Merge lp:~sidnei/charms/precise/haproxy/trunk into lp:charms/haproxy
- Precise Pangolin (12.04)
- trunk
- Merge into trunk
Proposed by
Sidnei da Silva
Status: | Merged |
---|---|
Merged at revision: | 68 |
Proposed branch: | lp:~sidnei/charms/precise/haproxy/trunk |
Merge into: | lp:charms/haproxy |
Diff against target: |
5102 lines (+3700/-904) 30 files modified
.bzrignore (+10/-0) Makefile (+39/-0) README.md (+22/-15) charm-helpers.yaml (+4/-0) cm.py (+193/-0) config-manager.txt (+6/-0) config.yaml (+13/-1) files/nrpe/check_haproxy.sh (+2/-3) hooks/charmhelpers/contrib/charmsupport/nrpe.py (+218/-0) hooks/charmhelpers/contrib/charmsupport/volumes.py (+156/-0) hooks/charmhelpers/core/hookenv.py (+340/-0) hooks/charmhelpers/core/host.py (+239/-0) hooks/charmhelpers/fetch/__init__.py (+209/-0) hooks/charmhelpers/fetch/archiveurl.py (+48/-0) hooks/charmhelpers/fetch/bzrurl.py (+44/-0) hooks/hooks.py (+522/-450) hooks/install (+13/-0) hooks/nrpe.py (+0/-170) hooks/test_hooks.py (+0/-263) hooks/tests/test_config_changed_hooks.py (+120/-0) hooks/tests/test_helpers.py (+750/-0) hooks/tests/test_nrpe_hooks.py (+24/-0) hooks/tests/test_peer_hooks.py (+200/-0) hooks/tests/test_reverseproxy_hooks.py (+345/-0) hooks/tests/test_website_hooks.py (+145/-0) hooks/tests/utils_for_tests.py (+21/-0) metadata.yaml (+7/-1) revision (+0/-1) setup.cfg (+4/-0) tarmac_tests.sh (+6/-0) |
To merge this branch: | bzr merge lp:~sidnei/charms/precise/haproxy/trunk |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Marco Ceppi (community) | Approve | ||
Review via email: mp+190501@code.launchpad.net |
Commit message
Description of the change
* The 'all_services' config now supports a static list of servers to be used *in addition* to the ones provided via relation.
* When more than one haproxy units exist, the configured service is upgraded in-place to a mode where traffic is routed to a single haproxy unit (the first one in unit-name order) and the remaining ones are configured as 'backup'. This is done to allow the enforcement of a 'maxconn' session in the configured services, which would not be possible to enforce otherwise.
* Changes to the configured services are properly propagated to the upstream relation.
To post a comment you must log in.
- 87. By JuanJo Ciarlante
-
[sidnei, r=jjo] Dupe mode http/tcp and option httplog/tcplog between frontend and backend
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file '.bzrignore' | |||
2 | --- .bzrignore 1970-01-01 00:00:00 +0000 | |||
3 | +++ .bzrignore 2013-10-16 14:05:24 +0000 | |||
4 | @@ -0,0 +1,10 @@ | |||
5 | 1 | revision | ||
6 | 2 | _trial_temp | ||
7 | 3 | .coverage | ||
8 | 4 | coverage.xml | ||
9 | 5 | *.crt | ||
10 | 6 | *.key | ||
11 | 7 | lib/* | ||
12 | 8 | *.pyc | ||
13 | 9 | exec.d | ||
14 | 10 | build/charm-helpers | ||
15 | 0 | 11 | ||
16 | === added file 'Makefile' | |||
17 | --- Makefile 1970-01-01 00:00:00 +0000 | |||
18 | +++ Makefile 2013-10-16 14:05:24 +0000 | |||
19 | @@ -0,0 +1,39 @@ | |||
20 | 1 | PWD := $(shell pwd) | ||
21 | 2 | SOURCEDEPS_DIR ?= $(shell dirname $(PWD))/.sourcecode | ||
22 | 3 | HOOKS_DIR := $(PWD)/hooks | ||
23 | 4 | TEST_PREFIX := PYTHONPATH=$(HOOKS_DIR) | ||
24 | 5 | TEST_DIR := $(PWD)/hooks/tests | ||
25 | 6 | CHARM_DIR := $(PWD) | ||
26 | 7 | PYTHON := /usr/bin/env python | ||
27 | 8 | |||
28 | 9 | |||
29 | 10 | build: test lint proof | ||
30 | 11 | |||
31 | 12 | revision: | ||
32 | 13 | @test -f revision || echo 0 > revision | ||
33 | 14 | |||
34 | 15 | proof: revision | ||
35 | 16 | @echo Proofing charm... | ||
36 | 17 | @(charm proof $(PWD) || [ $$? -eq 100 ]) && echo OK | ||
37 | 18 | @test `cat revision` = 0 && rm revision | ||
38 | 19 | |||
39 | 20 | test: | ||
40 | 21 | @echo Starting tests... | ||
41 | 22 | @CHARM_DIR=$(CHARM_DIR) $(TEST_PREFIX) nosetests $(TEST_DIR) | ||
42 | 23 | |||
43 | 24 | lint: | ||
44 | 25 | @echo Checking for Python syntax... | ||
45 | 26 | @flake8 $(HOOKS_DIR) --ignore=E123 --exclude=$(HOOKS_DIR)/charmhelpers && echo OK | ||
46 | 27 | |||
47 | 28 | sourcedeps: $(PWD)/config-manager.txt | ||
48 | 29 | @echo Updating source dependencies... | ||
49 | 30 | @$(PYTHON) cm.py -c $(PWD)/config-manager.txt \ | ||
50 | 31 | -p $(SOURCEDEPS_DIR) \ | ||
51 | 32 | -t $(PWD) | ||
52 | 33 | @$(PYTHON) build/charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \ | ||
53 | 34 | -c charm-helpers.yaml \ | ||
54 | 35 | -b build/charm-helpers \ | ||
55 | 36 | -d hooks/charmhelpers | ||
56 | 37 | @echo Do not forget to commit the updated files if any. | ||
57 | 38 | |||
58 | 39 | .PHONY: revision proof test lint sourcedeps charm-payload | ||
59 | 0 | 40 | ||
60 | === modified file 'README.md' | |||
61 | --- README.md 2013-02-12 23:43:54 +0000 | |||
62 | +++ README.md 2013-10-16 14:05:24 +0000 | |||
63 | @@ -1,5 +1,5 @@ | |||
66 | 1 | Juju charm haproxy | 1 | Juju charm for HAProxy |
67 | 2 | ================== | 2 | ====================== |
68 | 3 | 3 | ||
69 | 4 | HAProxy is a free, very fast and reliable solution offering high availability, | 4 | HAProxy is a free, very fast and reliable solution offering high availability, |
70 | 5 | load balancing, and proxying for TCP and HTTP-based applications. It is | 5 | load balancing, and proxying for TCP and HTTP-based applications. It is |
71 | @@ -9,6 +9,23 @@ | |||
72 | 9 | integration into existing architectures very easy and riskless, while still | 9 | integration into existing architectures very easy and riskless, while still |
73 | 10 | offering the possibility not to expose fragile web servers to the Net. | 10 | offering the possibility not to expose fragile web servers to the Net. |
74 | 11 | 11 | ||
75 | 12 | Development | ||
76 | 13 | ----------- | ||
77 | 14 | The following steps are needed for testing and development of the charm, | ||
78 | 15 | but **not** for deployment: | ||
79 | 16 | |||
80 | 17 | sudo apt-get install python-software-properties | ||
81 | 18 | sudo add-apt-repository ppa:cjohnston/flake8 | ||
82 | 19 | sudo apt-get update | ||
83 | 20 | sudo apt-get install python-mock python-flake8 python-nose python-nosexcover | ||
84 | 21 | |||
85 | 22 | To run the tests: | ||
86 | 23 | |||
87 | 24 | make build | ||
88 | 25 | |||
89 | 26 | ... will run the unit tests, run flake8 over the source to warn about | ||
90 | 27 | formatting issues and output a code coverage summary of the 'hooks.py' module. | ||
91 | 28 | |||
92 | 12 | How to deploy the charm | 29 | How to deploy the charm |
93 | 13 | ----------------------- | 30 | ----------------------- |
94 | 14 | juju deploy haproxy | 31 | juju deploy haproxy |
95 | @@ -27,7 +44,7 @@ | |||
96 | 27 | the "Website Relation" section for more information about that. | 44 | the "Website Relation" section for more information about that. |
97 | 28 | 45 | ||
98 | 29 | When your charm hooks into reverseproxy you have two general approaches | 46 | When your charm hooks into reverseproxy you have two general approaches |
100 | 30 | which can be used to notify haproxy about what services you are running. | 47 | which can be used to notify haproxy about what services you are running. |
101 | 31 | 1) Single-service proxying or 2) Multi-service or relation-driven proxying. | 48 | 1) Single-service proxying or 2) Multi-service or relation-driven proxying. |
102 | 32 | 49 | ||
103 | 33 | ** 1) Single-Service Proxying ** | 50 | ** 1) Single-Service Proxying ** |
104 | @@ -67,7 +84,7 @@ | |||
105 | 67 | 84 | ||
106 | 68 | #!/bin/bash | 85 | #!/bin/bash |
107 | 69 | # hooks/website-relation-changed | 86 | # hooks/website-relation-changed |
109 | 70 | 87 | ||
110 | 71 | host=$(unit-get private-address) | 88 | host=$(unit-get private-address) |
111 | 72 | port=80 | 89 | port=80 |
112 | 73 | 90 | ||
113 | @@ -80,7 +97,7 @@ | |||
114 | 80 | " | 97 | " |
115 | 81 | 98 | ||
116 | 82 | Once set, haproxy will union multiple `servers` stanzas from any units | 99 | Once set, haproxy will union multiple `servers` stanzas from any units |
118 | 83 | joining with the same `service_name` under one listen stanza. | 100 | joining with the same `service_name` under one listen stanza. |
119 | 84 | `service-options` and `server_options` will be overwritten, so ensure they | 101 | `service-options` and `server_options` will be overwritten, so ensure they |
120 | 85 | are set uniformly on all services with the same name. | 102 | are set uniformly on all services with the same name. |
121 | 86 | 103 | ||
122 | @@ -102,18 +119,8 @@ | |||
123 | 102 | Many of the haproxy settings can be altered via the standard juju configuration | 119 | Many of the haproxy settings can be altered via the standard juju configuration |
124 | 103 | settings. Please see the config.yaml file as each is fairly clearly documented. | 120 | settings. Please see the config.yaml file as each is fairly clearly documented. |
125 | 104 | 121 | ||
126 | 105 | Testing | ||
127 | 106 | ------- | ||
128 | 107 | This charm has a simple unit-test program. Please expand it and make sure new | ||
129 | 108 | changes are covered by simple unit tests. To run the unit tests: | ||
130 | 109 | |||
131 | 110 | sudo apt-get install python-mocker | ||
132 | 111 | sudo apt-get install python-twisted-core | ||
133 | 112 | cd hooks; trial test_hooks | ||
134 | 113 | |||
135 | 114 | TODO: | 122 | TODO: |
136 | 115 | ----- | 123 | ----- |
137 | 116 | 124 | ||
138 | 117 | * Expand Single-Service section as I have not tested that mode fully. | 125 | * Expand Single-Service section as I have not tested that mode fully. |
139 | 118 | * Trigger website-relation-changed when the reverse-proxy relation changes | 126 | * Trigger website-relation-changed when the reverse-proxy relation changes |
140 | 119 | |||
141 | 120 | 127 | ||
142 | === added directory 'build' | |||
143 | === added file 'charm-helpers.yaml' | |||
144 | --- charm-helpers.yaml 1970-01-01 00:00:00 +0000 | |||
145 | +++ charm-helpers.yaml 2013-10-16 14:05:24 +0000 | |||
146 | @@ -0,0 +1,4 @@ | |||
147 | 1 | include: | ||
148 | 2 | - core | ||
149 | 3 | - fetch | ||
150 | 4 | - contrib.charmsupport | ||
151 | 0 | \ No newline at end of file | 5 | \ No newline at end of file |
152 | 1 | 6 | ||
153 | === added file 'cm.py' | |||
154 | --- cm.py 1970-01-01 00:00:00 +0000 | |||
155 | +++ cm.py 2013-10-16 14:05:24 +0000 | |||
156 | @@ -0,0 +1,193 @@ | |||
157 | 1 | # Copyright 2010-2013 Canonical Ltd. All rights reserved. | ||
158 | 2 | import os | ||
159 | 3 | import re | ||
160 | 4 | import sys | ||
161 | 5 | import errno | ||
162 | 6 | import hashlib | ||
163 | 7 | import subprocess | ||
164 | 8 | import optparse | ||
165 | 9 | |||
166 | 10 | from os import curdir | ||
167 | 11 | from bzrlib.branch import Branch | ||
168 | 12 | from bzrlib.plugin import load_plugins | ||
169 | 13 | load_plugins() | ||
170 | 14 | from bzrlib.plugins.launchpad import account as lp_account | ||
171 | 15 | |||
172 | 16 | if 'GlobalConfig' in dir(lp_account): | ||
173 | 17 | from bzrlib.config import LocationConfig as LocationConfiguration | ||
174 | 18 | _ = LocationConfiguration | ||
175 | 19 | else: | ||
176 | 20 | from bzrlib.config import LocationStack as LocationConfiguration | ||
177 | 21 | _ = LocationConfiguration | ||
178 | 22 | |||
179 | 23 | |||
180 | 24 | def get_branch_config(config_file): | ||
181 | 25 | """ | ||
182 | 26 | Retrieves the sourcedeps configuration for an source dir. | ||
183 | 27 | Returns a dict of (branch, revspec) tuples, keyed by branch name. | ||
184 | 28 | """ | ||
185 | 29 | branches = {} | ||
186 | 30 | with open(config_file, 'r') as stream: | ||
187 | 31 | for line in stream: | ||
188 | 32 | line = line.split('#')[0].strip() | ||
189 | 33 | bzr_match = re.match(r'(\S+)\s+' | ||
190 | 34 | 'lp:([^;]+)' | ||
191 | 35 | '(?:;revno=(\d+))?', line) | ||
192 | 36 | if bzr_match: | ||
193 | 37 | name, branch, revno = bzr_match.group(1, 2, 3) | ||
194 | 38 | if revno is None: | ||
195 | 39 | revspec = -1 | ||
196 | 40 | else: | ||
197 | 41 | revspec = revno | ||
198 | 42 | branches[name] = (branch, revspec) | ||
199 | 43 | continue | ||
200 | 44 | dir_match = re.match(r'(\S+)\s+' | ||
201 | 45 | '\(directory\)', line) | ||
202 | 46 | if dir_match: | ||
203 | 47 | name = dir_match.group(1) | ||
204 | 48 | branches[name] = None | ||
205 | 49 | return branches | ||
206 | 50 | |||
207 | 51 | |||
208 | 52 | def main(config_file, parent_dir, target_dir, verbose): | ||
209 | 53 | """Do the deed.""" | ||
210 | 54 | |||
211 | 55 | try: | ||
212 | 56 | os.makedirs(parent_dir) | ||
213 | 57 | except OSError, e: | ||
214 | 58 | if e.errno != errno.EEXIST: | ||
215 | 59 | raise | ||
216 | 60 | |||
217 | 61 | branches = sorted(get_branch_config(config_file).items()) | ||
218 | 62 | for branch_name, spec in branches: | ||
219 | 63 | if spec is None: | ||
220 | 64 | # It's a directory, just create it and move on. | ||
221 | 65 | destination_path = os.path.join(target_dir, branch_name) | ||
222 | 66 | if not os.path.isdir(destination_path): | ||
223 | 67 | os.makedirs(destination_path) | ||
224 | 68 | continue | ||
225 | 69 | |||
226 | 70 | (quoted_branch_spec, revspec) = spec | ||
227 | 71 | revno = int(revspec) | ||
228 | 72 | |||
229 | 73 | # qualify mirror branch name with hash of remote repo path to deal | ||
230 | 74 | # with changes to the remote branch URL over time | ||
231 | 75 | branch_spec_digest = hashlib.sha1(quoted_branch_spec).hexdigest() | ||
232 | 76 | branch_directory = branch_spec_digest | ||
233 | 77 | |||
234 | 78 | source_path = os.path.join(parent_dir, branch_directory) | ||
235 | 79 | destination_path = os.path.join(target_dir, branch_name) | ||
236 | 80 | |||
237 | 81 | # Remove leftover symlinks/stray files. | ||
238 | 82 | try: | ||
239 | 83 | os.remove(destination_path) | ||
240 | 84 | except OSError, e: | ||
241 | 85 | if e.errno != errno.EISDIR and e.errno != errno.ENOENT: | ||
242 | 86 | raise | ||
243 | 87 | |||
244 | 88 | lp_url = "lp:" + quoted_branch_spec | ||
245 | 89 | |||
246 | 90 | # Create the local mirror branch if it doesn't already exist | ||
247 | 91 | if verbose: | ||
248 | 92 | sys.stderr.write('%30s: ' % (branch_name,)) | ||
249 | 93 | sys.stderr.flush() | ||
250 | 94 | |||
251 | 95 | fresh = False | ||
252 | 96 | if not os.path.exists(source_path): | ||
253 | 97 | subprocess.check_call(['bzr', 'branch', '-q', '--no-tree', | ||
254 | 98 | '--', lp_url, source_path]) | ||
255 | 99 | fresh = True | ||
256 | 100 | |||
257 | 101 | if not fresh: | ||
258 | 102 | source_branch = Branch.open(source_path) | ||
259 | 103 | if revno == -1: | ||
260 | 104 | orig_branch = Branch.open(lp_url) | ||
261 | 105 | fresh = source_branch.revno() == orig_branch.revno() | ||
262 | 106 | else: | ||
263 | 107 | fresh = source_branch.revno() == revno | ||
264 | 108 | |||
265 | 109 | # Freshen the source branch if required. | ||
266 | 110 | if not fresh: | ||
267 | 111 | subprocess.check_call(['bzr', 'pull', '-q', '--overwrite', '-r', | ||
268 | 112 | str(revno), '-d', source_path, | ||
269 | 113 | '--', lp_url]) | ||
270 | 114 | |||
271 | 115 | if os.path.exists(destination_path): | ||
272 | 116 | # Overwrite the destination with the appropriate revision. | ||
273 | 117 | subprocess.check_call(['bzr', 'clean-tree', '--force', '-q', | ||
274 | 118 | '--ignored', '-d', destination_path]) | ||
275 | 119 | subprocess.check_call(['bzr', 'pull', '-q', '--overwrite', | ||
276 | 120 | '-r', str(revno), | ||
277 | 121 | '-d', destination_path, '--', source_path]) | ||
278 | 122 | else: | ||
279 | 123 | # Create a new branch. | ||
280 | 124 | subprocess.check_call(['bzr', 'branch', '-q', '--hardlink', | ||
281 | 125 | '-r', str(revno), | ||
282 | 126 | '--', source_path, destination_path]) | ||
283 | 127 | |||
284 | 128 | # Check the state of the destination branch. | ||
285 | 129 | destination_branch = Branch.open(destination_path) | ||
286 | 130 | destination_revno = destination_branch.revno() | ||
287 | 131 | |||
288 | 132 | if verbose: | ||
289 | 133 | sys.stderr.write('checked out %4s of %s\n' % | ||
290 | 134 | ("r" + str(destination_revno), lp_url)) | ||
291 | 135 | sys.stderr.flush() | ||
292 | 136 | |||
293 | 137 | if revno != -1 and destination_revno != revno: | ||
294 | 138 | raise RuntimeError("Expected revno %d but got revno %d" % | ||
295 | 139 | (revno, destination_revno)) | ||
296 | 140 | |||
297 | 141 | if __name__ == '__main__': | ||
298 | 142 | parser = optparse.OptionParser( | ||
299 | 143 | usage="%prog [options]", | ||
300 | 144 | description=( | ||
301 | 145 | "Add a lightweight checkout in <target> for each " | ||
302 | 146 | "corresponding file in <parent>."), | ||
303 | 147 | add_help_option=False) | ||
304 | 148 | parser.add_option( | ||
305 | 149 | '-p', '--parent', dest='parent', | ||
306 | 150 | default=None, | ||
307 | 151 | help=("The directory of the parent tree."), | ||
308 | 152 | metavar="DIR") | ||
309 | 153 | parser.add_option( | ||
310 | 154 | '-t', '--target', dest='target', default=curdir, | ||
311 | 155 | help=("The directory of the target tree."), | ||
312 | 156 | metavar="DIR") | ||
313 | 157 | parser.add_option( | ||
314 | 158 | '-c', '--config', dest='config', default=None, | ||
315 | 159 | help=("The config file to be used for config-manager."), | ||
316 | 160 | metavar="DIR") | ||
317 | 161 | parser.add_option( | ||
318 | 162 | '-q', '--quiet', dest='verbose', action='store_false', | ||
319 | 163 | help="Be less verbose.") | ||
320 | 164 | parser.add_option( | ||
321 | 165 | '-v', '--verbose', dest='verbose', action='store_true', | ||
322 | 166 | help="Be more verbose.") | ||
323 | 167 | parser.add_option( | ||
324 | 168 | '-h', '--help', action='help', | ||
325 | 169 | help="Show this help message and exit.") | ||
326 | 170 | parser.set_defaults(verbose=True) | ||
327 | 171 | |||
328 | 172 | options, args = parser.parse_args() | ||
329 | 173 | |||
330 | 174 | if options.parent is None: | ||
331 | 175 | options.parent = os.environ.get( | ||
332 | 176 | "SOURCEDEPS_DIR", | ||
333 | 177 | os.path.join(curdir, ".sourcecode")) | ||
334 | 178 | |||
335 | 179 | if options.target is None: | ||
336 | 180 | parser.error( | ||
337 | 181 | "Target directory not specified.") | ||
338 | 182 | |||
339 | 183 | if options.config is None: | ||
340 | 184 | config = [arg for arg in args | ||
341 | 185 | if arg != "update"] | ||
342 | 186 | if not config or len(config) > 1: | ||
343 | 187 | parser.error("Config not specified") | ||
344 | 188 | options.config = config[0] | ||
345 | 189 | |||
346 | 190 | sys.exit(main(config_file=options.config, | ||
347 | 191 | parent_dir=options.parent, | ||
348 | 192 | target_dir=options.target, | ||
349 | 193 | verbose=options.verbose)) | ||
350 | 0 | 194 | ||
351 | === added file 'config-manager.txt' | |||
352 | --- config-manager.txt 1970-01-01 00:00:00 +0000 | |||
353 | +++ config-manager.txt 2013-10-16 14:05:24 +0000 | |||
354 | @@ -0,0 +1,6 @@ | |||
355 | 1 | # After making changes to this file, to ensure that your sourcedeps are | ||
356 | 2 | # up-to-date do: | ||
357 | 3 | # | ||
358 | 4 | # make sourcedeps | ||
359 | 5 | |||
360 | 6 | ./build/charm-helpers lp:charm-helpers;revno=70 | ||
361 | 0 | 7 | ||
362 | === modified file 'config.yaml' | |||
363 | --- config.yaml 2012-10-10 14:38:47 +0000 | |||
364 | +++ config.yaml 2013-10-16 14:05:24 +0000 | |||
365 | @@ -59,7 +59,7 @@ | |||
366 | 59 | restarting, a turn-around timer of 1 second is applied before a retry | 59 | restarting, a turn-around timer of 1 second is applied before a retry |
367 | 60 | occurs. | 60 | occurs. |
368 | 61 | default_timeouts: | 61 | default_timeouts: |
370 | 62 | default: "queue 1000, connect 1000, client 1000, server 1000" | 62 | default: "queue 20000, client 50000, connect 5000, server 50000" |
371 | 63 | type: string | 63 | type: string |
372 | 64 | description: Default timeouts | 64 | description: Default timeouts |
373 | 65 | enable_monitoring: | 65 | enable_monitoring: |
374 | @@ -90,6 +90,12 @@ | |||
375 | 90 | default: 3 | 90 | default: 3 |
376 | 91 | type: int | 91 | type: int |
377 | 92 | description: Monitoring interface refresh interval (in seconds) | 92 | description: Monitoring interface refresh interval (in seconds) |
378 | 93 | package_status: | ||
379 | 94 | default: "install" | ||
380 | 95 | type: "string" | ||
381 | 96 | description: | | ||
382 | 97 | The status of service-affecting packages will be set to this value in the dpkg database. | ||
383 | 98 | Useful valid values are "install" and "hold". | ||
384 | 93 | services: | 99 | services: |
385 | 94 | default: | | 100 | default: | |
386 | 95 | - service_name: haproxy_service | 101 | - service_name: haproxy_service |
387 | @@ -106,6 +112,12 @@ | |||
388 | 106 | before the first variable, service_name, as above. Service options is a | 112 | before the first variable, service_name, as above. Service options is a |
389 | 107 | comma separated list, server options will be appended as a string to | 113 | comma separated list, server options will be appended as a string to |
390 | 108 | the individual server lines for a given listen stanza. | 114 | the individual server lines for a given listen stanza. |
391 | 115 | sysctl: | ||
392 | 116 | default: "" | ||
393 | 117 | type: string | ||
394 | 118 | description: > | ||
395 | 119 | YAML-formatted list of sysctl values, e.g.: | ||
396 | 120 | '{ net.ipv4.tcp_max_syn_backlog : 65536 }' | ||
397 | 109 | nagios_context: | 121 | nagios_context: |
398 | 110 | default: "juju" | 122 | default: "juju" |
399 | 111 | type: string | 123 | type: string |
400 | 112 | 124 | ||
401 | === renamed directory 'files/nrpe-external-master' => 'files/nrpe' | |||
402 | === modified file 'files/nrpe/check_haproxy.sh' | |||
403 | --- files/nrpe-external-master/check_haproxy.sh 2012-11-07 22:32:06 +0000 | |||
404 | +++ files/nrpe/check_haproxy.sh 2013-10-16 14:05:24 +0000 | |||
405 | @@ -2,7 +2,7 @@ | |||
406 | 2 | #-------------------------------------------- | 2 | #-------------------------------------------- |
407 | 3 | # This file is managed by Juju | 3 | # This file is managed by Juju |
408 | 4 | #-------------------------------------------- | 4 | #-------------------------------------------- |
410 | 5 | # | 5 | # |
411 | 6 | # Copyright 2009,2012 Canonical Ltd. | 6 | # Copyright 2009,2012 Canonical Ltd. |
412 | 7 | # Author: Tom Haddon | 7 | # Author: Tom Haddon |
413 | 8 | 8 | ||
414 | @@ -13,7 +13,7 @@ | |||
415 | 13 | 13 | ||
416 | 14 | for appserver in $(grep ' server' /etc/haproxy/haproxy.cfg | awk '{print $2'}); | 14 | for appserver in $(grep ' server' /etc/haproxy/haproxy.cfg | awk '{print $2'}); |
417 | 15 | do | 15 | do |
419 | 16 | output=$(/usr/lib/nagios/plugins/check_http -a ${AUTH} -I 127.0.0.1 -p 10000 --regex="class=\"active(2|3).*${appserver}" -e ' 200 OK') | 16 | output=$(/usr/lib/nagios/plugins/check_http -a ${AUTH} -I 127.0.0.1 -p 10000 --regex="class=\"(active|backup)(2|3).*${appserver}" -e ' 200 OK') |
420 | 17 | if [ $? != 0 ]; then | 17 | if [ $? != 0 ]; then |
421 | 18 | date >> $LOGFILE | 18 | date >> $LOGFILE |
422 | 19 | echo $output >> $LOGFILE | 19 | echo $output >> $LOGFILE |
423 | @@ -30,4 +30,3 @@ | |||
424 | 30 | 30 | ||
425 | 31 | echo "OK: All haproxy instances looking good" | 31 | echo "OK: All haproxy instances looking good" |
426 | 32 | exit 0 | 32 | exit 0 |
427 | 33 | |||
428 | 34 | 33 | ||
429 | === added directory 'hooks/charmhelpers' | |||
430 | === added file 'hooks/charmhelpers/__init__.py' | |||
431 | === added directory 'hooks/charmhelpers/contrib' | |||
432 | === added file 'hooks/charmhelpers/contrib/__init__.py' | |||
433 | === added directory 'hooks/charmhelpers/contrib/charmsupport' | |||
434 | === added file 'hooks/charmhelpers/contrib/charmsupport/__init__.py' | |||
435 | === added file 'hooks/charmhelpers/contrib/charmsupport/nrpe.py' | |||
436 | --- hooks/charmhelpers/contrib/charmsupport/nrpe.py 1970-01-01 00:00:00 +0000 | |||
437 | +++ hooks/charmhelpers/contrib/charmsupport/nrpe.py 2013-10-16 14:05:24 +0000 | |||
438 | @@ -0,0 +1,218 @@ | |||
439 | 1 | """Compatibility with the nrpe-external-master charm""" | ||
440 | 2 | # Copyright 2012 Canonical Ltd. | ||
441 | 3 | # | ||
442 | 4 | # Authors: | ||
443 | 5 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | ||
444 | 6 | |||
445 | 7 | import subprocess | ||
446 | 8 | import pwd | ||
447 | 9 | import grp | ||
448 | 10 | import os | ||
449 | 11 | import re | ||
450 | 12 | import shlex | ||
451 | 13 | import yaml | ||
452 | 14 | |||
453 | 15 | from charmhelpers.core.hookenv import ( | ||
454 | 16 | config, | ||
455 | 17 | local_unit, | ||
456 | 18 | log, | ||
457 | 19 | relation_ids, | ||
458 | 20 | relation_set, | ||
459 | 21 | ) | ||
460 | 22 | |||
461 | 23 | from charmhelpers.core.host import service | ||
462 | 24 | |||
463 | 25 | # This module adds compatibility with the nrpe-external-master and plain nrpe | ||
464 | 26 | # subordinate charms. To use it in your charm: | ||
465 | 27 | # | ||
466 | 28 | # 1. Update metadata.yaml | ||
467 | 29 | # | ||
468 | 30 | # provides: | ||
469 | 31 | # (...) | ||
470 | 32 | # nrpe-external-master: | ||
471 | 33 | # interface: nrpe-external-master | ||
472 | 34 | # scope: container | ||
473 | 35 | # | ||
474 | 36 | # and/or | ||
475 | 37 | # | ||
476 | 38 | # provides: | ||
477 | 39 | # (...) | ||
478 | 40 | # local-monitors: | ||
479 | 41 | # interface: local-monitors | ||
480 | 42 | # scope: container | ||
481 | 43 | |||
482 | 44 | # | ||
483 | 45 | # 2. Add the following to config.yaml | ||
484 | 46 | # | ||
485 | 47 | # nagios_context: | ||
486 | 48 | # default: "juju" | ||
487 | 49 | # type: string | ||
488 | 50 | # description: | | ||
489 | 51 | # Used by the nrpe subordinate charms. | ||
490 | 52 | # A string that will be prepended to instance name to set the host name | ||
491 | 53 | # in nagios. So for instance the hostname would be something like: | ||
492 | 54 | # juju-myservice-0 | ||
493 | 55 | # If you're running multiple environments with the same services in them | ||
494 | 56 | # this allows you to differentiate between them. | ||
495 | 57 | # | ||
496 | 58 | # 3. Add custom checks (Nagios plugins) to files/nrpe-external-master | ||
497 | 59 | # | ||
498 | 60 | # 4. Update your hooks.py with something like this: | ||
499 | 61 | # | ||
500 | 62 | # from charmsupport.nrpe import NRPE | ||
501 | 63 | # (...) | ||
502 | 64 | # def update_nrpe_config(): | ||
503 | 65 | # nrpe_compat = NRPE() | ||
504 | 66 | # nrpe_compat.add_check( | ||
505 | 67 | # shortname = "myservice", | ||
506 | 68 | # description = "Check MyService", | ||
507 | 69 | # check_cmd = "check_http -w 2 -c 10 http://localhost" | ||
508 | 70 | # ) | ||
509 | 71 | # nrpe_compat.add_check( | ||
510 | 72 | # "myservice_other", | ||
511 | 73 | # "Check for widget failures", | ||
512 | 74 | # check_cmd = "/srv/myapp/scripts/widget_check" | ||
513 | 75 | # ) | ||
514 | 76 | # nrpe_compat.write() | ||
515 | 77 | # | ||
516 | 78 | # def config_changed(): | ||
517 | 79 | # (...) | ||
518 | 80 | # update_nrpe_config() | ||
519 | 81 | # | ||
520 | 82 | # def nrpe_external_master_relation_changed(): | ||
521 | 83 | # update_nrpe_config() | ||
522 | 84 | # | ||
523 | 85 | # def local_monitors_relation_changed(): | ||
524 | 86 | # update_nrpe_config() | ||
525 | 87 | # | ||
526 | 88 | # 5. ln -s hooks.py nrpe-external-master-relation-changed | ||
527 | 89 | # ln -s hooks.py local-monitors-relation-changed | ||
528 | 90 | |||
529 | 91 | |||
530 | 92 | class CheckException(Exception): | ||
531 | 93 | pass | ||
532 | 94 | |||
533 | 95 | |||
534 | 96 | class Check(object): | ||
535 | 97 | shortname_re = '[A-Za-z0-9-_]+$' | ||
536 | 98 | service_template = (""" | ||
537 | 99 | #--------------------------------------------------- | ||
538 | 100 | # This file is Juju managed | ||
539 | 101 | #--------------------------------------------------- | ||
540 | 102 | define service {{ | ||
541 | 103 | use active-service | ||
542 | 104 | host_name {nagios_hostname} | ||
543 | 105 | service_description {nagios_hostname}[{shortname}] """ | ||
544 | 106 | """{description} | ||
545 | 107 | check_command check_nrpe!{command} | ||
546 | 108 | servicegroups {nagios_servicegroup} | ||
547 | 109 | }} | ||
548 | 110 | """) | ||
549 | 111 | |||
550 | 112 | def __init__(self, shortname, description, check_cmd): | ||
551 | 113 | super(Check, self).__init__() | ||
552 | 114 | # XXX: could be better to calculate this from the service name | ||
553 | 115 | if not re.match(self.shortname_re, shortname): | ||
554 | 116 | raise CheckException("shortname must match {}".format( | ||
555 | 117 | Check.shortname_re)) | ||
556 | 118 | self.shortname = shortname | ||
557 | 119 | self.command = "check_{}".format(shortname) | ||
558 | 120 | # Note: a set of invalid characters is defined by the | ||
559 | 121 | # Nagios server config | ||
560 | 122 | # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= | ||
561 | 123 | self.description = description | ||
562 | 124 | self.check_cmd = self._locate_cmd(check_cmd) | ||
563 | 125 | |||
564 | 126 | def _locate_cmd(self, check_cmd): | ||
565 | 127 | search_path = ( | ||
566 | 128 | '/', | ||
567 | 129 | os.path.join(os.environ['CHARM_DIR'], | ||
568 | 130 | 'files/nrpe-external-master'), | ||
569 | 131 | '/usr/lib/nagios/plugins', | ||
570 | 132 | ) | ||
571 | 133 | parts = shlex.split(check_cmd) | ||
572 | 134 | for path in search_path: | ||
573 | 135 | if os.path.exists(os.path.join(path, parts[0])): | ||
574 | 136 | command = os.path.join(path, parts[0]) | ||
575 | 137 | if len(parts) > 1: | ||
576 | 138 | command += " " + " ".join(parts[1:]) | ||
577 | 139 | return command | ||
578 | 140 | log('Check command not found: {}'.format(parts[0])) | ||
579 | 141 | return '' | ||
580 | 142 | |||
581 | 143 | def write(self, nagios_context, hostname): | ||
582 | 144 | nrpe_check_file = '/etc/nagios/nrpe.d/{}.cfg'.format( | ||
583 | 145 | self.command) | ||
584 | 146 | with open(nrpe_check_file, 'w') as nrpe_check_config: | ||
585 | 147 | nrpe_check_config.write("# check {}\n".format(self.shortname)) | ||
586 | 148 | nrpe_check_config.write("command[{}]={}\n".format( | ||
587 | 149 | self.command, self.check_cmd)) | ||
588 | 150 | |||
589 | 151 | if not os.path.exists(NRPE.nagios_exportdir): | ||
590 | 152 | log('Not writing service config as {} is not accessible'.format( | ||
591 | 153 | NRPE.nagios_exportdir)) | ||
592 | 154 | else: | ||
593 | 155 | self.write_service_config(nagios_context, hostname) | ||
594 | 156 | |||
595 | 157 | def write_service_config(self, nagios_context, hostname): | ||
596 | 158 | for f in os.listdir(NRPE.nagios_exportdir): | ||
597 | 159 | if re.search('.*{}.cfg'.format(self.command), f): | ||
598 | 160 | os.remove(os.path.join(NRPE.nagios_exportdir, f)) | ||
599 | 161 | |||
600 | 162 | templ_vars = { | ||
601 | 163 | 'nagios_hostname': hostname, | ||
602 | 164 | 'nagios_servicegroup': nagios_context, | ||
603 | 165 | 'description': self.description, | ||
604 | 166 | 'shortname': self.shortname, | ||
605 | 167 | 'command': self.command, | ||
606 | 168 | } | ||
607 | 169 | nrpe_service_text = Check.service_template.format(**templ_vars) | ||
608 | 170 | nrpe_service_file = '{}/service__{}_{}.cfg'.format( | ||
609 | 171 | NRPE.nagios_exportdir, hostname, self.command) | ||
610 | 172 | with open(nrpe_service_file, 'w') as nrpe_service_config: | ||
611 | 173 | nrpe_service_config.write(str(nrpe_service_text)) | ||
612 | 174 | |||
613 | 175 | def run(self): | ||
614 | 176 | subprocess.call(self.check_cmd) | ||
615 | 177 | |||
616 | 178 | |||
617 | 179 | class NRPE(object): | ||
618 | 180 | nagios_logdir = '/var/log/nagios' | ||
619 | 181 | nagios_exportdir = '/var/lib/nagios/export' | ||
620 | 182 | nrpe_confdir = '/etc/nagios/nrpe.d' | ||
621 | 183 | |||
622 | 184 | def __init__(self): | ||
623 | 185 | super(NRPE, self).__init__() | ||
624 | 186 | self.config = config() | ||
625 | 187 | self.nagios_context = self.config['nagios_context'] | ||
626 | 188 | self.unit_name = local_unit().replace('/', '-') | ||
627 | 189 | self.hostname = "{}-{}".format(self.nagios_context, self.unit_name) | ||
628 | 190 | self.checks = [] | ||
629 | 191 | |||
630 | 192 | def add_check(self, *args, **kwargs): | ||
631 | 193 | self.checks.append(Check(*args, **kwargs)) | ||
632 | 194 | |||
633 | 195 | def write(self): | ||
634 | 196 | try: | ||
635 | 197 | nagios_uid = pwd.getpwnam('nagios').pw_uid | ||
636 | 198 | nagios_gid = grp.getgrnam('nagios').gr_gid | ||
637 | 199 | except: | ||
638 | 200 | log("Nagios user not set up, nrpe checks not updated") | ||
639 | 201 | return | ||
640 | 202 | |||
641 | 203 | if not os.path.exists(NRPE.nagios_logdir): | ||
642 | 204 | os.mkdir(NRPE.nagios_logdir) | ||
643 | 205 | os.chown(NRPE.nagios_logdir, nagios_uid, nagios_gid) | ||
644 | 206 | |||
645 | 207 | nrpe_monitors = {} | ||
646 | 208 | monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} | ||
647 | 209 | for nrpecheck in self.checks: | ||
648 | 210 | nrpecheck.write(self.nagios_context, self.hostname) | ||
649 | 211 | nrpe_monitors[nrpecheck.shortname] = { | ||
650 | 212 | "command": nrpecheck.command, | ||
651 | 213 | } | ||
652 | 214 | |||
653 | 215 | service('restart', 'nagios-nrpe-server') | ||
654 | 216 | |||
655 | 217 | for rid in relation_ids("local-monitors"): | ||
656 | 218 | relation_set(relation_id=rid, monitors=yaml.dump(monitors)) | ||
657 | 0 | 219 | ||
658 | === added file 'hooks/charmhelpers/contrib/charmsupport/volumes.py' | |||
659 | --- hooks/charmhelpers/contrib/charmsupport/volumes.py 1970-01-01 00:00:00 +0000 | |||
660 | +++ hooks/charmhelpers/contrib/charmsupport/volumes.py 2013-10-16 14:05:24 +0000 | |||
661 | @@ -0,0 +1,156 @@ | |||
662 | 1 | ''' | ||
663 | 2 | Functions for managing volumes in juju units. One volume is supported per unit. | ||
664 | 3 | Subordinates may have their own storage, provided it is on its own partition. | ||
665 | 4 | |||
666 | 5 | Configuration stanzas: | ||
667 | 6 | volume-ephemeral: | ||
668 | 7 | type: boolean | ||
669 | 8 | default: true | ||
670 | 9 | description: > | ||
671 | 10 | If false, a volume is mounted as sepecified in "volume-map" | ||
672 | 11 | If true, ephemeral storage will be used, meaning that log data | ||
673 | 12 | will only exist as long as the machine. YOU HAVE BEEN WARNED. | ||
674 | 13 | volume-map: | ||
675 | 14 | type: string | ||
676 | 15 | default: {} | ||
677 | 16 | description: > | ||
678 | 17 | YAML map of units to device names, e.g: | ||
679 | 18 | "{ rsyslog/0: /dev/vdb, rsyslog/1: /dev/vdb }" | ||
680 | 19 | Service units will raise a configure-error if volume-ephemeral | ||
681 | 20 | is 'true' and no volume-map value is set. Use 'juju set' to set a | ||
682 | 21 | value and 'juju resolved' to complete configuration. | ||
683 | 22 | |||
684 | 23 | Usage: | ||
685 | 24 | from charmsupport.volumes import configure_volume, VolumeConfigurationError | ||
686 | 25 | from charmsupport.hookenv import log, ERROR | ||
687 | 26 | def post_mount_hook(): | ||
688 | 27 | stop_service('myservice') | ||
689 | 28 | def post_mount_hook(): | ||
690 | 29 | start_service('myservice') | ||
691 | 30 | |||
692 | 31 | if __name__ == '__main__': | ||
693 | 32 | try: | ||
694 | 33 | configure_volume(before_change=pre_mount_hook, | ||
695 | 34 | after_change=post_mount_hook) | ||
696 | 35 | except VolumeConfigurationError: | ||
697 | 36 | log('Storage could not be configured', ERROR) | ||
698 | 37 | ''' | ||
699 | 38 | |||
700 | 39 | # XXX: Known limitations | ||
701 | 40 | # - fstab is neither consulted nor updated | ||
702 | 41 | |||
703 | 42 | import os | ||
704 | 43 | from charmhelpers.core import hookenv | ||
705 | 44 | from charmhelpers.core import host | ||
706 | 45 | import yaml | ||
707 | 46 | |||
708 | 47 | |||
709 | 48 | MOUNT_BASE = '/srv/juju/volumes' | ||
710 | 49 | |||
711 | 50 | |||
712 | 51 | class VolumeConfigurationError(Exception): | ||
713 | 52 | '''Volume configuration data is missing or invalid''' | ||
714 | 53 | pass | ||
715 | 54 | |||
716 | 55 | |||
717 | 56 | def get_config(): | ||
718 | 57 | '''Gather and sanity-check volume configuration data''' | ||
719 | 58 | volume_config = {} | ||
720 | 59 | config = hookenv.config() | ||
721 | 60 | |||
722 | 61 | errors = False | ||
723 | 62 | |||
724 | 63 | if config.get('volume-ephemeral') in (True, 'True', 'true', 'Yes', 'yes'): | ||
725 | 64 | volume_config['ephemeral'] = True | ||
726 | 65 | else: | ||
727 | 66 | volume_config['ephemeral'] = False | ||
728 | 67 | |||
729 | 68 | try: | ||
730 | 69 | volume_map = yaml.safe_load(config.get('volume-map', '{}')) | ||
731 | 70 | except yaml.YAMLError as e: | ||
732 | 71 | hookenv.log("Error parsing YAML volume-map: {}".format(e), | ||
733 | 72 | hookenv.ERROR) | ||
734 | 73 | errors = True | ||
735 | 74 | if volume_map is None: | ||
736 | 75 | # probably an empty string | ||
737 | 76 | volume_map = {} | ||
738 | 77 | elif not isinstance(volume_map, dict): | ||
739 | 78 | hookenv.log("Volume-map should be a dictionary, not {}".format( | ||
740 | 79 | type(volume_map))) | ||
741 | 80 | errors = True | ||
742 | 81 | |||
743 | 82 | volume_config['device'] = volume_map.get(os.environ['JUJU_UNIT_NAME']) | ||
744 | 83 | if volume_config['device'] and volume_config['ephemeral']: | ||
745 | 84 | # asked for ephemeral storage but also defined a volume ID | ||
746 | 85 | hookenv.log('A volume is defined for this unit, but ephemeral ' | ||
747 | 86 | 'storage was requested', hookenv.ERROR) | ||
748 | 87 | errors = True | ||
749 | 88 | elif not volume_config['device'] and not volume_config['ephemeral']: | ||
750 | 89 | # asked for permanent storage but did not define volume ID | ||
751 | 90 | hookenv.log('Ephemeral storage was requested, but there is no volume ' | ||
752 | 91 | 'defined for this unit.', hookenv.ERROR) | ||
753 | 92 | errors = True | ||
754 | 93 | |||
755 | 94 | unit_mount_name = hookenv.local_unit().replace('/', '-') | ||
756 | 95 | volume_config['mountpoint'] = os.path.join(MOUNT_BASE, unit_mount_name) | ||
757 | 96 | |||
758 | 97 | if errors: | ||
759 | 98 | return None | ||
760 | 99 | return volume_config | ||
761 | 100 | |||
762 | 101 | |||
763 | 102 | def mount_volume(config): | ||
764 | 103 | if os.path.exists(config['mountpoint']): | ||
765 | 104 | if not os.path.isdir(config['mountpoint']): | ||
766 | 105 | hookenv.log('Not a directory: {}'.format(config['mountpoint'])) | ||
767 | 106 | raise VolumeConfigurationError() | ||
768 | 107 | else: | ||
769 | 108 | host.mkdir(config['mountpoint']) | ||
770 | 109 | if os.path.ismount(config['mountpoint']): | ||
771 | 110 | unmount_volume(config) | ||
772 | 111 | if not host.mount(config['device'], config['mountpoint'], persist=True): | ||
773 | 112 | raise VolumeConfigurationError() | ||
774 | 113 | |||
775 | 114 | |||
776 | 115 | def unmount_volume(config): | ||
777 | 116 | if os.path.ismount(config['mountpoint']): | ||
778 | 117 | if not host.umount(config['mountpoint'], persist=True): | ||
779 | 118 | raise VolumeConfigurationError() | ||
780 | 119 | |||
781 | 120 | |||
782 | 121 | def managed_mounts(): | ||
783 | 122 | '''List of all mounted managed volumes''' | ||
784 | 123 | return filter(lambda mount: mount[0].startswith(MOUNT_BASE), host.mounts()) | ||
785 | 124 | |||
786 | 125 | |||
787 | 126 | def configure_volume(before_change=lambda: None, after_change=lambda: None): | ||
788 | 127 | '''Set up storage (or don't) according to the charm's volume configuration. | ||
789 | 128 | Returns the mount point or "ephemeral". before_change and after_change | ||
790 | 129 | are optional functions to be called if the volume configuration changes. | ||
791 | 130 | ''' | ||
792 | 131 | |||
793 | 132 | config = get_config() | ||
794 | 133 | if not config: | ||
795 | 134 | hookenv.log('Failed to read volume configuration', hookenv.CRITICAL) | ||
796 | 135 | raise VolumeConfigurationError() | ||
797 | 136 | |||
798 | 137 | if config['ephemeral']: | ||
799 | 138 | if os.path.ismount(config['mountpoint']): | ||
800 | 139 | before_change() | ||
801 | 140 | unmount_volume(config) | ||
802 | 141 | after_change() | ||
803 | 142 | return 'ephemeral' | ||
804 | 143 | else: | ||
805 | 144 | # persistent storage | ||
806 | 145 | if os.path.ismount(config['mountpoint']): | ||
807 | 146 | mounts = dict(managed_mounts()) | ||
808 | 147 | if mounts.get(config['mountpoint']) != config['device']: | ||
809 | 148 | before_change() | ||
810 | 149 | unmount_volume(config) | ||
811 | 150 | mount_volume(config) | ||
812 | 151 | after_change() | ||
813 | 152 | else: | ||
814 | 153 | before_change() | ||
815 | 154 | mount_volume(config) | ||
816 | 155 | after_change() | ||
817 | 156 | return config['mountpoint'] | ||
818 | 0 | 157 | ||
819 | === added directory 'hooks/charmhelpers/core' | |||
820 | === added file 'hooks/charmhelpers/core/__init__.py' | |||
821 | === added file 'hooks/charmhelpers/core/hookenv.py' | |||
822 | --- hooks/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000 | |||
823 | +++ hooks/charmhelpers/core/hookenv.py 2013-10-16 14:05:24 +0000 | |||
824 | @@ -0,0 +1,340 @@ | |||
825 | 1 | "Interactions with the Juju environment" | ||
826 | 2 | # Copyright 2013 Canonical Ltd. | ||
827 | 3 | # | ||
828 | 4 | # Authors: | ||
829 | 5 | # Charm Helpers Developers <juju@lists.ubuntu.com> | ||
830 | 6 | |||
831 | 7 | import os | ||
832 | 8 | import json | ||
833 | 9 | import yaml | ||
834 | 10 | import subprocess | ||
835 | 11 | import UserDict | ||
836 | 12 | |||
837 | 13 | CRITICAL = "CRITICAL" | ||
838 | 14 | ERROR = "ERROR" | ||
839 | 15 | WARNING = "WARNING" | ||
840 | 16 | INFO = "INFO" | ||
841 | 17 | DEBUG = "DEBUG" | ||
842 | 18 | MARKER = object() | ||
843 | 19 | |||
844 | 20 | cache = {} | ||
845 | 21 | |||
846 | 22 | |||
847 | 23 | def cached(func): | ||
848 | 24 | ''' Cache return values for multiple executions of func + args | ||
849 | 25 | |||
850 | 26 | For example: | ||
851 | 27 | |||
852 | 28 | @cached | ||
853 | 29 | def unit_get(attribute): | ||
854 | 30 | pass | ||
855 | 31 | |||
856 | 32 | unit_get('test') | ||
857 | 33 | |||
858 | 34 | will cache the result of unit_get + 'test' for future calls. | ||
859 | 35 | ''' | ||
860 | 36 | def wrapper(*args, **kwargs): | ||
861 | 37 | global cache | ||
862 | 38 | key = str((func, args, kwargs)) | ||
863 | 39 | try: | ||
864 | 40 | return cache[key] | ||
865 | 41 | except KeyError: | ||
866 | 42 | res = func(*args, **kwargs) | ||
867 | 43 | cache[key] = res | ||
868 | 44 | return res | ||
869 | 45 | return wrapper | ||
870 | 46 | |||
871 | 47 | |||
872 | 48 | def flush(key): | ||
873 | 49 | ''' Flushes any entries from function cache where the | ||
874 | 50 | key is found in the function+args ''' | ||
875 | 51 | flush_list = [] | ||
876 | 52 | for item in cache: | ||
877 | 53 | if key in item: | ||
878 | 54 | flush_list.append(item) | ||
879 | 55 | for item in flush_list: | ||
880 | 56 | del cache[item] | ||
881 | 57 | |||
882 | 58 | |||
883 | 59 | def log(message, level=None): | ||
884 | 60 | "Write a message to the juju log" | ||
885 | 61 | command = ['juju-log'] | ||
886 | 62 | if level: | ||
887 | 63 | command += ['-l', level] | ||
888 | 64 | command += [message] | ||
889 | 65 | subprocess.call(command) | ||
890 | 66 | |||
891 | 67 | |||
892 | 68 | class Serializable(UserDict.IterableUserDict): | ||
893 | 69 | "Wrapper, an object that can be serialized to yaml or json" | ||
894 | 70 | |||
895 | 71 | def __init__(self, obj): | ||
896 | 72 | # wrap the object | ||
897 | 73 | UserDict.IterableUserDict.__init__(self) | ||
898 | 74 | self.data = obj | ||
899 | 75 | |||
900 | 76 | def __getattr__(self, attr): | ||
901 | 77 | # See if this object has attribute. | ||
902 | 78 | if attr in ("json", "yaml", "data"): | ||
903 | 79 | return self.__dict__[attr] | ||
904 | 80 | # Check for attribute in wrapped object. | ||
905 | 81 | got = getattr(self.data, attr, MARKER) | ||
906 | 82 | if got is not MARKER: | ||
907 | 83 | return got | ||
908 | 84 | # Proxy to the wrapped object via dict interface. | ||
909 | 85 | try: | ||
910 | 86 | return self.data[attr] | ||
911 | 87 | except KeyError: | ||
912 | 88 | raise AttributeError(attr) | ||
913 | 89 | |||
914 | 90 | def __getstate__(self): | ||
915 | 91 | # Pickle as a standard dictionary. | ||
916 | 92 | return self.data | ||
917 | 93 | |||
918 | 94 | def __setstate__(self, state): | ||
919 | 95 | # Unpickle into our wrapper. | ||
920 | 96 | self.data = state | ||
921 | 97 | |||
922 | 98 | def json(self): | ||
923 | 99 | "Serialize the object to json" | ||
924 | 100 | return json.dumps(self.data) | ||
925 | 101 | |||
926 | 102 | def yaml(self): | ||
927 | 103 | "Serialize the object to yaml" | ||
928 | 104 | return yaml.dump(self.data) | ||
929 | 105 | |||
930 | 106 | |||
931 | 107 | def execution_environment(): | ||
932 | 108 | """A convenient bundling of the current execution context""" | ||
933 | 109 | context = {} | ||
934 | 110 | context['conf'] = config() | ||
935 | 111 | if relation_id(): | ||
936 | 112 | context['reltype'] = relation_type() | ||
937 | 113 | context['relid'] = relation_id() | ||
938 | 114 | context['rel'] = relation_get() | ||
939 | 115 | context['unit'] = local_unit() | ||
940 | 116 | context['rels'] = relations() | ||
941 | 117 | context['env'] = os.environ | ||
942 | 118 | return context | ||
943 | 119 | |||
944 | 120 | |||
945 | 121 | def in_relation_hook(): | ||
946 | 122 | "Determine whether we're running in a relation hook" | ||
947 | 123 | return 'JUJU_RELATION' in os.environ | ||
948 | 124 | |||
949 | 125 | |||
950 | 126 | def relation_type(): | ||
951 | 127 | "The scope for the current relation hook" | ||
952 | 128 | return os.environ.get('JUJU_RELATION', None) | ||
953 | 129 | |||
954 | 130 | |||
955 | 131 | def relation_id(): | ||
956 | 132 | "The relation ID for the current relation hook" | ||
957 | 133 | return os.environ.get('JUJU_RELATION_ID', None) | ||
958 | 134 | |||
959 | 135 | |||
960 | 136 | def local_unit(): | ||
961 | 137 | "Local unit ID" | ||
962 | 138 | return os.environ['JUJU_UNIT_NAME'] | ||
963 | 139 | |||
964 | 140 | |||
965 | 141 | def remote_unit(): | ||
966 | 142 | "The remote unit for the current relation hook" | ||
967 | 143 | return os.environ['JUJU_REMOTE_UNIT'] | ||
968 | 144 | |||
969 | 145 | |||
970 | 146 | def service_name(): | ||
971 | 147 | "The name service group this unit belongs to" | ||
972 | 148 | return local_unit().split('/')[0] | ||
973 | 149 | |||
974 | 150 | |||
975 | 151 | @cached | ||
976 | 152 | def config(scope=None): | ||
977 | 153 | "Juju charm configuration" | ||
978 | 154 | config_cmd_line = ['config-get'] | ||
979 | 155 | if scope is not None: | ||
980 | 156 | config_cmd_line.append(scope) | ||
981 | 157 | config_cmd_line.append('--format=json') | ||
982 | 158 | try: | ||
983 | 159 | return json.loads(subprocess.check_output(config_cmd_line)) | ||
984 | 160 | except ValueError: | ||
985 | 161 | return None | ||
986 | 162 | |||
987 | 163 | |||
988 | 164 | @cached | ||
989 | 165 | def relation_get(attribute=None, unit=None, rid=None): | ||
990 | 166 | _args = ['relation-get', '--format=json'] | ||
991 | 167 | if rid: | ||
992 | 168 | _args.append('-r') | ||
993 | 169 | _args.append(rid) | ||
994 | 170 | _args.append(attribute or '-') | ||
995 | 171 | if unit: | ||
996 | 172 | _args.append(unit) | ||
997 | 173 | try: | ||
998 | 174 | return json.loads(subprocess.check_output(_args)) | ||
999 | 175 | except ValueError: | ||
1000 | 176 | return None | ||
1001 | 177 | |||
1002 | 178 | |||
1003 | 179 | def relation_set(relation_id=None, relation_settings={}, **kwargs): | ||
1004 | 180 | relation_cmd_line = ['relation-set'] | ||
1005 | 181 | if relation_id is not None: | ||
1006 | 182 | relation_cmd_line.extend(('-r', relation_id)) | ||
1007 | 183 | for k, v in (relation_settings.items() + kwargs.items()): | ||
1008 | 184 | if v is None: | ||
1009 | 185 | relation_cmd_line.append('{}='.format(k)) | ||
1010 | 186 | else: | ||
1011 | 187 | relation_cmd_line.append('{}={}'.format(k, v)) | ||
1012 | 188 | subprocess.check_call(relation_cmd_line) | ||
1013 | 189 | # Flush cache of any relation-gets for local unit | ||
1014 | 190 | flush(local_unit()) | ||
1015 | 191 | |||
1016 | 192 | |||
1017 | 193 | @cached | ||
1018 | 194 | def relation_ids(reltype=None): | ||
1019 | 195 | "A list of relation_ids" | ||
1020 | 196 | reltype = reltype or relation_type() | ||
1021 | 197 | relid_cmd_line = ['relation-ids', '--format=json'] | ||
1022 | 198 | if reltype is not None: | ||
1023 | 199 | relid_cmd_line.append(reltype) | ||
1024 | 200 | return json.loads(subprocess.check_output(relid_cmd_line)) or [] | ||
1025 | 201 | return [] | ||
1026 | 202 | |||
1027 | 203 | |||
1028 | 204 | @cached | ||
1029 | 205 | def related_units(relid=None): | ||
1030 | 206 | "A list of related units" | ||
1031 | 207 | relid = relid or relation_id() | ||
1032 | 208 | units_cmd_line = ['relation-list', '--format=json'] | ||
1033 | 209 | if relid is not None: | ||
1034 | 210 | units_cmd_line.extend(('-r', relid)) | ||
1035 | 211 | return json.loads(subprocess.check_output(units_cmd_line)) or [] | ||
1036 | 212 | |||
1037 | 213 | |||
1038 | 214 | @cached | ||
1039 | 215 | def relation_for_unit(unit=None, rid=None): | ||
1040 | 216 | "Get the json represenation of a unit's relation" | ||
1041 | 217 | unit = unit or remote_unit() | ||
1042 | 218 | relation = relation_get(unit=unit, rid=rid) | ||
1043 | 219 | for key in relation: | ||
1044 | 220 | if key.endswith('-list'): | ||
1045 | 221 | relation[key] = relation[key].split() | ||
1046 | 222 | relation['__unit__'] = unit | ||
1047 | 223 | return relation | ||
1048 | 224 | |||
1049 | 225 | |||
1050 | 226 | @cached | ||
1051 | 227 | def relations_for_id(relid=None): | ||
1052 | 228 | "Get relations of a specific relation ID" | ||
1053 | 229 | relation_data = [] | ||
1054 | 230 | relid = relid or relation_ids() | ||
1055 | 231 | for unit in related_units(relid): | ||
1056 | 232 | unit_data = relation_for_unit(unit, relid) | ||
1057 | 233 | unit_data['__relid__'] = relid | ||
1058 | 234 | relation_data.append(unit_data) | ||
1059 | 235 | return relation_data | ||
1060 | 236 | |||
1061 | 237 | |||
1062 | 238 | @cached | ||
1063 | 239 | def relations_of_type(reltype=None): | ||
1064 | 240 | "Get relations of a specific type" | ||
1065 | 241 | relation_data = [] | ||
1066 | 242 | reltype = reltype or relation_type() | ||
1067 | 243 | for relid in relation_ids(reltype): | ||
1068 | 244 | for relation in relations_for_id(relid): | ||
1069 | 245 | relation['__relid__'] = relid | ||
1070 | 246 | relation_data.append(relation) | ||
1071 | 247 | return relation_data | ||
1072 | 248 | |||
1073 | 249 | |||
1074 | 250 | @cached | ||
1075 | 251 | def relation_types(): | ||
1076 | 252 | "Get a list of relation types supported by this charm" | ||
1077 | 253 | charmdir = os.environ.get('CHARM_DIR', '') | ||
1078 | 254 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) | ||
1079 | 255 | md = yaml.safe_load(mdf) | ||
1080 | 256 | rel_types = [] | ||
1081 | 257 | for key in ('provides', 'requires', 'peers'): | ||
1082 | 258 | section = md.get(key) | ||
1083 | 259 | if section: | ||
1084 | 260 | rel_types.extend(section.keys()) | ||
1085 | 261 | mdf.close() | ||
1086 | 262 | return rel_types | ||
1087 | 263 | |||
1088 | 264 | |||
1089 | 265 | @cached | ||
1090 | 266 | def relations(): | ||
1091 | 267 | rels = {} | ||
1092 | 268 | for reltype in relation_types(): | ||
1093 | 269 | relids = {} | ||
1094 | 270 | for relid in relation_ids(reltype): | ||
1095 | 271 | units = {local_unit(): relation_get(unit=local_unit(), rid=relid)} | ||
1096 | 272 | for unit in related_units(relid): | ||
1097 | 273 | reldata = relation_get(unit=unit, rid=relid) | ||
1098 | 274 | units[unit] = reldata | ||
1099 | 275 | relids[relid] = units | ||
1100 | 276 | rels[reltype] = relids | ||
1101 | 277 | return rels | ||
1102 | 278 | |||
1103 | 279 | |||
1104 | 280 | def open_port(port, protocol="TCP"): | ||
1105 | 281 | "Open a service network port" | ||
1106 | 282 | _args = ['open-port'] | ||
1107 | 283 | _args.append('{}/{}'.format(port, protocol)) | ||
1108 | 284 | subprocess.check_call(_args) | ||
1109 | 285 | |||
1110 | 286 | |||
1111 | 287 | def close_port(port, protocol="TCP"): | ||
1112 | 288 | "Close a service network port" | ||
1113 | 289 | _args = ['close-port'] | ||
1114 | 290 | _args.append('{}/{}'.format(port, protocol)) | ||
1115 | 291 | subprocess.check_call(_args) | ||
1116 | 292 | |||
1117 | 293 | |||
1118 | 294 | @cached | ||
1119 | 295 | def unit_get(attribute): | ||
1120 | 296 | _args = ['unit-get', '--format=json', attribute] | ||
1121 | 297 | try: | ||
1122 | 298 | return json.loads(subprocess.check_output(_args)) | ||
1123 | 299 | except ValueError: | ||
1124 | 300 | return None | ||
1125 | 301 | |||
1126 | 302 | |||
1127 | 303 | def unit_private_ip(): | ||
1128 | 304 | return unit_get('private-address') | ||
1129 | 305 | |||
1130 | 306 | |||
1131 | 307 | class UnregisteredHookError(Exception): | ||
1132 | 308 | pass | ||
1133 | 309 | |||
1134 | 310 | |||
1135 | 311 | class Hooks(object): | ||
1136 | 312 | def __init__(self): | ||
1137 | 313 | super(Hooks, self).__init__() | ||
1138 | 314 | self._hooks = {} | ||
1139 | 315 | |||
1140 | 316 | def register(self, name, function): | ||
1141 | 317 | self._hooks[name] = function | ||
1142 | 318 | |||
1143 | 319 | def execute(self, args): | ||
1144 | 320 | hook_name = os.path.basename(args[0]) | ||
1145 | 321 | if hook_name in self._hooks: | ||
1146 | 322 | self._hooks[hook_name]() | ||
1147 | 323 | else: | ||
1148 | 324 | raise UnregisteredHookError(hook_name) | ||
1149 | 325 | |||
1150 | 326 | def hook(self, *hook_names): | ||
1151 | 327 | def wrapper(decorated): | ||
1152 | 328 | for hook_name in hook_names: | ||
1153 | 329 | self.register(hook_name, decorated) | ||
1154 | 330 | else: | ||
1155 | 331 | self.register(decorated.__name__, decorated) | ||
1156 | 332 | if '_' in decorated.__name__: | ||
1157 | 333 | self.register( | ||
1158 | 334 | decorated.__name__.replace('_', '-'), decorated) | ||
1159 | 335 | return decorated | ||
1160 | 336 | return wrapper | ||
1161 | 337 | |||
1162 | 338 | |||
1163 | 339 | def charm_dir(): | ||
1164 | 340 | return os.environ.get('CHARM_DIR') | ||
1165 | 0 | 341 | ||
1166 | === added file 'hooks/charmhelpers/core/host.py' | |||
1167 | --- hooks/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000 | |||
1168 | +++ hooks/charmhelpers/core/host.py 2013-10-16 14:05:24 +0000 | |||
1169 | @@ -0,0 +1,239 @@ | |||
1170 | 1 | """Tools for working with the host system""" | ||
1171 | 2 | # Copyright 2012 Canonical Ltd. | ||
1172 | 3 | # | ||
1173 | 4 | # Authors: | ||
1174 | 5 | # Nick Moffitt <nick.moffitt@canonical.com> | ||
1175 | 6 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | ||
1176 | 7 | |||
1177 | 8 | import os | ||
1178 | 9 | import pwd | ||
1179 | 10 | import grp | ||
1180 | 11 | import random | ||
1181 | 12 | import string | ||
1182 | 13 | import subprocess | ||
1183 | 14 | import hashlib | ||
1184 | 15 | |||
1185 | 16 | from collections import OrderedDict | ||
1186 | 17 | |||
1187 | 18 | from hookenv import log | ||
1188 | 19 | |||
1189 | 20 | |||
1190 | 21 | def service_start(service_name): | ||
1191 | 22 | service('start', service_name) | ||
1192 | 23 | |||
1193 | 24 | |||
1194 | 25 | def service_stop(service_name): | ||
1195 | 26 | service('stop', service_name) | ||
1196 | 27 | |||
1197 | 28 | |||
1198 | 29 | def service_restart(service_name): | ||
1199 | 30 | service('restart', service_name) | ||
1200 | 31 | |||
1201 | 32 | |||
1202 | 33 | def service_reload(service_name, restart_on_failure=False): | ||
1203 | 34 | if not service('reload', service_name) and restart_on_failure: | ||
1204 | 35 | service('restart', service_name) | ||
1205 | 36 | |||
1206 | 37 | |||
1207 | 38 | def service(action, service_name): | ||
1208 | 39 | cmd = ['service', service_name, action] | ||
1209 | 40 | return subprocess.call(cmd) == 0 | ||
1210 | 41 | |||
1211 | 42 | |||
1212 | 43 | def service_running(service): | ||
1213 | 44 | try: | ||
1214 | 45 | output = subprocess.check_output(['service', service, 'status']) | ||
1215 | 46 | except subprocess.CalledProcessError: | ||
1216 | 47 | return False | ||
1217 | 48 | else: | ||
1218 | 49 | if ("start/running" in output or "is running" in output): | ||
1219 | 50 | return True | ||
1220 | 51 | else: | ||
1221 | 52 | return False | ||
1222 | 53 | |||
1223 | 54 | |||
1224 | 55 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | ||
1225 | 56 | """Add a user""" | ||
1226 | 57 | try: | ||
1227 | 58 | user_info = pwd.getpwnam(username) | ||
1228 | 59 | log('user {0} already exists!'.format(username)) | ||
1229 | 60 | except KeyError: | ||
1230 | 61 | log('creating user {0}'.format(username)) | ||
1231 | 62 | cmd = ['useradd'] | ||
1232 | 63 | if system_user or password is None: | ||
1233 | 64 | cmd.append('--system') | ||
1234 | 65 | else: | ||
1235 | 66 | cmd.extend([ | ||
1236 | 67 | '--create-home', | ||
1237 | 68 | '--shell', shell, | ||
1238 | 69 | '--password', password, | ||
1239 | 70 | ]) | ||
1240 | 71 | cmd.append(username) | ||
1241 | 72 | subprocess.check_call(cmd) | ||
1242 | 73 | user_info = pwd.getpwnam(username) | ||
1243 | 74 | return user_info | ||
1244 | 75 | |||
1245 | 76 | |||
1246 | 77 | def add_user_to_group(username, group): | ||
1247 | 78 | """Add a user to a group""" | ||
1248 | 79 | cmd = [ | ||
1249 | 80 | 'gpasswd', '-a', | ||
1250 | 81 | username, | ||
1251 | 82 | group | ||
1252 | 83 | ] | ||
1253 | 84 | log("Adding user {} to group {}".format(username, group)) | ||
1254 | 85 | subprocess.check_call(cmd) | ||
1255 | 86 | |||
1256 | 87 | |||
1257 | 88 | def rsync(from_path, to_path, flags='-r', options=None): | ||
1258 | 89 | """Replicate the contents of a path""" | ||
1259 | 90 | options = options or ['--delete', '--executability'] | ||
1260 | 91 | cmd = ['/usr/bin/rsync', flags] | ||
1261 | 92 | cmd.extend(options) | ||
1262 | 93 | cmd.append(from_path) | ||
1263 | 94 | cmd.append(to_path) | ||
1264 | 95 | log(" ".join(cmd)) | ||
1265 | 96 | return subprocess.check_output(cmd).strip() | ||
1266 | 97 | |||
1267 | 98 | |||
1268 | 99 | def symlink(source, destination): | ||
1269 | 100 | """Create a symbolic link""" | ||
1270 | 101 | log("Symlinking {} as {}".format(source, destination)) | ||
1271 | 102 | cmd = [ | ||
1272 | 103 | 'ln', | ||
1273 | 104 | '-sf', | ||
1274 | 105 | source, | ||
1275 | 106 | destination, | ||
1276 | 107 | ] | ||
1277 | 108 | subprocess.check_call(cmd) | ||
1278 | 109 | |||
1279 | 110 | |||
1280 | 111 | def mkdir(path, owner='root', group='root', perms=0555, force=False): | ||
1281 | 112 | """Create a directory""" | ||
1282 | 113 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | ||
1283 | 114 | perms)) | ||
1284 | 115 | uid = pwd.getpwnam(owner).pw_uid | ||
1285 | 116 | gid = grp.getgrnam(group).gr_gid | ||
1286 | 117 | realpath = os.path.abspath(path) | ||
1287 | 118 | if os.path.exists(realpath): | ||
1288 | 119 | if force and not os.path.isdir(realpath): | ||
1289 | 120 | log("Removing non-directory file {} prior to mkdir()".format(path)) | ||
1290 | 121 | os.unlink(realpath) | ||
1291 | 122 | else: | ||
1292 | 123 | os.makedirs(realpath, perms) | ||
1293 | 124 | os.chown(realpath, uid, gid) | ||
1294 | 125 | |||
1295 | 126 | |||
1296 | 127 | def write_file(path, content, owner='root', group='root', perms=0444): | ||
1297 | 128 | """Create or overwrite a file with the contents of a string""" | ||
1298 | 129 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
1299 | 130 | uid = pwd.getpwnam(owner).pw_uid | ||
1300 | 131 | gid = grp.getgrnam(group).gr_gid | ||
1301 | 132 | with open(path, 'w') as target: | ||
1302 | 133 | os.fchown(target.fileno(), uid, gid) | ||
1303 | 134 | os.fchmod(target.fileno(), perms) | ||
1304 | 135 | target.write(content) | ||
1305 | 136 | |||
1306 | 137 | |||
1307 | 138 | def mount(device, mountpoint, options=None, persist=False): | ||
1308 | 139 | '''Mount a filesystem''' | ||
1309 | 140 | cmd_args = ['mount'] | ||
1310 | 141 | if options is not None: | ||
1311 | 142 | cmd_args.extend(['-o', options]) | ||
1312 | 143 | cmd_args.extend([device, mountpoint]) | ||
1313 | 144 | try: | ||
1314 | 145 | subprocess.check_output(cmd_args) | ||
1315 | 146 | except subprocess.CalledProcessError, e: | ||
1316 | 147 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) | ||
1317 | 148 | return False | ||
1318 | 149 | if persist: | ||
1319 | 150 | # TODO: update fstab | ||
1320 | 151 | pass | ||
1321 | 152 | return True | ||
1322 | 153 | |||
1323 | 154 | |||
1324 | 155 | def umount(mountpoint, persist=False): | ||
1325 | 156 | '''Unmount a filesystem''' | ||
1326 | 157 | cmd_args = ['umount', mountpoint] | ||
1327 | 158 | try: | ||
1328 | 159 | subprocess.check_output(cmd_args) | ||
1329 | 160 | except subprocess.CalledProcessError, e: | ||
1330 | 161 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | ||
1331 | 162 | return False | ||
1332 | 163 | if persist: | ||
1333 | 164 | # TODO: update fstab | ||
1334 | 165 | pass | ||
1335 | 166 | return True | ||
1336 | 167 | |||
1337 | 168 | |||
1338 | 169 | def mounts(): | ||
1339 | 170 | '''List of all mounted volumes as [[mountpoint,device],[...]]''' | ||
1340 | 171 | with open('/proc/mounts') as f: | ||
1341 | 172 | # [['/mount/point','/dev/path'],[...]] | ||
1342 | 173 | system_mounts = [m[1::-1] for m in [l.strip().split() | ||
1343 | 174 | for l in f.readlines()]] | ||
1344 | 175 | return system_mounts | ||
1345 | 176 | |||
1346 | 177 | |||
1347 | 178 | def file_hash(path): | ||
1348 | 179 | ''' Generate a md5 hash of the contents of 'path' or None if not found ''' | ||
1349 | 180 | if os.path.exists(path): | ||
1350 | 181 | h = hashlib.md5() | ||
1351 | 182 | with open(path, 'r') as source: | ||
1352 | 183 | h.update(source.read()) # IGNORE:E1101 - it does have update | ||
1353 | 184 | return h.hexdigest() | ||
1354 | 185 | else: | ||
1355 | 186 | return None | ||
1356 | 187 | |||
1357 | 188 | |||
1358 | 189 | def restart_on_change(restart_map): | ||
1359 | 190 | ''' Restart services based on configuration files changing | ||
1360 | 191 | |||
1361 | 192 | This function is used a decorator, for example | ||
1362 | 193 | |||
1363 | 194 | @restart_on_change({ | ||
1364 | 195 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | ||
1365 | 196 | }) | ||
1366 | 197 | def ceph_client_changed(): | ||
1367 | 198 | ... | ||
1368 | 199 | |||
1369 | 200 | In this example, the cinder-api and cinder-volume services | ||
1370 | 201 | would be restarted if /etc/ceph/ceph.conf is changed by the | ||
1371 | 202 | ceph_client_changed function. | ||
1372 | 203 | ''' | ||
1373 | 204 | def wrap(f): | ||
1374 | 205 | def wrapped_f(*args): | ||
1375 | 206 | checksums = {} | ||
1376 | 207 | for path in restart_map: | ||
1377 | 208 | checksums[path] = file_hash(path) | ||
1378 | 209 | f(*args) | ||
1379 | 210 | restarts = [] | ||
1380 | 211 | for path in restart_map: | ||
1381 | 212 | if checksums[path] != file_hash(path): | ||
1382 | 213 | restarts += restart_map[path] | ||
1383 | 214 | for service_name in list(OrderedDict.fromkeys(restarts)): | ||
1384 | 215 | service('restart', service_name) | ||
1385 | 216 | return wrapped_f | ||
1386 | 217 | return wrap | ||
1387 | 218 | |||
1388 | 219 | |||
1389 | 220 | def lsb_release(): | ||
1390 | 221 | '''Return /etc/lsb-release in a dict''' | ||
1391 | 222 | d = {} | ||
1392 | 223 | with open('/etc/lsb-release', 'r') as lsb: | ||
1393 | 224 | for l in lsb: | ||
1394 | 225 | k, v = l.split('=') | ||
1395 | 226 | d[k.strip()] = v.strip() | ||
1396 | 227 | return d | ||
1397 | 228 | |||
1398 | 229 | |||
1399 | 230 | def pwgen(length=None): | ||
1400 | 231 | '''Generate a random pasword.''' | ||
1401 | 232 | if length is None: | ||
1402 | 233 | length = random.choice(range(35, 45)) | ||
1403 | 234 | alphanumeric_chars = [ | ||
1404 | 235 | l for l in (string.letters + string.digits) | ||
1405 | 236 | if l not in 'l0QD1vAEIOUaeiou'] | ||
1406 | 237 | random_chars = [ | ||
1407 | 238 | random.choice(alphanumeric_chars) for _ in range(length)] | ||
1408 | 239 | return(''.join(random_chars)) | ||
1409 | 0 | 240 | ||
1410 | === added directory 'hooks/charmhelpers/fetch' | |||
1411 | === added file 'hooks/charmhelpers/fetch/__init__.py' | |||
1412 | --- hooks/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000 | |||
1413 | +++ hooks/charmhelpers/fetch/__init__.py 2013-10-16 14:05:24 +0000 | |||
1414 | @@ -0,0 +1,209 @@ | |||
1415 | 1 | import importlib | ||
1416 | 2 | from yaml import safe_load | ||
1417 | 3 | from charmhelpers.core.host import ( | ||
1418 | 4 | lsb_release | ||
1419 | 5 | ) | ||
1420 | 6 | from urlparse import ( | ||
1421 | 7 | urlparse, | ||
1422 | 8 | urlunparse, | ||
1423 | 9 | ) | ||
1424 | 10 | import subprocess | ||
1425 | 11 | from charmhelpers.core.hookenv import ( | ||
1426 | 12 | config, | ||
1427 | 13 | log, | ||
1428 | 14 | ) | ||
1429 | 15 | import apt_pkg | ||
1430 | 16 | |||
1431 | 17 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | ||
1432 | 18 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
1433 | 19 | """ | ||
1434 | 20 | PROPOSED_POCKET = """# Proposed | ||
1435 | 21 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
1436 | 22 | """ | ||
1437 | 23 | |||
1438 | 24 | |||
1439 | 25 | def filter_installed_packages(packages): | ||
1440 | 26 | """Returns a list of packages that require installation""" | ||
1441 | 27 | apt_pkg.init() | ||
1442 | 28 | cache = apt_pkg.Cache() | ||
1443 | 29 | _pkgs = [] | ||
1444 | 30 | for package in packages: | ||
1445 | 31 | try: | ||
1446 | 32 | p = cache[package] | ||
1447 | 33 | p.current_ver or _pkgs.append(package) | ||
1448 | 34 | except KeyError: | ||
1449 | 35 | log('Package {} has no installation candidate.'.format(package), | ||
1450 | 36 | level='WARNING') | ||
1451 | 37 | _pkgs.append(package) | ||
1452 | 38 | return _pkgs | ||
1453 | 39 | |||
1454 | 40 | |||
1455 | 41 | def apt_install(packages, options=None, fatal=False): | ||
1456 | 42 | """Install one or more packages""" | ||
1457 | 43 | options = options or [] | ||
1458 | 44 | cmd = ['apt-get', '-y'] | ||
1459 | 45 | cmd.extend(options) | ||
1460 | 46 | cmd.append('install') | ||
1461 | 47 | if isinstance(packages, basestring): | ||
1462 | 48 | cmd.append(packages) | ||
1463 | 49 | else: | ||
1464 | 50 | cmd.extend(packages) | ||
1465 | 51 | log("Installing {} with options: {}".format(packages, | ||
1466 | 52 | options)) | ||
1467 | 53 | if fatal: | ||
1468 | 54 | subprocess.check_call(cmd) | ||
1469 | 55 | else: | ||
1470 | 56 | subprocess.call(cmd) | ||
1471 | 57 | |||
1472 | 58 | |||
1473 | 59 | def apt_update(fatal=False): | ||
1474 | 60 | """Update local apt cache""" | ||
1475 | 61 | cmd = ['apt-get', 'update'] | ||
1476 | 62 | if fatal: | ||
1477 | 63 | subprocess.check_call(cmd) | ||
1478 | 64 | else: | ||
1479 | 65 | subprocess.call(cmd) | ||
1480 | 66 | |||
1481 | 67 | |||
1482 | 68 | def apt_purge(packages, fatal=False): | ||
1483 | 69 | """Purge one or more packages""" | ||
1484 | 70 | cmd = ['apt-get', '-y', 'purge'] | ||
1485 | 71 | if isinstance(packages, basestring): | ||
1486 | 72 | cmd.append(packages) | ||
1487 | 73 | else: | ||
1488 | 74 | cmd.extend(packages) | ||
1489 | 75 | log("Purging {}".format(packages)) | ||
1490 | 76 | if fatal: | ||
1491 | 77 | subprocess.check_call(cmd) | ||
1492 | 78 | else: | ||
1493 | 79 | subprocess.call(cmd) | ||
1494 | 80 | |||
1495 | 81 | |||
1496 | 82 | def add_source(source, key=None): | ||
1497 | 83 | if ((source.startswith('ppa:') or | ||
1498 | 84 | source.startswith('http:'))): | ||
1499 | 85 | subprocess.check_call(['add-apt-repository', '--yes', source]) | ||
1500 | 86 | elif source.startswith('cloud:'): | ||
1501 | 87 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), | ||
1502 | 88 | fatal=True) | ||
1503 | 89 | pocket = source.split(':')[-1] | ||
1504 | 90 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | ||
1505 | 91 | apt.write(CLOUD_ARCHIVE.format(pocket)) | ||
1506 | 92 | elif source == 'proposed': | ||
1507 | 93 | release = lsb_release()['DISTRIB_CODENAME'] | ||
1508 | 94 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | ||
1509 | 95 | apt.write(PROPOSED_POCKET.format(release)) | ||
1510 | 96 | if key: | ||
1511 | 97 | subprocess.check_call(['apt-key', 'import', key]) | ||
1512 | 98 | |||
1513 | 99 | |||
1514 | 100 | class SourceConfigError(Exception): | ||
1515 | 101 | pass | ||
1516 | 102 | |||
1517 | 103 | |||
1518 | 104 | def configure_sources(update=False, | ||
1519 | 105 | sources_var='install_sources', | ||
1520 | 106 | keys_var='install_keys'): | ||
1521 | 107 | """ | ||
1522 | 108 | Configure multiple sources from charm configuration | ||
1523 | 109 | |||
1524 | 110 | Example config: | ||
1525 | 111 | install_sources: | ||
1526 | 112 | - "ppa:foo" | ||
1527 | 113 | - "http://example.com/repo precise main" | ||
1528 | 114 | install_keys: | ||
1529 | 115 | - null | ||
1530 | 116 | - "a1b2c3d4" | ||
1531 | 117 | |||
1532 | 118 | Note that 'null' (a.k.a. None) should not be quoted. | ||
1533 | 119 | """ | ||
1534 | 120 | sources = safe_load(config(sources_var)) | ||
1535 | 121 | keys = safe_load(config(keys_var)) | ||
1536 | 122 | if isinstance(sources, basestring) and isinstance(keys, basestring): | ||
1537 | 123 | add_source(sources, keys) | ||
1538 | 124 | else: | ||
1539 | 125 | if not len(sources) == len(keys): | ||
1540 | 126 | msg = 'Install sources and keys lists are different lengths' | ||
1541 | 127 | raise SourceConfigError(msg) | ||
1542 | 128 | for src_num in range(len(sources)): | ||
1543 | 129 | add_source(sources[src_num], keys[src_num]) | ||
1544 | 130 | if update: | ||
1545 | 131 | apt_update(fatal=True) | ||
1546 | 132 | |||
1547 | 133 | # The order of this list is very important. Handlers should be listed in from | ||
1548 | 134 | # least- to most-specific URL matching. | ||
1549 | 135 | FETCH_HANDLERS = ( | ||
1550 | 136 | 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', | ||
1551 | 137 | 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', | ||
1552 | 138 | ) | ||
1553 | 139 | |||
1554 | 140 | |||
1555 | 141 | class UnhandledSource(Exception): | ||
1556 | 142 | pass | ||
1557 | 143 | |||
1558 | 144 | |||
1559 | 145 | def install_remote(source): | ||
1560 | 146 | """ | ||
1561 | 147 | Install a file tree from a remote source | ||
1562 | 148 | |||
1563 | 149 | The specified source should be a url of the form: | ||
1564 | 150 | scheme://[host]/path[#[option=value][&...]] | ||
1565 | 151 | |||
1566 | 152 | Schemes supported are based on this modules submodules | ||
1567 | 153 | Options supported are submodule-specific""" | ||
1568 | 154 | # We ONLY check for True here because can_handle may return a string | ||
1569 | 155 | # explaining why it can't handle a given source. | ||
1570 | 156 | handlers = [h for h in plugins() if h.can_handle(source) is True] | ||
1571 | 157 | installed_to = None | ||
1572 | 158 | for handler in handlers: | ||
1573 | 159 | try: | ||
1574 | 160 | installed_to = handler.install(source) | ||
1575 | 161 | except UnhandledSource: | ||
1576 | 162 | pass | ||
1577 | 163 | if not installed_to: | ||
1578 | 164 | raise UnhandledSource("No handler found for source {}".format(source)) | ||
1579 | 165 | return installed_to | ||
1580 | 166 | |||
1581 | 167 | |||
1582 | 168 | def install_from_config(config_var_name): | ||
1583 | 169 | charm_config = config() | ||
1584 | 170 | source = charm_config[config_var_name] | ||
1585 | 171 | return install_remote(source) | ||
1586 | 172 | |||
1587 | 173 | |||
1588 | 174 | class BaseFetchHandler(object): | ||
1589 | 175 | """Base class for FetchHandler implementations in fetch plugins""" | ||
1590 | 176 | def can_handle(self, source): | ||
1591 | 177 | """Returns True if the source can be handled. Otherwise returns | ||
1592 | 178 | a string explaining why it cannot""" | ||
1593 | 179 | return "Wrong source type" | ||
1594 | 180 | |||
1595 | 181 | def install(self, source): | ||
1596 | 182 | """Try to download and unpack the source. Return the path to the | ||
1597 | 183 | unpacked files or raise UnhandledSource.""" | ||
1598 | 184 | raise UnhandledSource("Wrong source type {}".format(source)) | ||
1599 | 185 | |||
1600 | 186 | def parse_url(self, url): | ||
1601 | 187 | return urlparse(url) | ||
1602 | 188 | |||
1603 | 189 | def base_url(self, url): | ||
1604 | 190 | """Return url without querystring or fragment""" | ||
1605 | 191 | parts = list(self.parse_url(url)) | ||
1606 | 192 | parts[4:] = ['' for i in parts[4:]] | ||
1607 | 193 | return urlunparse(parts) | ||
1608 | 194 | |||
1609 | 195 | |||
1610 | 196 | def plugins(fetch_handlers=None): | ||
1611 | 197 | if not fetch_handlers: | ||
1612 | 198 | fetch_handlers = FETCH_HANDLERS | ||
1613 | 199 | plugin_list = [] | ||
1614 | 200 | for handler_name in fetch_handlers: | ||
1615 | 201 | package, classname = handler_name.rsplit('.', 1) | ||
1616 | 202 | try: | ||
1617 | 203 | handler_class = getattr(importlib.import_module(package), classname) | ||
1618 | 204 | plugin_list.append(handler_class()) | ||
1619 | 205 | except (ImportError, AttributeError): | ||
1620 | 206 | # Skip missing plugins so that they can be ommitted from | ||
1621 | 207 | # installation if desired | ||
1622 | 208 | log("FetchHandler {} not found, skipping plugin".format(handler_name)) | ||
1623 | 209 | return plugin_list | ||
1624 | 0 | 210 | ||
1625 | === added file 'hooks/charmhelpers/fetch/archiveurl.py' | |||
1626 | --- hooks/charmhelpers/fetch/archiveurl.py 1970-01-01 00:00:00 +0000 | |||
1627 | +++ hooks/charmhelpers/fetch/archiveurl.py 2013-10-16 14:05:24 +0000 | |||
1628 | @@ -0,0 +1,48 @@ | |||
1629 | 1 | import os | ||
1630 | 2 | import urllib2 | ||
1631 | 3 | from charmhelpers.fetch import ( | ||
1632 | 4 | BaseFetchHandler, | ||
1633 | 5 | UnhandledSource | ||
1634 | 6 | ) | ||
1635 | 7 | from charmhelpers.payload.archive import ( | ||
1636 | 8 | get_archive_handler, | ||
1637 | 9 | extract, | ||
1638 | 10 | ) | ||
1639 | 11 | from charmhelpers.core.host import mkdir | ||
1640 | 12 | |||
1641 | 13 | |||
1642 | 14 | class ArchiveUrlFetchHandler(BaseFetchHandler): | ||
1643 | 15 | """Handler for archives via generic URLs""" | ||
1644 | 16 | def can_handle(self, source): | ||
1645 | 17 | url_parts = self.parse_url(source) | ||
1646 | 18 | if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): | ||
1647 | 19 | return "Wrong source type" | ||
1648 | 20 | if get_archive_handler(self.base_url(source)): | ||
1649 | 21 | return True | ||
1650 | 22 | return False | ||
1651 | 23 | |||
1652 | 24 | def download(self, source, dest): | ||
1653 | 25 | # propogate all exceptions | ||
1654 | 26 | # URLError, OSError, etc | ||
1655 | 27 | response = urllib2.urlopen(source) | ||
1656 | 28 | try: | ||
1657 | 29 | with open(dest, 'w') as dest_file: | ||
1658 | 30 | dest_file.write(response.read()) | ||
1659 | 31 | except Exception as e: | ||
1660 | 32 | if os.path.isfile(dest): | ||
1661 | 33 | os.unlink(dest) | ||
1662 | 34 | raise e | ||
1663 | 35 | |||
1664 | 36 | def install(self, source): | ||
1665 | 37 | url_parts = self.parse_url(source) | ||
1666 | 38 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') | ||
1667 | 39 | if not os.path.exists(dest_dir): | ||
1668 | 40 | mkdir(dest_dir, perms=0755) | ||
1669 | 41 | dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) | ||
1670 | 42 | try: | ||
1671 | 43 | self.download(source, dld_file) | ||
1672 | 44 | except urllib2.URLError as e: | ||
1673 | 45 | raise UnhandledSource(e.reason) | ||
1674 | 46 | except OSError as e: | ||
1675 | 47 | raise UnhandledSource(e.strerror) | ||
1676 | 48 | return extract(dld_file) | ||
1677 | 0 | 49 | ||
1678 | === added file 'hooks/charmhelpers/fetch/bzrurl.py' | |||
1679 | --- hooks/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000 | |||
1680 | +++ hooks/charmhelpers/fetch/bzrurl.py 2013-10-16 14:05:24 +0000 | |||
1681 | @@ -0,0 +1,44 @@ | |||
1682 | 1 | import os | ||
1683 | 2 | from bzrlib.branch import Branch | ||
1684 | 3 | from charmhelpers.fetch import ( | ||
1685 | 4 | BaseFetchHandler, | ||
1686 | 5 | UnhandledSource | ||
1687 | 6 | ) | ||
1688 | 7 | from charmhelpers.core.host import mkdir | ||
1689 | 8 | |||
1690 | 9 | |||
1691 | 10 | class BzrUrlFetchHandler(BaseFetchHandler): | ||
1692 | 11 | """Handler for bazaar branches via generic and lp URLs""" | ||
1693 | 12 | def can_handle(self, source): | ||
1694 | 13 | url_parts = self.parse_url(source) | ||
1695 | 14 | if url_parts.scheme not in ('bzr+ssh', 'lp'): | ||
1696 | 15 | return False | ||
1697 | 16 | else: | ||
1698 | 17 | return True | ||
1699 | 18 | |||
1700 | 19 | def branch(self, source, dest): | ||
1701 | 20 | url_parts = self.parse_url(source) | ||
1702 | 21 | # If we use lp:branchname scheme we need to load plugins | ||
1703 | 22 | if not self.can_handle(source): | ||
1704 | 23 | raise UnhandledSource("Cannot handle {}".format(source)) | ||
1705 | 24 | if url_parts.scheme == "lp": | ||
1706 | 25 | from bzrlib.plugin import load_plugins | ||
1707 | 26 | load_plugins() | ||
1708 | 27 | try: | ||
1709 | 28 | remote_branch = Branch.open(source) | ||
1710 | 29 | remote_branch.bzrdir.sprout(dest).open_branch() | ||
1711 | 30 | except Exception as e: | ||
1712 | 31 | raise e | ||
1713 | 32 | |||
1714 | 33 | def install(self, source): | ||
1715 | 34 | url_parts = self.parse_url(source) | ||
1716 | 35 | branch_name = url_parts.path.strip("/").split("/")[-1] | ||
1717 | 36 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) | ||
1718 | 37 | if not os.path.exists(dest_dir): | ||
1719 | 38 | mkdir(dest_dir, perms=0755) | ||
1720 | 39 | try: | ||
1721 | 40 | self.branch(source, dest_dir) | ||
1722 | 41 | except OSError as e: | ||
1723 | 42 | raise UnhandledSource(e.strerror) | ||
1724 | 43 | return dest_dir | ||
1725 | 44 | |||
1726 | 0 | 45 | ||
1727 | === modified file 'hooks/hooks.py' | |||
1728 | --- hooks/hooks.py 2013-05-23 21:52:06 +0000 | |||
1729 | +++ hooks/hooks.py 2013-10-16 14:05:24 +0000 | |||
1730 | @@ -1,17 +1,31 @@ | |||
1731 | 1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python |
1732 | 2 | 2 | ||
1733 | 3 | import json | ||
1734 | 4 | import glob | 3 | import glob |
1735 | 5 | import os | 4 | import os |
1736 | 6 | import random | ||
1737 | 7 | import re | 5 | import re |
1738 | 8 | import socket | 6 | import socket |
1740 | 9 | import string | 7 | import shutil |
1741 | 10 | import subprocess | 8 | import subprocess |
1742 | 11 | import sys | 9 | import sys |
1743 | 12 | import yaml | 10 | import yaml |
1746 | 13 | import nrpe | 11 | |
1747 | 14 | import time | 12 | from itertools import izip, tee |
1748 | 13 | |||
1749 | 14 | from charmhelpers.core.host import pwgen | ||
1750 | 15 | from charmhelpers.core.hookenv import ( | ||
1751 | 16 | log, | ||
1752 | 17 | config as config_get, | ||
1753 | 18 | relation_set, | ||
1754 | 19 | relation_ids as get_relation_ids, | ||
1755 | 20 | relations_of_type, | ||
1756 | 21 | relations_for_id, | ||
1757 | 22 | relation_id, | ||
1758 | 23 | open_port, | ||
1759 | 24 | close_port, | ||
1760 | 25 | unit_get, | ||
1761 | 26 | ) | ||
1762 | 27 | from charmhelpers.fetch import apt_install | ||
1763 | 28 | from charmhelpers.contrib.charmsupport import nrpe | ||
1764 | 15 | 29 | ||
1765 | 16 | 30 | ||
1766 | 17 | ############################################################################### | 31 | ############################################################################### |
1767 | @@ -20,92 +34,59 @@ | |||
1768 | 20 | default_haproxy_config_dir = "/etc/haproxy" | 34 | default_haproxy_config_dir = "/etc/haproxy" |
1769 | 21 | default_haproxy_config = "%s/haproxy.cfg" % default_haproxy_config_dir | 35 | default_haproxy_config = "%s/haproxy.cfg" % default_haproxy_config_dir |
1770 | 22 | default_haproxy_service_config_dir = "/var/run/haproxy" | 36 | default_haproxy_service_config_dir = "/var/run/haproxy" |
1772 | 23 | HOOK_NAME = os.path.basename(sys.argv[0]) | 37 | service_affecting_packages = ['haproxy'] |
1773 | 38 | |||
1774 | 39 | dupe_options = [ | ||
1775 | 40 | "mode tcp", | ||
1776 | 41 | "option tcplog", | ||
1777 | 42 | "mode http", | ||
1778 | 43 | "option httplog", | ||
1779 | 44 | ] | ||
1780 | 45 | |||
1781 | 46 | frontend_only_options = [ | ||
1782 | 47 | "backlog", | ||
1783 | 48 | "bind", | ||
1784 | 49 | "capture cookie", | ||
1785 | 50 | "capture request header", | ||
1786 | 51 | "capture response header", | ||
1787 | 52 | "clitimeout", | ||
1788 | 53 | "default_backend", | ||
1789 | 54 | "maxconn", | ||
1790 | 55 | "monitor fail", | ||
1791 | 56 | "monitor-net", | ||
1792 | 57 | "monitor-uri", | ||
1793 | 58 | "option accept-invalid-http-request", | ||
1794 | 59 | "option clitcpka", | ||
1795 | 60 | "option contstats", | ||
1796 | 61 | "option dontlog-normal", | ||
1797 | 62 | "option dontlognull", | ||
1798 | 63 | "option http-use-proxy-header", | ||
1799 | 64 | "option log-separate-errors", | ||
1800 | 65 | "option logasap", | ||
1801 | 66 | "option socket-stats", | ||
1802 | 67 | "option tcp-smart-accept", | ||
1803 | 68 | "rate-limit sessions", | ||
1804 | 69 | "tcp-request content accept", | ||
1805 | 70 | "tcp-request content reject", | ||
1806 | 71 | "tcp-request inspect-delay", | ||
1807 | 72 | "timeout client", | ||
1808 | 73 | "timeout clitimeout", | ||
1809 | 74 | "use_backend", | ||
1810 | 75 | ] | ||
1811 | 76 | |||
1812 | 24 | 77 | ||
1813 | 25 | ############################################################################### | 78 | ############################################################################### |
1814 | 26 | # Supporting functions | 79 | # Supporting functions |
1815 | 27 | ############################################################################### | 80 | ############################################################################### |
1816 | 28 | 81 | ||
1897 | 29 | def unit_get(*args): | 82 | |
1898 | 30 | """Simple wrapper around unit-get, all arguments passed untouched""" | 83 | def ensure_package_status(packages, status): |
1899 | 31 | get_args = ["unit-get"] | 84 | if status in ['install', 'hold']: |
1900 | 32 | get_args.extend(args) | 85 | selections = ''.join(['{} {}\n'.format(package, status) |
1901 | 33 | return subprocess.check_output(get_args) | 86 | for package in packages]) |
1902 | 34 | 87 | dpkg = subprocess.Popen(['dpkg', '--set-selections'], | |
1903 | 35 | def juju_log(*args): | 88 | stdin=subprocess.PIPE) |
1904 | 36 | """Simple wrapper around juju-log, all arguments are passed untouched""" | 89 | dpkg.communicate(input=selections) |
1825 | 37 | log_args = ["juju-log"] | ||
1826 | 38 | log_args.extend(args) | ||
1827 | 39 | subprocess.call(log_args) | ||
1828 | 40 | |||
1829 | 41 | #------------------------------------------------------------------------------ | ||
1830 | 42 | # config_get: Returns a dictionary containing all of the config information | ||
1831 | 43 | # Optional parameter: scope | ||
1832 | 44 | # scope: limits the scope of the returned configuration to the | ||
1833 | 45 | # desired config item. | ||
1834 | 46 | #------------------------------------------------------------------------------ | ||
1835 | 47 | def config_get(scope=None): | ||
1836 | 48 | try: | ||
1837 | 49 | config_cmd_line = ['config-get'] | ||
1838 | 50 | if scope is not None: | ||
1839 | 51 | config_cmd_line.append(scope) | ||
1840 | 52 | config_cmd_line.append('--format=json') | ||
1841 | 53 | config_data = json.loads(subprocess.check_output(config_cmd_line)) | ||
1842 | 54 | except Exception, e: | ||
1843 | 55 | subprocess.call(['juju-log', str(e)]) | ||
1844 | 56 | config_data = None | ||
1845 | 57 | finally: | ||
1846 | 58 | return(config_data) | ||
1847 | 59 | |||
1848 | 60 | |||
1849 | 61 | #------------------------------------------------------------------------------ | ||
1850 | 62 | # relation_get: Returns a dictionary containing the relation information | ||
1851 | 63 | # Optional parameters: scope, relation_id | ||
1852 | 64 | # scope: limits the scope of the returned data to the | ||
1853 | 65 | # desired item. | ||
1854 | 66 | # unit_name: limits the data ( and optionally the scope ) | ||
1855 | 67 | # to the specified unit | ||
1856 | 68 | # relation_id: specify relation id for out of context usage. | ||
1857 | 69 | #------------------------------------------------------------------------------ | ||
1858 | 70 | def relation_get(scope=None, unit_name=None, relation_id=None): | ||
1859 | 71 | try: | ||
1860 | 72 | relation_cmd_line = ['relation-get', '--format=json'] | ||
1861 | 73 | if relation_id is not None: | ||
1862 | 74 | relation_cmd_line.extend(('-r', relation_id)) | ||
1863 | 75 | if scope is not None: | ||
1864 | 76 | relation_cmd_line.append(scope) | ||
1865 | 77 | else: | ||
1866 | 78 | relation_cmd_line.append('') | ||
1867 | 79 | if unit_name is not None: | ||
1868 | 80 | relation_cmd_line.append(unit_name) | ||
1869 | 81 | relation_data = json.loads(subprocess.check_output(relation_cmd_line)) | ||
1870 | 82 | except Exception, e: | ||
1871 | 83 | subprocess.call(['juju-log', str(e)]) | ||
1872 | 84 | relation_data = None | ||
1873 | 85 | finally: | ||
1874 | 86 | return(relation_data) | ||
1875 | 87 | |||
1876 | 88 | def relation_set(arguments, relation_id=None): | ||
1877 | 89 | """ | ||
1878 | 90 | Wrapper around relation-set | ||
1879 | 91 | @param arguments: list of command line arguments | ||
1880 | 92 | @param relation_id: optional relation-id (passed to -r parameter) to use | ||
1881 | 93 | """ | ||
1882 | 94 | set_args = ["relation-set"] | ||
1883 | 95 | if relation_id is not None: | ||
1884 | 96 | set_args.extend(["-r", str(relation_id)]) | ||
1885 | 97 | set_args.extend(arguments) | ||
1886 | 98 | subprocess.check_call(set_args) | ||
1887 | 99 | |||
1888 | 100 | #------------------------------------------------------------------------------ | ||
1889 | 101 | # apt_get_install( package ): Installs a package | ||
1890 | 102 | #------------------------------------------------------------------------------ | ||
1891 | 103 | def apt_get_install(packages=None): | ||
1892 | 104 | if packages is None: | ||
1893 | 105 | return(False) | ||
1894 | 106 | cmd_line = ['apt-get', '-y', 'install', '-qq'] | ||
1895 | 107 | cmd_line.append(packages) | ||
1896 | 108 | return(subprocess.call(cmd_line)) | ||
1905 | 109 | 90 | ||
1906 | 110 | 91 | ||
1907 | 111 | #------------------------------------------------------------------------------ | 92 | #------------------------------------------------------------------------------ |
1908 | @@ -113,8 +94,8 @@ | |||
1909 | 113 | #------------------------------------------------------------------------------ | 94 | #------------------------------------------------------------------------------ |
1910 | 114 | def enable_haproxy(): | 95 | def enable_haproxy(): |
1911 | 115 | default_haproxy = "/etc/default/haproxy" | 96 | default_haproxy = "/etc/default/haproxy" |
1914 | 116 | enabled_haproxy = \ | 97 | with open(default_haproxy) as f: |
1915 | 117 | open(default_haproxy).read().replace('ENABLED=0', 'ENABLED=1') | 98 | enabled_haproxy = f.read().replace('ENABLED=0', 'ENABLED=1') |
1916 | 118 | with open(default_haproxy, 'w') as f: | 99 | with open(default_haproxy, 'w') as f: |
1917 | 119 | f.write(enabled_haproxy) | 100 | f.write(enabled_haproxy) |
1918 | 120 | 101 | ||
1919 | @@ -137,8 +118,8 @@ | |||
1920 | 137 | if config_data['global_quiet'] is True: | 118 | if config_data['global_quiet'] is True: |
1921 | 138 | haproxy_globals.append(" quiet") | 119 | haproxy_globals.append(" quiet") |
1922 | 139 | haproxy_globals.append(" spread-checks %d" % | 120 | haproxy_globals.append(" spread-checks %d" % |
1925 | 140 | config_data['global_spread_checks']) | 121 | config_data['global_spread_checks']) |
1926 | 141 | return('\n'.join(haproxy_globals)) | 122 | return '\n'.join(haproxy_globals) |
1927 | 142 | 123 | ||
1928 | 143 | 124 | ||
1929 | 144 | #------------------------------------------------------------------------------ | 125 | #------------------------------------------------------------------------------ |
1930 | @@ -157,7 +138,7 @@ | |||
1931 | 157 | haproxy_defaults.append(" retries %d" % config_data['default_retries']) | 138 | haproxy_defaults.append(" retries %d" % config_data['default_retries']) |
1932 | 158 | for timeout_item in default_timeouts: | 139 | for timeout_item in default_timeouts: |
1933 | 159 | haproxy_defaults.append(" timeout %s" % timeout_item.strip()) | 140 | haproxy_defaults.append(" timeout %s" % timeout_item.strip()) |
1935 | 160 | return('\n'.join(haproxy_defaults)) | 141 | return '\n'.join(haproxy_defaults) |
1936 | 161 | 142 | ||
1937 | 162 | 143 | ||
1938 | 163 | #------------------------------------------------------------------------------ | 144 | #------------------------------------------------------------------------------ |
1939 | @@ -168,9 +149,9 @@ | |||
1940 | 168 | #------------------------------------------------------------------------------ | 149 | #------------------------------------------------------------------------------ |
1941 | 169 | def load_haproxy_config(haproxy_config_file="/etc/haproxy/haproxy.cfg"): | 150 | def load_haproxy_config(haproxy_config_file="/etc/haproxy/haproxy.cfg"): |
1942 | 170 | if os.path.isfile(haproxy_config_file): | 151 | if os.path.isfile(haproxy_config_file): |
1944 | 171 | return(open(haproxy_config_file).read()) | 152 | return open(haproxy_config_file).read() |
1945 | 172 | else: | 153 | else: |
1947 | 173 | return(None) | 154 | return None |
1948 | 174 | 155 | ||
1949 | 175 | 156 | ||
1950 | 176 | #------------------------------------------------------------------------------ | 157 | #------------------------------------------------------------------------------ |
1951 | @@ -182,12 +163,12 @@ | |||
1952 | 182 | def get_monitoring_password(haproxy_config_file="/etc/haproxy/haproxy.cfg"): | 163 | def get_monitoring_password(haproxy_config_file="/etc/haproxy/haproxy.cfg"): |
1953 | 183 | haproxy_config = load_haproxy_config(haproxy_config_file) | 164 | haproxy_config = load_haproxy_config(haproxy_config_file) |
1954 | 184 | if haproxy_config is None: | 165 | if haproxy_config is None: |
1956 | 185 | return(None) | 166 | return None |
1957 | 186 | m = re.search("stats auth\s+(\w+):(\w+)", haproxy_config) | 167 | m = re.search("stats auth\s+(\w+):(\w+)", haproxy_config) |
1958 | 187 | if m is not None: | 168 | if m is not None: |
1960 | 188 | return(m.group(2)) | 169 | return m.group(2) |
1961 | 189 | else: | 170 | else: |
1963 | 190 | return(None) | 171 | return None |
1964 | 191 | 172 | ||
1965 | 192 | 173 | ||
1966 | 193 | #------------------------------------------------------------------------------ | 174 | #------------------------------------------------------------------------------ |
1967 | @@ -197,32 +178,29 @@ | |||
1968 | 197 | # to open and close when exposing/unexposing a service | 178 | # to open and close when exposing/unexposing a service |
1969 | 198 | #------------------------------------------------------------------------------ | 179 | #------------------------------------------------------------------------------ |
1970 | 199 | def get_service_ports(haproxy_config_file="/etc/haproxy/haproxy.cfg"): | 180 | def get_service_ports(haproxy_config_file="/etc/haproxy/haproxy.cfg"): |
1971 | 181 | stanzas = get_listen_stanzas(haproxy_config_file=haproxy_config_file) | ||
1972 | 182 | return tuple((int(port) for service, addr, port in stanzas)) | ||
1973 | 183 | |||
1974 | 184 | |||
1975 | 185 | #------------------------------------------------------------------------------ | ||
1976 | 186 | # get_listen_stanzas: Convenience function that scans the existing haproxy | ||
1977 | 187 | # configuration file and returns a list of the existing | ||
1978 | 188 | # listen stanzas cofnigured. | ||
1979 | 189 | #------------------------------------------------------------------------------ | ||
1980 | 190 | def get_listen_stanzas(haproxy_config_file="/etc/haproxy/haproxy.cfg"): | ||
1981 | 200 | haproxy_config = load_haproxy_config(haproxy_config_file) | 191 | haproxy_config = load_haproxy_config(haproxy_config_file) |
1982 | 201 | if haproxy_config is None: | 192 | if haproxy_config is None: |
2007 | 202 | return(None) | 193 | return () |
2008 | 203 | return(re.findall("listen.*:(.*)", haproxy_config)) | 194 | listen_stanzas = re.findall( |
2009 | 204 | 195 | "listen\s+([^\s]+)\s+([^:]+):(.*)", | |
2010 | 205 | 196 | haproxy_config) | |
2011 | 206 | #------------------------------------------------------------------------------ | 197 | bind_stanzas = re.findall( |
2012 | 207 | # open_port: Convenience function to open a port in juju to | 198 | "\s+bind\s+([^:]+):(\d+)\s*\n\s+default_backend\s+([^\s]+)", |
2013 | 208 | # expose a service | 199 | haproxy_config, re.M) |
2014 | 209 | #------------------------------------------------------------------------------ | 200 | return (tuple(((service, addr, int(port)) |
2015 | 210 | def open_port(port=None, protocol="TCP"): | 201 | for service, addr, port in listen_stanzas)) + |
2016 | 211 | if port is None: | 202 | tuple(((service, addr, int(port)) |
2017 | 212 | return(None) | 203 | for addr, port, service in bind_stanzas))) |
1994 | 213 | return(subprocess.call(['open-port', "%d/%s" % | ||
1995 | 214 | (int(port), protocol)])) | ||
1996 | 215 | |||
1997 | 216 | |||
1998 | 217 | #------------------------------------------------------------------------------ | ||
1999 | 218 | # close_port: Convenience function to close a port in juju to | ||
2000 | 219 | # unexpose a service | ||
2001 | 220 | #------------------------------------------------------------------------------ | ||
2002 | 221 | def close_port(port=None, protocol="TCP"): | ||
2003 | 222 | if port is None: | ||
2004 | 223 | return(None) | ||
2005 | 224 | return(subprocess.call(['close-port', "%d/%s" % | ||
2006 | 225 | (int(port), protocol)])) | ||
2018 | 226 | 204 | ||
2019 | 227 | 205 | ||
2020 | 228 | #------------------------------------------------------------------------------ | 206 | #------------------------------------------------------------------------------ |
2021 | @@ -232,26 +210,25 @@ | |||
2022 | 232 | #------------------------------------------------------------------------------ | 210 | #------------------------------------------------------------------------------ |
2023 | 233 | def update_service_ports(old_service_ports=None, new_service_ports=None): | 211 | def update_service_ports(old_service_ports=None, new_service_ports=None): |
2024 | 234 | if old_service_ports is None or new_service_ports is None: | 212 | if old_service_ports is None or new_service_ports is None: |
2026 | 235 | return(None) | 213 | return None |
2027 | 236 | for port in old_service_ports: | 214 | for port in old_service_ports: |
2028 | 237 | if port not in new_service_ports: | 215 | if port not in new_service_ports: |
2029 | 238 | close_port(port) | 216 | close_port(port) |
2030 | 239 | for port in new_service_ports: | 217 | for port in new_service_ports: |
2046 | 240 | if port not in old_service_ports: | 218 | open_port(port) |
2047 | 241 | open_port(port) | 219 | |
2048 | 242 | 220 | ||
2049 | 243 | 221 | #------------------------------------------------------------------------------ | |
2050 | 244 | #------------------------------------------------------------------------------ | 222 | # update_sysctl: create a sysctl.conf file from YAML-formatted 'sysctl' config |
2051 | 245 | # pwgen: Generates a random password | 223 | #------------------------------------------------------------------------------ |
2052 | 246 | # pwd_length: Defines the length of the password to generate | 224 | def update_sysctl(config_data): |
2053 | 247 | # default: 20 | 225 | sysctl_dict = yaml.load(config_data.get("sysctl", "{}")) |
2054 | 248 | #------------------------------------------------------------------------------ | 226 | if sysctl_dict: |
2055 | 249 | def pwgen(pwd_length=20): | 227 | sysctl_file = open("/etc/sysctl.d/50-haproxy.conf", "w") |
2056 | 250 | alphanumeric_chars = [l for l in (string.letters + string.digits) | 228 | for key in sysctl_dict: |
2057 | 251 | if l not in 'Iil0oO1'] | 229 | sysctl_file.write("{}={}\n".format(key, sysctl_dict[key])) |
2058 | 252 | random_chars = [random.choice(alphanumeric_chars) | 230 | sysctl_file.close() |
2059 | 253 | for i in range(pwd_length)] | 231 | subprocess.call(["sysctl", "-p", "/etc/sysctl.d/50-haproxy.conf"]) |
2045 | 254 | return(''.join(random_chars)) | ||
2060 | 255 | 232 | ||
2061 | 256 | 233 | ||
2062 | 257 | #------------------------------------------------------------------------------ | 234 | #------------------------------------------------------------------------------ |
2063 | @@ -271,22 +248,47 @@ | |||
2064 | 271 | service_port=None, service_options=None, | 248 | service_port=None, service_options=None, |
2065 | 272 | server_entries=None): | 249 | server_entries=None): |
2066 | 273 | if service_name is None or service_ip is None or service_port is None: | 250 | if service_name is None or service_ip is None or service_port is None: |
2068 | 274 | return(None) | 251 | return None |
2069 | 252 | fe_options = [] | ||
2070 | 253 | be_options = [] | ||
2071 | 254 | if service_options is not None: | ||
2072 | 255 | # For options that should be duplicated in both frontend and backend, | ||
2073 | 256 | # copy them to both. | ||
2074 | 257 | for o in dupe_options: | ||
2075 | 258 | if any(map(o.strip().startswith, service_options)): | ||
2076 | 259 | fe_options.append(o) | ||
2077 | 260 | be_options.append(o) | ||
2078 | 261 | # Filter provided service options into frontend-only and backend-only. | ||
2079 | 262 | results = izip( | ||
2080 | 263 | (fe_options, be_options), | ||
2081 | 264 | (True, False), | ||
2082 | 265 | tee((o, any(map(o.strip().startswith, | ||
2083 | 266 | frontend_only_options))) | ||
2084 | 267 | for o in service_options)) | ||
2085 | 268 | for out, cond, result in results: | ||
2086 | 269 | out.extend(option for option, match in result | ||
2087 | 270 | if match is cond and option not in out) | ||
2088 | 275 | service_config = [] | 271 | service_config = [] |
2097 | 276 | service_config.append("listen %s %s:%s" % | 272 | unit_name = os.environ["JUJU_UNIT_NAME"].replace("/", "-") |
2098 | 277 | (service_name, service_ip, service_port)) | 273 | service_config.append("frontend %s-%s" % (unit_name, service_port)) |
2099 | 278 | if service_options is not None: | 274 | service_config.append(" bind %s:%s" % |
2100 | 279 | for service_option in service_options: | 275 | (service_ip, service_port)) |
2101 | 280 | service_config.append(" %s" % service_option.strip()) | 276 | service_config.append(" default_backend %s" % (service_name,)) |
2102 | 281 | if server_entries is not None and isinstance(server_entries, list): | 277 | service_config.extend(" %s" % service_option.strip() |
2103 | 282 | for (server_name, server_ip, server_port, server_options) \ | 278 | for service_option in fe_options) |
2104 | 283 | in server_entries: | 279 | service_config.append("") |
2105 | 280 | service_config.append("backend %s" % (service_name,)) | ||
2106 | 281 | service_config.extend(" %s" % service_option.strip() | ||
2107 | 282 | for service_option in be_options) | ||
2108 | 283 | if isinstance(server_entries, (list, tuple)): | ||
2109 | 284 | for (server_name, server_ip, server_port, | ||
2110 | 285 | server_options) in server_entries: | ||
2111 | 284 | server_line = " server %s %s:%s" % \ | 286 | server_line = " server %s %s:%s" % \ |
2113 | 285 | (server_name, server_ip, server_port) | 287 | (server_name, server_ip, server_port) |
2114 | 286 | if server_options is not None: | 288 | if server_options is not None: |
2116 | 287 | server_line += " %s" % server_options | 289 | server_line += " %s" % " ".join(server_options) |
2117 | 288 | service_config.append(server_line) | 290 | service_config.append(server_line) |
2119 | 289 | return('\n'.join(service_config)) | 291 | return '\n'.join(service_config) |
2120 | 290 | 292 | ||
2121 | 291 | 293 | ||
2122 | 292 | #------------------------------------------------------------------------------ | 294 | #------------------------------------------------------------------------------ |
2123 | @@ -296,216 +298,234 @@ | |||
2124 | 296 | def create_monitoring_stanza(service_name="haproxy_monitoring"): | 298 | def create_monitoring_stanza(service_name="haproxy_monitoring"): |
2125 | 297 | config_data = config_get() | 299 | config_data = config_get() |
2126 | 298 | if config_data['enable_monitoring'] is False: | 300 | if config_data['enable_monitoring'] is False: |
2128 | 299 | return(None) | 301 | return None |
2129 | 300 | monitoring_password = get_monitoring_password() | 302 | monitoring_password = get_monitoring_password() |
2130 | 301 | if config_data['monitoring_password'] != "changeme": | 303 | if config_data['monitoring_password'] != "changeme": |
2131 | 302 | monitoring_password = config_data['monitoring_password'] | 304 | monitoring_password = config_data['monitoring_password'] |
2135 | 303 | elif monitoring_password is None and \ | 305 | elif (monitoring_password is None and |
2136 | 304 | config_data['monitoring_password'] == "changeme": | 306 | config_data['monitoring_password'] == "changeme"): |
2137 | 305 | monitoring_password = pwgen() | 307 | monitoring_password = pwgen(length=20) |
2138 | 306 | monitoring_config = [] | 308 | monitoring_config = [] |
2139 | 307 | monitoring_config.append("mode http") | 309 | monitoring_config.append("mode http") |
2140 | 308 | monitoring_config.append("acl allowed_cidr src %s" % | 310 | monitoring_config.append("acl allowed_cidr src %s" % |
2142 | 309 | config_data['monitoring_allowed_cidr']) | 311 | config_data['monitoring_allowed_cidr']) |
2143 | 310 | monitoring_config.append("block unless allowed_cidr") | 312 | monitoring_config.append("block unless allowed_cidr") |
2144 | 311 | monitoring_config.append("stats enable") | 313 | monitoring_config.append("stats enable") |
2145 | 312 | monitoring_config.append("stats uri /") | 314 | monitoring_config.append("stats uri /") |
2146 | 313 | monitoring_config.append("stats realm Haproxy\ Statistics") | 315 | monitoring_config.append("stats realm Haproxy\ Statistics") |
2147 | 314 | monitoring_config.append("stats auth %s:%s" % | 316 | monitoring_config.append("stats auth %s:%s" % |
2149 | 315 | (config_data['monitoring_username'], monitoring_password)) | 317 | (config_data['monitoring_username'], |
2150 | 318 | monitoring_password)) | ||
2151 | 316 | monitoring_config.append("stats refresh %d" % | 319 | monitoring_config.append("stats refresh %d" % |
2154 | 317 | config_data['monitoring_stats_refresh']) | 320 | config_data['monitoring_stats_refresh']) |
2155 | 318 | return(create_listen_stanza(service_name, | 321 | return create_listen_stanza(service_name, |
2156 | 319 | "0.0.0.0", | 322 | "0.0.0.0", |
2157 | 320 | config_data['monitoring_port'], | 323 | config_data['monitoring_port'], |
2169 | 321 | monitoring_config)) | 324 | monitoring_config) |
2170 | 322 | 325 | ||
2171 | 323 | def get_host_port(services_list): | 326 | |
2172 | 324 | """ | 327 | #------------------------------------------------------------------------------ |
2173 | 325 | Given a services list and global juju information, get a host | 328 | # get_config_services: Convenience function that returns a mapping containing |
2174 | 326 | and port for this system. | 329 | # all of the services configuration |
2175 | 327 | """ | 330 | #------------------------------------------------------------------------------ |
2165 | 328 | host = services_list[0]["service_host"] | ||
2166 | 329 | port = int(services_list[0]["service_port"]) | ||
2167 | 330 | return (host, port) | ||
2168 | 331 | |||
2176 | 332 | def get_config_services(): | 331 | def get_config_services(): |
2177 | 333 | """ | ||
2178 | 334 | Return dict of all services in the configuration, and in the relation | ||
2179 | 335 | where appropriate. If a relation contains a "services" key, read | ||
2180 | 336 | it in as yaml as is the case with the configuration. Set the host and | ||
2181 | 337 | port for any relation initiated service entry as those items cannot be | ||
2182 | 338 | known by the other side of the relation. In the case of a | ||
2183 | 339 | proxy configuration found, ensure the forward for option is set. | ||
2184 | 340 | """ | ||
2185 | 341 | config_data = config_get() | 332 | config_data = config_get() |
2203 | 342 | config_services_list = yaml.load(config_data['services']) | 333 | services = {} |
2204 | 343 | (host, port) = get_host_port(config_services_list) | 334 | for service in yaml.safe_load(config_data['services']): |
2205 | 344 | all_relations = relation_get_all("reverseproxy") | 335 | service_name = service["service_name"] |
2206 | 345 | services_list = [] | 336 | if not services: |
2207 | 346 | if hasattr(all_relations, "iteritems"): | 337 | # 'None' is used as a marker for the first service defined, which |
2208 | 347 | for relid, reldata in all_relations.iteritems(): | 338 | # is used as the default service if a proxied server doesn't |
2209 | 348 | for unit, relation_info in reldata.iteritems(): | 339 | # specify which service it is bound to. |
2210 | 349 | if relation_info.has_key("services"): | 340 | services[None] = {"service_name": service_name} |
2211 | 350 | rservices = yaml.load(relation_info["services"]) | 341 | if is_proxy(service_name) and ("option forwardfor" not in |
2212 | 351 | for r in rservices: | 342 | service["service_options"]): |
2213 | 352 | r["service_host"] = host | 343 | service["service_options"].append("option forwardfor") |
2214 | 353 | r["service_port"] = port | 344 | |
2215 | 354 | port += 1 | 345 | if isinstance(service["server_options"], basestring): |
2216 | 355 | services_list.extend(rservices) | 346 | service["server_options"] = service["server_options"].split() |
2217 | 356 | if len(services_list) == 0: | 347 | |
2218 | 357 | services_list = config_services_list | 348 | services[service_name] = service |
2219 | 358 | return(services_list) | 349 | |
2220 | 350 | return services | ||
2221 | 359 | 351 | ||
2222 | 360 | 352 | ||
2223 | 361 | #------------------------------------------------------------------------------ | 353 | #------------------------------------------------------------------------------ |
2224 | 362 | # get_config_service: Convenience function that returns a dictionary | 354 | # get_config_service: Convenience function that returns a dictionary |
2226 | 363 | # of the configuration of a given services configuration | 355 | # of the configuration of a given service's configuration |
2227 | 364 | #------------------------------------------------------------------------------ | 356 | #------------------------------------------------------------------------------ |
2228 | 365 | def get_config_service(service_name=None): | 357 | def get_config_service(service_name=None): |
2308 | 366 | services_list = get_config_services() | 358 | return get_config_services().get(service_name, None) |
2309 | 367 | for service_item in services_list: | 359 | |
2310 | 368 | if service_item['service_name'] == service_name: | 360 | |
2311 | 369 | return(service_item) | 361 | def is_proxy(service_name): |
2312 | 370 | return(None) | 362 | flag_path = os.path.join(default_haproxy_service_config_dir, |
2313 | 371 | 363 | "%s.is.proxy" % service_name) | |
2314 | 372 | 364 | return os.path.exists(flag_path) | |
2315 | 373 | def relation_get_all(relation_name): | 365 | |
2237 | 374 | """ | ||
2238 | 375 | Iterate through all relations, and return large data structure with the | ||
2239 | 376 | relation data set: | ||
2240 | 377 | |||
2241 | 378 | @param relation_name: The name of the relation to check | ||
2242 | 379 | |||
2243 | 380 | Returns: | ||
2244 | 381 | |||
2245 | 382 | relation_id: | ||
2246 | 383 | unit: | ||
2247 | 384 | key: value | ||
2248 | 385 | key2: value | ||
2249 | 386 | """ | ||
2250 | 387 | result = {} | ||
2251 | 388 | try: | ||
2252 | 389 | relids = subprocess.Popen( | ||
2253 | 390 | ['relation-ids', relation_name], stdout=subprocess.PIPE) | ||
2254 | 391 | for relid in [x.strip() for x in relids.stdout]: | ||
2255 | 392 | result[relid] = {} | ||
2256 | 393 | for unit in json.loads( | ||
2257 | 394 | subprocess.check_output( | ||
2258 | 395 | ['relation-list', '--format=json', '-r', relid])): | ||
2259 | 396 | result[relid][unit] = relation_get(None, unit, relid) | ||
2260 | 397 | return result | ||
2261 | 398 | except Exception, e: | ||
2262 | 399 | subprocess.call(['juju-log', str(e)]) | ||
2263 | 400 | |||
2264 | 401 | def get_services_dict(): | ||
2265 | 402 | """ | ||
2266 | 403 | Transform the services list into a dict for easier comprehension, | ||
2267 | 404 | and to ensure that we have only one entry per service type. If multiple | ||
2268 | 405 | relations specify the same server_name, try to union the servers | ||
2269 | 406 | entries. | ||
2270 | 407 | """ | ||
2271 | 408 | services_list = get_config_services() | ||
2272 | 409 | services_dict = {} | ||
2273 | 410 | |||
2274 | 411 | for service_item in services_list: | ||
2275 | 412 | if not hasattr(service_item, "iteritems"): | ||
2276 | 413 | juju_log("Each 'services' entry must be a dict: %s" % service_item) | ||
2277 | 414 | continue; | ||
2278 | 415 | if "service_name" not in service_item: | ||
2279 | 416 | juju_log("Missing 'service_name': %s" % service_item) | ||
2280 | 417 | continue; | ||
2281 | 418 | name = service_item["service_name"] | ||
2282 | 419 | options = service_item["service_options"] | ||
2283 | 420 | if name in services_dict: | ||
2284 | 421 | if "servers" in services_dict[name]: | ||
2285 | 422 | services_dict[name]["servers"].extend(service_item["servers"]) | ||
2286 | 423 | else: | ||
2287 | 424 | services_dict[name] = service_item | ||
2288 | 425 | if os.path.exists("%s/%s.is.proxy" % ( | ||
2289 | 426 | default_haproxy_service_config_dir, name)): | ||
2290 | 427 | if 'option forwardfor' not in options: | ||
2291 | 428 | options.append("option forwardfor") | ||
2292 | 429 | |||
2293 | 430 | return services_dict | ||
2294 | 431 | |||
2295 | 432 | def get_all_services(): | ||
2296 | 433 | """ | ||
2297 | 434 | Transform a services dict into an "all_services" relation setting expected | ||
2298 | 435 | by apache2. This is needed to ensure we have the port and hostname setting | ||
2299 | 436 | correct and in the proper format | ||
2300 | 437 | """ | ||
2301 | 438 | services = get_services_dict() | ||
2302 | 439 | all_services = [] | ||
2303 | 440 | for name in services: | ||
2304 | 441 | s = {"service_name": name, | ||
2305 | 442 | "service_port": services[name]["service_port"]} | ||
2306 | 443 | all_services.append(s) | ||
2307 | 444 | return all_services | ||
2316 | 445 | 366 | ||
2317 | 446 | #------------------------------------------------------------------------------ | 367 | #------------------------------------------------------------------------------ |
2318 | 447 | # create_services: Function that will create the services configuration | 368 | # create_services: Function that will create the services configuration |
2319 | 448 | # from the config data and/or relation information | 369 | # from the config data and/or relation information |
2320 | 449 | #------------------------------------------------------------------------------ | 370 | #------------------------------------------------------------------------------ |
2321 | 450 | def create_services(): | 371 | def create_services(): |
2355 | 451 | services_list = get_config_services() | 372 | services_dict = get_config_services() |
2356 | 452 | services_dict = get_services_dict() | 373 | if len(services_dict) == 0: |
2357 | 453 | 374 | log("No services configured, exiting.") | |
2358 | 454 | # service definitions overwrites user specified haproxy file in | 375 | return |
2359 | 455 | # a pseudo-template form | 376 | |
2360 | 456 | all_relations = relation_get_all("reverseproxy") | 377 | relation_data = relations_of_type("reverseproxy") |
2361 | 457 | for relid, reldata in all_relations.iteritems(): | 378 | |
2362 | 458 | for unit, relation_info in reldata.iteritems(): | 379 | for relation_info in relation_data: |
2363 | 459 | if not isinstance(relation_info, dict): | 380 | unit = relation_info['__unit__'] |
2364 | 460 | sys.exit(0) | 381 | juju_service_name = unit.rpartition('/')[0] |
2365 | 461 | if "services" in relation_info: | 382 | |
2366 | 462 | juju_log("Relation %s has services override defined" % relid) | 383 | relation_ok = True |
2367 | 463 | continue; | 384 | for required in ("port", "private-address", "hostname"): |
2368 | 464 | if "hostname" not in relation_info or "port" not in relation_info: | 385 | if not required in relation_info: |
2369 | 465 | juju_log("Relation %s needs hostname and port defined" % relid) | 386 | log("No %s in relation data for '%s', skipping." % |
2370 | 466 | continue; | 387 | (required, unit)) |
2371 | 467 | juju_service_name = unit.rpartition('/')[0] | 388 | relation_ok = False |
2372 | 468 | # Mandatory switches ( hostname, port ) | 389 | break |
2373 | 469 | server_name = "%s__%s" % ( | 390 | |
2374 | 470 | relation_info['hostname'].replace('.', '_'), | 391 | if not relation_ok: |
2375 | 471 | relation_info['port']) | 392 | continue |
2376 | 472 | server_ip = relation_info['hostname'] | 393 | |
2377 | 473 | server_port = relation_info['port'] | 394 | # Mandatory switches ( private-address, port ) |
2378 | 474 | # Optional switches ( service_name ) | 395 | host = relation_info['private-address'] |
2379 | 475 | if 'service_name' in relation_info: | 396 | port = relation_info['port'] |
2380 | 476 | if relation_info['service_name'] in services_dict: | 397 | server_name = ("%s-%s" % (unit.replace("/", "-"), port)) |
2381 | 477 | service_name = relation_info['service_name'] | 398 | |
2382 | 478 | else: | 399 | # Optional switches ( service_name, sitenames ) |
2383 | 479 | juju_log("service %s does not exist." % ( | 400 | service_names = set() |
2384 | 480 | relation_info['service_name'])) | 401 | if 'service_name' in relation_info: |
2385 | 481 | sys.exit(1) | 402 | if relation_info['service_name'] in services_dict: |
2386 | 482 | elif juju_service_name + '_service' in services_dict: | 403 | service_names.add(relation_info['service_name']) |
2354 | 483 | service_name = juju_service_name + '_service' | ||
2387 | 484 | else: | 404 | else: |
2389 | 485 | service_name = services_list[0]['service_name'] | 405 | log("Service '%s' does not exist." % |
2390 | 406 | relation_info['service_name']) | ||
2391 | 407 | continue | ||
2392 | 408 | |||
2393 | 409 | if 'sitenames' in relation_info: | ||
2394 | 410 | sitenames = relation_info['sitenames'].split() | ||
2395 | 411 | for sitename in sitenames: | ||
2396 | 412 | if sitename in services_dict: | ||
2397 | 413 | service_names.add(sitename) | ||
2398 | 414 | |||
2399 | 415 | if juju_service_name + "_service" in services_dict: | ||
2400 | 416 | service_names.add(juju_service_name + "_service") | ||
2401 | 417 | |||
2402 | 418 | if juju_service_name in services_dict: | ||
2403 | 419 | service_names.add(juju_service_name) | ||
2404 | 420 | |||
2405 | 421 | if not service_names: | ||
2406 | 422 | service_names.add(services_dict[None]["service_name"]) | ||
2407 | 423 | |||
2408 | 424 | for service_name in service_names: | ||
2409 | 425 | service = services_dict[service_name] | ||
2410 | 426 | |||
2411 | 486 | # Add the server entries | 427 | # Add the server entries |
2418 | 487 | if not 'servers' in services_dict[service_name]: | 428 | servers = service.setdefault("servers", []) |
2419 | 488 | services_dict[service_name]['servers'] = [] | 429 | servers.append((server_name, host, port, |
2420 | 489 | services_dict[service_name]['servers'].append(( | 430 | services_dict[service_name].get( |
2421 | 490 | server_name, server_ip, server_port, | 431 | 'server_options', []))) |
2422 | 491 | services_dict[service_name]['server_options'])) | 432 | |
2423 | 492 | 433 | has_servers = False | |
2424 | 434 | for service_name, service in services_dict.iteritems(): | ||
2425 | 435 | if service.get("servers", []): | ||
2426 | 436 | has_servers = True | ||
2427 | 437 | |||
2428 | 438 | if not has_servers: | ||
2429 | 439 | log("No backend servers, exiting.") | ||
2430 | 440 | return | ||
2431 | 441 | |||
2432 | 442 | del services_dict[None] | ||
2433 | 443 | services_dict = apply_peer_config(services_dict) | ||
2434 | 444 | write_service_config(services_dict) | ||
2435 | 445 | return services_dict | ||
2436 | 446 | |||
2437 | 447 | |||
2438 | 448 | def apply_peer_config(services_dict): | ||
2439 | 449 | peer_data = relations_of_type("peer") | ||
2440 | 450 | |||
2441 | 451 | peer_services = {} | ||
2442 | 452 | for relation_info in peer_data: | ||
2443 | 453 | unit_name = relation_info["__unit__"] | ||
2444 | 454 | peer_services_data = relation_info.get("all_services") | ||
2445 | 455 | if peer_services_data is None: | ||
2446 | 456 | continue | ||
2447 | 457 | service_data = yaml.safe_load(peer_services_data) | ||
2448 | 458 | for service in service_data: | ||
2449 | 459 | service_name = service["service_name"] | ||
2450 | 460 | if service_name in services_dict: | ||
2451 | 461 | peer_service = peer_services.setdefault(service_name, {}) | ||
2452 | 462 | peer_service["service_name"] = service_name | ||
2453 | 463 | peer_service["service_host"] = service["service_host"] | ||
2454 | 464 | peer_service["service_port"] = service["service_port"] | ||
2455 | 465 | peer_service["service_options"] = ["balance leastconn", | ||
2456 | 466 | "mode tcp", | ||
2457 | 467 | "option tcplog"] | ||
2458 | 468 | servers = peer_service.setdefault("servers", []) | ||
2459 | 469 | servers.append((unit_name.replace("/", "-"), | ||
2460 | 470 | relation_info["private-address"], | ||
2461 | 471 | service["service_port"] + 1, ["check"])) | ||
2462 | 472 | |||
2463 | 473 | if not peer_services: | ||
2464 | 474 | return services_dict | ||
2465 | 475 | |||
2466 | 476 | unit_name = os.environ["JUJU_UNIT_NAME"].replace("/", "-") | ||
2467 | 477 | private_address = unit_get("private-address") | ||
2468 | 478 | for service_name, peer_service in peer_services.iteritems(): | ||
2469 | 479 | original_service = services_dict[service_name] | ||
2470 | 480 | |||
2471 | 481 | # If the original service has timeout settings, copy them over to the | ||
2472 | 482 | # peer service. | ||
2473 | 483 | for option in original_service.get("service_options", ()): | ||
2474 | 484 | if "timeout" in option: | ||
2475 | 485 | peer_service["service_options"].append(option) | ||
2476 | 486 | |||
2477 | 487 | servers = peer_service["servers"] | ||
2478 | 488 | # Add ourselves to the list of servers for the peer listen stanza. | ||
2479 | 489 | servers.append((unit_name, private_address, | ||
2480 | 490 | original_service["service_port"] + 1, | ||
2481 | 491 | ["check"])) | ||
2482 | 492 | |||
2483 | 493 | # Make all but the first server in the peer listen stanza a backup | ||
2484 | 494 | # server. | ||
2485 | 495 | servers.sort() | ||
2486 | 496 | for server in servers[1:]: | ||
2487 | 497 | server[3].append("backup") | ||
2488 | 498 | |||
2489 | 499 | # Remap original service port, will now be used by peer listen stanza. | ||
2490 | 500 | original_service["service_port"] += 1 | ||
2491 | 501 | |||
2492 | 502 | # Remap original service to a new name, stuff peer listen stanza into | ||
2493 | 503 | # it's place. | ||
2494 | 504 | be_service = service_name + "_be" | ||
2495 | 505 | original_service["service_name"] = be_service | ||
2496 | 506 | services_dict[be_service] = original_service | ||
2497 | 507 | services_dict[service_name] = peer_service | ||
2498 | 508 | |||
2499 | 509 | return services_dict | ||
2500 | 510 | |||
2501 | 511 | |||
2502 | 512 | def write_service_config(services_dict): | ||
2503 | 493 | # Construct the new haproxy.cfg file | 513 | # Construct the new haproxy.cfg file |
2519 | 494 | for service in services_dict: | 514 | for service_key, service_config in services_dict.items(): |
2520 | 495 | juju_log("Service: ", service) | 515 | log("Service: %s" % service_key) |
2521 | 496 | server_entries = None | 516 | server_entries = service_config.get('servers') |
2522 | 497 | if 'servers' in services_dict[service]: | 517 | |
2523 | 498 | server_entries = services_dict[service]['servers'] | 518 | service_name = service_config["service_name"] |
2524 | 499 | service_config_file = "%s/%s.service" % ( | 519 | if not os.path.exists(default_haproxy_service_config_dir): |
2525 | 500 | default_haproxy_service_config_dir, | 520 | os.mkdir(default_haproxy_service_config_dir, 0600) |
2526 | 501 | services_dict[service]['service_name']) | 521 | with open(os.path.join(default_haproxy_service_config_dir, |
2527 | 502 | with open(service_config_file, 'w') as service_config: | 522 | "%s.service" % service_name), 'w') as config: |
2528 | 503 | service_config.write( | 523 | config.write(create_listen_stanza( |
2529 | 504 | create_listen_stanza(services_dict[service]['service_name'], | 524 | service_name, |
2530 | 505 | services_dict[service]['service_host'], | 525 | service_config['service_host'], |
2531 | 506 | services_dict[service]['service_port'], | 526 | service_config['service_port'], |
2532 | 507 | services_dict[service]['service_options'], | 527 | service_config['service_options'], |
2533 | 508 | server_entries)) | 528 | server_entries)) |
2534 | 509 | 529 | ||
2535 | 510 | 530 | ||
2536 | 511 | #------------------------------------------------------------------------------ | 531 | #------------------------------------------------------------------------------ |
2537 | @@ -516,17 +536,19 @@ | |||
2538 | 516 | services = '' | 536 | services = '' |
2539 | 517 | if service_name is not None: | 537 | if service_name is not None: |
2540 | 518 | if os.path.exists("%s/%s.service" % | 538 | if os.path.exists("%s/%s.service" % |
2544 | 519 | (default_haproxy_service_config_dir, service_name)): | 539 | (default_haproxy_service_config_dir, service_name)): |
2545 | 520 | services = open("%s/%s.service" % | 540 | with open("%s/%s.service" % (default_haproxy_service_config_dir, |
2546 | 521 | (default_haproxy_service_config_dir, service_name)).read() | 541 | service_name)) as f: |
2547 | 542 | services = f.read() | ||
2548 | 522 | else: | 543 | else: |
2549 | 523 | services = None | 544 | services = None |
2550 | 524 | else: | 545 | else: |
2551 | 525 | for service in glob.glob("%s/*.service" % | 546 | for service in glob.glob("%s/*.service" % |
2556 | 526 | default_haproxy_service_config_dir): | 547 | default_haproxy_service_config_dir): |
2557 | 527 | services += open(service).read() | 548 | with open(service) as f: |
2558 | 528 | services += "\n\n" | 549 | services += f.read() |
2559 | 529 | return(services) | 550 | services += "\n\n" |
2560 | 551 | return services | ||
2561 | 530 | 552 | ||
2562 | 531 | 553 | ||
2563 | 532 | #------------------------------------------------------------------------------ | 554 | #------------------------------------------------------------------------------ |
2564 | @@ -537,24 +559,24 @@ | |||
2565 | 537 | #------------------------------------------------------------------------------ | 559 | #------------------------------------------------------------------------------ |
2566 | 538 | def remove_services(service_name=None): | 560 | def remove_services(service_name=None): |
2567 | 539 | if service_name is not None: | 561 | if service_name is not None: |
2570 | 540 | if os.path.exists("%s/%s.service" % | 562 | path = "%s/%s.service" % (default_haproxy_service_config_dir, |
2571 | 541 | (default_haproxy_service_config_dir, service_name)): | 563 | service_name) |
2572 | 564 | if os.path.exists(path): | ||
2573 | 542 | try: | 565 | try: |
2577 | 543 | os.remove("%s/%s.service" % | 566 | os.remove(path) |
2575 | 544 | (default_haproxy_service_config_dir, service_name)) | ||
2576 | 545 | return(True) | ||
2578 | 546 | except Exception, e: | 567 | except Exception, e: |
2581 | 547 | subprocess.call(['juju-log', str(e)]) | 568 | log(str(e)) |
2582 | 548 | return(False) | 569 | return False |
2583 | 570 | return True | ||
2584 | 549 | else: | 571 | else: |
2585 | 550 | for service in glob.glob("%s/*.service" % | 572 | for service in glob.glob("%s/*.service" % |
2587 | 551 | default_haproxy_service_config_dir): | 573 | default_haproxy_service_config_dir): |
2588 | 552 | try: | 574 | try: |
2589 | 553 | os.remove(service) | 575 | os.remove(service) |
2590 | 554 | except Exception, e: | 576 | except Exception, e: |
2592 | 555 | subprocess.call(['juju-log', str(e)]) | 577 | log(str(e)) |
2593 | 556 | pass | 578 | pass |
2595 | 557 | return(True) | 579 | return True |
2596 | 558 | 580 | ||
2597 | 559 | 581 | ||
2598 | 560 | #------------------------------------------------------------------------------ | 582 | #------------------------------------------------------------------------------ |
2599 | @@ -567,27 +589,18 @@ | |||
2600 | 567 | # optional arguments | 589 | # optional arguments |
2601 | 568 | #------------------------------------------------------------------------------ | 590 | #------------------------------------------------------------------------------ |
2602 | 569 | def construct_haproxy_config(haproxy_globals=None, | 591 | def construct_haproxy_config(haproxy_globals=None, |
2609 | 570 | haproxy_defaults=None, | 592 | haproxy_defaults=None, |
2610 | 571 | haproxy_monitoring=None, | 593 | haproxy_monitoring=None, |
2611 | 572 | haproxy_services=None): | 594 | haproxy_services=None): |
2612 | 573 | if haproxy_globals is None or \ | 595 | if None in (haproxy_globals, haproxy_defaults): |
2613 | 574 | haproxy_defaults is None: | 596 | return |
2608 | 575 | return(None) | ||
2614 | 576 | with open(default_haproxy_config, 'w') as haproxy_config: | 597 | with open(default_haproxy_config, 'w') as haproxy_config: |
2629 | 577 | haproxy_config.write(haproxy_globals) | 598 | config_string = '' |
2630 | 578 | haproxy_config.write("\n") | 599 | for config in (haproxy_globals, haproxy_defaults, haproxy_monitoring, |
2631 | 579 | haproxy_config.write("\n") | 600 | haproxy_services): |
2632 | 580 | haproxy_config.write(haproxy_defaults) | 601 | if config is not None: |
2633 | 581 | haproxy_config.write("\n") | 602 | config_string += config + '\n\n' |
2634 | 582 | haproxy_config.write("\n") | 603 | haproxy_config.write(config_string) |
2621 | 583 | if haproxy_monitoring is not None: | ||
2622 | 584 | haproxy_config.write(haproxy_monitoring) | ||
2623 | 585 | haproxy_config.write("\n") | ||
2624 | 586 | haproxy_config.write("\n") | ||
2625 | 587 | if haproxy_services is not None: | ||
2626 | 588 | haproxy_config.write(haproxy_services) | ||
2627 | 589 | haproxy_config.write("\n") | ||
2628 | 590 | haproxy_config.write("\n") | ||
2635 | 591 | 604 | ||
2636 | 592 | 605 | ||
2637 | 593 | #------------------------------------------------------------------------------ | 606 | #------------------------------------------------------------------------------ |
2638 | @@ -595,50 +608,37 @@ | |||
2639 | 595 | # the haproxy service | 608 | # the haproxy service |
2640 | 596 | #------------------------------------------------------------------------------ | 609 | #------------------------------------------------------------------------------ |
2641 | 597 | def service_haproxy(action=None, haproxy_config=default_haproxy_config): | 610 | def service_haproxy(action=None, haproxy_config=default_haproxy_config): |
2644 | 598 | if action is None or haproxy_config is None: | 611 | if None in (action, haproxy_config): |
2645 | 599 | return(None) | 612 | return None |
2646 | 600 | elif action == "check": | 613 | elif action == "check": |
2655 | 601 | retVal = subprocess.call( | 614 | command = ['/usr/sbin/haproxy', '-f', haproxy_config, '-c'] |
2648 | 602 | ['/usr/sbin/haproxy', '-f', haproxy_config, '-c']) | ||
2649 | 603 | if retVal == 1: | ||
2650 | 604 | return(False) | ||
2651 | 605 | elif retVal == 0: | ||
2652 | 606 | return(True) | ||
2653 | 607 | else: | ||
2654 | 608 | return(False) | ||
2656 | 609 | else: | 615 | else: |
2672 | 610 | retVal = subprocess.call(['service', 'haproxy', action]) | 616 | command = ['service', 'haproxy', action] |
2673 | 611 | if retVal == 0: | 617 | return_value = subprocess.call(command) |
2674 | 612 | return(True) | 618 | return return_value == 0 |
2660 | 613 | else: | ||
2661 | 614 | return(False) | ||
2662 | 615 | |||
2663 | 616 | def website_notify(): | ||
2664 | 617 | """ | ||
2665 | 618 | Notify any webiste relations of any configuration changes. | ||
2666 | 619 | """ | ||
2667 | 620 | juju_log("Notifying all website relations of change") | ||
2668 | 621 | all_relations = relation_get_all("website") | ||
2669 | 622 | if hasattr(all_relations, "iteritems"): | ||
2670 | 623 | for relid, reldata in all_relations.iteritems(): | ||
2671 | 624 | relation_set(["time=%s" % time.time()], relation_id=relid) | ||
2675 | 625 | 619 | ||
2676 | 626 | 620 | ||
2677 | 627 | ############################################################################### | 621 | ############################################################################### |
2678 | 628 | # Hook functions | 622 | # Hook functions |
2679 | 629 | ############################################################################### | 623 | ############################################################################### |
2680 | 630 | def install_hook(): | 624 | def install_hook(): |
2681 | 631 | for f in glob.glob('exec.d/*/charm-pre-install'): | ||
2682 | 632 | if os.path.isfile(f) and os.access(f, os.X_OK): | ||
2683 | 633 | subprocess.check_call(['sh', '-c', f]) | ||
2684 | 634 | if not os.path.exists(default_haproxy_service_config_dir): | 625 | if not os.path.exists(default_haproxy_service_config_dir): |
2685 | 635 | os.mkdir(default_haproxy_service_config_dir, 0600) | 626 | os.mkdir(default_haproxy_service_config_dir, 0600) |
2687 | 636 | return ((apt_get_install("haproxy") == enable_haproxy()) is True) | 627 | |
2688 | 628 | apt_install('haproxy', fatal=True) | ||
2689 | 629 | ensure_package_status(service_affecting_packages, | ||
2690 | 630 | config_get('package_status')) | ||
2691 | 631 | enable_haproxy() | ||
2692 | 637 | 632 | ||
2693 | 638 | 633 | ||
2694 | 639 | def config_changed(): | 634 | def config_changed(): |
2695 | 640 | config_data = config_get() | 635 | config_data = config_get() |
2697 | 641 | current_service_ports = get_service_ports() | 636 | |
2698 | 637 | ensure_package_status(service_affecting_packages, | ||
2699 | 638 | config_data['package_status']) | ||
2700 | 639 | |||
2701 | 640 | old_service_ports = get_service_ports() | ||
2702 | 641 | old_stanzas = get_listen_stanzas() | ||
2703 | 642 | haproxy_globals = create_haproxy_globals() | 642 | haproxy_globals = create_haproxy_globals() |
2704 | 643 | haproxy_defaults = create_haproxy_defaults() | 643 | haproxy_defaults = create_haproxy_defaults() |
2705 | 644 | if config_data['enable_monitoring'] is True: | 644 | if config_data['enable_monitoring'] is True: |
2706 | @@ -648,105 +648,177 @@ | |||
2707 | 648 | remove_services() | 648 | remove_services() |
2708 | 649 | create_services() | 649 | create_services() |
2709 | 650 | haproxy_services = load_services() | 650 | haproxy_services = load_services() |
2710 | 651 | update_sysctl(config_data) | ||
2711 | 651 | construct_haproxy_config(haproxy_globals, | 652 | construct_haproxy_config(haproxy_globals, |
2712 | 652 | haproxy_defaults, | 653 | haproxy_defaults, |
2713 | 653 | haproxy_monitoring, | 654 | haproxy_monitoring, |
2714 | 654 | haproxy_services) | 655 | haproxy_services) |
2715 | 655 | 656 | ||
2716 | 656 | if service_haproxy("check"): | 657 | if service_haproxy("check"): |
2719 | 657 | updated_service_ports = get_service_ports() | 658 | update_service_ports(old_service_ports, get_service_ports()) |
2718 | 658 | update_service_ports(current_service_ports, updated_service_ports) | ||
2720 | 659 | service_haproxy("reload") | 659 | service_haproxy("reload") |
2721 | 660 | if not (get_listen_stanzas() == old_stanzas): | ||
2722 | 661 | notify_website() | ||
2723 | 662 | notify_peer() | ||
2724 | 663 | else: | ||
2725 | 664 | # XXX Ideally the config should be restored to a working state if the | ||
2726 | 665 | # check fails, otherwise an inadvertent reload will cause the service | ||
2727 | 666 | # to be broken. | ||
2728 | 667 | log("HAProxy configuration check failed, exiting.") | ||
2729 | 668 | sys.exit(1) | ||
2730 | 660 | 669 | ||
2731 | 661 | 670 | ||
2732 | 662 | def start_hook(): | 671 | def start_hook(): |
2733 | 663 | if service_haproxy("status"): | 672 | if service_haproxy("status"): |
2735 | 664 | return(service_haproxy("restart")) | 673 | return service_haproxy("restart") |
2736 | 665 | else: | 674 | else: |
2738 | 666 | return(service_haproxy("start")) | 675 | return service_haproxy("start") |
2739 | 667 | 676 | ||
2740 | 668 | 677 | ||
2741 | 669 | def stop_hook(): | 678 | def stop_hook(): |
2742 | 670 | if service_haproxy("status"): | 679 | if service_haproxy("status"): |
2744 | 671 | return(service_haproxy("stop")) | 680 | return service_haproxy("stop") |
2745 | 672 | 681 | ||
2746 | 673 | 682 | ||
2747 | 674 | def reverseproxy_interface(hook_name=None): | 683 | def reverseproxy_interface(hook_name=None): |
2748 | 675 | if hook_name is None: | 684 | if hook_name is None: |
2756 | 676 | return(None) | 685 | return None |
2757 | 677 | elif hook_name == "changed": | 686 | if hook_name in ("changed", "departed"): |
2758 | 678 | config_changed() | 687 | config_changed() |
2759 | 679 | website_notify() | 688 | |
2753 | 680 | elif hook_name=="departed": | ||
2754 | 681 | config_changed() | ||
2755 | 682 | website_notify() | ||
2760 | 683 | 689 | ||
2761 | 684 | def website_interface(hook_name=None): | 690 | def website_interface(hook_name=None): |
2762 | 685 | if hook_name is None: | 691 | if hook_name is None: |
2764 | 686 | return(None) | 692 | return None |
2765 | 693 | # Notify website relation but only for the current relation in context. | ||
2766 | 694 | notify_website(changed=hook_name == "changed", | ||
2767 | 695 | relation_ids=(relation_id(),)) | ||
2768 | 696 | |||
2769 | 697 | |||
2770 | 698 | def get_hostname(host=None): | ||
2771 | 699 | my_host = socket.gethostname() | ||
2772 | 700 | if host is None or host == "0.0.0.0": | ||
2773 | 701 | # If the listen ip has been set to 0.0.0.0 then pass back the hostname | ||
2774 | 702 | return socket.getfqdn(my_host) | ||
2775 | 703 | elif host == "localhost": | ||
2776 | 704 | # If the fqdn lookup has returned localhost (lxc setups) then return | ||
2777 | 705 | # hostname | ||
2778 | 706 | return my_host | ||
2779 | 707 | return host | ||
2780 | 708 | |||
2781 | 709 | |||
2782 | 710 | def notify_relation(relation, changed=False, relation_ids=None): | ||
2783 | 711 | config_data = config_get() | ||
2784 | 712 | default_host = get_hostname() | ||
2785 | 687 | default_port = 80 | 713 | default_port = 80 |
2816 | 688 | relation_data = relation_get() | 714 | |
2817 | 689 | 715 | for rid in relation_ids or get_relation_ids(relation): | |
2818 | 690 | # If a specfic service has been asked for then return the ip:port for | 716 | service_names = set() |
2819 | 691 | # that service, else pass back the default | 717 | if rid is None: |
2820 | 692 | if 'service_name' in relation_data: | 718 | rid = relation_id() |
2821 | 693 | service_name = relation_data['service_name'] | 719 | for relation_data in relations_for_id(rid): |
2822 | 694 | requestedservice = get_config_service(service_name) | 720 | if 'service_name' in relation_data: |
2823 | 695 | my_host = requestedservice['service_host'] | 721 | service_names.add(relation_data['service_name']) |
2824 | 696 | my_port = requestedservice['service_port'] | 722 | |
2825 | 697 | else: | 723 | if changed: |
2826 | 698 | my_host = socket.getfqdn(socket.gethostname()) | 724 | if 'is-proxy' in relation_data: |
2827 | 699 | my_port = default_port | 725 | remote_service = ("%s__%d" % (relation_data['hostname'], |
2828 | 700 | 726 | relation_data['port'])) | |
2829 | 701 | # If the listen ip has been set to 0.0.0.0 then pass back the hostname | 727 | open("%s/%s.is.proxy" % ( |
2830 | 702 | if my_host == "0.0.0.0": | 728 | default_haproxy_service_config_dir, |
2831 | 703 | my_host = socket.getfqdn(socket.gethostname()) | 729 | remote_service), 'a').close() |
2832 | 704 | 730 | ||
2833 | 705 | # If the fqdn lookup has returned localhost (lxc setups) then return | 731 | service_name = None |
2834 | 706 | # hostname | 732 | if len(service_names) == 1: |
2835 | 707 | if my_host == "localhost": | 733 | service_name = service_names.pop() |
2836 | 708 | my_host = socket.gethostname() | 734 | elif len(service_names) > 1: |
2837 | 709 | subprocess.call( | 735 | log("Remote units requested than a single service name." |
2838 | 710 | ['relation-set', 'port=%d' % my_port, 'hostname=%s' % my_host, | 736 | "Falling back to default host/port.") |
2839 | 711 | 'all_services=%s' % yaml.safe_dump(get_all_services())]) | 737 | |
2840 | 712 | if hook_name == "changed": | 738 | if service_name is not None: |
2841 | 713 | if 'is-proxy' in relation_data: | 739 | # If a specfic service has been asked for then return the ip:port |
2842 | 714 | service_name = "%s__%d" % \ | 740 | # for that service, else pass back the default |
2843 | 715 | (relation_data['hostname'], relation_data['port']) | 741 | requestedservice = get_config_service(service_name) |
2844 | 716 | open("%s/%s.is.proxy" % | 742 | my_host = get_hostname(requestedservice['service_host']) |
2845 | 717 | (default_haproxy_service_config_dir, service_name), 'a').close() | 743 | my_port = requestedservice['service_port'] |
2846 | 744 | else: | ||
2847 | 745 | my_host = default_host | ||
2848 | 746 | my_port = default_port | ||
2849 | 747 | |||
2850 | 748 | relation_set(relation_id=rid, port=str(my_port), | ||
2851 | 749 | hostname=my_host, | ||
2852 | 750 | all_services=config_data['services']) | ||
2853 | 751 | |||
2854 | 752 | |||
2855 | 753 | def notify_website(changed=False, relation_ids=None): | ||
2856 | 754 | notify_relation("website", changed=changed, relation_ids=relation_ids) | ||
2857 | 755 | |||
2858 | 756 | |||
2859 | 757 | def notify_peer(changed=False, relation_ids=None): | ||
2860 | 758 | notify_relation("peer", changed=changed, relation_ids=relation_ids) | ||
2861 | 759 | |||
2862 | 760 | |||
2863 | 761 | def install_nrpe_scripts(): | ||
2864 | 762 | scripts_src = os.path.join(os.environ["CHARM_DIR"], "files", | ||
2865 | 763 | "nrpe") | ||
2866 | 764 | scripts_dst = "/usr/lib/nagios/plugins" | ||
2867 | 765 | if not os.path.exists(scripts_dst): | ||
2868 | 766 | os.makedirs(scripts_dst) | ||
2869 | 767 | for fname in glob.glob(os.path.join(scripts_src, "*.sh")): | ||
2870 | 768 | shutil.copy2(fname, | ||
2871 | 769 | os.path.join(scripts_dst, os.path.basename(fname))) | ||
2872 | 770 | |||
2873 | 718 | 771 | ||
2874 | 719 | def update_nrpe_config(): | 772 | def update_nrpe_config(): |
2875 | 773 | install_nrpe_scripts() | ||
2876 | 720 | nrpe_compat = nrpe.NRPE() | 774 | nrpe_compat = nrpe.NRPE() |
2879 | 721 | nrpe_compat.add_check('haproxy','Check HAProxy', 'check_haproxy.sh') | 775 | nrpe_compat.add_check('haproxy', 'Check HAProxy', 'check_haproxy.sh') |
2880 | 722 | nrpe_compat.add_check('haproxy_queue','Check HAProxy queue depth', 'check_haproxy_queue_depth.sh') | 776 | nrpe_compat.add_check('haproxy_queue', 'Check HAProxy queue depth', |
2881 | 777 | 'check_haproxy_queue_depth.sh') | ||
2882 | 723 | nrpe_compat.write() | 778 | nrpe_compat.write() |
2883 | 724 | 779 | ||
2884 | 780 | |||
2885 | 725 | ############################################################################### | 781 | ############################################################################### |
2886 | 726 | # Main section | 782 | # Main section |
2887 | 727 | ############################################################################### | 783 | ############################################################################### |
2890 | 728 | if __name__ == "__main__": | 784 | |
2891 | 729 | if HOOK_NAME == "install": | 785 | |
2892 | 786 | def main(hook_name): | ||
2893 | 787 | if hook_name == "install": | ||
2894 | 730 | install_hook() | 788 | install_hook() |
2896 | 731 | elif HOOK_NAME == "config-changed": | 789 | elif hook_name in ("config-changed", "upgrade-charm"): |
2897 | 732 | config_changed() | 790 | config_changed() |
2898 | 733 | update_nrpe_config() | 791 | update_nrpe_config() |
2900 | 734 | elif HOOK_NAME == "start": | 792 | elif hook_name == "start": |
2901 | 735 | start_hook() | 793 | start_hook() |
2903 | 736 | elif HOOK_NAME == "stop": | 794 | elif hook_name == "stop": |
2904 | 737 | stop_hook() | 795 | stop_hook() |
2906 | 738 | elif HOOK_NAME == "reverseproxy-relation-broken": | 796 | elif hook_name == "reverseproxy-relation-broken": |
2907 | 739 | config_changed() | 797 | config_changed() |
2909 | 740 | elif HOOK_NAME == "reverseproxy-relation-changed": | 798 | elif hook_name == "reverseproxy-relation-changed": |
2910 | 741 | reverseproxy_interface("changed") | 799 | reverseproxy_interface("changed") |
2912 | 742 | elif HOOK_NAME == "reverseproxy-relation-departed": | 800 | elif hook_name == "reverseproxy-relation-departed": |
2913 | 743 | reverseproxy_interface("departed") | 801 | reverseproxy_interface("departed") |
2915 | 744 | elif HOOK_NAME == "website-relation-joined": | 802 | elif hook_name == "website-relation-joined": |
2916 | 745 | website_interface("joined") | 803 | website_interface("joined") |
2918 | 746 | elif HOOK_NAME == "website-relation-changed": | 804 | elif hook_name == "website-relation-changed": |
2919 | 747 | website_interface("changed") | 805 | website_interface("changed") |
2921 | 748 | elif HOOK_NAME == "nrpe-external-master-relation-changed": | 806 | elif hook_name == "peer-relation-joined": |
2922 | 807 | website_interface("joined") | ||
2923 | 808 | elif hook_name == "peer-relation-changed": | ||
2924 | 809 | reverseproxy_interface("changed") | ||
2925 | 810 | elif hook_name in ("nrpe-external-master-relation-joined", | ||
2926 | 811 | "local-monitors-relation-joined"): | ||
2927 | 749 | update_nrpe_config() | 812 | update_nrpe_config() |
2928 | 750 | else: | 813 | else: |
2929 | 751 | print "Unknown hook" | 814 | print "Unknown hook" |
2930 | 752 | sys.exit(1) | 815 | sys.exit(1) |
2931 | 816 | |||
2932 | 817 | if __name__ == "__main__": | ||
2933 | 818 | hook_name = os.path.basename(sys.argv[0]) | ||
2934 | 819 | # Also support being invoked directly with hook as argument name. | ||
2935 | 820 | if hook_name == "hooks.py": | ||
2936 | 821 | if len(sys.argv) < 2: | ||
2937 | 822 | sys.exit("Missing required hook name argument.") | ||
2938 | 823 | hook_name = sys.argv[1] | ||
2939 | 824 | main(hook_name) | ||
2940 | 753 | 825 | ||
2941 | === modified symlink 'hooks/install' (properties changed: -x to +x) | |||
2942 | === target was u'./hooks.py' | |||
2943 | --- hooks/install 1970-01-01 00:00:00 +0000 | |||
2944 | +++ hooks/install 2013-10-16 14:05:24 +0000 | |||
2945 | @@ -0,0 +1,13 @@ | |||
2946 | 1 | #!/bin/sh | ||
2947 | 2 | |||
2948 | 3 | set -eu | ||
2949 | 4 | |||
2950 | 5 | apt_get_install() { | ||
2951 | 6 | DEBIAN_FRONTEND=noninteractive apt-get -y -qq -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install $@ | ||
2952 | 7 | } | ||
2953 | 8 | |||
2954 | 9 | juju-log 'Invoking charm-pre-install hooks' | ||
2955 | 10 | [ -d exec.d ] && ( for f in exec.d/*/charm-pre-install; do [ -x $f ] && /bin/sh -c "$f"; done ) | ||
2956 | 11 | |||
2957 | 12 | juju-log 'Invoking python-based install hook' | ||
2958 | 13 | python hooks/hooks.py install | ||
2959 | 0 | 14 | ||
2960 | === added symlink 'hooks/local-monitors-relation-joined' | |||
2961 | === target is u'./hooks.py' | |||
2962 | === renamed symlink 'hooks/nrpe-external-master-relation-changed' => 'hooks/nrpe-external-master-relation-joined' | |||
2963 | === removed file 'hooks/nrpe.py' | |||
2964 | --- hooks/nrpe.py 2012-12-21 11:08:58 +0000 | |||
2965 | +++ hooks/nrpe.py 1970-01-01 00:00:00 +0000 | |||
2966 | @@ -1,170 +0,0 @@ | |||
2967 | 1 | import json | ||
2968 | 2 | import subprocess | ||
2969 | 3 | import pwd | ||
2970 | 4 | import grp | ||
2971 | 5 | import os | ||
2972 | 6 | import re | ||
2973 | 7 | import shlex | ||
2974 | 8 | |||
2975 | 9 | # This module adds compatibility with the nrpe_external_master | ||
2976 | 10 | # subordinate charm. To use it in your charm: | ||
2977 | 11 | # | ||
2978 | 12 | # 1. Update metadata.yaml | ||
2979 | 13 | # | ||
2980 | 14 | # provides: | ||
2981 | 15 | # (...) | ||
2982 | 16 | # nrpe-external-master: | ||
2983 | 17 | # interface: nrpe-external-master | ||
2984 | 18 | # scope: container | ||
2985 | 19 | # | ||
2986 | 20 | # 2. Add the following to config.yaml | ||
2987 | 21 | # | ||
2988 | 22 | # nagios_context: | ||
2989 | 23 | # default: "juju" | ||
2990 | 24 | # type: string | ||
2991 | 25 | # description: | | ||
2992 | 26 | # Used by the nrpe-external-master subordinate charm. | ||
2993 | 27 | # A string that will be prepended to instance name to set the host name | ||
2994 | 28 | # in nagios. So for instance the hostname would be something like: | ||
2995 | 29 | # juju-myservice-0 | ||
2996 | 30 | # If you're running multiple environments with the same services in them | ||
2997 | 31 | # this allows you to differentiate between them. | ||
2998 | 32 | # | ||
2999 | 33 | # 3. Add custom checks (Nagios plugins) to files/nrpe-external-master | ||
3000 | 34 | # | ||
3001 | 35 | # 4. Update your hooks.py with something like this: | ||
3002 | 36 | # | ||
3003 | 37 | # import nrpe | ||
3004 | 38 | # (...) | ||
3005 | 39 | # def update_nrpe_config(): | ||
3006 | 40 | # nrpe_compat = NRPE("myservice") | ||
3007 | 41 | # nrpe_compat.add_check( | ||
3008 | 42 | # shortname = "myservice", | ||
3009 | 43 | # description = "Check MyService", | ||
3010 | 44 | # check_cmd = "check_http -w 2 -c 10 http://localhost" | ||
3011 | 45 | # ) | ||
3012 | 46 | # nrpe_compat.add_check( | ||
3013 | 47 | # "myservice_other", | ||
3014 | 48 | # "Check for widget failures", | ||
3015 | 49 | # check_cmd = "/srv/myapp/scripts/widget_check" | ||
3016 | 50 | # ) | ||
3017 | 51 | # nrpe_compat.write() | ||
3018 | 52 | # | ||
3019 | 53 | # def config_changed(): | ||
3020 | 54 | # (...) | ||
3021 | 55 | # update_nrpe_config() | ||
3022 | 56 | |||
3023 | 57 | class ConfigurationError(Exception): | ||
3024 | 58 | '''An error interacting with the Juju config''' | ||
3025 | 59 | pass | ||
3026 | 60 | def config_get(scope=None): | ||
3027 | 61 | '''Return the Juju config as a dictionary''' | ||
3028 | 62 | try: | ||
3029 | 63 | config_cmd_line = ['config-get'] | ||
3030 | 64 | if scope is not None: | ||
3031 | 65 | config_cmd_line.append(scope) | ||
3032 | 66 | config_cmd_line.append('--format=json') | ||
3033 | 67 | return json.loads(subprocess.check_output(config_cmd_line)) | ||
3034 | 68 | except (ValueError, OSError, subprocess.CalledProcessError) as error: | ||
3035 | 69 | subprocess.call(['juju-log', str(error)]) | ||
3036 | 70 | raise ConfigurationError(str(error)) | ||
3037 | 71 | |||
3038 | 72 | class CheckException(Exception): pass | ||
3039 | 73 | class Check(object): | ||
3040 | 74 | shortname_re = '[A-Za-z0-9-_]*' | ||
3041 | 75 | service_template = """ | ||
3042 | 76 | #--------------------------------------------------- | ||
3043 | 77 | # This file is Juju managed | ||
3044 | 78 | #--------------------------------------------------- | ||
3045 | 79 | define service {{ | ||
3046 | 80 | use active-service | ||
3047 | 81 | host_name {nagios_hostname} | ||
3048 | 82 | service_description {nagios_hostname} {shortname} {description} | ||
3049 | 83 | check_command check_nrpe!check_{shortname} | ||
3050 | 84 | servicegroups {nagios_servicegroup} | ||
3051 | 85 | }} | ||
3052 | 86 | """ | ||
3053 | 87 | def __init__(self, shortname, description, check_cmd): | ||
3054 | 88 | super(Check, self).__init__() | ||
3055 | 89 | # XXX: could be better to calculate this from the service name | ||
3056 | 90 | if not re.match(self.shortname_re, shortname): | ||
3057 | 91 | raise CheckException("shortname must match {}".format(Check.shortname_re)) | ||
3058 | 92 | self.shortname = shortname | ||
3059 | 93 | self.description = description | ||
3060 | 94 | self.check_cmd = self._locate_cmd(check_cmd) | ||
3061 | 95 | |||
3062 | 96 | def _locate_cmd(self, check_cmd): | ||
3063 | 97 | search_path = ( | ||
3064 | 98 | '/', | ||
3065 | 99 | os.path.join(os.environ['CHARM_DIR'], 'files/nrpe-external-master'), | ||
3066 | 100 | '/usr/lib/nagios/plugins', | ||
3067 | 101 | ) | ||
3068 | 102 | command = shlex.split(check_cmd) | ||
3069 | 103 | for path in search_path: | ||
3070 | 104 | if os.path.exists(os.path.join(path,command[0])): | ||
3071 | 105 | return os.path.join(path, command[0]) + " " + " ".join(command[1:]) | ||
3072 | 106 | subprocess.call(['juju-log', 'Check command not found: {}'.format(command[0])]) | ||
3073 | 107 | return '' | ||
3074 | 108 | |||
3075 | 109 | def write(self, nagios_context, hostname): | ||
3076 | 110 | for f in os.listdir(NRPE.nagios_exportdir): | ||
3077 | 111 | if re.search('.*check_{}.cfg'.format(self.shortname), f): | ||
3078 | 112 | os.remove(os.path.join(NRPE.nagios_exportdir, f)) | ||
3079 | 113 | |||
3080 | 114 | templ_vars = { | ||
3081 | 115 | 'nagios_hostname': hostname, | ||
3082 | 116 | 'nagios_servicegroup': nagios_context, | ||
3083 | 117 | 'description': self.description, | ||
3084 | 118 | 'shortname': self.shortname, | ||
3085 | 119 | } | ||
3086 | 120 | nrpe_service_text = Check.service_template.format(**templ_vars) | ||
3087 | 121 | nrpe_service_file = '{}/service__{}_check_{}.cfg'.format( | ||
3088 | 122 | NRPE.nagios_exportdir, hostname, self.shortname) | ||
3089 | 123 | with open(nrpe_service_file, 'w') as nrpe_service_config: | ||
3090 | 124 | nrpe_service_config.write(str(nrpe_service_text)) | ||
3091 | 125 | |||
3092 | 126 | nrpe_check_file = '/etc/nagios/nrpe.d/check_{}.cfg'.format(self.shortname) | ||
3093 | 127 | with open(nrpe_check_file, 'w') as nrpe_check_config: | ||
3094 | 128 | nrpe_check_config.write("# check {}\n".format(self.shortname)) | ||
3095 | 129 | nrpe_check_config.write("command[check_{}]={}\n".format( | ||
3096 | 130 | self.shortname, self.check_cmd)) | ||
3097 | 131 | |||
3098 | 132 | def run(self): | ||
3099 | 133 | subprocess.call(self.check_cmd) | ||
3100 | 134 | |||
3101 | 135 | class NRPE(object): | ||
3102 | 136 | nagios_logdir = '/var/log/nagios' | ||
3103 | 137 | nagios_exportdir = '/var/lib/nagios/export' | ||
3104 | 138 | nrpe_confdir = '/etc/nagios/nrpe.d' | ||
3105 | 139 | def __init__(self): | ||
3106 | 140 | super(NRPE, self).__init__() | ||
3107 | 141 | self.config = config_get() | ||
3108 | 142 | self.nagios_context = self.config['nagios_context'] | ||
3109 | 143 | self.unit_name = os.environ['JUJU_UNIT_NAME'].replace('/', '-') | ||
3110 | 144 | self.hostname = "{}-{}".format(self.nagios_context, self.unit_name) | ||
3111 | 145 | self.checks = [] | ||
3112 | 146 | |||
3113 | 147 | def add_check(self, *args, **kwargs): | ||
3114 | 148 | self.checks.append( Check(*args, **kwargs) ) | ||
3115 | 149 | |||
3116 | 150 | def write(self): | ||
3117 | 151 | try: | ||
3118 | 152 | nagios_uid = pwd.getpwnam('nagios').pw_uid | ||
3119 | 153 | nagios_gid = grp.getgrnam('nagios').gr_gid | ||
3120 | 154 | except: | ||
3121 | 155 | subprocess.call(['juju-log', "Nagios user not set up, nrpe checks not updated"]) | ||
3122 | 156 | return | ||
3123 | 157 | |||
3124 | 158 | if not os.path.exists(NRPE.nagios_exportdir): | ||
3125 | 159 | subprocess.call(['juju-log', 'Exiting as {} is not accessible'.format(NRPE.nagios_exportdir)]) | ||
3126 | 160 | return | ||
3127 | 161 | |||
3128 | 162 | if not os.path.exists(NRPE.nagios_logdir): | ||
3129 | 163 | os.mkdir(NRPE.nagios_logdir) | ||
3130 | 164 | os.chown(NRPE.nagios_logdir, nagios_uid, nagios_gid) | ||
3131 | 165 | |||
3132 | 166 | for nrpecheck in self.checks: | ||
3133 | 167 | nrpecheck.write(self.nagios_context, self.hostname) | ||
3134 | 168 | |||
3135 | 169 | if os.path.isfile('/etc/init.d/nagios-nrpe-server'): | ||
3136 | 170 | subprocess.call(['service', 'nagios-nrpe-server', 'reload']) | ||
3137 | 171 | 0 | ||
3138 | === added symlink 'hooks/peer-relation-changed' | |||
3139 | === target is u'./hooks.py' | |||
3140 | === added symlink 'hooks/peer-relation-joined' | |||
3141 | === target is u'./hooks.py' | |||
3142 | === removed file 'hooks/test_hooks.py' | |||
3143 | --- hooks/test_hooks.py 2013-02-14 21:35:47 +0000 | |||
3144 | +++ hooks/test_hooks.py 1970-01-01 00:00:00 +0000 | |||
3145 | @@ -1,263 +0,0 @@ | |||
3146 | 1 | import hooks | ||
3147 | 2 | import yaml | ||
3148 | 3 | from textwrap import dedent | ||
3149 | 4 | from mocker import MockerTestCase, ARGS | ||
3150 | 5 | |||
3151 | 6 | class JujuHookTest(MockerTestCase): | ||
3152 | 7 | |||
3153 | 8 | def setUp(self): | ||
3154 | 9 | self.config_services = [{ | ||
3155 | 10 | "service_name": "haproxy_test", | ||
3156 | 11 | "service_host": "0.0.0.0", | ||
3157 | 12 | "service_port": "88", | ||
3158 | 13 | "service_options": ["balance leastconn"], | ||
3159 | 14 | "server_options": "maxconn 25"}] | ||
3160 | 15 | self.config_services_extended = [ | ||
3161 | 16 | {"service_name": "unit_service", | ||
3162 | 17 | "service_host": "supplied-hostname", | ||
3163 | 18 | "service_port": "999", | ||
3164 | 19 | "service_options": ["balance leastconn"], | ||
3165 | 20 | "server_options": "maxconn 99"}] | ||
3166 | 21 | self.relation_services = [ | ||
3167 | 22 | {"service_name": "foo_svc", | ||
3168 | 23 | "service_options": ["balance leastconn"], | ||
3169 | 24 | "servers": [("A", "hA", "1", "oA1 oA2")]}, | ||
3170 | 25 | {"service_name": "bar_svc", | ||
3171 | 26 | "service_options": ["balance leastconn"], | ||
3172 | 27 | "servers": [ | ||
3173 | 28 | ("A", "hA", "1", "oA1 oA2"), ("B", "hB", "2", "oB1 oB2")]}] | ||
3174 | 29 | self.relation_services2 = [ | ||
3175 | 30 | {"service_name": "foo_svc", | ||
3176 | 31 | "service_options": ["balance leastconn"], | ||
3177 | 32 | "servers": [("A2", "hA2", "12", "oA12 oA22")]}] | ||
3178 | 33 | hooks.default_haproxy_config_dir = self.makeDir() | ||
3179 | 34 | hooks.default_haproxy_config = self.makeFile() | ||
3180 | 35 | hooks.default_haproxy_service_config_dir = self.makeDir() | ||
3181 | 36 | obj = self.mocker.replace("hooks.juju_log") | ||
3182 | 37 | obj(ARGS) | ||
3183 | 38 | self.mocker.count(0, None) | ||
3184 | 39 | obj = self.mocker.replace("hooks.unit_get") | ||
3185 | 40 | obj("public-address") | ||
3186 | 41 | self.mocker.result("test-host.example.com") | ||
3187 | 42 | self.mocker.count(0, None) | ||
3188 | 43 | self.maxDiff = None | ||
3189 | 44 | |||
3190 | 45 | def _expect_config_get(self, **kwargs): | ||
3191 | 46 | result = { | ||
3192 | 47 | "default_timeouts": "queue 1000, connect 1000, client 1000, server 1000", | ||
3193 | 48 | "global_log": "127.0.0.1 local0, 127.0.0.1 local1 notice", | ||
3194 | 49 | "global_spread_checks": 0, | ||
3195 | 50 | "monitoring_allowed_cidr": "127.0.0.1/32", | ||
3196 | 51 | "monitoring_username": "haproxy", | ||
3197 | 52 | "default_log": "global", | ||
3198 | 53 | "global_group": "haproxy", | ||
3199 | 54 | "monitoring_stats_refresh": 3, | ||
3200 | 55 | "default_retries": 3, | ||
3201 | 56 | "services": yaml.dump(self.config_services), | ||
3202 | 57 | "global_maxconn": 4096, | ||
3203 | 58 | "global_user": "haproxy", | ||
3204 | 59 | "default_options": "httplog, dontlognull", | ||
3205 | 60 | "monitoring_port": 10000, | ||
3206 | 61 | "global_debug": False, | ||
3207 | 62 | "nagios_context": "juju", | ||
3208 | 63 | "global_quiet": False, | ||
3209 | 64 | "enable_monitoring": False, | ||
3210 | 65 | "monitoring_password": "changeme", | ||
3211 | 66 | "default_mode": "http"} | ||
3212 | 67 | obj = self.mocker.replace("hooks.config_get") | ||
3213 | 68 | obj() | ||
3214 | 69 | result.update(kwargs) | ||
3215 | 70 | self.mocker.result(result) | ||
3216 | 71 | self.mocker.count(1, None) | ||
3217 | 72 | |||
3218 | 73 | def _expect_relation_get_all(self, relation, extra={}): | ||
3219 | 74 | obj = self.mocker.replace("hooks.relation_get_all") | ||
3220 | 75 | obj(relation) | ||
3221 | 76 | relation = {"hostname": "10.0.1.2", | ||
3222 | 77 | "private-address": "10.0.1.2", | ||
3223 | 78 | "port": "10000"} | ||
3224 | 79 | relation.update(extra) | ||
3225 | 80 | result = {"1": {"unit/0": relation}} | ||
3226 | 81 | self.mocker.result(result) | ||
3227 | 82 | self.mocker.count(1, None) | ||
3228 | 83 | |||
3229 | 84 | def _expect_relation_get_all_multiple(self, relation_name): | ||
3230 | 85 | obj = self.mocker.replace("hooks.relation_get_all") | ||
3231 | 86 | obj(relation_name) | ||
3232 | 87 | result = { | ||
3233 | 88 | "1": {"unit/0": { | ||
3234 | 89 | "hostname": "10.0.1.2", | ||
3235 | 90 | "private-address": "10.0.1.2", | ||
3236 | 91 | "port": "10000", | ||
3237 | 92 | "services": yaml.dump(self.relation_services)}}, | ||
3238 | 93 | "2": {"unit/1": { | ||
3239 | 94 | "hostname": "10.0.1.3", | ||
3240 | 95 | "private-address": "10.0.1.3", | ||
3241 | 96 | "port": "10001", | ||
3242 | 97 | "services": yaml.dump(self.relation_services2)}}} | ||
3243 | 98 | self.mocker.result(result) | ||
3244 | 99 | self.mocker.count(1, None) | ||
3245 | 100 | |||
3246 | 101 | def _expect_relation_get_all_with_services(self, relation, extra={}): | ||
3247 | 102 | extra.update({"services": yaml.dump(self.relation_services)}) | ||
3248 | 103 | return self._expect_relation_get_all(relation, extra) | ||
3249 | 104 | |||
3250 | 105 | def _expect_relation_get(self): | ||
3251 | 106 | obj = self.mocker.replace("hooks.relation_get") | ||
3252 | 107 | obj() | ||
3253 | 108 | result = {} | ||
3254 | 109 | self.mocker.result(result) | ||
3255 | 110 | self.mocker.count(1, None) | ||
3256 | 111 | |||
3257 | 112 | def test_create_services(self): | ||
3258 | 113 | """ | ||
3259 | 114 | Simplest use case, config stanza seeded in config file, server line | ||
3260 | 115 | added through simple relation. Many servers can join this, but | ||
3261 | 116 | multiple services will not be presented to the outside | ||
3262 | 117 | """ | ||
3263 | 118 | self._expect_config_get() | ||
3264 | 119 | self._expect_relation_get_all("reverseproxy") | ||
3265 | 120 | self.mocker.replay() | ||
3266 | 121 | hooks.create_services() | ||
3267 | 122 | services = hooks.load_services() | ||
3268 | 123 | stanza = """\ | ||
3269 | 124 | listen haproxy_test 0.0.0.0:88 | ||
3270 | 125 | balance leastconn | ||
3271 | 126 | server 10_0_1_2__10000 10.0.1.2:10000 maxconn 25 | ||
3272 | 127 | |||
3273 | 128 | """ | ||
3274 | 129 | self.assertEquals(services, dedent(stanza)) | ||
3275 | 130 | |||
3276 | 131 | def test_create_services_extended_with_relation(self): | ||
3277 | 132 | """ | ||
3278 | 133 | This case covers specifying an up-front services file to ha-proxy | ||
3279 | 134 | in the config. The relation then specifies a singular hostname, | ||
3280 | 135 | port and server_options setting which is filled into the appropriate | ||
3281 | 136 | haproxy stanza based on multiple criteria. | ||
3282 | 137 | """ | ||
3283 | 138 | self._expect_config_get( | ||
3284 | 139 | services=yaml.dump(self.config_services_extended)) | ||
3285 | 140 | self._expect_relation_get_all("reverseproxy") | ||
3286 | 141 | self.mocker.replay() | ||
3287 | 142 | hooks.create_services() | ||
3288 | 143 | services = hooks.load_services() | ||
3289 | 144 | stanza = """\ | ||
3290 | 145 | listen unit_service supplied-hostname:999 | ||
3291 | 146 | balance leastconn | ||
3292 | 147 | server 10_0_1_2__10000 10.0.1.2:10000 maxconn 99 | ||
3293 | 148 | |||
3294 | 149 | """ | ||
3295 | 150 | self.assertEquals(dedent(stanza), services) | ||
3296 | 151 | |||
3297 | 152 | def test_create_services_pure_relation(self): | ||
3298 | 153 | """ | ||
3299 | 154 | In this case, the relation is in control of the haproxy config file. | ||
3300 | 155 | Each relation chooses what server it creates in the haproxy file, it | ||
3301 | 156 | relies on the haproxy service only for the hostname and front-end port. | ||
3302 | 157 | Each member of the relation will put a backend server entry under in | ||
3303 | 158 | the desired stanza. Each realtion can in fact supply multiple | ||
3304 | 159 | entries from the same juju service unit if desired. | ||
3305 | 160 | """ | ||
3306 | 161 | self._expect_config_get() | ||
3307 | 162 | self._expect_relation_get_all_with_services("reverseproxy") | ||
3308 | 163 | self.mocker.replay() | ||
3309 | 164 | hooks.create_services() | ||
3310 | 165 | services = hooks.load_services() | ||
3311 | 166 | stanza = """\ | ||
3312 | 167 | listen foo_svc 0.0.0.0:88 | ||
3313 | 168 | balance leastconn | ||
3314 | 169 | server A hA:1 oA1 oA2 | ||
3315 | 170 | """ | ||
3316 | 171 | self.assertIn(dedent(stanza), services) | ||
3317 | 172 | stanza = """\ | ||
3318 | 173 | listen bar_svc 0.0.0.0:89 | ||
3319 | 174 | balance leastconn | ||
3320 | 175 | server A hA:1 oA1 oA2 | ||
3321 | 176 | server B hB:2 oB1 oB2 | ||
3322 | 177 | """ | ||
3323 | 178 | self.assertIn(dedent(stanza), services) | ||
3324 | 179 | |||
3325 | 180 | def test_create_services_pure_relation_multiple(self): | ||
3326 | 181 | """ | ||
3327 | 182 | This is much liek the pure_relation case, where the relation specifies | ||
3328 | 183 | a "services" override. However, in this case we have multiple relations | ||
3329 | 184 | that partially override each other. We expect that the created haproxy | ||
3330 | 185 | conf file will combine things appropriately. | ||
3331 | 186 | """ | ||
3332 | 187 | self._expect_config_get() | ||
3333 | 188 | self._expect_relation_get_all_multiple("reverseproxy") | ||
3334 | 189 | self.mocker.replay() | ||
3335 | 190 | hooks.create_services() | ||
3336 | 191 | result = hooks.load_services() | ||
3337 | 192 | stanza = """\ | ||
3338 | 193 | listen foo_svc 0.0.0.0:88 | ||
3339 | 194 | balance leastconn | ||
3340 | 195 | server A hA:1 oA1 oA2 | ||
3341 | 196 | server A2 hA2:12 oA12 oA22 | ||
3342 | 197 | """ | ||
3343 | 198 | self.assertIn(dedent(stanza), result) | ||
3344 | 199 | stanza = """\ | ||
3345 | 200 | listen bar_svc 0.0.0.0:89 | ||
3346 | 201 | balance leastconn | ||
3347 | 202 | server A hA:1 oA1 oA2 | ||
3348 | 203 | server B hB:2 oB1 oB2 | ||
3349 | 204 | """ | ||
3350 | 205 | self.assertIn(dedent(stanza), result) | ||
3351 | 206 | |||
3352 | 207 | def test_get_config_services_config_only(self): | ||
3353 | 208 | """ | ||
3354 | 209 | Attempting to catch the case where a relation is not joined yet | ||
3355 | 210 | """ | ||
3356 | 211 | self._expect_config_get() | ||
3357 | 212 | obj = self.mocker.replace("hooks.relation_get_all") | ||
3358 | 213 | obj("reverseproxy") | ||
3359 | 214 | self.mocker.result(None) | ||
3360 | 215 | self.mocker.replay() | ||
3361 | 216 | result = hooks.get_config_services() | ||
3362 | 217 | self.assertEquals(result, self.config_services) | ||
3363 | 218 | |||
3364 | 219 | def test_get_config_services_relation_no_services(self): | ||
3365 | 220 | """ | ||
3366 | 221 | If the config specifies services and the realtion does not, just the | ||
3367 | 222 | config services should come through. | ||
3368 | 223 | """ | ||
3369 | 224 | self._expect_config_get() | ||
3370 | 225 | self._expect_relation_get_all("reverseproxy") | ||
3371 | 226 | self.mocker.replay() | ||
3372 | 227 | result = hooks.get_config_services() | ||
3373 | 228 | self.assertEquals(result, self.config_services) | ||
3374 | 229 | |||
3375 | 230 | def test_get_config_services_relation_with_services(self): | ||
3376 | 231 | """ | ||
3377 | 232 | Testing with both the config and relation providing services should | ||
3378 | 233 | yield the just the relation | ||
3379 | 234 | """ | ||
3380 | 235 | self._expect_config_get() | ||
3381 | 236 | self._expect_relation_get_all_with_services("reverseproxy") | ||
3382 | 237 | self.mocker.replay() | ||
3383 | 238 | result = hooks.get_config_services() | ||
3384 | 239 | # Just test "servers" since hostname and port and maybe other keys | ||
3385 | 240 | # will be added by the hook | ||
3386 | 241 | self.assertEquals(result[0]["servers"], | ||
3387 | 242 | self.relation_services[0]["servers"]) | ||
3388 | 243 | |||
3389 | 244 | def test_config_generation_indempotent(self): | ||
3390 | 245 | self._expect_config_get() | ||
3391 | 246 | self._expect_relation_get_all_multiple("reverseproxy") | ||
3392 | 247 | self.mocker.replay() | ||
3393 | 248 | |||
3394 | 249 | # Test that we generate the same haproxy.conf file each time | ||
3395 | 250 | hooks.create_services() | ||
3396 | 251 | result1 = hooks.load_services() | ||
3397 | 252 | hooks.create_services() | ||
3398 | 253 | result2 = hooks.load_services() | ||
3399 | 254 | self.assertEqual(result1, result2) | ||
3400 | 255 | |||
3401 | 256 | def test_get_all_services(self): | ||
3402 | 257 | self._expect_config_get() | ||
3403 | 258 | self._expect_relation_get_all_multiple("reverseproxy") | ||
3404 | 259 | self.mocker.replay() | ||
3405 | 260 | baseline = [{"service_name": "foo_svc", "service_port": 88}, | ||
3406 | 261 | {"service_name": "bar_svc", "service_port": 89}] | ||
3407 | 262 | services = hooks.get_all_services() | ||
3408 | 263 | self.assertEqual(baseline, services) | ||
3409 | 264 | 0 | ||
3410 | === added directory 'hooks/tests' | |||
3411 | === added file 'hooks/tests/__init__.py' | |||
3412 | === added file 'hooks/tests/test_config_changed_hooks.py' | |||
3413 | --- hooks/tests/test_config_changed_hooks.py 1970-01-01 00:00:00 +0000 | |||
3414 | +++ hooks/tests/test_config_changed_hooks.py 2013-10-16 14:05:24 +0000 | |||
3415 | @@ -0,0 +1,120 @@ | |||
3416 | 1 | import sys | ||
3417 | 2 | |||
3418 | 3 | from testtools import TestCase | ||
3419 | 4 | from mock import patch | ||
3420 | 5 | |||
3421 | 6 | import hooks | ||
3422 | 7 | from utils_for_tests import patch_open | ||
3423 | 8 | |||
3424 | 9 | |||
3425 | 10 | class ConfigChangedTest(TestCase): | ||
3426 | 11 | |||
3427 | 12 | def setUp(self): | ||
3428 | 13 | super(ConfigChangedTest, self).setUp() | ||
3429 | 14 | self.config_get = self.patch_hook("config_get") | ||
3430 | 15 | self.get_service_ports = self.patch_hook("get_service_ports") | ||
3431 | 16 | self.get_listen_stanzas = self.patch_hook("get_listen_stanzas") | ||
3432 | 17 | self.create_haproxy_globals = self.patch_hook( | ||
3433 | 18 | "create_haproxy_globals") | ||
3434 | 19 | self.create_haproxy_defaults = self.patch_hook( | ||
3435 | 20 | "create_haproxy_defaults") | ||
3436 | 21 | self.remove_services = self.patch_hook("remove_services") | ||
3437 | 22 | self.create_services = self.patch_hook("create_services") | ||
3438 | 23 | self.load_services = self.patch_hook("load_services") | ||
3439 | 24 | self.construct_haproxy_config = self.patch_hook( | ||
3440 | 25 | "construct_haproxy_config") | ||
3441 | 26 | self.service_haproxy = self.patch_hook( | ||
3442 | 27 | "service_haproxy") | ||
3443 | 28 | self.update_sysctl = self.patch_hook( | ||
3444 | 29 | "update_sysctl") | ||
3445 | 30 | self.notify_website = self.patch_hook("notify_website") | ||
3446 | 31 | self.notify_peer = self.patch_hook("notify_peer") | ||
3447 | 32 | self.log = self.patch_hook("log") | ||
3448 | 33 | sys_exit = patch.object(sys, "exit") | ||
3449 | 34 | self.sys_exit = sys_exit.start() | ||
3450 | 35 | self.addCleanup(sys_exit.stop) | ||
3451 | 36 | |||
3452 | 37 | def patch_hook(self, hook_name): | ||
3453 | 38 | mock_controller = patch.object(hooks, hook_name) | ||
3454 | 39 | mock = mock_controller.start() | ||
3455 | 40 | self.addCleanup(mock_controller.stop) | ||
3456 | 41 | return mock | ||
3457 | 42 | |||
3458 | 43 | def test_config_changed_notify_website_changed_stanzas(self): | ||
3459 | 44 | self.service_haproxy.return_value = True | ||
3460 | 45 | self.get_listen_stanzas.side_effect = ( | ||
3461 | 46 | (('foo.internal', '1.2.3.4', 123),), | ||
3462 | 47 | (('foo.internal', '1.2.3.4', 123), | ||
3463 | 48 | ('bar.internal', '1.2.3.5', 234),)) | ||
3464 | 49 | |||
3465 | 50 | hooks.config_changed() | ||
3466 | 51 | |||
3467 | 52 | self.notify_website.assert_called_once_with() | ||
3468 | 53 | self.notify_peer.assert_called_once_with() | ||
3469 | 54 | |||
3470 | 55 | def test_config_changed_no_notify_website_not_changed(self): | ||
3471 | 56 | self.service_haproxy.return_value = True | ||
3472 | 57 | self.get_listen_stanzas.side_effect = ( | ||
3473 | 58 | (('foo.internal', '1.2.3.4', 123),), | ||
3474 | 59 | (('foo.internal', '1.2.3.4', 123),)) | ||
3475 | 60 | |||
3476 | 61 | hooks.config_changed() | ||
3477 | 62 | |||
3478 | 63 | self.notify_website.assert_not_called() | ||
3479 | 64 | self.notify_peer.assert_not_called() | ||
3480 | 65 | |||
3481 | 66 | def test_config_changed_no_notify_website_failed_check(self): | ||
3482 | 67 | self.service_haproxy.return_value = False | ||
3483 | 68 | self.get_listen_stanzas.side_effect = ( | ||
3484 | 69 | (('foo.internal', '1.2.3.4', 123),), | ||
3485 | 70 | (('foo.internal', '1.2.3.4', 123), | ||
3486 | 71 | ('bar.internal', '1.2.3.5', 234),)) | ||
3487 | 72 | |||
3488 | 73 | hooks.config_changed() | ||
3489 | 74 | |||
3490 | 75 | self.notify_website.assert_not_called() | ||
3491 | 76 | self.notify_peer.assert_not_called() | ||
3492 | 77 | self.log.assert_called_once_with( | ||
3493 | 78 | "HAProxy configuration check failed, exiting.") | ||
3494 | 79 | self.sys_exit.assert_called_once_with(1) | ||
3495 | 80 | |||
3496 | 81 | |||
3497 | 82 | class HelpersTest(TestCase): | ||
3498 | 83 | def test_constructs_haproxy_config(self): | ||
3499 | 84 | with patch_open() as (mock_open, mock_file): | ||
3500 | 85 | hooks.construct_haproxy_config('foo-globals', 'foo-defaults', | ||
3501 | 86 | 'foo-monitoring', 'foo-services') | ||
3502 | 87 | |||
3503 | 88 | mock_file.write.assert_called_with( | ||
3504 | 89 | 'foo-globals\n\n' | ||
3505 | 90 | 'foo-defaults\n\n' | ||
3506 | 91 | 'foo-monitoring\n\n' | ||
3507 | 92 | 'foo-services\n\n' | ||
3508 | 93 | ) | ||
3509 | 94 | mock_open.assert_called_with(hooks.default_haproxy_config, 'w') | ||
3510 | 95 | |||
3511 | 96 | def test_constructs_nothing_if_globals_is_none(self): | ||
3512 | 97 | with patch_open() as (mock_open, mock_file): | ||
3513 | 98 | hooks.construct_haproxy_config(None, 'foo-defaults', | ||
3514 | 99 | 'foo-monitoring', 'foo-services') | ||
3515 | 100 | |||
3516 | 101 | self.assertFalse(mock_open.called) | ||
3517 | 102 | self.assertFalse(mock_file.called) | ||
3518 | 103 | |||
3519 | 104 | def test_constructs_nothing_if_defaults_is_none(self): | ||
3520 | 105 | with patch_open() as (mock_open, mock_file): | ||
3521 | 106 | hooks.construct_haproxy_config('foo-globals', None, | ||
3522 | 107 | 'foo-monitoring', 'foo-services') | ||
3523 | 108 | |||
3524 | 109 | self.assertFalse(mock_open.called) | ||
3525 | 110 | self.assertFalse(mock_file.called) | ||
3526 | 111 | |||
3527 | 112 | def test_constructs_haproxy_config_without_optionals(self): | ||
3528 | 113 | with patch_open() as (mock_open, mock_file): | ||
3529 | 114 | hooks.construct_haproxy_config('foo-globals', 'foo-defaults') | ||
3530 | 115 | |||
3531 | 116 | mock_file.write.assert_called_with( | ||
3532 | 117 | 'foo-globals\n\n' | ||
3533 | 118 | 'foo-defaults\n\n' | ||
3534 | 119 | ) | ||
3535 | 120 | mock_open.assert_called_with(hooks.default_haproxy_config, 'w') | ||
3536 | 0 | 121 | ||
3537 | === added file 'hooks/tests/test_helpers.py' | |||
3538 | --- hooks/tests/test_helpers.py 1970-01-01 00:00:00 +0000 | |||
3539 | +++ hooks/tests/test_helpers.py 2013-10-16 14:05:24 +0000 | |||
3540 | @@ -0,0 +1,750 @@ | |||
3541 | 1 | import os | ||
3542 | 2 | |||
3543 | 3 | from contextlib import contextmanager | ||
3544 | 4 | from StringIO import StringIO | ||
3545 | 5 | |||
3546 | 6 | from testtools import TestCase | ||
3547 | 7 | from mock import patch, call, MagicMock | ||
3548 | 8 | |||
3549 | 9 | import hooks | ||
3550 | 10 | from utils_for_tests import patch_open | ||
3551 | 11 | |||
3552 | 12 | |||
3553 | 13 | class HelpersTest(TestCase): | ||
3554 | 14 | |||
3555 | 15 | @patch('hooks.config_get') | ||
3556 | 16 | def test_creates_haproxy_globals(self, config_get): | ||
3557 | 17 | config_get.return_value = { | ||
3558 | 18 | 'global_log': 'foo-log, bar-log', | ||
3559 | 19 | 'global_maxconn': 123, | ||
3560 | 20 | 'global_user': 'foo-user', | ||
3561 | 21 | 'global_group': 'foo-group', | ||
3562 | 22 | 'global_spread_checks': 234, | ||
3563 | 23 | 'global_debug': False, | ||
3564 | 24 | 'global_quiet': False, | ||
3565 | 25 | } | ||
3566 | 26 | result = hooks.create_haproxy_globals() | ||
3567 | 27 | |||
3568 | 28 | expected = '\n'.join([ | ||
3569 | 29 | 'global', | ||
3570 | 30 | ' log foo-log', | ||
3571 | 31 | ' log bar-log', | ||
3572 | 32 | ' maxconn 123', | ||
3573 | 33 | ' user foo-user', | ||
3574 | 34 | ' group foo-group', | ||
3575 | 35 | ' spread-checks 234', | ||
3576 | 36 | ]) | ||
3577 | 37 | self.assertEqual(result, expected) | ||
3578 | 38 | |||
3579 | 39 | @patch('hooks.config_get') | ||
3580 | 40 | def test_creates_haproxy_globals_quietly_with_debug(self, config_get): | ||
3581 | 41 | config_get.return_value = { | ||
3582 | 42 | 'global_log': 'foo-log, bar-log', | ||
3583 | 43 | 'global_maxconn': 123, | ||
3584 | 44 | 'global_user': 'foo-user', | ||
3585 | 45 | 'global_group': 'foo-group', | ||
3586 | 46 | 'global_spread_checks': 234, | ||
3587 | 47 | 'global_debug': True, | ||
3588 | 48 | 'global_quiet': True, | ||
3589 | 49 | } | ||
3590 | 50 | result = hooks.create_haproxy_globals() | ||
3591 | 51 | |||
3592 | 52 | expected = '\n'.join([ | ||
3593 | 53 | 'global', | ||
3594 | 54 | ' log foo-log', | ||
3595 | 55 | ' log bar-log', | ||
3596 | 56 | ' maxconn 123', | ||
3597 | 57 | ' user foo-user', | ||
3598 | 58 | ' group foo-group', | ||
3599 | 59 | ' debug', | ||
3600 | 60 | ' quiet', | ||
3601 | 61 | ' spread-checks 234', | ||
3602 | 62 | ]) | ||
3603 | 63 | self.assertEqual(result, expected) | ||
3604 | 64 | |||
3605 | 65 | def test_enables_haproxy(self): | ||
3606 | 66 | mock_file = MagicMock() | ||
3607 | 67 | |||
3608 | 68 | @contextmanager | ||
3609 | 69 | def mock_open(*args, **kwargs): | ||
3610 | 70 | yield mock_file | ||
3611 | 71 | |||
3612 | 72 | initial_content = """ | ||
3613 | 73 | foo | ||
3614 | 74 | ENABLED=0 | ||
3615 | 75 | bar | ||
3616 | 76 | """ | ||
3617 | 77 | ending_content = initial_content.replace('ENABLED=0', 'ENABLED=1') | ||
3618 | 78 | |||
3619 | 79 | with patch('__builtin__.open', mock_open): | ||
3620 | 80 | mock_file.read.return_value = initial_content | ||
3621 | 81 | |||
3622 | 82 | hooks.enable_haproxy() | ||
3623 | 83 | |||
3624 | 84 | mock_file.write.assert_called_with(ending_content) | ||
3625 | 85 | |||
3626 | 86 | @patch('hooks.config_get') | ||
3627 | 87 | def test_creates_haproxy_defaults(self, config_get): | ||
3628 | 88 | config_get.return_value = { | ||
3629 | 89 | 'default_options': 'foo-option, bar-option', | ||
3630 | 90 | 'default_timeouts': '234, 456', | ||
3631 | 91 | 'default_log': 'foo-log', | ||
3632 | 92 | 'default_mode': 'foo-mode', | ||
3633 | 93 | 'default_retries': 321, | ||
3634 | 94 | } | ||
3635 | 95 | result = hooks.create_haproxy_defaults() | ||
3636 | 96 | |||
3637 | 97 | expected = '\n'.join([ | ||
3638 | 98 | 'defaults', | ||
3639 | 99 | ' log foo-log', | ||
3640 | 100 | ' mode foo-mode', | ||
3641 | 101 | ' option foo-option', | ||
3642 | 102 | ' option bar-option', | ||
3643 | 103 | ' retries 321', | ||
3644 | 104 | ' timeout 234', | ||
3645 | 105 | ' timeout 456', | ||
3646 | 106 | ]) | ||
3647 | 107 | self.assertEqual(result, expected) | ||
3648 | 108 | |||
3649 | 109 | def test_returns_none_when_haproxy_config_doesnt_exist(self): | ||
3650 | 110 | self.assertIsNone(hooks.load_haproxy_config('/some/foo/file')) | ||
3651 | 111 | |||
3652 | 112 | @patch('__builtin__.open') | ||
3653 | 113 | @patch('os.path.isfile') | ||
3654 | 114 | def test_loads_haproxy_config_file(self, isfile, mock_open): | ||
3655 | 115 | content = 'some content' | ||
3656 | 116 | config_file = '/etc/haproxy/haproxy.cfg' | ||
3657 | 117 | file_object = StringIO(content) | ||
3658 | 118 | isfile.return_value = True | ||
3659 | 119 | mock_open.return_value = file_object | ||
3660 | 120 | |||
3661 | 121 | result = hooks.load_haproxy_config() | ||
3662 | 122 | |||
3663 | 123 | self.assertEqual(result, content) | ||
3664 | 124 | isfile.assert_called_with(config_file) | ||
3665 | 125 | mock_open.assert_called_with(config_file) | ||
3666 | 126 | |||
3667 | 127 | @patch('hooks.load_haproxy_config') | ||
3668 | 128 | def test_gets_monitoring_password(self, load_haproxy_config): | ||
3669 | 129 | load_haproxy_config.return_value = 'stats auth foo:bar' | ||
3670 | 130 | |||
3671 | 131 | password = hooks.get_monitoring_password() | ||
3672 | 132 | |||
3673 | 133 | self.assertEqual(password, 'bar') | ||
3674 | 134 | |||
3675 | 135 | @patch('hooks.load_haproxy_config') | ||
3676 | 136 | def test_gets_none_if_different_pattern(self, load_haproxy_config): | ||
3677 | 137 | load_haproxy_config.return_value = 'some other pattern' | ||
3678 | 138 | |||
3679 | 139 | password = hooks.get_monitoring_password() | ||
3680 | 140 | |||
3681 | 141 | self.assertIsNone(password) | ||
3682 | 142 | |||
3683 | 143 | def test_gets_none_pass_if_config_doesnt_exist(self): | ||
3684 | 144 | password = hooks.get_monitoring_password('/some/foo/path') | ||
3685 | 145 | |||
3686 | 146 | self.assertIsNone(password) | ||
3687 | 147 | |||
3688 | 148 | @patch('hooks.load_haproxy_config') | ||
3689 | 149 | def test_gets_service_ports(self, load_haproxy_config): | ||
3690 | 150 | load_haproxy_config.return_value = ''' | ||
3691 | 151 | listen foo.internal 1.2.3.4:123 | ||
3692 | 152 | listen bar.internal 1.2.3.5:234 | ||
3693 | 153 | ''' | ||
3694 | 154 | |||
3695 | 155 | ports = hooks.get_service_ports() | ||
3696 | 156 | |||
3697 | 157 | self.assertEqual(ports, (123, 234)) | ||
3698 | 158 | |||
3699 | 159 | @patch('hooks.load_haproxy_config') | ||
3700 | 160 | def test_get_listen_stanzas(self, load_haproxy_config): | ||
3701 | 161 | load_haproxy_config.return_value = ''' | ||
3702 | 162 | listen foo.internal 1.2.3.4:123 | ||
3703 | 163 | listen bar.internal 1.2.3.5:234 | ||
3704 | 164 | ''' | ||
3705 | 165 | |||
3706 | 166 | stanzas = hooks.get_listen_stanzas() | ||
3707 | 167 | |||
3708 | 168 | self.assertEqual((('foo.internal', '1.2.3.4', 123), | ||
3709 | 169 | ('bar.internal', '1.2.3.5', 234)), | ||
3710 | 170 | stanzas) | ||
3711 | 171 | |||
3712 | 172 | @patch('hooks.load_haproxy_config') | ||
3713 | 173 | def test_get_listen_stanzas_with_frontend(self, load_haproxy_config): | ||
3714 | 174 | load_haproxy_config.return_value = ''' | ||
3715 | 175 | frontend foo-2-123 | ||
3716 | 176 | bind 1.2.3.4:123 | ||
3717 | 177 | default_backend foo.internal | ||
3718 | 178 | frontend foo-2-234 | ||
3719 | 179 | bind 1.2.3.5:234 | ||
3720 | 180 | default_backend bar.internal | ||
3721 | 181 | ''' | ||
3722 | 182 | |||
3723 | 183 | stanzas = hooks.get_listen_stanzas() | ||
3724 | 184 | |||
3725 | 185 | self.assertEqual((('foo.internal', '1.2.3.4', 123), | ||
3726 | 186 | ('bar.internal', '1.2.3.5', 234)), | ||
3727 | 187 | stanzas) | ||
3728 | 188 | |||
3729 | 189 | @patch('hooks.load_haproxy_config') | ||
3730 | 190 | def test_get_empty_tuple_when_no_stanzas(self, load_haproxy_config): | ||
3731 | 191 | load_haproxy_config.return_value = ''' | ||
3732 | 192 | ''' | ||
3733 | 193 | |||
3734 | 194 | stanzas = hooks.get_listen_stanzas() | ||
3735 | 195 | |||
3736 | 196 | self.assertEqual((), stanzas) | ||
3737 | 197 | |||
3738 | 198 | @patch('hooks.load_haproxy_config') | ||
3739 | 199 | def test_get_listen_stanzas_none_configured(self, load_haproxy_config): | ||
3740 | 200 | load_haproxy_config.return_value = "" | ||
3741 | 201 | |||
3742 | 202 | stanzas = hooks.get_listen_stanzas() | ||
3743 | 203 | |||
3744 | 204 | self.assertEqual((), stanzas) | ||
3745 | 205 | |||
3746 | 206 | def test_gets_no_ports_if_config_doesnt_exist(self): | ||
3747 | 207 | ports = hooks.get_service_ports('/some/foo/path') | ||
3748 | 208 | self.assertEqual((), ports) | ||
3749 | 209 | |||
3750 | 210 | @patch('hooks.open_port') | ||
3751 | 211 | @patch('hooks.close_port') | ||
3752 | 212 | def test_updates_service_ports(self, close_port, open_port): | ||
3753 | 213 | old_service_ports = [123, 234, 345] | ||
3754 | 214 | new_service_ports = [345, 456, 567] | ||
3755 | 215 | |||
3756 | 216 | hooks.update_service_ports(old_service_ports, new_service_ports) | ||
3757 | 217 | |||
3758 | 218 | self.assertEqual(close_port.mock_calls, [call(123), call(234)]) | ||
3759 | 219 | self.assertEqual(open_port.mock_calls, | ||
3760 | 220 | [call(345), call(456), call(567)]) | ||
3761 | 221 | |||
3762 | 222 | @patch('hooks.open_port') | ||
3763 | 223 | @patch('hooks.close_port') | ||
3764 | 224 | def test_updates_none_if_service_ports_not_provided(self, close_port, | ||
3765 | 225 | open_port): | ||
3766 | 226 | hooks.update_service_ports() | ||
3767 | 227 | |||
3768 | 228 | self.assertFalse(close_port.called) | ||
3769 | 229 | self.assertFalse(open_port.called) | ||
3770 | 230 | |||
3771 | 231 | @patch.dict(os.environ, {"JUJU_UNIT_NAME": "haproxy/2"}) | ||
3772 | 232 | def test_creates_a_listen_stanza(self): | ||
3773 | 233 | service_name = 'some-name' | ||
3774 | 234 | service_ip = '10.11.12.13' | ||
3775 | 235 | service_port = 1234 | ||
3776 | 236 | service_options = ('foo', 'bar') | ||
3777 | 237 | server_entries = [ | ||
3778 | 238 | ('name-1', 'ip-1', 'port-1', ('foo1', 'bar1')), | ||
3779 | 239 | ('name-2', 'ip-2', 'port-2', ('foo2', 'bar2')), | ||
3780 | 240 | ] | ||
3781 | 241 | |||
3782 | 242 | result = hooks.create_listen_stanza(service_name, service_ip, | ||
3783 | 243 | service_port, service_options, | ||
3784 | 244 | server_entries) | ||
3785 | 245 | |||
3786 | 246 | expected = '\n'.join(( | ||
3787 | 247 | 'frontend haproxy-2-1234', | ||
3788 | 248 | ' bind 10.11.12.13:1234', | ||
3789 | 249 | ' default_backend some-name', | ||
3790 | 250 | '', | ||
3791 | 251 | 'backend some-name', | ||
3792 | 252 | ' foo', | ||
3793 | 253 | ' bar', | ||
3794 | 254 | ' server name-1 ip-1:port-1 foo1 bar1', | ||
3795 | 255 | ' server name-2 ip-2:port-2 foo2 bar2', | ||
3796 | 256 | )) | ||
3797 | 257 | |||
3798 | 258 | self.assertEqual(expected, result) | ||
3799 | 259 | |||
3800 | 260 | @patch.dict(os.environ, {"JUJU_UNIT_NAME": "haproxy/2"}) | ||
3801 | 261 | def test_create_listen_stanza_filters_frontend_options(self): | ||
3802 | 262 | service_name = 'some-name' | ||
3803 | 263 | service_ip = '10.11.12.13' | ||
3804 | 264 | service_port = 1234 | ||
3805 | 265 | service_options = ('capture request header X-Man', 'mode http', | ||
3806 | 266 | 'option httplog', 'retries 3', 'balance uri', | ||
3807 | 267 | 'option logasap') | ||
3808 | 268 | server_entries = [ | ||
3809 | 269 | ('name-1', 'ip-1', 'port-1', ('foo1', 'bar1')), | ||
3810 | 270 | ('name-2', 'ip-2', 'port-2', ('foo2', 'bar2')), | ||
3811 | 271 | ] | ||
3812 | 272 | |||
3813 | 273 | result = hooks.create_listen_stanza(service_name, service_ip, | ||
3814 | 274 | service_port, service_options, | ||
3815 | 275 | server_entries) | ||
3816 | 276 | |||
3817 | 277 | expected = '\n'.join(( | ||
3818 | 278 | 'frontend haproxy-2-1234', | ||
3819 | 279 | ' bind 10.11.12.13:1234', | ||
3820 | 280 | ' default_backend some-name', | ||
3821 | 281 | ' mode http', | ||
3822 | 282 | ' option httplog', | ||
3823 | 283 | ' capture request header X-Man', | ||
3824 | 284 | ' option logasap', | ||
3825 | 285 | '', | ||
3826 | 286 | 'backend some-name', | ||
3827 | 287 | ' mode http', | ||
3828 | 288 | ' option httplog', | ||
3829 | 289 | ' retries 3', | ||
3830 | 290 | ' balance uri', | ||
3831 | 291 | ' server name-1 ip-1:port-1 foo1 bar1', | ||
3832 | 292 | ' server name-2 ip-2:port-2 foo2 bar2', | ||
3833 | 293 | )) | ||
3834 | 294 | |||
3835 | 295 | self.assertEqual(expected, result) | ||
3836 | 296 | |||
3837 | 297 | @patch.dict(os.environ, {"JUJU_UNIT_NAME": "haproxy/2"}) | ||
3838 | 298 | def test_creates_a_listen_stanza_with_tuple_entries(self): | ||
3839 | 299 | service_name = 'some-name' | ||
3840 | 300 | service_ip = '10.11.12.13' | ||
3841 | 301 | service_port = 1234 | ||
3842 | 302 | service_options = ('foo', 'bar') | ||
3843 | 303 | server_entries = ( | ||
3844 | 304 | ('name-1', 'ip-1', 'port-1', ('foo1', 'bar1')), | ||
3845 | 305 | ('name-2', 'ip-2', 'port-2', ('foo2', 'bar2')), | ||
3846 | 306 | ) | ||
3847 | 307 | |||
3848 | 308 | result = hooks.create_listen_stanza(service_name, service_ip, | ||
3849 | 309 | service_port, service_options, | ||
3850 | 310 | server_entries) | ||
3851 | 311 | |||
3852 | 312 | expected = '\n'.join(( | ||
3853 | 313 | 'frontend haproxy-2-1234', | ||
3854 | 314 | ' bind 10.11.12.13:1234', | ||
3855 | 315 | ' default_backend some-name', | ||
3856 | 316 | '', | ||
3857 | 317 | 'backend some-name', | ||
3858 | 318 | ' foo', | ||
3859 | 319 | ' bar', | ||
3860 | 320 | ' server name-1 ip-1:port-1 foo1 bar1', | ||
3861 | 321 | ' server name-2 ip-2:port-2 foo2 bar2', | ||
3862 | 322 | )) | ||
3863 | 323 | |||
3864 | 324 | self.assertEqual(expected, result) | ||
3865 | 325 | |||
3866 | 326 | def test_doesnt_create_listen_stanza_if_args_not_provided(self): | ||
3867 | 327 | self.assertIsNone(hooks.create_listen_stanza()) | ||
3868 | 328 | |||
3869 | 329 | @patch('hooks.create_listen_stanza') | ||
3870 | 330 | @patch('hooks.config_get') | ||
3871 | 331 | @patch('hooks.get_monitoring_password') | ||
3872 | 332 | def test_creates_a_monitoring_stanza(self, get_monitoring_password, | ||
3873 | 333 | config_get, create_listen_stanza): | ||
3874 | 334 | config_get.return_value = { | ||
3875 | 335 | 'enable_monitoring': True, | ||
3876 | 336 | 'monitoring_allowed_cidr': 'some-cidr', | ||
3877 | 337 | 'monitoring_password': 'some-pass', | ||
3878 | 338 | 'monitoring_username': 'some-user', | ||
3879 | 339 | 'monitoring_stats_refresh': 123, | ||
3880 | 340 | 'monitoring_port': 1234, | ||
3881 | 341 | } | ||
3882 | 342 | create_listen_stanza.return_value = 'some result' | ||
3883 | 343 | |||
3884 | 344 | result = hooks.create_monitoring_stanza(service_name="some-service") | ||
3885 | 345 | |||
3886 | 346 | self.assertEqual('some result', result) | ||
3887 | 347 | get_monitoring_password.assert_called_with() | ||
3888 | 348 | create_listen_stanza.assert_called_with( | ||
3889 | 349 | 'some-service', '0.0.0.0', 1234, [ | ||
3890 | 350 | 'mode http', | ||
3891 | 351 | 'acl allowed_cidr src some-cidr', | ||
3892 | 352 | 'block unless allowed_cidr', | ||
3893 | 353 | 'stats enable', | ||
3894 | 354 | 'stats uri /', | ||
3895 | 355 | 'stats realm Haproxy\\ Statistics', | ||
3896 | 356 | 'stats auth some-user:some-pass', | ||
3897 | 357 | 'stats refresh 123', | ||
3898 | 358 | ]) | ||
3899 | 359 | |||
3900 | 360 | @patch('hooks.create_listen_stanza') | ||
3901 | 361 | @patch('hooks.config_get') | ||
3902 | 362 | @patch('hooks.get_monitoring_password') | ||
3903 | 363 | def test_doesnt_create_a_monitoring_stanza_if_monitoring_disabled( | ||
3904 | 364 | self, get_monitoring_password, config_get, create_listen_stanza): | ||
3905 | 365 | config_get.return_value = { | ||
3906 | 366 | 'enable_monitoring': False, | ||
3907 | 367 | } | ||
3908 | 368 | |||
3909 | 369 | result = hooks.create_monitoring_stanza(service_name="some-service") | ||
3910 | 370 | |||
3911 | 371 | self.assertIsNone(result) | ||
3912 | 372 | self.assertFalse(get_monitoring_password.called) | ||
3913 | 373 | self.assertFalse(create_listen_stanza.called) | ||
3914 | 374 | |||
3915 | 375 | @patch('hooks.create_listen_stanza') | ||
3916 | 376 | @patch('hooks.config_get') | ||
3917 | 377 | @patch('hooks.get_monitoring_password') | ||
3918 | 378 | def test_uses_monitoring_password_for_stanza(self, get_monitoring_password, | ||
3919 | 379 | config_get, | ||
3920 | 380 | create_listen_stanza): | ||
3921 | 381 | config_get.return_value = { | ||
3922 | 382 | 'enable_monitoring': True, | ||
3923 | 383 | 'monitoring_allowed_cidr': 'some-cidr', | ||
3924 | 384 | 'monitoring_password': 'changeme', | ||
3925 | 385 | 'monitoring_username': 'some-user', | ||
3926 | 386 | 'monitoring_stats_refresh': 123, | ||
3927 | 387 | 'monitoring_port': 1234, | ||
3928 | 388 | } | ||
3929 | 389 | create_listen_stanza.return_value = 'some result' | ||
3930 | 390 | get_monitoring_password.return_value = 'some-monitoring-pass' | ||
3931 | 391 | |||
3932 | 392 | hooks.create_monitoring_stanza(service_name="some-service") | ||
3933 | 393 | |||
3934 | 394 | get_monitoring_password.assert_called_with() | ||
3935 | 395 | create_listen_stanza.assert_called_with( | ||
3936 | 396 | 'some-service', '0.0.0.0', 1234, [ | ||
3937 | 397 | 'mode http', | ||
3938 | 398 | 'acl allowed_cidr src some-cidr', | ||
3939 | 399 | 'block unless allowed_cidr', | ||
3940 | 400 | 'stats enable', | ||
3941 | 401 | 'stats uri /', | ||
3942 | 402 | 'stats realm Haproxy\\ Statistics', | ||
3943 | 403 | 'stats auth some-user:some-monitoring-pass', | ||
3944 | 404 | 'stats refresh 123', | ||
3945 | 405 | ]) | ||
3946 | 406 | |||
3947 | 407 | @patch('hooks.pwgen') | ||
3948 | 408 | @patch('hooks.create_listen_stanza') | ||
3949 | 409 | @patch('hooks.config_get') | ||
3950 | 410 | @patch('hooks.get_monitoring_password') | ||
3951 | 411 | def test_uses_new_password_for_stanza(self, get_monitoring_password, | ||
3952 | 412 | config_get, create_listen_stanza, | ||
3953 | 413 | pwgen): | ||
3954 | 414 | config_get.return_value = { | ||
3955 | 415 | 'enable_monitoring': True, | ||
3956 | 416 | 'monitoring_allowed_cidr': 'some-cidr', | ||
3957 | 417 | 'monitoring_password': 'changeme', | ||
3958 | 418 | 'monitoring_username': 'some-user', | ||
3959 | 419 | 'monitoring_stats_refresh': 123, | ||
3960 | 420 | 'monitoring_port': 1234, | ||
3961 | 421 | } | ||
3962 | 422 | create_listen_stanza.return_value = 'some result' | ||
3963 | 423 | get_monitoring_password.return_value = None | ||
3964 | 424 | pwgen.return_value = 'some-new-pass' | ||
3965 | 425 | |||
3966 | 426 | hooks.create_monitoring_stanza(service_name="some-service") | ||
3967 | 427 | |||
3968 | 428 | get_monitoring_password.assert_called_with() | ||
3969 | 429 | create_listen_stanza.assert_called_with( | ||
3970 | 430 | 'some-service', '0.0.0.0', 1234, [ | ||
3971 | 431 | 'mode http', | ||
3972 | 432 | 'acl allowed_cidr src some-cidr', | ||
3973 | 433 | 'block unless allowed_cidr', | ||
3974 | 434 | 'stats enable', | ||
3975 | 435 | 'stats uri /', | ||
3976 | 436 | 'stats realm Haproxy\\ Statistics', | ||
3977 | 437 | 'stats auth some-user:some-new-pass', | ||
3978 | 438 | 'stats refresh 123', | ||
3979 | 439 | ]) | ||
3980 | 440 | |||
3981 | 441 | @patch('hooks.is_proxy') | ||
3982 | 442 | @patch('hooks.config_get') | ||
3983 | 443 | @patch('yaml.safe_load') | ||
3984 | 444 | def test_gets_config_services(self, safe_load, config_get, is_proxy): | ||
3985 | 445 | config_get.return_value = { | ||
3986 | 446 | 'services': 'some-services', | ||
3987 | 447 | } | ||
3988 | 448 | safe_load.return_value = [ | ||
3989 | 449 | { | ||
3990 | 450 | 'service_name': 'foo', | ||
3991 | 451 | 'service_options': { | ||
3992 | 452 | 'foo-1': 123, | ||
3993 | 453 | }, | ||
3994 | 454 | 'service_options': ['foo1', 'foo2'], | ||
3995 | 455 | 'server_options': ['baz1', 'baz2'], | ||
3996 | 456 | }, | ||
3997 | 457 | { | ||
3998 | 458 | 'service_name': 'bar', | ||
3999 | 459 | 'service_options': ['bar1', 'bar2'], | ||
4000 | 460 | 'server_options': ['baz1', 'baz2'], | ||
4001 | 461 | }, | ||
4002 | 462 | ] | ||
4003 | 463 | is_proxy.return_value = False | ||
4004 | 464 | |||
4005 | 465 | result = hooks.get_config_services() | ||
4006 | 466 | expected = { | ||
4007 | 467 | None: { | ||
4008 | 468 | 'service_name': 'foo', | ||
4009 | 469 | }, | ||
4010 | 470 | 'foo': { | ||
4011 | 471 | 'service_name': 'foo', | ||
4012 | 472 | 'service_options': ['foo1', 'foo2'], | ||
4013 | 473 | 'server_options': ['baz1', 'baz2'], | ||
4014 | 474 | }, | ||
4015 | 475 | 'bar': { | ||
4016 | 476 | 'service_name': 'bar', | ||
4017 | 477 | 'service_options': ['bar1', 'bar2'], | ||
4018 | 478 | 'server_options': ['baz1', 'baz2'], | ||
4019 | 479 | }, | ||
4020 | 480 | } | ||
4021 | 481 | |||
4022 | 482 | self.assertEqual(expected, result) | ||
4023 | 483 | |||
4024 | 484 | @patch('hooks.is_proxy') | ||
4025 | 485 | @patch('hooks.config_get') | ||
4026 | 486 | @patch('yaml.safe_load') | ||
4027 | 487 | def test_gets_config_services_with_forward_option(self, safe_load, | ||
4028 | 488 | config_get, is_proxy): | ||
4029 | 489 | config_get.return_value = { | ||
4030 | 490 | 'services': 'some-services', | ||
4031 | 491 | } | ||
4032 | 492 | safe_load.return_value = [ | ||
4033 | 493 | { | ||
4034 | 494 | 'service_name': 'foo', | ||
4035 | 495 | 'service_options': { | ||
4036 | 496 | 'foo-1': 123, | ||
4037 | 497 | }, | ||
4038 | 498 | 'service_options': ['foo1', 'foo2'], | ||
4039 | 499 | 'server_options': ['baz1', 'baz2'], | ||
4040 | 500 | }, | ||
4041 | 501 | { | ||
4042 | 502 | 'service_name': 'bar', | ||
4043 | 503 | 'service_options': ['bar1', 'bar2'], | ||
4044 | 504 | 'server_options': ['baz1', 'baz2'], | ||
4045 | 505 | }, | ||
4046 | 506 | ] | ||
4047 | 507 | is_proxy.return_value = True | ||
4048 | 508 | |||
4049 | 509 | result = hooks.get_config_services() | ||
4050 | 510 | expected = { | ||
4051 | 511 | None: { | ||
4052 | 512 | 'service_name': 'foo', | ||
4053 | 513 | }, | ||
4054 | 514 | 'foo': { | ||
4055 | 515 | 'service_name': 'foo', | ||
4056 | 516 | 'service_options': ['foo1', 'foo2', 'option forwardfor'], | ||
4057 | 517 | 'server_options': ['baz1', 'baz2'], | ||
4058 | 518 | }, | ||
4059 | 519 | 'bar': { | ||
4060 | 520 | 'service_name': 'bar', | ||
4061 | 521 | 'service_options': ['bar1', 'bar2', 'option forwardfor'], | ||
4062 | 522 | 'server_options': ['baz1', 'baz2'], | ||
4063 | 523 | }, | ||
4064 | 524 | } | ||
4065 | 525 | |||
4066 | 526 | self.assertEqual(expected, result) | ||
4067 | 527 | |||
4068 | 528 | @patch('hooks.is_proxy') | ||
4069 | 529 | @patch('hooks.config_get') | ||
4070 | 530 | @patch('yaml.safe_load') | ||
4071 | 531 | def test_gets_config_services_with_options_string(self, safe_load, | ||
4072 | 532 | config_get, is_proxy): | ||
4073 | 533 | config_get.return_value = { | ||
4074 | 534 | 'services': 'some-services', | ||
4075 | 535 | } | ||
4076 | 536 | safe_load.return_value = [ | ||
4077 | 537 | { | ||
4078 | 538 | 'service_name': 'foo', | ||
4079 | 539 | 'service_options': { | ||
4080 | 540 | 'foo-1': 123, | ||
4081 | 541 | }, | ||
4082 | 542 | 'service_options': ['foo1', 'foo2'], | ||
4083 | 543 | 'server_options': 'baz1 baz2', | ||
4084 | 544 | }, | ||
4085 | 545 | { | ||
4086 | 546 | 'service_name': 'bar', | ||
4087 | 547 | 'service_options': ['bar1', 'bar2'], | ||
4088 | 548 | 'server_options': 'baz1 baz2', | ||
4089 | 549 | }, | ||
4090 | 550 | ] | ||
4091 | 551 | is_proxy.return_value = False | ||
4092 | 552 | |||
4093 | 553 | result = hooks.get_config_services() | ||
4094 | 554 | expected = { | ||
4095 | 555 | None: { | ||
4096 | 556 | 'service_name': 'foo', | ||
4097 | 557 | }, | ||
4098 | 558 | 'foo': { | ||
4099 | 559 | 'service_name': 'foo', | ||
4100 | 560 | 'service_options': ['foo1', 'foo2'], | ||
4101 | 561 | 'server_options': ['baz1', 'baz2'], | ||
4102 | 562 | }, | ||
4103 | 563 | 'bar': { | ||
4104 | 564 | 'service_name': 'bar', | ||
4105 | 565 | 'service_options': ['bar1', 'bar2'], | ||
4106 | 566 | 'server_options': ['baz1', 'baz2'], | ||
4107 | 567 | }, | ||
4108 | 568 | } | ||
4109 | 569 | |||
4110 | 570 | self.assertEqual(expected, result) | ||
4111 | 571 | |||
4112 | 572 | @patch('hooks.get_config_services') | ||
4113 | 573 | def test_gets_a_service_config(self, get_config_services): | ||
4114 | 574 | get_config_services.return_value = { | ||
4115 | 575 | 'foo': 'bar', | ||
4116 | 576 | } | ||
4117 | 577 | |||
4118 | 578 | self.assertEqual('bar', hooks.get_config_service('foo')) | ||
4119 | 579 | |||
4120 | 580 | @patch('hooks.get_config_services') | ||
4121 | 581 | def test_gets_a_service_config_from_none(self, get_config_services): | ||
4122 | 582 | get_config_services.return_value = { | ||
4123 | 583 | None: 'bar', | ||
4124 | 584 | } | ||
4125 | 585 | |||
4126 | 586 | self.assertEqual('bar', hooks.get_config_service()) | ||
4127 | 587 | |||
4128 | 588 | @patch('hooks.get_config_services') | ||
4129 | 589 | def test_gets_a_service_config_as_none(self, get_config_services): | ||
4130 | 590 | get_config_services.return_value = { | ||
4131 | 591 | 'baz': 'bar', | ||
4132 | 592 | } | ||
4133 | 593 | |||
4134 | 594 | self.assertIsNone(hooks.get_config_service()) | ||
4135 | 595 | |||
4136 | 596 | @patch('os.path.exists') | ||
4137 | 597 | def test_mark_as_proxy_when_path_exists(self, path_exists): | ||
4138 | 598 | path_exists.return_value = True | ||
4139 | 599 | |||
4140 | 600 | self.assertTrue(hooks.is_proxy('foo')) | ||
4141 | 601 | path_exists.assert_called_with('/var/run/haproxy/foo.is.proxy') | ||
4142 | 602 | |||
4143 | 603 | @patch('os.path.exists') | ||
4144 | 604 | def test_doesnt_mark_as_proxy_when_path_doesnt_exist(self, path_exists): | ||
4145 | 605 | path_exists.return_value = False | ||
4146 | 606 | |||
4147 | 607 | self.assertFalse(hooks.is_proxy('foo')) | ||
4148 | 608 | path_exists.assert_called_with('/var/run/haproxy/foo.is.proxy') | ||
4149 | 609 | |||
4150 | 610 | @patch('os.path.exists') | ||
4151 | 611 | def test_loads_services_by_name(self, path_exists): | ||
4152 | 612 | with patch_open() as (mock_open, mock_file): | ||
4153 | 613 | path_exists.return_value = True | ||
4154 | 614 | mock_file.read.return_value = 'some content' | ||
4155 | 615 | |||
4156 | 616 | result = hooks.load_services('some-service') | ||
4157 | 617 | |||
4158 | 618 | self.assertEqual('some content', result) | ||
4159 | 619 | mock_open.assert_called_with( | ||
4160 | 620 | '/var/run/haproxy/some-service.service') | ||
4161 | 621 | mock_file.read.assert_called_with() | ||
4162 | 622 | |||
4163 | 623 | @patch('os.path.exists') | ||
4164 | 624 | def test_loads_no_service_if_path_doesnt_exist(self, path_exists): | ||
4165 | 625 | path_exists.return_value = False | ||
4166 | 626 | |||
4167 | 627 | result = hooks.load_services('some-service') | ||
4168 | 628 | |||
4169 | 629 | self.assertIsNone(result) | ||
4170 | 630 | |||
4171 | 631 | @patch('glob.glob') | ||
4172 | 632 | def test_loads_services_within_dir_if_no_name_provided(self, glob): | ||
4173 | 633 | with patch_open() as (mock_open, mock_file): | ||
4174 | 634 | mock_file.read.side_effect = ['foo', 'bar'] | ||
4175 | 635 | glob.return_value = ['foo-file', 'bar-file'] | ||
4176 | 636 | |||
4177 | 637 | result = hooks.load_services() | ||
4178 | 638 | |||
4179 | 639 | self.assertEqual('foo\n\nbar\n\n', result) | ||
4180 | 640 | mock_open.assert_has_calls([call('foo-file'), call('bar-file')]) | ||
4181 | 641 | mock_file.read.assert_has_calls([call(), call()]) | ||
4182 | 642 | |||
4183 | 643 | @patch('hooks.os') | ||
4184 | 644 | def test_removes_services_by_name(self, os_): | ||
4185 | 645 | service_path = '/var/run/haproxy/some-service.service' | ||
4186 | 646 | os_.path.exists.return_value = True | ||
4187 | 647 | |||
4188 | 648 | self.assertTrue(hooks.remove_services('some-service')) | ||
4189 | 649 | |||
4190 | 650 | os_.path.exists.assert_called_with(service_path) | ||
4191 | 651 | os_.remove.assert_called_with(service_path) | ||
4192 | 652 | |||
4193 | 653 | @patch('hooks.os') | ||
4194 | 654 | def test_removes_nothing_if_service_doesnt_exist(self, os_): | ||
4195 | 655 | service_path = '/var/run/haproxy/some-service.service' | ||
4196 | 656 | os_.path.exists.return_value = False | ||
4197 | 657 | |||
4198 | 658 | self.assertTrue(hooks.remove_services('some-service')) | ||
4199 | 659 | |||
4200 | 660 | os_.path.exists.assert_called_with(service_path) | ||
4201 | 661 | |||
4202 | 662 | @patch('hooks.os') | ||
4203 | 663 | @patch('glob.glob') | ||
4204 | 664 | def test_removes_all_services_in_dir_if_name_not_provided(self, glob, os_): | ||
4205 | 665 | glob.return_value = ['foo', 'bar'] | ||
4206 | 666 | |||
4207 | 667 | self.assertTrue(hooks.remove_services()) | ||
4208 | 668 | |||
4209 | 669 | os_.remove.assert_has_calls([call('foo'), call('bar')]) | ||
4210 | 670 | |||
4211 | 671 | @patch('hooks.os') | ||
4212 | 672 | @patch('hooks.log') | ||
4213 | 673 | def test_logs_error_when_failing_to_remove_service_by_name(self, log, os_): | ||
4214 | 674 | error = Exception('some error') | ||
4215 | 675 | os_.path.exists.return_value = True | ||
4216 | 676 | os_.remove.side_effect = error | ||
4217 | 677 | |||
4218 | 678 | self.assertFalse(hooks.remove_services('some-service')) | ||
4219 | 679 | |||
4220 | 680 | log.assert_called_with(str(error)) | ||
4221 | 681 | |||
4222 | 682 | @patch('hooks.os') | ||
4223 | 683 | @patch('hooks.log') | ||
4224 | 684 | @patch('glob.glob') | ||
4225 | 685 | def test_logs_error_when_failing_to_remove_services(self, glob, log, os_): | ||
4226 | 686 | errors = [Exception('some error 1'), Exception('some error 2')] | ||
4227 | 687 | os_.remove.side_effect = errors | ||
4228 | 688 | glob.return_value = ['foo', 'bar'] | ||
4229 | 689 | |||
4230 | 690 | self.assertTrue(hooks.remove_services()) | ||
4231 | 691 | |||
4232 | 692 | log.assert_has_calls([ | ||
4233 | 693 | call(str(errors[0])), | ||
4234 | 694 | call(str(errors[1])), | ||
4235 | 695 | ]) | ||
4236 | 696 | |||
4237 | 697 | @patch('subprocess.call') | ||
4238 | 698 | def test_calls_check_action(self, mock_call): | ||
4239 | 699 | mock_call.return_value = 0 | ||
4240 | 700 | |||
4241 | 701 | result = hooks.service_haproxy('check') | ||
4242 | 702 | |||
4243 | 703 | self.assertTrue(result) | ||
4244 | 704 | mock_call.assert_called_with(['/usr/sbin/haproxy', '-f', | ||
4245 | 705 | hooks.default_haproxy_config, '-c']) | ||
4246 | 706 | |||
4247 | 707 | @patch('subprocess.call') | ||
4248 | 708 | def test_calls_check_action_with_different_config(self, mock_call): | ||
4249 | 709 | mock_call.return_value = 0 | ||
4250 | 710 | |||
4251 | 711 | result = hooks.service_haproxy('check', 'some-config') | ||
4252 | 712 | |||
4253 | 713 | self.assertTrue(result) | ||
4254 | 714 | mock_call.assert_called_with(['/usr/sbin/haproxy', '-f', | ||
4255 | 715 | 'some-config', '-c']) | ||
4256 | 716 | |||
4257 | 717 | @patch('subprocess.call') | ||
4258 | 718 | def test_fails_to_check_config(self, mock_call): | ||
4259 | 719 | mock_call.return_value = 1 | ||
4260 | 720 | |||
4261 | 721 | result = hooks.service_haproxy('check') | ||
4262 | 722 | |||
4263 | 723 | self.assertFalse(result) | ||
4264 | 724 | |||
4265 | 725 | @patch('subprocess.call') | ||
4266 | 726 | def test_calls_different_actions(self, mock_call): | ||
4267 | 727 | mock_call.return_value = 0 | ||
4268 | 728 | |||
4269 | 729 | result = hooks.service_haproxy('foo') | ||
4270 | 730 | |||
4271 | 731 | self.assertTrue(result) | ||
4272 | 732 | mock_call.assert_called_with(['service', 'haproxy', 'foo']) | ||
4273 | 733 | |||
4274 | 734 | @patch('subprocess.call') | ||
4275 | 735 | def test_fails_to_call_different_actions(self, mock_call): | ||
4276 | 736 | mock_call.return_value = 1 | ||
4277 | 737 | |||
4278 | 738 | result = hooks.service_haproxy('foo') | ||
4279 | 739 | |||
4280 | 740 | self.assertFalse(result) | ||
4281 | 741 | |||
4282 | 742 | @patch('subprocess.call') | ||
4283 | 743 | def test_doesnt_call_actions_if_action_not_provided(self, mock_call): | ||
4284 | 744 | self.assertIsNone(hooks.service_haproxy()) | ||
4285 | 745 | self.assertFalse(mock_call.called) | ||
4286 | 746 | |||
4287 | 747 | @patch('subprocess.call') | ||
4288 | 748 | def test_doesnt_call_actions_if_config_is_none(self, mock_call): | ||
4289 | 749 | self.assertIsNone(hooks.service_haproxy('foo', None)) | ||
4290 | 750 | self.assertFalse(mock_call.called) | ||
4291 | 0 | 751 | ||
4292 | === added file 'hooks/tests/test_nrpe_hooks.py' | |||
4293 | --- hooks/tests/test_nrpe_hooks.py 1970-01-01 00:00:00 +0000 | |||
4294 | +++ hooks/tests/test_nrpe_hooks.py 2013-10-16 14:05:24 +0000 | |||
4295 | @@ -0,0 +1,24 @@ | |||
4296 | 1 | from testtools import TestCase | ||
4297 | 2 | from mock import call, patch, MagicMock | ||
4298 | 3 | |||
4299 | 4 | import hooks | ||
4300 | 5 | |||
4301 | 6 | |||
4302 | 7 | class NRPEHooksTest(TestCase): | ||
4303 | 8 | |||
4304 | 9 | @patch('hooks.install_nrpe_scripts') | ||
4305 | 10 | @patch('charmhelpers.contrib.charmsupport.nrpe.NRPE') | ||
4306 | 11 | def test_update_nrpe_config(self, nrpe, install_nrpe_scripts): | ||
4307 | 12 | nrpe_compat = MagicMock() | ||
4308 | 13 | nrpe_compat.checks = [MagicMock(shortname="haproxy"), | ||
4309 | 14 | MagicMock(shortname="haproxy_queue")] | ||
4310 | 15 | nrpe.return_value = nrpe_compat | ||
4311 | 16 | |||
4312 | 17 | hooks.update_nrpe_config() | ||
4313 | 18 | |||
4314 | 19 | self.assertEqual( | ||
4315 | 20 | nrpe_compat.mock_calls, | ||
4316 | 21 | [call.add_check('haproxy', 'Check HAProxy', 'check_haproxy.sh'), | ||
4317 | 22 | call.add_check('haproxy_queue', 'Check HAProxy queue depth', | ||
4318 | 23 | 'check_haproxy_queue_depth.sh'), | ||
4319 | 24 | call.write()]) | ||
4320 | 0 | 25 | ||
4321 | === added file 'hooks/tests/test_peer_hooks.py' | |||
4322 | --- hooks/tests/test_peer_hooks.py 1970-01-01 00:00:00 +0000 | |||
4323 | +++ hooks/tests/test_peer_hooks.py 2013-10-16 14:05:24 +0000 | |||
4324 | @@ -0,0 +1,200 @@ | |||
4325 | 1 | import os | ||
4326 | 2 | import yaml | ||
4327 | 3 | |||
4328 | 4 | from testtools import TestCase | ||
4329 | 5 | from mock import patch | ||
4330 | 6 | |||
4331 | 7 | import hooks | ||
4332 | 8 | from utils_for_tests import patch_open | ||
4333 | 9 | |||
4334 | 10 | |||
4335 | 11 | class PeerRelationTest(TestCase): | ||
4336 | 12 | |||
4337 | 13 | def setUp(self): | ||
4338 | 14 | super(PeerRelationTest, self).setUp() | ||
4339 | 15 | |||
4340 | 16 | self.relations_of_type = self.patch_hook("relations_of_type") | ||
4341 | 17 | self.log = self.patch_hook("log") | ||
4342 | 18 | self.unit_get = self.patch_hook("unit_get") | ||
4343 | 19 | |||
4344 | 20 | def patch_hook(self, hook_name): | ||
4345 | 21 | mock_controller = patch.object(hooks, hook_name) | ||
4346 | 22 | mock = mock_controller.start() | ||
4347 | 23 | self.addCleanup(mock_controller.stop) | ||
4348 | 24 | return mock | ||
4349 | 25 | |||
4350 | 26 | @patch.dict(os.environ, {"JUJU_UNIT_NAME": "haproxy/2"}) | ||
4351 | 27 | def test_with_peer_same_services(self): | ||
4352 | 28 | self.unit_get.return_value = "1.2.4.5" | ||
4353 | 29 | self.relations_of_type.return_value = [ | ||
4354 | 30 | {"__unit__": "haproxy/1", | ||
4355 | 31 | "hostname": "haproxy-1", | ||
4356 | 32 | "private-address": "1.2.4.4", | ||
4357 | 33 | "all_services": yaml.dump([ | ||
4358 | 34 | {"service_name": "foo_service", | ||
4359 | 35 | "service_host": "0.0.0.0", | ||
4360 | 36 | "service_options": ["balance leastconn"], | ||
4361 | 37 | "service_port": 4242}, | ||
4362 | 38 | ]) | ||
4363 | 39 | } | ||
4364 | 40 | ] | ||
4365 | 41 | |||
4366 | 42 | services_dict = { | ||
4367 | 43 | "foo_service": { | ||
4368 | 44 | "service_name": "foo_service", | ||
4369 | 45 | "service_host": "0.0.0.0", | ||
4370 | 46 | "service_port": 4242, | ||
4371 | 47 | "service_options": ["balance leastconn"], | ||
4372 | 48 | "server_options": ["maxconn 4"], | ||
4373 | 49 | "servers": [("backend_1__8080", "1.2.3.4", | ||
4374 | 50 | 8080, ["maxconn 4"])], | ||
4375 | 51 | }, | ||
4376 | 52 | } | ||
4377 | 53 | |||
4378 | 54 | expected = { | ||
4379 | 55 | "foo_service": { | ||
4380 | 56 | "service_name": "foo_service", | ||
4381 | 57 | "service_host": "0.0.0.0", | ||
4382 | 58 | "service_port": 4242, | ||
4383 | 59 | "service_options": ["balance leastconn", | ||
4384 | 60 | "mode tcp", | ||
4385 | 61 | "option tcplog"], | ||
4386 | 62 | "servers": [ | ||
4387 | 63 | ("haproxy-1", "1.2.4.4", 4243, ["check"]), | ||
4388 | 64 | ("haproxy-2", "1.2.4.5", 4243, ["check", "backup"]) | ||
4389 | 65 | ], | ||
4390 | 66 | }, | ||
4391 | 67 | "foo_service_be": { | ||
4392 | 68 | "service_name": "foo_service_be", | ||
4393 | 69 | "service_host": "0.0.0.0", | ||
4394 | 70 | "service_port": 4243, | ||
4395 | 71 | "service_options": ["balance leastconn"], | ||
4396 | 72 | "server_options": ["maxconn 4"], | ||
4397 | 73 | "servers": [("backend_1__8080", "1.2.3.4", | ||
4398 | 74 | 8080, ["maxconn 4"])], | ||
4399 | 75 | }, | ||
4400 | 76 | } | ||
4401 | 77 | self.assertEqual(expected, hooks.apply_peer_config(services_dict)) | ||
4402 | 78 | |||
4403 | 79 | @patch.dict(os.environ, {"JUJU_UNIT_NAME": "haproxy/2"}) | ||
4404 | 80 | def test_inherit_timeout_settings(self): | ||
4405 | 81 | self.unit_get.return_value = "1.2.4.5" | ||
4406 | 82 | self.relations_of_type.return_value = [ | ||
4407 | 83 | {"__unit__": "haproxy/1", | ||
4408 | 84 | "hostname": "haproxy-1", | ||
4409 | 85 | "private-address": "1.2.4.4", | ||
4410 | 86 | "all_services": yaml.dump([ | ||
4411 | 87 | {"service_name": "foo_service", | ||
4412 | 88 | "service_host": "0.0.0.0", | ||
4413 | 89 | "service_options": ["timeout server 5000"], | ||
4414 | 90 | "service_port": 4242}, | ||
4415 | 91 | ]) | ||
4416 | 92 | } | ||
4417 | 93 | ] | ||
4418 | 94 | |||
4419 | 95 | services_dict = { | ||
4420 | 96 | "foo_service": { | ||
4421 | 97 | "service_name": "foo_service", | ||
4422 | 98 | "service_host": "0.0.0.0", | ||
4423 | 99 | "service_port": 4242, | ||
4424 | 100 | "service_options": ["timeout server 5000"], | ||
4425 | 101 | "server_options": ["maxconn 4"], | ||
4426 | 102 | "servers": [("backend_1__8080", "1.2.3.4", | ||
4427 | 103 | 8080, ["maxconn 4"])], | ||
4428 | 104 | }, | ||
4429 | 105 | } | ||
4430 | 106 | |||
4431 | 107 | expected = { | ||
4432 | 108 | "foo_service": { | ||
4433 | 109 | "service_name": "foo_service", | ||
4434 | 110 | "service_host": "0.0.0.0", | ||
4435 | 111 | "service_port": 4242, | ||
4436 | 112 | "service_options": ["balance leastconn", | ||
4437 | 113 | "mode tcp", | ||
4438 | 114 | "option tcplog", | ||
4439 | 115 | "timeout server 5000"], | ||
4440 | 116 | "servers": [ | ||
4441 | 117 | ("haproxy-1", "1.2.4.4", 4243, ["check"]), | ||
4442 | 118 | ("haproxy-2", "1.2.4.5", 4243, ["check", "backup"]) | ||
4443 | 119 | ], | ||
4444 | 120 | }, | ||
4445 | 121 | "foo_service_be": { | ||
4446 | 122 | "service_name": "foo_service_be", | ||
4447 | 123 | "service_host": "0.0.0.0", | ||
4448 | 124 | "service_port": 4243, | ||
4449 | 125 | "service_options": ["timeout server 5000"], | ||
4450 | 126 | "server_options": ["maxconn 4"], | ||
4451 | 127 | "servers": [("backend_1__8080", "1.2.3.4", | ||
4452 | 128 | 8080, ["maxconn 4"])], | ||
4453 | 129 | }, | ||
4454 | 130 | } | ||
4455 | 131 | self.assertEqual(expected, hooks.apply_peer_config(services_dict)) | ||
4456 | 132 | |||
4457 | 133 | @patch.dict(os.environ, {"JUJU_UNIT_NAME": "haproxy/2"}) | ||
4458 | 134 | def test_with_no_relation_data(self): | ||
4459 | 135 | self.unit_get.return_value = "1.2.4.5" | ||
4460 | 136 | self.relations_of_type.return_value = [] | ||
4461 | 137 | |||
4462 | 138 | services_dict = { | ||
4463 | 139 | "foo_service": { | ||
4464 | 140 | "service_name": "foo_service", | ||
4465 | 141 | "service_host": "0.0.0.0", | ||
4466 | 142 | "service_port": 4242, | ||
4467 | 143 | "service_options": ["balance leastconn"], | ||
4468 | 144 | "server_options": ["maxconn 4"], | ||
4469 | 145 | "servers": [("backend_1__8080", "1.2.3.4", | ||
4470 | 146 | 8080, ["maxconn 4"])], | ||
4471 | 147 | }, | ||
4472 | 148 | } | ||
4473 | 149 | |||
4474 | 150 | expected = services_dict | ||
4475 | 151 | self.assertEqual(expected, hooks.apply_peer_config(services_dict)) | ||
4476 | 152 | |||
4477 | 153 | @patch.dict(os.environ, {"JUJU_UNIT_NAME": "haproxy/2"}) | ||
4478 | 154 | def test_with_missing_all_services(self): | ||
4479 | 155 | self.unit_get.return_value = "1.2.4.5" | ||
4480 | 156 | self.relations_of_type.return_value = [ | ||
4481 | 157 | {"__unit__": "haproxy/1", | ||
4482 | 158 | "hostname": "haproxy-1", | ||
4483 | 159 | "private-address": "1.2.4.4", | ||
4484 | 160 | } | ||
4485 | 161 | ] | ||
4486 | 162 | |||
4487 | 163 | services_dict = { | ||
4488 | 164 | "foo_service": { | ||
4489 | 165 | "service_name": "foo_service", | ||
4490 | 166 | "service_host": "0.0.0.0", | ||
4491 | 167 | "service_port": 4242, | ||
4492 | 168 | "service_options": ["balance leastconn"], | ||
4493 | 169 | "server_options": ["maxconn 4"], | ||
4494 | 170 | "servers": [("backend_1__8080", "1.2.3.4", | ||
4495 | 171 | 8080, ["maxconn 4"])], | ||
4496 | 172 | }, | ||
4497 | 173 | } | ||
4498 | 174 | |||
4499 | 175 | expected = services_dict | ||
4500 | 176 | self.assertEqual(expected, hooks.apply_peer_config(services_dict)) | ||
4501 | 177 | |||
4502 | 178 | @patch('hooks.create_listen_stanza') | ||
4503 | 179 | def test_writes_service_config(self, create_listen_stanza): | ||
4504 | 180 | create_listen_stanza.return_value = 'some content' | ||
4505 | 181 | services_dict = { | ||
4506 | 182 | 'foo': { | ||
4507 | 183 | 'service_name': 'bar', | ||
4508 | 184 | 'service_host': 'some-host', | ||
4509 | 185 | 'service_port': 'some-port', | ||
4510 | 186 | 'service_options': 'some-options', | ||
4511 | 187 | 'servers': (1, 2), | ||
4512 | 188 | }, | ||
4513 | 189 | } | ||
4514 | 190 | |||
4515 | 191 | with patch.object(os.path, "exists") as exists: | ||
4516 | 192 | exists.return_value = True | ||
4517 | 193 | with patch_open() as (mock_open, mock_file): | ||
4518 | 194 | hooks.write_service_config(services_dict) | ||
4519 | 195 | |||
4520 | 196 | create_listen_stanza.assert_called_with( | ||
4521 | 197 | 'bar', 'some-host', 'some-port', 'some-options', (1, 2)) | ||
4522 | 198 | mock_open.assert_called_with( | ||
4523 | 199 | '/var/run/haproxy/bar.service', 'w') | ||
4524 | 200 | mock_file.write.assert_called_with('some content') | ||
4525 | 0 | 201 | ||
4526 | === added file 'hooks/tests/test_reverseproxy_hooks.py' | |||
4527 | --- hooks/tests/test_reverseproxy_hooks.py 1970-01-01 00:00:00 +0000 | |||
4528 | +++ hooks/tests/test_reverseproxy_hooks.py 2013-10-16 14:05:24 +0000 | |||
4529 | @@ -0,0 +1,345 @@ | |||
4530 | 1 | from testtools import TestCase | ||
4531 | 2 | from mock import patch, call | ||
4532 | 3 | |||
4533 | 4 | import hooks | ||
4534 | 5 | |||
4535 | 6 | |||
4536 | 7 | class ReverseProxyRelationTest(TestCase): | ||
4537 | 8 | |||
4538 | 9 | def setUp(self): | ||
4539 | 10 | super(ReverseProxyRelationTest, self).setUp() | ||
4540 | 11 | |||
4541 | 12 | self.relations_of_type = self.patch_hook("relations_of_type") | ||
4542 | 13 | self.get_config_services = self.patch_hook("get_config_services") | ||
4543 | 14 | self.log = self.patch_hook("log") | ||
4544 | 15 | self.write_service_config = self.patch_hook("write_service_config") | ||
4545 | 16 | self.apply_peer_config = self.patch_hook("apply_peer_config") | ||
4546 | 17 | self.apply_peer_config.side_effect = lambda value: value | ||
4547 | 18 | |||
4548 | 19 | def patch_hook(self, hook_name): | ||
4549 | 20 | mock_controller = patch.object(hooks, hook_name) | ||
4550 | 21 | mock = mock_controller.start() | ||
4551 | 22 | self.addCleanup(mock_controller.stop) | ||
4552 | 23 | return mock | ||
4553 | 24 | |||
4554 | 25 | def test_relation_data_returns_none(self): | ||
4555 | 26 | self.get_config_services.return_value = { | ||
4556 | 27 | "service": { | ||
4557 | 28 | "service_name": "service", | ||
4558 | 29 | }, | ||
4559 | 30 | } | ||
4560 | 31 | self.relations_of_type.return_value = [] | ||
4561 | 32 | self.assertIs(None, hooks.create_services()) | ||
4562 | 33 | self.log.assert_called_once_with("No backend servers, exiting.") | ||
4563 | 34 | self.write_service_config.assert_not_called() | ||
4564 | 35 | |||
4565 | 36 | def test_relation_data_returns_no_relations(self): | ||
4566 | 37 | self.get_config_services.return_value = { | ||
4567 | 38 | "service": { | ||
4568 | 39 | "service_name": "service", | ||
4569 | 40 | }, | ||
4570 | 41 | } | ||
4571 | 42 | self.relations_of_type.return_value = [] | ||
4572 | 43 | self.assertIs(None, hooks.create_services()) | ||
4573 | 44 | self.log.assert_called_once_with("No backend servers, exiting.") | ||
4574 | 45 | self.write_service_config.assert_not_called() | ||
4575 | 46 | |||
4576 | 47 | def test_relation_no_services(self): | ||
4577 | 48 | self.get_config_services.return_value = {} | ||
4578 | 49 | self.relations_of_type.return_value = [ | ||
4579 | 50 | {"port": 4242, | ||
4580 | 51 | "__unit__": "foo/0", | ||
4581 | 52 | "hostname": "backend.1", | ||
4582 | 53 | "private-address": "1.2.3.4"}, | ||
4583 | 54 | ] | ||
4584 | 55 | self.assertIs(None, hooks.create_services()) | ||
4585 | 56 | self.log.assert_called_once_with("No services configured, exiting.") | ||
4586 | 57 | self.write_service_config.assert_not_called() | ||
4587 | 58 | |||
4588 | 59 | def test_no_port_in_relation_data(self): | ||
4589 | 60 | self.get_config_services.return_value = { | ||
4590 | 61 | "service": { | ||
4591 | 62 | "service_name": "service", | ||
4592 | 63 | }, | ||
4593 | 64 | } | ||
4594 | 65 | self.relations_of_type.return_value = [ | ||
4595 | 66 | {"private-address": "1.2.3.4", | ||
4596 | 67 | "__unit__": "foo/0"}, | ||
4597 | 68 | ] | ||
4598 | 69 | self.assertIs(None, hooks.create_services()) | ||
4599 | 70 | self.log.assert_has_calls([call.log( | ||
4600 | 71 | "No port in relation data for 'foo/0', skipping.")]) | ||
4601 | 72 | self.write_service_config.assert_not_called() | ||
4602 | 73 | |||
4603 | 74 | def test_no_private_address_in_relation_data(self): | ||
4604 | 75 | self.get_config_services.return_value = { | ||
4605 | 76 | "service": { | ||
4606 | 77 | "service_name": "service", | ||
4607 | 78 | }, | ||
4608 | 79 | } | ||
4609 | 80 | self.relations_of_type.return_value = [ | ||
4610 | 81 | {"port": 4242, | ||
4611 | 82 | "__unit__": "foo/0"}, | ||
4612 | 83 | ] | ||
4613 | 84 | self.assertIs(None, hooks.create_services()) | ||
4614 | 85 | self.log.assert_has_calls([call.log( | ||
4615 | 86 | "No private-address in relation data for 'foo/0', skipping.")]) | ||
4616 | 87 | self.write_service_config.assert_not_called() | ||
4617 | 88 | |||
4618 | 89 | def test_no_hostname_in_relation_data(self): | ||
4619 | 90 | self.get_config_services.return_value = { | ||
4620 | 91 | "service": { | ||
4621 | 92 | "service_name": "service", | ||
4622 | 93 | }, | ||
4623 | 94 | } | ||
4624 | 95 | self.relations_of_type.return_value = [ | ||
4625 | 96 | {"port": 4242, | ||
4626 | 97 | "private-address": "1.2.3.4", | ||
4627 | 98 | "__unit__": "foo/0"}, | ||
4628 | 99 | ] | ||
4629 | 100 | self.assertIs(None, hooks.create_services()) | ||
4630 | 101 | self.log.assert_has_calls([call.log( | ||
4631 | 102 | "No hostname in relation data for 'foo/0', skipping.")]) | ||
4632 | 103 | self.write_service_config.assert_not_called() | ||
4633 | 104 | |||
4634 | 105 | def test_relation_unknown_service(self): | ||
4635 | 106 | self.get_config_services.return_value = { | ||
4636 | 107 | "service": { | ||
4637 | 108 | "service_name": "service", | ||
4638 | 109 | }, | ||
4639 | 110 | } | ||
4640 | 111 | self.relations_of_type.return_value = [ | ||
4641 | 112 | {"port": 4242, | ||
4642 | 113 | "hostname": "backend.1", | ||
4643 | 114 | "service_name": "invalid", | ||
4644 | 115 | "private-address": "1.2.3.4", | ||
4645 | 116 | "__unit__": "foo/0"}, | ||
4646 | 117 | ] | ||
4647 | 118 | self.assertIs(None, hooks.create_services()) | ||
4648 | 119 | self.log.assert_has_calls([call.log( | ||
4649 | 120 | "Service 'invalid' does not exist.")]) | ||
4650 | 121 | self.write_service_config.assert_not_called() | ||
4651 | 122 | |||
4652 | 123 | def test_no_relation_but_has_servers_from_config(self): | ||
4653 | 124 | self.get_config_services.return_value = { | ||
4654 | 125 | None: { | ||
4655 | 126 | "service_name": "service", | ||
4656 | 127 | }, | ||
4657 | 128 | "service": { | ||
4658 | 129 | "service_name": "service", | ||
4659 | 130 | "servers": [ | ||
4660 | 131 | ("legacy-backend", "1.2.3.1", 4242, ["maxconn 42"]), | ||
4661 | 132 | ] | ||
4662 | 133 | }, | ||
4663 | 134 | } | ||
4664 | 135 | self.relations_of_type.return_value = [] | ||
4665 | 136 | |||
4666 | 137 | expected = { | ||
4667 | 138 | 'service': { | ||
4668 | 139 | 'service_name': 'service', | ||
4669 | 140 | 'servers': [ | ||
4670 | 141 | ("legacy-backend", "1.2.3.1", 4242, ["maxconn 42"]), | ||
4671 | 142 | ], | ||
4672 | 143 | }, | ||
4673 | 144 | } | ||
4674 | 145 | self.assertEqual(expected, hooks.create_services()) | ||
4675 | 146 | self.write_service_config.assert_called_with(expected) | ||
4676 | 147 | |||
4677 | 148 | def test_relation_default_service(self): | ||
4678 | 149 | self.get_config_services.return_value = { | ||
4679 | 150 | None: { | ||
4680 | 151 | "service_name": "service", | ||
4681 | 152 | }, | ||
4682 | 153 | "service": { | ||
4683 | 154 | "service_name": "service", | ||
4684 | 155 | }, | ||
4685 | 156 | } | ||
4686 | 157 | self.relations_of_type.return_value = [ | ||
4687 | 158 | {"port": 4242, | ||
4688 | 159 | "hostname": "backend.1", | ||
4689 | 160 | "private-address": "1.2.3.4", | ||
4690 | 161 | "__unit__": "foo/0"}, | ||
4691 | 162 | ] | ||
4692 | 163 | |||
4693 | 164 | expected = { | ||
4694 | 165 | 'service': { | ||
4695 | 166 | 'service_name': 'service', | ||
4696 | 167 | 'servers': [('foo-0-4242', '1.2.3.4', 4242, [])], | ||
4697 | 168 | }, | ||
4698 | 169 | } | ||
4699 | 170 | self.assertEqual(expected, hooks.create_services()) | ||
4700 | 171 | self.write_service_config.assert_called_with(expected) | ||
4701 | 172 | |||
4702 | 173 | def test_with_service_options(self): | ||
4703 | 174 | self.get_config_services.return_value = { | ||
4704 | 175 | None: { | ||
4705 | 176 | "service_name": "service", | ||
4706 | 177 | }, | ||
4707 | 178 | "service": { | ||
4708 | 179 | "service_name": "service", | ||
4709 | 180 | "server_options": ["maxconn 4"], | ||
4710 | 181 | }, | ||
4711 | 182 | } | ||
4712 | 183 | self.relations_of_type.return_value = [ | ||
4713 | 184 | {"port": 4242, | ||
4714 | 185 | "hostname": "backend.1", | ||
4715 | 186 | "private-address": "1.2.3.4", | ||
4716 | 187 | "__unit__": "foo/0"}, | ||
4717 | 188 | ] | ||
4718 | 189 | |||
4719 | 190 | expected = { | ||
4720 | 191 | 'service': { | ||
4721 | 192 | 'service_name': 'service', | ||
4722 | 193 | 'server_options': ["maxconn 4"], | ||
4723 | 194 | 'servers': [('foo-0-4242', '1.2.3.4', | ||
4724 | 195 | 4242, ["maxconn 4"])], | ||
4725 | 196 | }, | ||
4726 | 197 | } | ||
4727 | 198 | self.assertEqual(expected, hooks.create_services()) | ||
4728 | 199 | self.write_service_config.assert_called_with(expected) | ||
4729 | 200 | |||
4730 | 201 | def test_with_service_name(self): | ||
4731 | 202 | self.get_config_services.return_value = { | ||
4732 | 203 | None: { | ||
4733 | 204 | "service_name": "service", | ||
4734 | 205 | }, | ||
4735 | 206 | "foo_service": { | ||
4736 | 207 | "service_name": "foo_service", | ||
4737 | 208 | "server_options": ["maxconn 4"], | ||
4738 | 209 | }, | ||
4739 | 210 | } | ||
4740 | 211 | self.relations_of_type.return_value = [ | ||
4741 | 212 | {"port": 4242, | ||
4742 | 213 | "hostname": "backend.1", | ||
4743 | 214 | "service_name": "foo_service", | ||
4744 | 215 | "private-address": "1.2.3.4", | ||
4745 | 216 | "__unit__": "foo/0"}, | ||
4746 | 217 | ] | ||
4747 | 218 | |||
4748 | 219 | expected = { | ||
4749 | 220 | 'foo_service': { | ||
4750 | 221 | 'service_name': 'foo_service', | ||
4751 | 222 | 'server_options': ["maxconn 4"], | ||
4752 | 223 | 'servers': [('foo-0-4242', '1.2.3.4', | ||
4753 | 224 | 4242, ["maxconn 4"])], | ||
4754 | 225 | }, | ||
4755 | 226 | } | ||
4756 | 227 | self.assertEqual(expected, hooks.create_services()) | ||
4757 | 228 | self.write_service_config.assert_called_with(expected) | ||
4758 | 229 | |||
4759 | 230 | def test_no_service_name_unit_name_match_service_name(self): | ||
4760 | 231 | self.get_config_services.return_value = { | ||
4761 | 232 | None: { | ||
4762 | 233 | "service_name": "foo_service", | ||
4763 | 234 | }, | ||
4764 | 235 | "foo_service": { | ||
4765 | 236 | "service_name": "foo_service", | ||
4766 | 237 | "server_options": ["maxconn 4"], | ||
4767 | 238 | }, | ||
4768 | 239 | } | ||
4769 | 240 | self.relations_of_type.return_value = [ | ||
4770 | 241 | {"port": 4242, | ||
4771 | 242 | "hostname": "backend.1", | ||
4772 | 243 | "private-address": "1.2.3.4", | ||
4773 | 244 | "__unit__": "foo/1"}, | ||
4774 | 245 | ] | ||
4775 | 246 | |||
4776 | 247 | expected = { | ||
4777 | 248 | 'foo_service': { | ||
4778 | 249 | 'service_name': 'foo_service', | ||
4779 | 250 | 'server_options': ["maxconn 4"], | ||
4780 | 251 | 'servers': [('foo-1-4242', '1.2.3.4', | ||
4781 | 252 | 4242, ["maxconn 4"])], | ||
4782 | 253 | }, | ||
4783 | 254 | } | ||
4784 | 255 | self.assertEqual(expected, hooks.create_services()) | ||
4785 | 256 | self.write_service_config.assert_called_with(expected) | ||
4786 | 257 | |||
4787 | 258 | def test_with_sitenames_match_service_name(self): | ||
4788 | 259 | self.get_config_services.return_value = { | ||
4789 | 260 | None: { | ||
4790 | 261 | "service_name": "service", | ||
4791 | 262 | }, | ||
4792 | 263 | "foo_srv": { | ||
4793 | 264 | "service_name": "foo_srv", | ||
4794 | 265 | "server_options": ["maxconn 4"], | ||
4795 | 266 | }, | ||
4796 | 267 | } | ||
4797 | 268 | self.relations_of_type.return_value = [ | ||
4798 | 269 | {"port": 4242, | ||
4799 | 270 | "hostname": "backend.1", | ||
4800 | 271 | "sitenames": "foo_srv bar_srv", | ||
4801 | 272 | "private-address": "1.2.3.4", | ||
4802 | 273 | "__unit__": "foo/0"}, | ||
4803 | 274 | ] | ||
4804 | 275 | |||
4805 | 276 | expected = { | ||
4806 | 277 | 'foo_srv': { | ||
4807 | 278 | 'service_name': 'foo_srv', | ||
4808 | 279 | 'server_options': ["maxconn 4"], | ||
4809 | 280 | 'servers': [('foo-0-4242', '1.2.3.4', | ||
4810 | 281 | 4242, ["maxconn 4"])], | ||
4811 | 282 | }, | ||
4812 | 283 | } | ||
4813 | 284 | self.assertEqual(expected, hooks.create_services()) | ||
4814 | 285 | self.write_service_config.assert_called_with(expected) | ||
4815 | 286 | |||
4816 | 287 | def test_with_juju_services_match_service_name(self): | ||
4817 | 288 | self.get_config_services.return_value = { | ||
4818 | 289 | None: { | ||
4819 | 290 | "service_name": "service", | ||
4820 | 291 | }, | ||
4821 | 292 | "foo_service": { | ||
4822 | 293 | "service_name": "foo_service", | ||
4823 | 294 | "server_options": ["maxconn 4"], | ||
4824 | 295 | }, | ||
4825 | 296 | } | ||
4826 | 297 | self.relations_of_type.return_value = [ | ||
4827 | 298 | {"port": 4242, | ||
4828 | 299 | "hostname": "backend.1", | ||
4829 | 300 | "private-address": "1.2.3.4", | ||
4830 | 301 | "__unit__": "foo/1"}, | ||
4831 | 302 | ] | ||
4832 | 303 | |||
4833 | 304 | expected = { | ||
4834 | 305 | 'foo_service': { | ||
4835 | 306 | 'service_name': 'foo_service', | ||
4836 | 307 | 'server_options': ["maxconn 4"], | ||
4837 | 308 | 'servers': [('foo-1-4242', '1.2.3.4', | ||
4838 | 309 | 4242, ["maxconn 4"])], | ||
4839 | 310 | }, | ||
4840 | 311 | } | ||
4841 | 312 | |||
4842 | 313 | result = hooks.create_services() | ||
4843 | 314 | |||
4844 | 315 | self.assertEqual(expected, result) | ||
4845 | 316 | self.write_service_config.assert_called_with(expected) | ||
4846 | 317 | |||
4847 | 318 | def test_with_sitenames_no_match_but_unit_name(self): | ||
4848 | 319 | self.get_config_services.return_value = { | ||
4849 | 320 | None: { | ||
4850 | 321 | "service_name": "service", | ||
4851 | 322 | }, | ||
4852 | 323 | "foo": { | ||
4853 | 324 | "service_name": "foo", | ||
4854 | 325 | "server_options": ["maxconn 4"], | ||
4855 | 326 | }, | ||
4856 | 327 | } | ||
4857 | 328 | self.relations_of_type.return_value = [ | ||
4858 | 329 | {"port": 4242, | ||
4859 | 330 | "hostname": "backend.1", | ||
4860 | 331 | "sitenames": "bar_service baz_service", | ||
4861 | 332 | "private-address": "1.2.3.4", | ||
4862 | 333 | "__unit__": "foo/0"}, | ||
4863 | 334 | ] | ||
4864 | 335 | |||
4865 | 336 | expected = { | ||
4866 | 337 | 'foo': { | ||
4867 | 338 | 'service_name': 'foo', | ||
4868 | 339 | 'server_options': ["maxconn 4"], | ||
4869 | 340 | 'servers': [('foo-0-4242', '1.2.3.4', | ||
4870 | 341 | 4242, ["maxconn 4"])], | ||
4871 | 342 | }, | ||
4872 | 343 | } | ||
4873 | 344 | self.assertEqual(expected, hooks.create_services()) | ||
4874 | 345 | self.write_service_config.assert_called_with(expected) | ||
4875 | 0 | 346 | ||
4876 | === added file 'hooks/tests/test_website_hooks.py' | |||
4877 | --- hooks/tests/test_website_hooks.py 1970-01-01 00:00:00 +0000 | |||
4878 | +++ hooks/tests/test_website_hooks.py 2013-10-16 14:05:24 +0000 | |||
4879 | @@ -0,0 +1,145 @@ | |||
4880 | 1 | from testtools import TestCase | ||
4881 | 2 | from mock import patch, call | ||
4882 | 3 | |||
4883 | 4 | import hooks | ||
4884 | 5 | |||
4885 | 6 | |||
4886 | 7 | class WebsiteRelationTest(TestCase): | ||
4887 | 8 | |||
4888 | 9 | def setUp(self): | ||
4889 | 10 | super(WebsiteRelationTest, self).setUp() | ||
4890 | 11 | self.notify_website = self.patch_hook("notify_website") | ||
4891 | 12 | |||
4892 | 13 | def patch_hook(self, hook_name): | ||
4893 | 14 | mock_controller = patch.object(hooks, hook_name) | ||
4894 | 15 | mock = mock_controller.start() | ||
4895 | 16 | self.addCleanup(mock_controller.stop) | ||
4896 | 17 | return mock | ||
4897 | 18 | |||
4898 | 19 | def test_website_interface_none(self): | ||
4899 | 20 | self.assertEqual(None, hooks.website_interface(hook_name=None)) | ||
4900 | 21 | self.notify_website.assert_not_called() | ||
4901 | 22 | |||
4902 | 23 | def test_website_interface_joined(self): | ||
4903 | 24 | hooks.website_interface(hook_name="joined") | ||
4904 | 25 | self.notify_website.assert_called_once_with( | ||
4905 | 26 | changed=False, relation_ids=(None,)) | ||
4906 | 27 | |||
4907 | 28 | def test_website_interface_changed(self): | ||
4908 | 29 | hooks.website_interface(hook_name="changed") | ||
4909 | 30 | self.notify_website.assert_called_once_with( | ||
4910 | 31 | changed=True, relation_ids=(None,)) | ||
4911 | 32 | |||
4912 | 33 | |||
4913 | 34 | class NotifyRelationTest(TestCase): | ||
4914 | 35 | |||
4915 | 36 | def setUp(self): | ||
4916 | 37 | super(NotifyRelationTest, self).setUp() | ||
4917 | 38 | |||
4918 | 39 | self.relations_for_id = self.patch_hook("relations_for_id") | ||
4919 | 40 | self.relation_set = self.patch_hook("relation_set") | ||
4920 | 41 | self.config_get = self.patch_hook("config_get") | ||
4921 | 42 | self.get_relation_ids = self.patch_hook("get_relation_ids") | ||
4922 | 43 | self.get_hostname = self.patch_hook("get_hostname") | ||
4923 | 44 | self.log = self.patch_hook("log") | ||
4924 | 45 | self.get_config_services = self.patch_hook("get_config_service") | ||
4925 | 46 | |||
4926 | 47 | def patch_hook(self, hook_name): | ||
4927 | 48 | mock_controller = patch.object(hooks, hook_name) | ||
4928 | 49 | mock = mock_controller.start() | ||
4929 | 50 | self.addCleanup(mock_controller.stop) | ||
4930 | 51 | return mock | ||
4931 | 52 | |||
4932 | 53 | def test_notify_website_relation_no_relation_ids(self): | ||
4933 | 54 | hooks.notify_relation("website") | ||
4934 | 55 | self.get_relation_ids.return_value = () | ||
4935 | 56 | self.relation_set.assert_not_called() | ||
4936 | 57 | self.get_relation_ids.assert_called_once_with("website") | ||
4937 | 58 | |||
4938 | 59 | def test_notify_website_relation_with_default_relation(self): | ||
4939 | 60 | self.get_relation_ids.return_value = () | ||
4940 | 61 | self.get_hostname.return_value = "foo.local" | ||
4941 | 62 | self.relations_for_id.return_value = [{}] | ||
4942 | 63 | self.config_get.return_value = {"services": ""} | ||
4943 | 64 | |||
4944 | 65 | hooks.notify_relation("website", relation_ids=(None,)) | ||
4945 | 66 | |||
4946 | 67 | self.get_hostname.assert_called_once_with() | ||
4947 | 68 | self.relations_for_id.assert_called_once_with(None) | ||
4948 | 69 | self.relation_set.assert_called_once_with( | ||
4949 | 70 | relation_id=None, port="80", hostname="foo.local", | ||
4950 | 71 | all_services="") | ||
4951 | 72 | self.get_relation_ids.assert_not_called() | ||
4952 | 73 | |||
4953 | 74 | def test_notify_website_relation_with_relations(self): | ||
4954 | 75 | self.get_relation_ids.return_value = ("website:1", | ||
4955 | 76 | "website:2") | ||
4956 | 77 | self.get_hostname.return_value = "foo.local" | ||
4957 | 78 | self.relations_for_id.return_value = [{}] | ||
4958 | 79 | self.config_get.return_value = {"services": ""} | ||
4959 | 80 | |||
4960 | 81 | hooks.notify_relation("website") | ||
4961 | 82 | |||
4962 | 83 | self.get_hostname.assert_called_once_with() | ||
4963 | 84 | self.get_relation_ids.assert_called_once_with("website") | ||
4964 | 85 | self.relations_for_id.assert_has_calls([ | ||
4965 | 86 | call("website:1"), | ||
4966 | 87 | call("website:2"), | ||
4967 | 88 | ]) | ||
4968 | 89 | |||
4969 | 90 | self.relation_set.assert_has_calls([ | ||
4970 | 91 | call(relation_id="website:1", port="80", hostname="foo.local", | ||
4971 | 92 | all_services=""), | ||
4972 | 93 | call(relation_id="website:2", port="80", hostname="foo.local", | ||
4973 | 94 | all_services=""), | ||
4974 | 95 | ]) | ||
4975 | 96 | |||
4976 | 97 | def test_notify_website_relation_with_different_sitenames(self): | ||
4977 | 98 | self.get_relation_ids.return_value = ("website:1",) | ||
4978 | 99 | self.get_hostname.return_value = "foo.local" | ||
4979 | 100 | self.relations_for_id.return_value = [{"service_name": "foo"}, | ||
4980 | 101 | {"service_name": "bar"}] | ||
4981 | 102 | self.config_get.return_value = {"services": ""} | ||
4982 | 103 | |||
4983 | 104 | hooks.notify_relation("website") | ||
4984 | 105 | |||
4985 | 106 | self.get_hostname.assert_called_once_with() | ||
4986 | 107 | self.get_relation_ids.assert_called_once_with("website") | ||
4987 | 108 | self.relations_for_id.assert_has_calls([ | ||
4988 | 109 | call("website:1"), | ||
4989 | 110 | ]) | ||
4990 | 111 | |||
4991 | 112 | self.relation_set.assert_has_calls([ | ||
4992 | 113 | call.relation_set( | ||
4993 | 114 | relation_id="website:1", port="80", hostname="foo.local", | ||
4994 | 115 | all_services=""), | ||
4995 | 116 | ]) | ||
4996 | 117 | self.log.assert_called_once_with( | ||
4997 | 118 | "Remote units requested than a single service name." | ||
4998 | 119 | "Falling back to default host/port.") | ||
4999 | 120 | |||
5000 | 121 | def test_notify_website_relation_with_same_sitenames(self): |
The diff has been truncated for viewing.
LGTM +1, only comment is from the previous merge about using Hooks.hook() https:/ /code.launchpad .net/~sidnei/ charms/ precise/ apache2/ trunk/+ merge/190504/ comments/ 440317