Merge lp:~bac/charms/trusty/openstack-dashboard/dashboard-plugin into lp:~yellow/charms/trusty/openstack-dashboard/dashboard-plugin
- Trusty Tahr (14.04)
- dashboard-plugin
- Merge into dashboard-plugin
Proposed by
Brad Crittenden
Status: | Merged |
---|---|
Merged at revision: | 87 |
Proposed branch: | lp:~bac/charms/trusty/openstack-dashboard/dashboard-plugin |
Merge into: | lp:~yellow/charms/trusty/openstack-dashboard/dashboard-plugin |
Diff against target: |
6515 lines (+3109/-999) 60 files modified
Makefile (+1/-0) actions/openstack_upgrade.py (+0/-34) charm-helpers-hooks.yaml (+1/-1) charm-helpers-tests.yaml (+1/-1) config.yaml (+24/-0) hooks/charmhelpers/cli/__init__.py (+0/-191) hooks/charmhelpers/cli/benchmark.py (+0/-36) hooks/charmhelpers/cli/commands.py (+0/-32) hooks/charmhelpers/cli/hookenv.py (+0/-23) hooks/charmhelpers/cli/host.py (+0/-31) hooks/charmhelpers/cli/unitdata.py (+0/-39) hooks/charmhelpers/contrib/charmsupport/nrpe.py (+52/-14) hooks/charmhelpers/contrib/network/ip.py (+26/-22) hooks/charmhelpers/contrib/openstack/amulet/deployment.py (+130/-12) hooks/charmhelpers/contrib/openstack/amulet/utils.py (+381/-0) hooks/charmhelpers/contrib/openstack/context.py (+132/-31) hooks/charmhelpers/contrib/openstack/files/check_haproxy.sh (+7/-5) hooks/charmhelpers/contrib/openstack/neutron.py (+19/-5) hooks/charmhelpers/contrib/openstack/templates/ceph.conf (+6/-0) hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg (+19/-11) hooks/charmhelpers/contrib/openstack/templating.py (+30/-2) hooks/charmhelpers/contrib/openstack/utils.py (+240/-27) hooks/charmhelpers/contrib/python/packages.py (+13/-4) hooks/charmhelpers/contrib/storage/linux/ceph.py (+656/-61) hooks/charmhelpers/contrib/storage/linux/loopback.py (+10/-0) hooks/charmhelpers/core/files.py (+0/-45) hooks/charmhelpers/core/hookenv.py (+84/-4) hooks/charmhelpers/core/host.py (+120/-32) hooks/charmhelpers/core/hugepage.py (+0/-62) hooks/charmhelpers/core/kernel.py (+68/-0) hooks/charmhelpers/core/services/helpers.py (+14/-5) hooks/charmhelpers/core/strutils.py (+30/-0) hooks/charmhelpers/core/templating.py (+21/-8) hooks/charmhelpers/fetch/__init__.py (+10/-2) hooks/charmhelpers/fetch/archiveurl.py (+1/-1) hooks/charmhelpers/fetch/bzrurl.py (+22/-32) hooks/charmhelpers/fetch/giturl.py (+19/-24) hooks/charmhelpers/payload/archive.py (+73/-0) hooks/horizon_hooks.py (+13/-4) hooks/horizon_utils.py (+3/-0) metadata.yaml (+2/-2) templates/icehouse/local_settings.py (+3/-3) templates/juno/local_settings.py (+3/-3) tests/018-basic-trusty-liberty (+11/-0) tests/019-basic-trusty-mitaka (+11/-0) tests/020-basic-wily-liberty (+9/-0) tests/021-basic-xenial-mitaka (+9/-0) tests/052-basic-trusty-kilo-git (+0/-12) tests/README (+96/-46) tests/basic_deployment.py (+1/-1) tests/charmhelpers/contrib/amulet/deployment.py (+4/-2) tests/charmhelpers/contrib/amulet/utils.py (+193/-19) tests/charmhelpers/contrib/openstack/amulet/deployment.py (+130/-12) tests/charmhelpers/contrib/openstack/amulet/utils.py (+381/-0) tests/setup/00-setup (+17/-0) tests/tests.yaml (+0/-20) unit_tests/test_actions_openstack_upgrade.py (+0/-53) unit_tests/test_horizon_contexts.py (+2/-2) unit_tests/test_horizon_hooks.py (+10/-22) unit_tests/test_horizon_utils.py (+1/-1) |
To merge this branch: | bzr merge lp:~bac/charms/trusty/openstack-dashboard/dashboard-plugin |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
j.c.sackett (community) | Approve | ||
Review via email: mp+286514@code.launchpad.net |
Commit message
Description of the change
Merge upstream
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'Makefile' | |||
2 | --- Makefile 2015-10-06 15:06:44 +0000 | |||
3 | +++ Makefile 2016-02-18 14:28:13 +0000 | |||
4 | @@ -13,6 +13,7 @@ | |||
5 | 13 | 13 | ||
6 | 14 | functional_test: | 14 | functional_test: |
7 | 15 | @echo Starting Amulet tests... | 15 | @echo Starting Amulet tests... |
8 | 16 | @tests/setup/00-setup | ||
9 | 16 | @juju test -v -p AMULET_HTTP_PROXY,AMULET_OS_VIP --timeout 2700 | 17 | @juju test -v -p AMULET_HTTP_PROXY,AMULET_OS_VIP --timeout 2700 |
10 | 17 | 18 | ||
11 | 18 | bin/charm_helpers_sync.py: | 19 | bin/charm_helpers_sync.py: |
12 | 19 | 20 | ||
13 | === added symlink 'actions/openstack-upgrade' | |||
14 | === target is u'openstack_upgrade.py' | |||
15 | === removed symlink 'actions/openstack-upgrade' | |||
16 | === target was u'openstack_upgrade.py' | |||
17 | === added file 'actions/openstack_upgrade.py' | |||
18 | --- actions/openstack_upgrade.py 1970-01-01 00:00:00 +0000 | |||
19 | +++ actions/openstack_upgrade.py 2016-02-18 14:28:13 +0000 | |||
20 | @@ -0,0 +1,34 @@ | |||
21 | 1 | #!/usr/bin/python | ||
22 | 2 | import sys | ||
23 | 3 | |||
24 | 4 | sys.path.append('hooks/') | ||
25 | 5 | |||
26 | 6 | from charmhelpers.contrib.openstack.utils import ( | ||
27 | 7 | do_action_openstack_upgrade, | ||
28 | 8 | ) | ||
29 | 9 | |||
30 | 10 | from horizon_utils import ( | ||
31 | 11 | do_openstack_upgrade, | ||
32 | 12 | ) | ||
33 | 13 | |||
34 | 14 | from horizon_hooks import ( | ||
35 | 15 | config_changed, | ||
36 | 16 | CONFIGS, | ||
37 | 17 | ) | ||
38 | 18 | |||
39 | 19 | |||
40 | 20 | def openstack_upgrade(): | ||
41 | 21 | """Upgrade packages to config-set Openstack version. | ||
42 | 22 | |||
43 | 23 | If the charm was installed from source we cannot upgrade it. | ||
44 | 24 | For backwards compatibility a config flag must be set for this | ||
45 | 25 | code to run, otherwise a full service level upgrade will fire | ||
46 | 26 | on config-changed.""" | ||
47 | 27 | |||
48 | 28 | if do_action_openstack_upgrade('openstack-dashboard', | ||
49 | 29 | do_openstack_upgrade, | ||
50 | 30 | CONFIGS): | ||
51 | 31 | config_changed() | ||
52 | 32 | |||
53 | 33 | if __name__ == '__main__': | ||
54 | 34 | openstack_upgrade() | ||
55 | 0 | 35 | ||
56 | === removed file 'actions/openstack_upgrade.py' | |||
57 | --- actions/openstack_upgrade.py 2015-09-23 14:37:57 +0000 | |||
58 | +++ actions/openstack_upgrade.py 1970-01-01 00:00:00 +0000 | |||
59 | @@ -1,34 +0,0 @@ | |||
60 | 1 | #!/usr/bin/python | ||
61 | 2 | import sys | ||
62 | 3 | |||
63 | 4 | sys.path.append('hooks/') | ||
64 | 5 | |||
65 | 6 | from charmhelpers.contrib.openstack.utils import ( | ||
66 | 7 | do_action_openstack_upgrade, | ||
67 | 8 | ) | ||
68 | 9 | |||
69 | 10 | from horizon_utils import ( | ||
70 | 11 | do_openstack_upgrade, | ||
71 | 12 | ) | ||
72 | 13 | |||
73 | 14 | from horizon_hooks import ( | ||
74 | 15 | config_changed, | ||
75 | 16 | CONFIGS, | ||
76 | 17 | ) | ||
77 | 18 | |||
78 | 19 | |||
79 | 20 | def openstack_upgrade(): | ||
80 | 21 | """Upgrade packages to config-set Openstack version. | ||
81 | 22 | |||
82 | 23 | If the charm was installed from source we cannot upgrade it. | ||
83 | 24 | For backwards compatibility a config flag must be set for this | ||
84 | 25 | code to run, otherwise a full service level upgrade will fire | ||
85 | 26 | on config-changed.""" | ||
86 | 27 | |||
87 | 28 | if do_action_openstack_upgrade('openstack-dashboard', | ||
88 | 29 | do_openstack_upgrade, | ||
89 | 30 | CONFIGS): | ||
90 | 31 | config_changed() | ||
91 | 32 | |||
92 | 33 | if __name__ == '__main__': | ||
93 | 34 | openstack_upgrade() | ||
94 | 35 | 0 | ||
95 | === modified file 'charm-helpers-hooks.yaml' | |||
96 | --- charm-helpers-hooks.yaml 2015-07-31 13:11:17 +0000 | |||
97 | +++ charm-helpers-hooks.yaml 2016-02-18 14:28:13 +0000 | |||
98 | @@ -1,4 +1,4 @@ | |||
100 | 1 | branch: lp:charm-helpers | 1 | branch: lp:~openstack-charmers/charm-helpers/stable |
101 | 2 | destination: hooks/charmhelpers | 2 | destination: hooks/charmhelpers |
102 | 3 | include: | 3 | include: |
103 | 4 | - core | 4 | - core |
104 | 5 | 5 | ||
105 | === modified file 'charm-helpers-tests.yaml' | |||
106 | --- charm-helpers-tests.yaml 2015-02-10 18:50:39 +0000 | |||
107 | +++ charm-helpers-tests.yaml 2016-02-18 14:28:13 +0000 | |||
108 | @@ -1,4 +1,4 @@ | |||
110 | 1 | branch: lp:charm-helpers | 1 | branch: lp:~openstack-charmers/charm-helpers/stable |
111 | 2 | destination: tests/charmhelpers | 2 | destination: tests/charmhelpers |
112 | 3 | include: | 3 | include: |
113 | 4 | - contrib.amulet | 4 | - contrib.amulet |
114 | 5 | 5 | ||
115 | === modified file 'config.yaml' | |||
116 | --- config.yaml 2015-10-08 20:29:11 +0000 | |||
117 | +++ config.yaml 2016-02-18 14:28:13 +0000 | |||
118 | @@ -193,6 +193,30 @@ | |||
119 | 193 | wait for you to execute the openstack-upgrade action for this charm on | 193 | wait for you to execute the openstack-upgrade action for this charm on |
120 | 194 | each unit. If False it will revert to existing behavior of upgrading | 194 | each unit. If False it will revert to existing behavior of upgrading |
121 | 195 | all units on config change. | 195 | all units on config change. |
122 | 196 | haproxy-server-timeout: | ||
123 | 197 | type: int | ||
124 | 198 | default: | ||
125 | 199 | description: | | ||
126 | 200 | Server timeout configuration in ms for haproxy, used in HA | ||
127 | 201 | configurations. If not provided, default value of 30000ms is used. | ||
128 | 202 | haproxy-client-timeout: | ||
129 | 203 | type: int | ||
130 | 204 | default: | ||
131 | 205 | description: | | ||
132 | 206 | Client timeout configuration in ms for haproxy, used in HA | ||
133 | 207 | configurations. If not provided, default value of 30000ms is used. | ||
134 | 208 | haproxy-queue-timeout: | ||
135 | 209 | type: int | ||
136 | 210 | default: | ||
137 | 211 | description: | | ||
138 | 212 | Queue timeout configuration in ms for haproxy, used in HA | ||
139 | 213 | configurations. If not provided, default value of 5000ms is used. | ||
140 | 214 | haproxy-connect-timeout: | ||
141 | 215 | type: int | ||
142 | 216 | default: | ||
143 | 217 | description: | | ||
144 | 218 | Connect timeout configuration in ms for haproxy, used in HA | ||
145 | 219 | configurations. If not provided, default value of 5000ms is used. | ||
146 | 196 | apache_http_addendum: | 220 | apache_http_addendum: |
147 | 197 | type: string | 221 | type: string |
148 | 198 | default: '' | 222 | default: '' |
149 | 199 | 223 | ||
150 | === added directory 'hooks/charmhelpers/cli' | |||
151 | === removed directory 'hooks/charmhelpers/cli' | |||
152 | === added file 'hooks/charmhelpers/cli/__init__.py' | |||
153 | --- hooks/charmhelpers/cli/__init__.py 1970-01-01 00:00:00 +0000 | |||
154 | +++ hooks/charmhelpers/cli/__init__.py 2016-02-18 14:28:13 +0000 | |||
155 | @@ -0,0 +1,191 @@ | |||
156 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
157 | 2 | # | ||
158 | 3 | # This file is part of charm-helpers. | ||
159 | 4 | # | ||
160 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
161 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
162 | 7 | # published by the Free Software Foundation. | ||
163 | 8 | # | ||
164 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
165 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
166 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
167 | 12 | # GNU Lesser General Public License for more details. | ||
168 | 13 | # | ||
169 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
170 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
171 | 16 | |||
172 | 17 | import inspect | ||
173 | 18 | import argparse | ||
174 | 19 | import sys | ||
175 | 20 | |||
176 | 21 | from six.moves import zip | ||
177 | 22 | |||
178 | 23 | import charmhelpers.core.unitdata | ||
179 | 24 | |||
180 | 25 | |||
181 | 26 | class OutputFormatter(object): | ||
182 | 27 | def __init__(self, outfile=sys.stdout): | ||
183 | 28 | self.formats = ( | ||
184 | 29 | "raw", | ||
185 | 30 | "json", | ||
186 | 31 | "py", | ||
187 | 32 | "yaml", | ||
188 | 33 | "csv", | ||
189 | 34 | "tab", | ||
190 | 35 | ) | ||
191 | 36 | self.outfile = outfile | ||
192 | 37 | |||
193 | 38 | def add_arguments(self, argument_parser): | ||
194 | 39 | formatgroup = argument_parser.add_mutually_exclusive_group() | ||
195 | 40 | choices = self.supported_formats | ||
196 | 41 | formatgroup.add_argument("--format", metavar='FMT', | ||
197 | 42 | help="Select output format for returned data, " | ||
198 | 43 | "where FMT is one of: {}".format(choices), | ||
199 | 44 | choices=choices, default='raw') | ||
200 | 45 | for fmt in self.formats: | ||
201 | 46 | fmtfunc = getattr(self, fmt) | ||
202 | 47 | formatgroup.add_argument("-{}".format(fmt[0]), | ||
203 | 48 | "--{}".format(fmt), action='store_const', | ||
204 | 49 | const=fmt, dest='format', | ||
205 | 50 | help=fmtfunc.__doc__) | ||
206 | 51 | |||
207 | 52 | @property | ||
208 | 53 | def supported_formats(self): | ||
209 | 54 | return self.formats | ||
210 | 55 | |||
211 | 56 | def raw(self, output): | ||
212 | 57 | """Output data as raw string (default)""" | ||
213 | 58 | if isinstance(output, (list, tuple)): | ||
214 | 59 | output = '\n'.join(map(str, output)) | ||
215 | 60 | self.outfile.write(str(output)) | ||
216 | 61 | |||
217 | 62 | def py(self, output): | ||
218 | 63 | """Output data as a nicely-formatted python data structure""" | ||
219 | 64 | import pprint | ||
220 | 65 | pprint.pprint(output, stream=self.outfile) | ||
221 | 66 | |||
222 | 67 | def json(self, output): | ||
223 | 68 | """Output data in JSON format""" | ||
224 | 69 | import json | ||
225 | 70 | json.dump(output, self.outfile) | ||
226 | 71 | |||
227 | 72 | def yaml(self, output): | ||
228 | 73 | """Output data in YAML format""" | ||
229 | 74 | import yaml | ||
230 | 75 | yaml.safe_dump(output, self.outfile) | ||
231 | 76 | |||
232 | 77 | def csv(self, output): | ||
233 | 78 | """Output data as excel-compatible CSV""" | ||
234 | 79 | import csv | ||
235 | 80 | csvwriter = csv.writer(self.outfile) | ||
236 | 81 | csvwriter.writerows(output) | ||
237 | 82 | |||
238 | 83 | def tab(self, output): | ||
239 | 84 | """Output data in excel-compatible tab-delimited format""" | ||
240 | 85 | import csv | ||
241 | 86 | csvwriter = csv.writer(self.outfile, dialect=csv.excel_tab) | ||
242 | 87 | csvwriter.writerows(output) | ||
243 | 88 | |||
244 | 89 | def format_output(self, output, fmt='raw'): | ||
245 | 90 | fmtfunc = getattr(self, fmt) | ||
246 | 91 | fmtfunc(output) | ||
247 | 92 | |||
248 | 93 | |||
249 | 94 | class CommandLine(object): | ||
250 | 95 | argument_parser = None | ||
251 | 96 | subparsers = None | ||
252 | 97 | formatter = None | ||
253 | 98 | exit_code = 0 | ||
254 | 99 | |||
255 | 100 | def __init__(self): | ||
256 | 101 | if not self.argument_parser: | ||
257 | 102 | self.argument_parser = argparse.ArgumentParser(description='Perform common charm tasks') | ||
258 | 103 | if not self.formatter: | ||
259 | 104 | self.formatter = OutputFormatter() | ||
260 | 105 | self.formatter.add_arguments(self.argument_parser) | ||
261 | 106 | if not self.subparsers: | ||
262 | 107 | self.subparsers = self.argument_parser.add_subparsers(help='Commands') | ||
263 | 108 | |||
264 | 109 | def subcommand(self, command_name=None): | ||
265 | 110 | """ | ||
266 | 111 | Decorate a function as a subcommand. Use its arguments as the | ||
267 | 112 | command-line arguments""" | ||
268 | 113 | def wrapper(decorated): | ||
269 | 114 | cmd_name = command_name or decorated.__name__ | ||
270 | 115 | subparser = self.subparsers.add_parser(cmd_name, | ||
271 | 116 | description=decorated.__doc__) | ||
272 | 117 | for args, kwargs in describe_arguments(decorated): | ||
273 | 118 | subparser.add_argument(*args, **kwargs) | ||
274 | 119 | subparser.set_defaults(func=decorated) | ||
275 | 120 | return decorated | ||
276 | 121 | return wrapper | ||
277 | 122 | |||
278 | 123 | def test_command(self, decorated): | ||
279 | 124 | """ | ||
280 | 125 | Subcommand is a boolean test function, so bool return values should be | ||
281 | 126 | converted to a 0/1 exit code. | ||
282 | 127 | """ | ||
283 | 128 | decorated._cli_test_command = True | ||
284 | 129 | return decorated | ||
285 | 130 | |||
286 | 131 | def no_output(self, decorated): | ||
287 | 132 | """ | ||
288 | 133 | Subcommand is not expected to return a value, so don't print a spurious None. | ||
289 | 134 | """ | ||
290 | 135 | decorated._cli_no_output = True | ||
291 | 136 | return decorated | ||
292 | 137 | |||
293 | 138 | def subcommand_builder(self, command_name, description=None): | ||
294 | 139 | """ | ||
295 | 140 | Decorate a function that builds a subcommand. Builders should accept a | ||
296 | 141 | single argument (the subparser instance) and return the function to be | ||
297 | 142 | run as the command.""" | ||
298 | 143 | def wrapper(decorated): | ||
299 | 144 | subparser = self.subparsers.add_parser(command_name) | ||
300 | 145 | func = decorated(subparser) | ||
301 | 146 | subparser.set_defaults(func=func) | ||
302 | 147 | subparser.description = description or func.__doc__ | ||
303 | 148 | return wrapper | ||
304 | 149 | |||
305 | 150 | def run(self): | ||
306 | 151 | "Run cli, processing arguments and executing subcommands." | ||
307 | 152 | arguments = self.argument_parser.parse_args() | ||
308 | 153 | argspec = inspect.getargspec(arguments.func) | ||
309 | 154 | vargs = [] | ||
310 | 155 | for arg in argspec.args: | ||
311 | 156 | vargs.append(getattr(arguments, arg)) | ||
312 | 157 | if argspec.varargs: | ||
313 | 158 | vargs.extend(getattr(arguments, argspec.varargs)) | ||
314 | 159 | output = arguments.func(*vargs) | ||
315 | 160 | if getattr(arguments.func, '_cli_test_command', False): | ||
316 | 161 | self.exit_code = 0 if output else 1 | ||
317 | 162 | output = '' | ||
318 | 163 | if getattr(arguments.func, '_cli_no_output', False): | ||
319 | 164 | output = '' | ||
320 | 165 | self.formatter.format_output(output, arguments.format) | ||
321 | 166 | if charmhelpers.core.unitdata._KV: | ||
322 | 167 | charmhelpers.core.unitdata._KV.flush() | ||
323 | 168 | |||
324 | 169 | |||
325 | 170 | cmdline = CommandLine() | ||
326 | 171 | |||
327 | 172 | |||
328 | 173 | def describe_arguments(func): | ||
329 | 174 | """ | ||
330 | 175 | Analyze a function's signature and return a data structure suitable for | ||
331 | 176 | passing in as arguments to an argparse parser's add_argument() method.""" | ||
332 | 177 | |||
333 | 178 | argspec = inspect.getargspec(func) | ||
334 | 179 | # we should probably raise an exception somewhere if func includes **kwargs | ||
335 | 180 | if argspec.defaults: | ||
336 | 181 | positional_args = argspec.args[:-len(argspec.defaults)] | ||
337 | 182 | keyword_names = argspec.args[-len(argspec.defaults):] | ||
338 | 183 | for arg, default in zip(keyword_names, argspec.defaults): | ||
339 | 184 | yield ('--{}'.format(arg),), {'default': default} | ||
340 | 185 | else: | ||
341 | 186 | positional_args = argspec.args | ||
342 | 187 | |||
343 | 188 | for arg in positional_args: | ||
344 | 189 | yield (arg,), {} | ||
345 | 190 | if argspec.varargs: | ||
346 | 191 | yield (argspec.varargs,), {'nargs': '*'} | ||
347 | 0 | 192 | ||
348 | === removed file 'hooks/charmhelpers/cli/__init__.py' | |||
349 | --- hooks/charmhelpers/cli/__init__.py 2015-08-18 17:34:36 +0000 | |||
350 | +++ hooks/charmhelpers/cli/__init__.py 1970-01-01 00:00:00 +0000 | |||
351 | @@ -1,191 +0,0 @@ | |||
352 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
353 | 2 | # | ||
354 | 3 | # This file is part of charm-helpers. | ||
355 | 4 | # | ||
356 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
357 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
358 | 7 | # published by the Free Software Foundation. | ||
359 | 8 | # | ||
360 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
361 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
362 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
363 | 12 | # GNU Lesser General Public License for more details. | ||
364 | 13 | # | ||
365 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
366 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
367 | 16 | |||
368 | 17 | import inspect | ||
369 | 18 | import argparse | ||
370 | 19 | import sys | ||
371 | 20 | |||
372 | 21 | from six.moves import zip | ||
373 | 22 | |||
374 | 23 | from charmhelpers.core import unitdata | ||
375 | 24 | |||
376 | 25 | |||
377 | 26 | class OutputFormatter(object): | ||
378 | 27 | def __init__(self, outfile=sys.stdout): | ||
379 | 28 | self.formats = ( | ||
380 | 29 | "raw", | ||
381 | 30 | "json", | ||
382 | 31 | "py", | ||
383 | 32 | "yaml", | ||
384 | 33 | "csv", | ||
385 | 34 | "tab", | ||
386 | 35 | ) | ||
387 | 36 | self.outfile = outfile | ||
388 | 37 | |||
389 | 38 | def add_arguments(self, argument_parser): | ||
390 | 39 | formatgroup = argument_parser.add_mutually_exclusive_group() | ||
391 | 40 | choices = self.supported_formats | ||
392 | 41 | formatgroup.add_argument("--format", metavar='FMT', | ||
393 | 42 | help="Select output format for returned data, " | ||
394 | 43 | "where FMT is one of: {}".format(choices), | ||
395 | 44 | choices=choices, default='raw') | ||
396 | 45 | for fmt in self.formats: | ||
397 | 46 | fmtfunc = getattr(self, fmt) | ||
398 | 47 | formatgroup.add_argument("-{}".format(fmt[0]), | ||
399 | 48 | "--{}".format(fmt), action='store_const', | ||
400 | 49 | const=fmt, dest='format', | ||
401 | 50 | help=fmtfunc.__doc__) | ||
402 | 51 | |||
403 | 52 | @property | ||
404 | 53 | def supported_formats(self): | ||
405 | 54 | return self.formats | ||
406 | 55 | |||
407 | 56 | def raw(self, output): | ||
408 | 57 | """Output data as raw string (default)""" | ||
409 | 58 | if isinstance(output, (list, tuple)): | ||
410 | 59 | output = '\n'.join(map(str, output)) | ||
411 | 60 | self.outfile.write(str(output)) | ||
412 | 61 | |||
413 | 62 | def py(self, output): | ||
414 | 63 | """Output data as a nicely-formatted python data structure""" | ||
415 | 64 | import pprint | ||
416 | 65 | pprint.pprint(output, stream=self.outfile) | ||
417 | 66 | |||
418 | 67 | def json(self, output): | ||
419 | 68 | """Output data in JSON format""" | ||
420 | 69 | import json | ||
421 | 70 | json.dump(output, self.outfile) | ||
422 | 71 | |||
423 | 72 | def yaml(self, output): | ||
424 | 73 | """Output data in YAML format""" | ||
425 | 74 | import yaml | ||
426 | 75 | yaml.safe_dump(output, self.outfile) | ||
427 | 76 | |||
428 | 77 | def csv(self, output): | ||
429 | 78 | """Output data as excel-compatible CSV""" | ||
430 | 79 | import csv | ||
431 | 80 | csvwriter = csv.writer(self.outfile) | ||
432 | 81 | csvwriter.writerows(output) | ||
433 | 82 | |||
434 | 83 | def tab(self, output): | ||
435 | 84 | """Output data in excel-compatible tab-delimited format""" | ||
436 | 85 | import csv | ||
437 | 86 | csvwriter = csv.writer(self.outfile, dialect=csv.excel_tab) | ||
438 | 87 | csvwriter.writerows(output) | ||
439 | 88 | |||
440 | 89 | def format_output(self, output, fmt='raw'): | ||
441 | 90 | fmtfunc = getattr(self, fmt) | ||
442 | 91 | fmtfunc(output) | ||
443 | 92 | |||
444 | 93 | |||
445 | 94 | class CommandLine(object): | ||
446 | 95 | argument_parser = None | ||
447 | 96 | subparsers = None | ||
448 | 97 | formatter = None | ||
449 | 98 | exit_code = 0 | ||
450 | 99 | |||
451 | 100 | def __init__(self): | ||
452 | 101 | if not self.argument_parser: | ||
453 | 102 | self.argument_parser = argparse.ArgumentParser(description='Perform common charm tasks') | ||
454 | 103 | if not self.formatter: | ||
455 | 104 | self.formatter = OutputFormatter() | ||
456 | 105 | self.formatter.add_arguments(self.argument_parser) | ||
457 | 106 | if not self.subparsers: | ||
458 | 107 | self.subparsers = self.argument_parser.add_subparsers(help='Commands') | ||
459 | 108 | |||
460 | 109 | def subcommand(self, command_name=None): | ||
461 | 110 | """ | ||
462 | 111 | Decorate a function as a subcommand. Use its arguments as the | ||
463 | 112 | command-line arguments""" | ||
464 | 113 | def wrapper(decorated): | ||
465 | 114 | cmd_name = command_name or decorated.__name__ | ||
466 | 115 | subparser = self.subparsers.add_parser(cmd_name, | ||
467 | 116 | description=decorated.__doc__) | ||
468 | 117 | for args, kwargs in describe_arguments(decorated): | ||
469 | 118 | subparser.add_argument(*args, **kwargs) | ||
470 | 119 | subparser.set_defaults(func=decorated) | ||
471 | 120 | return decorated | ||
472 | 121 | return wrapper | ||
473 | 122 | |||
474 | 123 | def test_command(self, decorated): | ||
475 | 124 | """ | ||
476 | 125 | Subcommand is a boolean test function, so bool return values should be | ||
477 | 126 | converted to a 0/1 exit code. | ||
478 | 127 | """ | ||
479 | 128 | decorated._cli_test_command = True | ||
480 | 129 | return decorated | ||
481 | 130 | |||
482 | 131 | def no_output(self, decorated): | ||
483 | 132 | """ | ||
484 | 133 | Subcommand is not expected to return a value, so don't print a spurious None. | ||
485 | 134 | """ | ||
486 | 135 | decorated._cli_no_output = True | ||
487 | 136 | return decorated | ||
488 | 137 | |||
489 | 138 | def subcommand_builder(self, command_name, description=None): | ||
490 | 139 | """ | ||
491 | 140 | Decorate a function that builds a subcommand. Builders should accept a | ||
492 | 141 | single argument (the subparser instance) and return the function to be | ||
493 | 142 | run as the command.""" | ||
494 | 143 | def wrapper(decorated): | ||
495 | 144 | subparser = self.subparsers.add_parser(command_name) | ||
496 | 145 | func = decorated(subparser) | ||
497 | 146 | subparser.set_defaults(func=func) | ||
498 | 147 | subparser.description = description or func.__doc__ | ||
499 | 148 | return wrapper | ||
500 | 149 | |||
501 | 150 | def run(self): | ||
502 | 151 | "Run cli, processing arguments and executing subcommands." | ||
503 | 152 | arguments = self.argument_parser.parse_args() | ||
504 | 153 | argspec = inspect.getargspec(arguments.func) | ||
505 | 154 | vargs = [] | ||
506 | 155 | for arg in argspec.args: | ||
507 | 156 | vargs.append(getattr(arguments, arg)) | ||
508 | 157 | if argspec.varargs: | ||
509 | 158 | vargs.extend(getattr(arguments, argspec.varargs)) | ||
510 | 159 | output = arguments.func(*vargs) | ||
511 | 160 | if getattr(arguments.func, '_cli_test_command', False): | ||
512 | 161 | self.exit_code = 0 if output else 1 | ||
513 | 162 | output = '' | ||
514 | 163 | if getattr(arguments.func, '_cli_no_output', False): | ||
515 | 164 | output = '' | ||
516 | 165 | self.formatter.format_output(output, arguments.format) | ||
517 | 166 | if unitdata._KV: | ||
518 | 167 | unitdata._KV.flush() | ||
519 | 168 | |||
520 | 169 | |||
521 | 170 | cmdline = CommandLine() | ||
522 | 171 | |||
523 | 172 | |||
524 | 173 | def describe_arguments(func): | ||
525 | 174 | """ | ||
526 | 175 | Analyze a function's signature and return a data structure suitable for | ||
527 | 176 | passing in as arguments to an argparse parser's add_argument() method.""" | ||
528 | 177 | |||
529 | 178 | argspec = inspect.getargspec(func) | ||
530 | 179 | # we should probably raise an exception somewhere if func includes **kwargs | ||
531 | 180 | if argspec.defaults: | ||
532 | 181 | positional_args = argspec.args[:-len(argspec.defaults)] | ||
533 | 182 | keyword_names = argspec.args[-len(argspec.defaults):] | ||
534 | 183 | for arg, default in zip(keyword_names, argspec.defaults): | ||
535 | 184 | yield ('--{}'.format(arg),), {'default': default} | ||
536 | 185 | else: | ||
537 | 186 | positional_args = argspec.args | ||
538 | 187 | |||
539 | 188 | for arg in positional_args: | ||
540 | 189 | yield (arg,), {} | ||
541 | 190 | if argspec.varargs: | ||
542 | 191 | yield (argspec.varargs,), {'nargs': '*'} | ||
543 | 192 | 0 | ||
544 | === added file 'hooks/charmhelpers/cli/benchmark.py' | |||
545 | --- hooks/charmhelpers/cli/benchmark.py 1970-01-01 00:00:00 +0000 | |||
546 | +++ hooks/charmhelpers/cli/benchmark.py 2016-02-18 14:28:13 +0000 | |||
547 | @@ -0,0 +1,36 @@ | |||
548 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
549 | 2 | # | ||
550 | 3 | # This file is part of charm-helpers. | ||
551 | 4 | # | ||
552 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
553 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
554 | 7 | # published by the Free Software Foundation. | ||
555 | 8 | # | ||
556 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
557 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
558 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
559 | 12 | # GNU Lesser General Public License for more details. | ||
560 | 13 | # | ||
561 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
562 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
563 | 16 | |||
564 | 17 | from . import cmdline | ||
565 | 18 | from charmhelpers.contrib.benchmark import Benchmark | ||
566 | 19 | |||
567 | 20 | |||
568 | 21 | @cmdline.subcommand(command_name='benchmark-start') | ||
569 | 22 | def start(): | ||
570 | 23 | Benchmark.start() | ||
571 | 24 | |||
572 | 25 | |||
573 | 26 | @cmdline.subcommand(command_name='benchmark-finish') | ||
574 | 27 | def finish(): | ||
575 | 28 | Benchmark.finish() | ||
576 | 29 | |||
577 | 30 | |||
578 | 31 | @cmdline.subcommand_builder('benchmark-composite', description="Set the benchmark composite score") | ||
579 | 32 | def service(subparser): | ||
580 | 33 | subparser.add_argument("value", help="The composite score.") | ||
581 | 34 | subparser.add_argument("units", help="The units the composite score represents, i.e., 'reads/sec'.") | ||
582 | 35 | subparser.add_argument("direction", help="'asc' if a lower score is better, 'desc' if a higher score is better.") | ||
583 | 36 | return Benchmark.set_composite_score | ||
584 | 0 | 37 | ||
585 | === removed file 'hooks/charmhelpers/cli/benchmark.py' | |||
586 | --- hooks/charmhelpers/cli/benchmark.py 2015-07-31 13:11:17 +0000 | |||
587 | +++ hooks/charmhelpers/cli/benchmark.py 1970-01-01 00:00:00 +0000 | |||
588 | @@ -1,36 +0,0 @@ | |||
589 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
590 | 2 | # | ||
591 | 3 | # This file is part of charm-helpers. | ||
592 | 4 | # | ||
593 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
594 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
595 | 7 | # published by the Free Software Foundation. | ||
596 | 8 | # | ||
597 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
598 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
599 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
600 | 12 | # GNU Lesser General Public License for more details. | ||
601 | 13 | # | ||
602 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
603 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
604 | 16 | |||
605 | 17 | from . import cmdline | ||
606 | 18 | from charmhelpers.contrib.benchmark import Benchmark | ||
607 | 19 | |||
608 | 20 | |||
609 | 21 | @cmdline.subcommand(command_name='benchmark-start') | ||
610 | 22 | def start(): | ||
611 | 23 | Benchmark.start() | ||
612 | 24 | |||
613 | 25 | |||
614 | 26 | @cmdline.subcommand(command_name='benchmark-finish') | ||
615 | 27 | def finish(): | ||
616 | 28 | Benchmark.finish() | ||
617 | 29 | |||
618 | 30 | |||
619 | 31 | @cmdline.subcommand_builder('benchmark-composite', description="Set the benchmark composite score") | ||
620 | 32 | def service(subparser): | ||
621 | 33 | subparser.add_argument("value", help="The composite score.") | ||
622 | 34 | subparser.add_argument("units", help="The units the composite score represents, i.e., 'reads/sec'.") | ||
623 | 35 | subparser.add_argument("direction", help="'asc' if a lower score is better, 'desc' if a higher score is better.") | ||
624 | 36 | return Benchmark.set_composite_score | ||
625 | 37 | 0 | ||
626 | === added file 'hooks/charmhelpers/cli/commands.py' | |||
627 | --- hooks/charmhelpers/cli/commands.py 1970-01-01 00:00:00 +0000 | |||
628 | +++ hooks/charmhelpers/cli/commands.py 2016-02-18 14:28:13 +0000 | |||
629 | @@ -0,0 +1,32 @@ | |||
630 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
631 | 2 | # | ||
632 | 3 | # This file is part of charm-helpers. | ||
633 | 4 | # | ||
634 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
635 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
636 | 7 | # published by the Free Software Foundation. | ||
637 | 8 | # | ||
638 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
639 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
640 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
641 | 12 | # GNU Lesser General Public License for more details. | ||
642 | 13 | # | ||
643 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
644 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
645 | 16 | |||
646 | 17 | """ | ||
647 | 18 | This module loads sub-modules into the python runtime so they can be | ||
648 | 19 | discovered via the inspect module. In order to prevent flake8 from (rightfully) | ||
649 | 20 | telling us these are unused modules, throw a ' # noqa' at the end of each import | ||
650 | 21 | so that the warning is suppressed. | ||
651 | 22 | """ | ||
652 | 23 | |||
653 | 24 | from . import CommandLine # noqa | ||
654 | 25 | |||
655 | 26 | """ | ||
656 | 27 | Import the sub-modules which have decorated subcommands to register with chlp. | ||
657 | 28 | """ | ||
658 | 29 | from . import host # noqa | ||
659 | 30 | from . import benchmark # noqa | ||
660 | 31 | from . import unitdata # noqa | ||
661 | 32 | from . import hookenv # noqa | ||
662 | 0 | 33 | ||
663 | === removed file 'hooks/charmhelpers/cli/commands.py' | |||
664 | --- hooks/charmhelpers/cli/commands.py 2015-08-18 17:34:36 +0000 | |||
665 | +++ hooks/charmhelpers/cli/commands.py 1970-01-01 00:00:00 +0000 | |||
666 | @@ -1,32 +0,0 @@ | |||
667 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
668 | 2 | # | ||
669 | 3 | # This file is part of charm-helpers. | ||
670 | 4 | # | ||
671 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
672 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
673 | 7 | # published by the Free Software Foundation. | ||
674 | 8 | # | ||
675 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
676 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
677 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
678 | 12 | # GNU Lesser General Public License for more details. | ||
679 | 13 | # | ||
680 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
681 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
682 | 16 | |||
683 | 17 | """ | ||
684 | 18 | This module loads sub-modules into the python runtime so they can be | ||
685 | 19 | discovered via the inspect module. In order to prevent flake8 from (rightfully) | ||
686 | 20 | telling us these are unused modules, throw a ' # noqa' at the end of each import | ||
687 | 21 | so that the warning is suppressed. | ||
688 | 22 | """ | ||
689 | 23 | |||
690 | 24 | from . import CommandLine # noqa | ||
691 | 25 | |||
692 | 26 | """ | ||
693 | 27 | Import the sub-modules which have decorated subcommands to register with chlp. | ||
694 | 28 | """ | ||
695 | 29 | from . import host # noqa | ||
696 | 30 | from . import benchmark # noqa | ||
697 | 31 | from . import unitdata # noqa | ||
698 | 32 | from . import hookenv # noqa | ||
699 | 33 | 0 | ||
700 | === added file 'hooks/charmhelpers/cli/hookenv.py' | |||
701 | --- hooks/charmhelpers/cli/hookenv.py 1970-01-01 00:00:00 +0000 | |||
702 | +++ hooks/charmhelpers/cli/hookenv.py 2016-02-18 14:28:13 +0000 | |||
703 | @@ -0,0 +1,23 @@ | |||
704 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
705 | 2 | # | ||
706 | 3 | # This file is part of charm-helpers. | ||
707 | 4 | # | ||
708 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
709 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
710 | 7 | # published by the Free Software Foundation. | ||
711 | 8 | # | ||
712 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
713 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
714 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
715 | 12 | # GNU Lesser General Public License for more details. | ||
716 | 13 | # | ||
717 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
718 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
719 | 16 | |||
720 | 17 | from . import cmdline | ||
721 | 18 | from charmhelpers.core import hookenv | ||
722 | 19 | |||
723 | 20 | |||
724 | 21 | cmdline.subcommand('relation-id')(hookenv.relation_id._wrapped) | ||
725 | 22 | cmdline.subcommand('service-name')(hookenv.service_name) | ||
726 | 23 | cmdline.subcommand('remote-service-name')(hookenv.remote_service_name._wrapped) | ||
727 | 0 | 24 | ||
728 | === removed file 'hooks/charmhelpers/cli/hookenv.py' | |||
729 | --- hooks/charmhelpers/cli/hookenv.py 2015-08-18 17:34:36 +0000 | |||
730 | +++ hooks/charmhelpers/cli/hookenv.py 1970-01-01 00:00:00 +0000 | |||
731 | @@ -1,23 +0,0 @@ | |||
732 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
733 | 2 | # | ||
734 | 3 | # This file is part of charm-helpers. | ||
735 | 4 | # | ||
736 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
737 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
738 | 7 | # published by the Free Software Foundation. | ||
739 | 8 | # | ||
740 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
741 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
742 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
743 | 12 | # GNU Lesser General Public License for more details. | ||
744 | 13 | # | ||
745 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
746 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
747 | 16 | |||
748 | 17 | from . import cmdline | ||
749 | 18 | from charmhelpers.core import hookenv | ||
750 | 19 | |||
751 | 20 | |||
752 | 21 | cmdline.subcommand('relation-id')(hookenv.relation_id._wrapped) | ||
753 | 22 | cmdline.subcommand('service-name')(hookenv.service_name) | ||
754 | 23 | cmdline.subcommand('remote-service-name')(hookenv.remote_service_name._wrapped) | ||
755 | 24 | 0 | ||
756 | === added file 'hooks/charmhelpers/cli/host.py' | |||
757 | --- hooks/charmhelpers/cli/host.py 1970-01-01 00:00:00 +0000 | |||
758 | +++ hooks/charmhelpers/cli/host.py 2016-02-18 14:28:13 +0000 | |||
759 | @@ -0,0 +1,31 @@ | |||
760 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
761 | 2 | # | ||
762 | 3 | # This file is part of charm-helpers. | ||
763 | 4 | # | ||
764 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
765 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
766 | 7 | # published by the Free Software Foundation. | ||
767 | 8 | # | ||
768 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
769 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
770 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
771 | 12 | # GNU Lesser General Public License for more details. | ||
772 | 13 | # | ||
773 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
774 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
775 | 16 | |||
776 | 17 | from . import cmdline | ||
777 | 18 | from charmhelpers.core import host | ||
778 | 19 | |||
779 | 20 | |||
780 | 21 | @cmdline.subcommand() | ||
781 | 22 | def mounts(): | ||
782 | 23 | "List mounts" | ||
783 | 24 | return host.mounts() | ||
784 | 25 | |||
785 | 26 | |||
786 | 27 | @cmdline.subcommand_builder('service', description="Control system services") | ||
787 | 28 | def service(subparser): | ||
788 | 29 | subparser.add_argument("action", help="The action to perform (start, stop, etc...)") | ||
789 | 30 | subparser.add_argument("service_name", help="Name of the service to control") | ||
790 | 31 | return host.service | ||
791 | 0 | 32 | ||
792 | === removed file 'hooks/charmhelpers/cli/host.py' | |||
793 | --- hooks/charmhelpers/cli/host.py 2015-07-31 13:11:17 +0000 | |||
794 | +++ hooks/charmhelpers/cli/host.py 1970-01-01 00:00:00 +0000 | |||
795 | @@ -1,31 +0,0 @@ | |||
796 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
797 | 2 | # | ||
798 | 3 | # This file is part of charm-helpers. | ||
799 | 4 | # | ||
800 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
801 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
802 | 7 | # published by the Free Software Foundation. | ||
803 | 8 | # | ||
804 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
805 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
806 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
807 | 12 | # GNU Lesser General Public License for more details. | ||
808 | 13 | # | ||
809 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
810 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
811 | 16 | |||
812 | 17 | from . import cmdline | ||
813 | 18 | from charmhelpers.core import host | ||
814 | 19 | |||
815 | 20 | |||
816 | 21 | @cmdline.subcommand() | ||
817 | 22 | def mounts(): | ||
818 | 23 | "List mounts" | ||
819 | 24 | return host.mounts() | ||
820 | 25 | |||
821 | 26 | |||
822 | 27 | @cmdline.subcommand_builder('service', description="Control system services") | ||
823 | 28 | def service(subparser): | ||
824 | 29 | subparser.add_argument("action", help="The action to perform (start, stop, etc...)") | ||
825 | 30 | subparser.add_argument("service_name", help="Name of the service to control") | ||
826 | 31 | return host.service | ||
827 | 32 | 0 | ||
828 | === added file 'hooks/charmhelpers/cli/unitdata.py' | |||
829 | --- hooks/charmhelpers/cli/unitdata.py 1970-01-01 00:00:00 +0000 | |||
830 | +++ hooks/charmhelpers/cli/unitdata.py 2016-02-18 14:28:13 +0000 | |||
831 | @@ -0,0 +1,39 @@ | |||
832 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
833 | 2 | # | ||
834 | 3 | # This file is part of charm-helpers. | ||
835 | 4 | # | ||
836 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
837 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
838 | 7 | # published by the Free Software Foundation. | ||
839 | 8 | # | ||
840 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
841 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
842 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
843 | 12 | # GNU Lesser General Public License for more details. | ||
844 | 13 | # | ||
845 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
846 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
847 | 16 | |||
848 | 17 | from . import cmdline | ||
849 | 18 | from charmhelpers.core import unitdata | ||
850 | 19 | |||
851 | 20 | |||
852 | 21 | @cmdline.subcommand_builder('unitdata', description="Store and retrieve data") | ||
853 | 22 | def unitdata_cmd(subparser): | ||
854 | 23 | nested = subparser.add_subparsers() | ||
855 | 24 | get_cmd = nested.add_parser('get', help='Retrieve data') | ||
856 | 25 | get_cmd.add_argument('key', help='Key to retrieve the value of') | ||
857 | 26 | get_cmd.set_defaults(action='get', value=None) | ||
858 | 27 | set_cmd = nested.add_parser('set', help='Store data') | ||
859 | 28 | set_cmd.add_argument('key', help='Key to set') | ||
860 | 29 | set_cmd.add_argument('value', help='Value to store') | ||
861 | 30 | set_cmd.set_defaults(action='set') | ||
862 | 31 | |||
863 | 32 | def _unitdata_cmd(action, key, value): | ||
864 | 33 | if action == 'get': | ||
865 | 34 | return unitdata.kv().get(key) | ||
866 | 35 | elif action == 'set': | ||
867 | 36 | unitdata.kv().set(key, value) | ||
868 | 37 | unitdata.kv().flush() | ||
869 | 38 | return '' | ||
870 | 39 | return _unitdata_cmd | ||
871 | 0 | 40 | ||
872 | === removed file 'hooks/charmhelpers/cli/unitdata.py' | |||
873 | --- hooks/charmhelpers/cli/unitdata.py 2015-07-31 13:11:17 +0000 | |||
874 | +++ hooks/charmhelpers/cli/unitdata.py 1970-01-01 00:00:00 +0000 | |||
875 | @@ -1,39 +0,0 @@ | |||
876 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
877 | 2 | # | ||
878 | 3 | # This file is part of charm-helpers. | ||
879 | 4 | # | ||
880 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
881 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
882 | 7 | # published by the Free Software Foundation. | ||
883 | 8 | # | ||
884 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
885 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
886 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
887 | 12 | # GNU Lesser General Public License for more details. | ||
888 | 13 | # | ||
889 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
890 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
891 | 16 | |||
892 | 17 | from . import cmdline | ||
893 | 18 | from charmhelpers.core import unitdata | ||
894 | 19 | |||
895 | 20 | |||
896 | 21 | @cmdline.subcommand_builder('unitdata', description="Store and retrieve data") | ||
897 | 22 | def unitdata_cmd(subparser): | ||
898 | 23 | nested = subparser.add_subparsers() | ||
899 | 24 | get_cmd = nested.add_parser('get', help='Retrieve data') | ||
900 | 25 | get_cmd.add_argument('key', help='Key to retrieve the value of') | ||
901 | 26 | get_cmd.set_defaults(action='get', value=None) | ||
902 | 27 | set_cmd = nested.add_parser('set', help='Store data') | ||
903 | 28 | set_cmd.add_argument('key', help='Key to set') | ||
904 | 29 | set_cmd.add_argument('value', help='Value to store') | ||
905 | 30 | set_cmd.set_defaults(action='set') | ||
906 | 31 | |||
907 | 32 | def _unitdata_cmd(action, key, value): | ||
908 | 33 | if action == 'get': | ||
909 | 34 | return unitdata.kv().get(key) | ||
910 | 35 | elif action == 'set': | ||
911 | 36 | unitdata.kv().set(key, value) | ||
912 | 37 | unitdata.kv().flush() | ||
913 | 38 | return '' | ||
914 | 39 | return _unitdata_cmd | ||
915 | 40 | 0 | ||
916 | === modified file 'hooks/charmhelpers/contrib/charmsupport/nrpe.py' | |||
917 | --- hooks/charmhelpers/contrib/charmsupport/nrpe.py 2015-04-19 09:02:03 +0000 | |||
918 | +++ hooks/charmhelpers/contrib/charmsupport/nrpe.py 2016-02-18 14:28:13 +0000 | |||
919 | @@ -148,6 +148,13 @@ | |||
920 | 148 | self.description = description | 148 | self.description = description |
921 | 149 | self.check_cmd = self._locate_cmd(check_cmd) | 149 | self.check_cmd = self._locate_cmd(check_cmd) |
922 | 150 | 150 | ||
923 | 151 | def _get_check_filename(self): | ||
924 | 152 | return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command)) | ||
925 | 153 | |||
926 | 154 | def _get_service_filename(self, hostname): | ||
927 | 155 | return os.path.join(NRPE.nagios_exportdir, | ||
928 | 156 | 'service__{}_{}.cfg'.format(hostname, self.command)) | ||
929 | 157 | |||
930 | 151 | def _locate_cmd(self, check_cmd): | 158 | def _locate_cmd(self, check_cmd): |
931 | 152 | search_path = ( | 159 | search_path = ( |
932 | 153 | '/usr/lib/nagios/plugins', | 160 | '/usr/lib/nagios/plugins', |
933 | @@ -163,9 +170,21 @@ | |||
934 | 163 | log('Check command not found: {}'.format(parts[0])) | 170 | log('Check command not found: {}'.format(parts[0])) |
935 | 164 | return '' | 171 | return '' |
936 | 165 | 172 | ||
937 | 173 | def _remove_service_files(self): | ||
938 | 174 | if not os.path.exists(NRPE.nagios_exportdir): | ||
939 | 175 | return | ||
940 | 176 | for f in os.listdir(NRPE.nagios_exportdir): | ||
941 | 177 | if f.endswith('_{}.cfg'.format(self.command)): | ||
942 | 178 | os.remove(os.path.join(NRPE.nagios_exportdir, f)) | ||
943 | 179 | |||
944 | 180 | def remove(self, hostname): | ||
945 | 181 | nrpe_check_file = self._get_check_filename() | ||
946 | 182 | if os.path.exists(nrpe_check_file): | ||
947 | 183 | os.remove(nrpe_check_file) | ||
948 | 184 | self._remove_service_files() | ||
949 | 185 | |||
950 | 166 | def write(self, nagios_context, hostname, nagios_servicegroups): | 186 | def write(self, nagios_context, hostname, nagios_servicegroups): |
953 | 167 | nrpe_check_file = '/etc/nagios/nrpe.d/{}.cfg'.format( | 187 | nrpe_check_file = self._get_check_filename() |
952 | 168 | self.command) | ||
954 | 169 | with open(nrpe_check_file, 'w') as nrpe_check_config: | 188 | with open(nrpe_check_file, 'w') as nrpe_check_config: |
955 | 170 | nrpe_check_config.write("# check {}\n".format(self.shortname)) | 189 | nrpe_check_config.write("# check {}\n".format(self.shortname)) |
956 | 171 | nrpe_check_config.write("command[{}]={}\n".format( | 190 | nrpe_check_config.write("command[{}]={}\n".format( |
957 | @@ -180,9 +199,7 @@ | |||
958 | 180 | 199 | ||
959 | 181 | def write_service_config(self, nagios_context, hostname, | 200 | def write_service_config(self, nagios_context, hostname, |
960 | 182 | nagios_servicegroups): | 201 | nagios_servicegroups): |
964 | 183 | for f in os.listdir(NRPE.nagios_exportdir): | 202 | self._remove_service_files() |
962 | 184 | if re.search('.*{}.cfg'.format(self.command), f): | ||
963 | 185 | os.remove(os.path.join(NRPE.nagios_exportdir, f)) | ||
965 | 186 | 203 | ||
966 | 187 | templ_vars = { | 204 | templ_vars = { |
967 | 188 | 'nagios_hostname': hostname, | 205 | 'nagios_hostname': hostname, |
968 | @@ -192,8 +209,7 @@ | |||
969 | 192 | 'command': self.command, | 209 | 'command': self.command, |
970 | 193 | } | 210 | } |
971 | 194 | nrpe_service_text = Check.service_template.format(**templ_vars) | 211 | nrpe_service_text = Check.service_template.format(**templ_vars) |
974 | 195 | nrpe_service_file = '{}/service__{}_{}.cfg'.format( | 212 | nrpe_service_file = self._get_service_filename(hostname) |
973 | 196 | NRPE.nagios_exportdir, hostname, self.command) | ||
975 | 197 | with open(nrpe_service_file, 'w') as nrpe_service_config: | 213 | with open(nrpe_service_file, 'w') as nrpe_service_config: |
976 | 198 | nrpe_service_config.write(str(nrpe_service_text)) | 214 | nrpe_service_config.write(str(nrpe_service_text)) |
977 | 199 | 215 | ||
978 | @@ -218,12 +234,32 @@ | |||
979 | 218 | if hostname: | 234 | if hostname: |
980 | 219 | self.hostname = hostname | 235 | self.hostname = hostname |
981 | 220 | else: | 236 | else: |
983 | 221 | self.hostname = "{}-{}".format(self.nagios_context, self.unit_name) | 237 | nagios_hostname = get_nagios_hostname() |
984 | 238 | if nagios_hostname: | ||
985 | 239 | self.hostname = nagios_hostname | ||
986 | 240 | else: | ||
987 | 241 | self.hostname = "{}-{}".format(self.nagios_context, self.unit_name) | ||
988 | 222 | self.checks = [] | 242 | self.checks = [] |
989 | 223 | 243 | ||
990 | 224 | def add_check(self, *args, **kwargs): | 244 | def add_check(self, *args, **kwargs): |
991 | 225 | self.checks.append(Check(*args, **kwargs)) | 245 | self.checks.append(Check(*args, **kwargs)) |
992 | 226 | 246 | ||
993 | 247 | def remove_check(self, *args, **kwargs): | ||
994 | 248 | if kwargs.get('shortname') is None: | ||
995 | 249 | raise ValueError('shortname of check must be specified') | ||
996 | 250 | |||
997 | 251 | # Use sensible defaults if they're not specified - these are not | ||
998 | 252 | # actually used during removal, but they're required for constructing | ||
999 | 253 | # the Check object; check_disk is chosen because it's part of the | ||
1000 | 254 | # nagios-plugins-basic package. | ||
1001 | 255 | if kwargs.get('check_cmd') is None: | ||
1002 | 256 | kwargs['check_cmd'] = 'check_disk' | ||
1003 | 257 | if kwargs.get('description') is None: | ||
1004 | 258 | kwargs['description'] = '' | ||
1005 | 259 | |||
1006 | 260 | check = Check(*args, **kwargs) | ||
1007 | 261 | check.remove(self.hostname) | ||
1008 | 262 | |||
1009 | 227 | def write(self): | 263 | def write(self): |
1010 | 228 | try: | 264 | try: |
1011 | 229 | nagios_uid = pwd.getpwnam('nagios').pw_uid | 265 | nagios_uid = pwd.getpwnam('nagios').pw_uid |
1012 | @@ -260,7 +296,7 @@ | |||
1013 | 260 | :param str relation_name: Name of relation nrpe sub joined to | 296 | :param str relation_name: Name of relation nrpe sub joined to |
1014 | 261 | """ | 297 | """ |
1015 | 262 | for rel in relations_of_type(relation_name): | 298 | for rel in relations_of_type(relation_name): |
1017 | 263 | if 'nagios_hostname' in rel: | 299 | if 'nagios_host_context' in rel: |
1018 | 264 | return rel['nagios_host_context'] | 300 | return rel['nagios_host_context'] |
1019 | 265 | 301 | ||
1020 | 266 | 302 | ||
1021 | @@ -301,11 +337,13 @@ | |||
1022 | 301 | upstart_init = '/etc/init/%s.conf' % svc | 337 | upstart_init = '/etc/init/%s.conf' % svc |
1023 | 302 | sysv_init = '/etc/init.d/%s' % svc | 338 | sysv_init = '/etc/init.d/%s' % svc |
1024 | 303 | if os.path.exists(upstart_init): | 339 | if os.path.exists(upstart_init): |
1030 | 304 | nrpe.add_check( | 340 | # Don't add a check for these services from neutron-gateway |
1031 | 305 | shortname=svc, | 341 | if svc not in ['ext-port', 'os-charm-phy-nic-mtu']: |
1032 | 306 | description='process check {%s}' % unit_name, | 342 | nrpe.add_check( |
1033 | 307 | check_cmd='check_upstart_job %s' % svc | 343 | shortname=svc, |
1034 | 308 | ) | 344 | description='process check {%s}' % unit_name, |
1035 | 345 | check_cmd='check_upstart_job %s' % svc | ||
1036 | 346 | ) | ||
1037 | 309 | elif os.path.exists(sysv_init): | 347 | elif os.path.exists(sysv_init): |
1038 | 310 | cronpath = '/etc/cron.d/nagios-service-check-%s' % svc | 348 | cronpath = '/etc/cron.d/nagios-service-check-%s' % svc |
1039 | 311 | cron_file = ('*/5 * * * * root ' | 349 | cron_file = ('*/5 * * * * root ' |
1040 | 312 | 350 | ||
1041 | === modified file 'hooks/charmhelpers/contrib/network/ip.py' | |||
1042 | --- hooks/charmhelpers/contrib/network/ip.py 2015-09-03 09:42:35 +0000 | |||
1043 | +++ hooks/charmhelpers/contrib/network/ip.py 2016-02-18 14:28:13 +0000 | |||
1044 | @@ -23,7 +23,7 @@ | |||
1045 | 23 | from functools import partial | 23 | from functools import partial |
1046 | 24 | 24 | ||
1047 | 25 | from charmhelpers.core.hookenv import unit_get | 25 | from charmhelpers.core.hookenv import unit_get |
1049 | 26 | from charmhelpers.fetch import apt_install | 26 | from charmhelpers.fetch import apt_install, apt_update |
1050 | 27 | from charmhelpers.core.hookenv import ( | 27 | from charmhelpers.core.hookenv import ( |
1051 | 28 | log, | 28 | log, |
1052 | 29 | WARNING, | 29 | WARNING, |
1053 | @@ -32,13 +32,15 @@ | |||
1054 | 32 | try: | 32 | try: |
1055 | 33 | import netifaces | 33 | import netifaces |
1056 | 34 | except ImportError: | 34 | except ImportError: |
1058 | 35 | apt_install('python-netifaces') | 35 | apt_update(fatal=True) |
1059 | 36 | apt_install('python-netifaces', fatal=True) | ||
1060 | 36 | import netifaces | 37 | import netifaces |
1061 | 37 | 38 | ||
1062 | 38 | try: | 39 | try: |
1063 | 39 | import netaddr | 40 | import netaddr |
1064 | 40 | except ImportError: | 41 | except ImportError: |
1066 | 41 | apt_install('python-netaddr') | 42 | apt_update(fatal=True) |
1067 | 43 | apt_install('python-netaddr', fatal=True) | ||
1068 | 42 | import netaddr | 44 | import netaddr |
1069 | 43 | 45 | ||
1070 | 44 | 46 | ||
1071 | @@ -51,7 +53,7 @@ | |||
1072 | 51 | 53 | ||
1073 | 52 | 54 | ||
1074 | 53 | def no_ip_found_error_out(network): | 55 | def no_ip_found_error_out(network): |
1076 | 54 | errmsg = ("No IP address found in network: %s" % network) | 56 | errmsg = ("No IP address found in network(s): %s" % network) |
1077 | 55 | raise ValueError(errmsg) | 57 | raise ValueError(errmsg) |
1078 | 56 | 58 | ||
1079 | 57 | 59 | ||
1080 | @@ -59,7 +61,7 @@ | |||
1081 | 59 | """Get an IPv4 or IPv6 address within the network from the host. | 61 | """Get an IPv4 or IPv6 address within the network from the host. |
1082 | 60 | 62 | ||
1083 | 61 | :param network (str): CIDR presentation format. For example, | 63 | :param network (str): CIDR presentation format. For example, |
1085 | 62 | '192.168.1.0/24'. | 64 | '192.168.1.0/24'. Supports multiple networks as a space-delimited list. |
1086 | 63 | :param fallback (str): If no address is found, return fallback. | 65 | :param fallback (str): If no address is found, return fallback. |
1087 | 64 | :param fatal (boolean): If no address is found, fallback is not | 66 | :param fatal (boolean): If no address is found, fallback is not |
1088 | 65 | set and fatal is True then exit(1). | 67 | set and fatal is True then exit(1). |
1089 | @@ -73,24 +75,26 @@ | |||
1090 | 73 | else: | 75 | else: |
1091 | 74 | return None | 76 | return None |
1092 | 75 | 77 | ||
1103 | 76 | _validate_cidr(network) | 78 | networks = network.split() or [network] |
1104 | 77 | network = netaddr.IPNetwork(network) | 79 | for network in networks: |
1105 | 78 | for iface in netifaces.interfaces(): | 80 | _validate_cidr(network) |
1106 | 79 | addresses = netifaces.ifaddresses(iface) | 81 | network = netaddr.IPNetwork(network) |
1107 | 80 | if network.version == 4 and netifaces.AF_INET in addresses: | 82 | for iface in netifaces.interfaces(): |
1108 | 81 | addr = addresses[netifaces.AF_INET][0]['addr'] | 83 | addresses = netifaces.ifaddresses(iface) |
1109 | 82 | netmask = addresses[netifaces.AF_INET][0]['netmask'] | 84 | if network.version == 4 and netifaces.AF_INET in addresses: |
1110 | 83 | cidr = netaddr.IPNetwork("%s/%s" % (addr, netmask)) | 85 | addr = addresses[netifaces.AF_INET][0]['addr'] |
1111 | 84 | if cidr in network: | 86 | netmask = addresses[netifaces.AF_INET][0]['netmask'] |
1112 | 85 | return str(cidr.ip) | 87 | cidr = netaddr.IPNetwork("%s/%s" % (addr, netmask)) |
1113 | 88 | if cidr in network: | ||
1114 | 89 | return str(cidr.ip) | ||
1115 | 86 | 90 | ||
1123 | 87 | if network.version == 6 and netifaces.AF_INET6 in addresses: | 91 | if network.version == 6 and netifaces.AF_INET6 in addresses: |
1124 | 88 | for addr in addresses[netifaces.AF_INET6]: | 92 | for addr in addresses[netifaces.AF_INET6]: |
1125 | 89 | if not addr['addr'].startswith('fe80'): | 93 | if not addr['addr'].startswith('fe80'): |
1126 | 90 | cidr = netaddr.IPNetwork("%s/%s" % (addr['addr'], | 94 | cidr = netaddr.IPNetwork("%s/%s" % (addr['addr'], |
1127 | 91 | addr['netmask'])) | 95 | addr['netmask'])) |
1128 | 92 | if cidr in network: | 96 | if cidr in network: |
1129 | 93 | return str(cidr.ip) | 97 | return str(cidr.ip) |
1130 | 94 | 98 | ||
1131 | 95 | if fallback is not None: | 99 | if fallback is not None: |
1132 | 96 | return fallback | 100 | return fallback |
1133 | 97 | 101 | ||
1134 | === modified file 'hooks/charmhelpers/contrib/openstack/amulet/deployment.py' | |||
1135 | --- hooks/charmhelpers/contrib/openstack/amulet/deployment.py 2015-08-18 17:34:36 +0000 | |||
1136 | +++ hooks/charmhelpers/contrib/openstack/amulet/deployment.py 2016-02-18 14:28:13 +0000 | |||
1137 | @@ -14,12 +14,18 @@ | |||
1138 | 14 | # You should have received a copy of the GNU Lesser General Public License | 14 | # You should have received a copy of the GNU Lesser General Public License |
1139 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
1140 | 16 | 16 | ||
1141 | 17 | import logging | ||
1142 | 18 | import re | ||
1143 | 19 | import sys | ||
1144 | 17 | import six | 20 | import six |
1145 | 18 | from collections import OrderedDict | 21 | from collections import OrderedDict |
1146 | 19 | from charmhelpers.contrib.amulet.deployment import ( | 22 | from charmhelpers.contrib.amulet.deployment import ( |
1147 | 20 | AmuletDeployment | 23 | AmuletDeployment |
1148 | 21 | ) | 24 | ) |
1149 | 22 | 25 | ||
1150 | 26 | DEBUG = logging.DEBUG | ||
1151 | 27 | ERROR = logging.ERROR | ||
1152 | 28 | |||
1153 | 23 | 29 | ||
1154 | 24 | class OpenStackAmuletDeployment(AmuletDeployment): | 30 | class OpenStackAmuletDeployment(AmuletDeployment): |
1155 | 25 | """OpenStack amulet deployment. | 31 | """OpenStack amulet deployment. |
1156 | @@ -28,9 +34,12 @@ | |||
1157 | 28 | that is specifically for use by OpenStack charms. | 34 | that is specifically for use by OpenStack charms. |
1158 | 29 | """ | 35 | """ |
1159 | 30 | 36 | ||
1161 | 31 | def __init__(self, series=None, openstack=None, source=None, stable=True): | 37 | def __init__(self, series=None, openstack=None, source=None, |
1162 | 38 | stable=True, log_level=DEBUG): | ||
1163 | 32 | """Initialize the deployment environment.""" | 39 | """Initialize the deployment environment.""" |
1164 | 33 | super(OpenStackAmuletDeployment, self).__init__(series) | 40 | super(OpenStackAmuletDeployment, self).__init__(series) |
1165 | 41 | self.log = self.get_logger(level=log_level) | ||
1166 | 42 | self.log.info('OpenStackAmuletDeployment: init') | ||
1167 | 34 | self.openstack = openstack | 43 | self.openstack = openstack |
1168 | 35 | self.source = source | 44 | self.source = source |
1169 | 36 | self.stable = stable | 45 | self.stable = stable |
1170 | @@ -38,26 +47,55 @@ | |||
1171 | 38 | # out. | 47 | # out. |
1172 | 39 | self.current_next = "trusty" | 48 | self.current_next = "trusty" |
1173 | 40 | 49 | ||
1174 | 50 | def get_logger(self, name="deployment-logger", level=logging.DEBUG): | ||
1175 | 51 | """Get a logger object that will log to stdout.""" | ||
1176 | 52 | log = logging | ||
1177 | 53 | logger = log.getLogger(name) | ||
1178 | 54 | fmt = log.Formatter("%(asctime)s %(funcName)s " | ||
1179 | 55 | "%(levelname)s: %(message)s") | ||
1180 | 56 | |||
1181 | 57 | handler = log.StreamHandler(stream=sys.stdout) | ||
1182 | 58 | handler.setLevel(level) | ||
1183 | 59 | handler.setFormatter(fmt) | ||
1184 | 60 | |||
1185 | 61 | logger.addHandler(handler) | ||
1186 | 62 | logger.setLevel(level) | ||
1187 | 63 | |||
1188 | 64 | return logger | ||
1189 | 65 | |||
1190 | 41 | def _determine_branch_locations(self, other_services): | 66 | def _determine_branch_locations(self, other_services): |
1191 | 42 | """Determine the branch locations for the other services. | 67 | """Determine the branch locations for the other services. |
1192 | 43 | 68 | ||
1193 | 44 | Determine if the local branch being tested is derived from its | 69 | Determine if the local branch being tested is derived from its |
1194 | 45 | stable or next (dev) branch, and based on this, use the corresonding | 70 | stable or next (dev) branch, and based on this, use the corresonding |
1195 | 46 | stable or next branches for the other_services.""" | 71 | stable or next branches for the other_services.""" |
1196 | 72 | |||
1197 | 73 | self.log.info('OpenStackAmuletDeployment: determine branch locations') | ||
1198 | 74 | |||
1199 | 75 | # Charms outside the lp:~openstack-charmers namespace | ||
1200 | 47 | base_charms = ['mysql', 'mongodb', 'nrpe'] | 76 | base_charms = ['mysql', 'mongodb', 'nrpe'] |
1201 | 48 | 77 | ||
1202 | 78 | # Force these charms to current series even when using an older series. | ||
1203 | 79 | # ie. Use trusty/nrpe even when series is precise, as the P charm | ||
1204 | 80 | # does not possess the necessary external master config and hooks. | ||
1205 | 81 | force_series_current = ['nrpe'] | ||
1206 | 82 | |||
1207 | 49 | if self.series in ['precise', 'trusty']: | 83 | if self.series in ['precise', 'trusty']: |
1208 | 50 | base_series = self.series | 84 | base_series = self.series |
1209 | 51 | else: | 85 | else: |
1210 | 52 | base_series = self.current_next | 86 | base_series = self.current_next |
1211 | 53 | 87 | ||
1214 | 54 | if self.stable: | 88 | for svc in other_services: |
1215 | 55 | for svc in other_services: | 89 | if svc['name'] in force_series_current: |
1216 | 90 | base_series = self.current_next | ||
1217 | 91 | # If a location has been explicitly set, use it | ||
1218 | 92 | if svc.get('location'): | ||
1219 | 93 | continue | ||
1220 | 94 | if self.stable: | ||
1221 | 56 | temp = 'lp:charms/{}/{}' | 95 | temp = 'lp:charms/{}/{}' |
1222 | 57 | svc['location'] = temp.format(base_series, | 96 | svc['location'] = temp.format(base_series, |
1223 | 58 | svc['name']) | 97 | svc['name']) |
1226 | 59 | else: | 98 | else: |
1225 | 60 | for svc in other_services: | ||
1227 | 61 | if svc['name'] in base_charms: | 99 | if svc['name'] in base_charms: |
1228 | 62 | temp = 'lp:charms/{}/{}' | 100 | temp = 'lp:charms/{}/{}' |
1229 | 63 | svc['location'] = temp.format(base_series, | 101 | svc['location'] = temp.format(base_series, |
1230 | @@ -66,10 +104,13 @@ | |||
1231 | 66 | temp = 'lp:~openstack-charmers/charms/{}/{}/next' | 104 | temp = 'lp:~openstack-charmers/charms/{}/{}/next' |
1232 | 67 | svc['location'] = temp.format(self.current_next, | 105 | svc['location'] = temp.format(self.current_next, |
1233 | 68 | svc['name']) | 106 | svc['name']) |
1234 | 107 | |||
1235 | 69 | return other_services | 108 | return other_services |
1236 | 70 | 109 | ||
1237 | 71 | def _add_services(self, this_service, other_services): | 110 | def _add_services(self, this_service, other_services): |
1238 | 72 | """Add services to the deployment and set openstack-origin/source.""" | 111 | """Add services to the deployment and set openstack-origin/source.""" |
1239 | 112 | self.log.info('OpenStackAmuletDeployment: adding services') | ||
1240 | 113 | |||
1241 | 73 | other_services = self._determine_branch_locations(other_services) | 114 | other_services = self._determine_branch_locations(other_services) |
1242 | 74 | 115 | ||
1243 | 75 | super(OpenStackAmuletDeployment, self)._add_services(this_service, | 116 | super(OpenStackAmuletDeployment, self)._add_services(this_service, |
1244 | @@ -77,29 +118,102 @@ | |||
1245 | 77 | 118 | ||
1246 | 78 | services = other_services | 119 | services = other_services |
1247 | 79 | services.append(this_service) | 120 | services.append(this_service) |
1248 | 121 | |||
1249 | 122 | # Charms which should use the source config option | ||
1250 | 80 | use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', | 123 | use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', |
1251 | 81 | 'ceph-osd', 'ceph-radosgw'] | 124 | 'ceph-osd', 'ceph-radosgw'] |
1255 | 82 | # Most OpenStack subordinate charms do not expose an origin option | 125 | |
1256 | 83 | # as that is controlled by the principle. | 126 | # Charms which can not use openstack-origin, ie. many subordinates |
1257 | 84 | ignore = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe'] | 127 | no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe', |
1258 | 128 | 'openvswitch-odl', 'neutron-api-odl', 'odl-controller'] | ||
1259 | 85 | 129 | ||
1260 | 86 | if self.openstack: | 130 | if self.openstack: |
1261 | 87 | for svc in services: | 131 | for svc in services: |
1263 | 88 | if svc['name'] not in use_source + ignore: | 132 | if svc['name'] not in use_source + no_origin: |
1264 | 89 | config = {'openstack-origin': self.openstack} | 133 | config = {'openstack-origin': self.openstack} |
1265 | 90 | self.d.configure(svc['name'], config) | 134 | self.d.configure(svc['name'], config) |
1266 | 91 | 135 | ||
1267 | 92 | if self.source: | 136 | if self.source: |
1268 | 93 | for svc in services: | 137 | for svc in services: |
1270 | 94 | if svc['name'] in use_source and svc['name'] not in ignore: | 138 | if svc['name'] in use_source and svc['name'] not in no_origin: |
1271 | 95 | config = {'source': self.source} | 139 | config = {'source': self.source} |
1272 | 96 | self.d.configure(svc['name'], config) | 140 | self.d.configure(svc['name'], config) |
1273 | 97 | 141 | ||
1274 | 98 | def _configure_services(self, configs): | 142 | def _configure_services(self, configs): |
1275 | 99 | """Configure all of the services.""" | 143 | """Configure all of the services.""" |
1276 | 144 | self.log.info('OpenStackAmuletDeployment: configure services') | ||
1277 | 100 | for service, config in six.iteritems(configs): | 145 | for service, config in six.iteritems(configs): |
1278 | 101 | self.d.configure(service, config) | 146 | self.d.configure(service, config) |
1279 | 102 | 147 | ||
1280 | 148 | def _auto_wait_for_status(self, message=None, exclude_services=None, | ||
1281 | 149 | include_only=None, timeout=1800): | ||
1282 | 150 | """Wait for all units to have a specific extended status, except | ||
1283 | 151 | for any defined as excluded. Unless specified via message, any | ||
1284 | 152 | status containing any case of 'ready' will be considered a match. | ||
1285 | 153 | |||
1286 | 154 | Examples of message usage: | ||
1287 | 155 | |||
1288 | 156 | Wait for all unit status to CONTAIN any case of 'ready' or 'ok': | ||
1289 | 157 | message = re.compile('.*ready.*|.*ok.*', re.IGNORECASE) | ||
1290 | 158 | |||
1291 | 159 | Wait for all units to reach this status (exact match): | ||
1292 | 160 | message = re.compile('^Unit is ready and clustered$') | ||
1293 | 161 | |||
1294 | 162 | Wait for all units to reach any one of these (exact match): | ||
1295 | 163 | message = re.compile('Unit is ready|OK|Ready') | ||
1296 | 164 | |||
1297 | 165 | Wait for at least one unit to reach this status (exact match): | ||
1298 | 166 | message = {'ready'} | ||
1299 | 167 | |||
1300 | 168 | See Amulet's sentry.wait_for_messages() for message usage detail. | ||
1301 | 169 | https://github.com/juju/amulet/blob/master/amulet/sentry.py | ||
1302 | 170 | |||
1303 | 171 | :param message: Expected status match | ||
1304 | 172 | :param exclude_services: List of juju service names to ignore, | ||
1305 | 173 | not to be used in conjuction with include_only. | ||
1306 | 174 | :param include_only: List of juju service names to exclusively check, | ||
1307 | 175 | not to be used in conjuction with exclude_services. | ||
1308 | 176 | :param timeout: Maximum time in seconds to wait for status match | ||
1309 | 177 | :returns: None. Raises if timeout is hit. | ||
1310 | 178 | """ | ||
1311 | 179 | self.log.info('Waiting for extended status on units...') | ||
1312 | 180 | |||
1313 | 181 | all_services = self.d.services.keys() | ||
1314 | 182 | |||
1315 | 183 | if exclude_services and include_only: | ||
1316 | 184 | raise ValueError('exclude_services can not be used ' | ||
1317 | 185 | 'with include_only') | ||
1318 | 186 | |||
1319 | 187 | if message: | ||
1320 | 188 | if isinstance(message, re._pattern_type): | ||
1321 | 189 | match = message.pattern | ||
1322 | 190 | else: | ||
1323 | 191 | match = message | ||
1324 | 192 | |||
1325 | 193 | self.log.debug('Custom extended status wait match: ' | ||
1326 | 194 | '{}'.format(match)) | ||
1327 | 195 | else: | ||
1328 | 196 | self.log.debug('Default extended status wait match: contains ' | ||
1329 | 197 | 'READY (case-insensitive)') | ||
1330 | 198 | message = re.compile('.*ready.*', re.IGNORECASE) | ||
1331 | 199 | |||
1332 | 200 | if exclude_services: | ||
1333 | 201 | self.log.debug('Excluding services from extended status match: ' | ||
1334 | 202 | '{}'.format(exclude_services)) | ||
1335 | 203 | else: | ||
1336 | 204 | exclude_services = [] | ||
1337 | 205 | |||
1338 | 206 | if include_only: | ||
1339 | 207 | services = include_only | ||
1340 | 208 | else: | ||
1341 | 209 | services = list(set(all_services) - set(exclude_services)) | ||
1342 | 210 | |||
1343 | 211 | self.log.debug('Waiting up to {}s for extended status on services: ' | ||
1344 | 212 | '{}'.format(timeout, services)) | ||
1345 | 213 | service_messages = {service: message for service in services} | ||
1346 | 214 | self.d.sentry.wait_for_messages(service_messages, timeout=timeout) | ||
1347 | 215 | self.log.info('OK') | ||
1348 | 216 | |||
1349 | 103 | def _get_openstack_release(self): | 217 | def _get_openstack_release(self): |
1350 | 104 | """Get openstack release. | 218 | """Get openstack release. |
1351 | 105 | 219 | ||
1352 | @@ -111,7 +225,8 @@ | |||
1353 | 111 | self.precise_havana, self.precise_icehouse, | 225 | self.precise_havana, self.precise_icehouse, |
1354 | 112 | self.trusty_icehouse, self.trusty_juno, self.utopic_juno, | 226 | self.trusty_icehouse, self.trusty_juno, self.utopic_juno, |
1355 | 113 | self.trusty_kilo, self.vivid_kilo, self.trusty_liberty, | 227 | self.trusty_kilo, self.vivid_kilo, self.trusty_liberty, |
1357 | 114 | self.wily_liberty) = range(12) | 228 | self.wily_liberty, self.trusty_mitaka, |
1358 | 229 | self.xenial_mitaka) = range(14) | ||
1359 | 115 | 230 | ||
1360 | 116 | releases = { | 231 | releases = { |
1361 | 117 | ('precise', None): self.precise_essex, | 232 | ('precise', None): self.precise_essex, |
1362 | @@ -123,9 +238,11 @@ | |||
1363 | 123 | ('trusty', 'cloud:trusty-juno'): self.trusty_juno, | 238 | ('trusty', 'cloud:trusty-juno'): self.trusty_juno, |
1364 | 124 | ('trusty', 'cloud:trusty-kilo'): self.trusty_kilo, | 239 | ('trusty', 'cloud:trusty-kilo'): self.trusty_kilo, |
1365 | 125 | ('trusty', 'cloud:trusty-liberty'): self.trusty_liberty, | 240 | ('trusty', 'cloud:trusty-liberty'): self.trusty_liberty, |
1366 | 241 | ('trusty', 'cloud:trusty-mitaka'): self.trusty_mitaka, | ||
1367 | 126 | ('utopic', None): self.utopic_juno, | 242 | ('utopic', None): self.utopic_juno, |
1368 | 127 | ('vivid', None): self.vivid_kilo, | 243 | ('vivid', None): self.vivid_kilo, |
1370 | 128 | ('wily', None): self.wily_liberty} | 244 | ('wily', None): self.wily_liberty, |
1371 | 245 | ('xenial', None): self.xenial_mitaka} | ||
1372 | 129 | return releases[(self.series, self.openstack)] | 246 | return releases[(self.series, self.openstack)] |
1373 | 130 | 247 | ||
1374 | 131 | def _get_openstack_release_string(self): | 248 | def _get_openstack_release_string(self): |
1375 | @@ -142,6 +259,7 @@ | |||
1376 | 142 | ('utopic', 'juno'), | 259 | ('utopic', 'juno'), |
1377 | 143 | ('vivid', 'kilo'), | 260 | ('vivid', 'kilo'), |
1378 | 144 | ('wily', 'liberty'), | 261 | ('wily', 'liberty'), |
1379 | 262 | ('xenial', 'mitaka'), | ||
1380 | 145 | ]) | 263 | ]) |
1381 | 146 | if self.openstack: | 264 | if self.openstack: |
1382 | 147 | os_origin = self.openstack.split(':')[1] | 265 | os_origin = self.openstack.split(':')[1] |
1383 | 148 | 266 | ||
1384 | === modified file 'hooks/charmhelpers/contrib/openstack/amulet/utils.py' | |||
1385 | --- hooks/charmhelpers/contrib/openstack/amulet/utils.py 2015-07-17 13:24:05 +0000 | |||
1386 | +++ hooks/charmhelpers/contrib/openstack/amulet/utils.py 2016-02-18 14:28:13 +0000 | |||
1387 | @@ -18,6 +18,7 @@ | |||
1388 | 18 | import json | 18 | import json |
1389 | 19 | import logging | 19 | import logging |
1390 | 20 | import os | 20 | import os |
1391 | 21 | import re | ||
1392 | 21 | import six | 22 | import six |
1393 | 22 | import time | 23 | import time |
1394 | 23 | import urllib | 24 | import urllib |
1395 | @@ -27,6 +28,7 @@ | |||
1396 | 27 | import heatclient.v1.client as heat_client | 28 | import heatclient.v1.client as heat_client |
1397 | 28 | import keystoneclient.v2_0 as keystone_client | 29 | import keystoneclient.v2_0 as keystone_client |
1398 | 29 | import novaclient.v1_1.client as nova_client | 30 | import novaclient.v1_1.client as nova_client |
1399 | 31 | import pika | ||
1400 | 30 | import swiftclient | 32 | import swiftclient |
1401 | 31 | 33 | ||
1402 | 32 | from charmhelpers.contrib.amulet.utils import ( | 34 | from charmhelpers.contrib.amulet.utils import ( |
1403 | @@ -602,3 +604,382 @@ | |||
1404 | 602 | self.log.debug('Ceph {} samples (OK): ' | 604 | self.log.debug('Ceph {} samples (OK): ' |
1405 | 603 | '{}'.format(sample_type, samples)) | 605 | '{}'.format(sample_type, samples)) |
1406 | 604 | return None | 606 | return None |
1407 | 607 | |||
1408 | 608 | # rabbitmq/amqp specific helpers: | ||
1409 | 609 | |||
1410 | 610 | def rmq_wait_for_cluster(self, deployment, init_sleep=15, timeout=1200): | ||
1411 | 611 | """Wait for rmq units extended status to show cluster readiness, | ||
1412 | 612 | after an optional initial sleep period. Initial sleep is likely | ||
1413 | 613 | necessary to be effective following a config change, as status | ||
1414 | 614 | message may not instantly update to non-ready.""" | ||
1415 | 615 | |||
1416 | 616 | if init_sleep: | ||
1417 | 617 | time.sleep(init_sleep) | ||
1418 | 618 | |||
1419 | 619 | message = re.compile('^Unit is ready and clustered$') | ||
1420 | 620 | deployment._auto_wait_for_status(message=message, | ||
1421 | 621 | timeout=timeout, | ||
1422 | 622 | include_only=['rabbitmq-server']) | ||
1423 | 623 | |||
1424 | 624 | def add_rmq_test_user(self, sentry_units, | ||
1425 | 625 | username="testuser1", password="changeme"): | ||
1426 | 626 | """Add a test user via the first rmq juju unit, check connection as | ||
1427 | 627 | the new user against all sentry units. | ||
1428 | 628 | |||
1429 | 629 | :param sentry_units: list of sentry unit pointers | ||
1430 | 630 | :param username: amqp user name, default to testuser1 | ||
1431 | 631 | :param password: amqp user password | ||
1432 | 632 | :returns: None if successful. Raise on error. | ||
1433 | 633 | """ | ||
1434 | 634 | self.log.debug('Adding rmq user ({})...'.format(username)) | ||
1435 | 635 | |||
1436 | 636 | # Check that user does not already exist | ||
1437 | 637 | cmd_user_list = 'rabbitmqctl list_users' | ||
1438 | 638 | output, _ = self.run_cmd_unit(sentry_units[0], cmd_user_list) | ||
1439 | 639 | if username in output: | ||
1440 | 640 | self.log.warning('User ({}) already exists, returning ' | ||
1441 | 641 | 'gracefully.'.format(username)) | ||
1442 | 642 | return | ||
1443 | 643 | |||
1444 | 644 | perms = '".*" ".*" ".*"' | ||
1445 | 645 | cmds = ['rabbitmqctl add_user {} {}'.format(username, password), | ||
1446 | 646 | 'rabbitmqctl set_permissions {} {}'.format(username, perms)] | ||
1447 | 647 | |||
1448 | 648 | # Add user via first unit | ||
1449 | 649 | for cmd in cmds: | ||
1450 | 650 | output, _ = self.run_cmd_unit(sentry_units[0], cmd) | ||
1451 | 651 | |||
1452 | 652 | # Check connection against the other sentry_units | ||
1453 | 653 | self.log.debug('Checking user connect against units...') | ||
1454 | 654 | for sentry_unit in sentry_units: | ||
1455 | 655 | connection = self.connect_amqp_by_unit(sentry_unit, ssl=False, | ||
1456 | 656 | username=username, | ||
1457 | 657 | password=password) | ||
1458 | 658 | connection.close() | ||
1459 | 659 | |||
1460 | 660 | def delete_rmq_test_user(self, sentry_units, username="testuser1"): | ||
1461 | 661 | """Delete a rabbitmq user via the first rmq juju unit. | ||
1462 | 662 | |||
1463 | 663 | :param sentry_units: list of sentry unit pointers | ||
1464 | 664 | :param username: amqp user name, default to testuser1 | ||
1465 | 665 | :param password: amqp user password | ||
1466 | 666 | :returns: None if successful or no such user. | ||
1467 | 667 | """ | ||
1468 | 668 | self.log.debug('Deleting rmq user ({})...'.format(username)) | ||
1469 | 669 | |||
1470 | 670 | # Check that the user exists | ||
1471 | 671 | cmd_user_list = 'rabbitmqctl list_users' | ||
1472 | 672 | output, _ = self.run_cmd_unit(sentry_units[0], cmd_user_list) | ||
1473 | 673 | |||
1474 | 674 | if username not in output: | ||
1475 | 675 | self.log.warning('User ({}) does not exist, returning ' | ||
1476 | 676 | 'gracefully.'.format(username)) | ||
1477 | 677 | return | ||
1478 | 678 | |||
1479 | 679 | # Delete the user | ||
1480 | 680 | cmd_user_del = 'rabbitmqctl delete_user {}'.format(username) | ||
1481 | 681 | output, _ = self.run_cmd_unit(sentry_units[0], cmd_user_del) | ||
1482 | 682 | |||
1483 | 683 | def get_rmq_cluster_status(self, sentry_unit): | ||
1484 | 684 | """Execute rabbitmq cluster status command on a unit and return | ||
1485 | 685 | the full output. | ||
1486 | 686 | |||
1487 | 687 | :param unit: sentry unit | ||
1488 | 688 | :returns: String containing console output of cluster status command | ||
1489 | 689 | """ | ||
1490 | 690 | cmd = 'rabbitmqctl cluster_status' | ||
1491 | 691 | output, _ = self.run_cmd_unit(sentry_unit, cmd) | ||
1492 | 692 | self.log.debug('{} cluster_status:\n{}'.format( | ||
1493 | 693 | sentry_unit.info['unit_name'], output)) | ||
1494 | 694 | return str(output) | ||
1495 | 695 | |||
1496 | 696 | def get_rmq_cluster_running_nodes(self, sentry_unit): | ||
1497 | 697 | """Parse rabbitmqctl cluster_status output string, return list of | ||
1498 | 698 | running rabbitmq cluster nodes. | ||
1499 | 699 | |||
1500 | 700 | :param unit: sentry unit | ||
1501 | 701 | :returns: List containing node names of running nodes | ||
1502 | 702 | """ | ||
1503 | 703 | # NOTE(beisner): rabbitmqctl cluster_status output is not | ||
1504 | 704 | # json-parsable, do string chop foo, then json.loads that. | ||
1505 | 705 | str_stat = self.get_rmq_cluster_status(sentry_unit) | ||
1506 | 706 | if 'running_nodes' in str_stat: | ||
1507 | 707 | pos_start = str_stat.find("{running_nodes,") + 15 | ||
1508 | 708 | pos_end = str_stat.find("]},", pos_start) + 1 | ||
1509 | 709 | str_run_nodes = str_stat[pos_start:pos_end].replace("'", '"') | ||
1510 | 710 | run_nodes = json.loads(str_run_nodes) | ||
1511 | 711 | return run_nodes | ||
1512 | 712 | else: | ||
1513 | 713 | return [] | ||
1514 | 714 | |||
1515 | 715 | def validate_rmq_cluster_running_nodes(self, sentry_units): | ||
1516 | 716 | """Check that all rmq unit hostnames are represented in the | ||
1517 | 717 | cluster_status output of all units. | ||
1518 | 718 | |||
1519 | 719 | :param host_names: dict of juju unit names to host names | ||
1520 | 720 | :param units: list of sentry unit pointers (all rmq units) | ||
1521 | 721 | :returns: None if successful, otherwise return error message | ||
1522 | 722 | """ | ||
1523 | 723 | host_names = self.get_unit_hostnames(sentry_units) | ||
1524 | 724 | errors = [] | ||
1525 | 725 | |||
1526 | 726 | # Query every unit for cluster_status running nodes | ||
1527 | 727 | for query_unit in sentry_units: | ||
1528 | 728 | query_unit_name = query_unit.info['unit_name'] | ||
1529 | 729 | running_nodes = self.get_rmq_cluster_running_nodes(query_unit) | ||
1530 | 730 | |||
1531 | 731 | # Confirm that every unit is represented in the queried unit's | ||
1532 | 732 | # cluster_status running nodes output. | ||
1533 | 733 | for validate_unit in sentry_units: | ||
1534 | 734 | val_host_name = host_names[validate_unit.info['unit_name']] | ||
1535 | 735 | val_node_name = 'rabbit@{}'.format(val_host_name) | ||
1536 | 736 | |||
1537 | 737 | if val_node_name not in running_nodes: | ||
1538 | 738 | errors.append('Cluster member check failed on {}: {} not ' | ||
1539 | 739 | 'in {}\n'.format(query_unit_name, | ||
1540 | 740 | val_node_name, | ||
1541 | 741 | running_nodes)) | ||
1542 | 742 | if errors: | ||
1543 | 743 | return ''.join(errors) | ||
1544 | 744 | |||
1545 | 745 | def rmq_ssl_is_enabled_on_unit(self, sentry_unit, port=None): | ||
1546 | 746 | """Check a single juju rmq unit for ssl and port in the config file.""" | ||
1547 | 747 | host = sentry_unit.info['public-address'] | ||
1548 | 748 | unit_name = sentry_unit.info['unit_name'] | ||
1549 | 749 | |||
1550 | 750 | conf_file = '/etc/rabbitmq/rabbitmq.config' | ||
1551 | 751 | conf_contents = str(self.file_contents_safe(sentry_unit, | ||
1552 | 752 | conf_file, max_wait=16)) | ||
1553 | 753 | # Checks | ||
1554 | 754 | conf_ssl = 'ssl' in conf_contents | ||
1555 | 755 | conf_port = str(port) in conf_contents | ||
1556 | 756 | |||
1557 | 757 | # Port explicitly checked in config | ||
1558 | 758 | if port and conf_port and conf_ssl: | ||
1559 | 759 | self.log.debug('SSL is enabled @{}:{} ' | ||
1560 | 760 | '({})'.format(host, port, unit_name)) | ||
1561 | 761 | return True | ||
1562 | 762 | elif port and not conf_port and conf_ssl: | ||
1563 | 763 | self.log.debug('SSL is enabled @{} but not on port {} ' | ||
1564 | 764 | '({})'.format(host, port, unit_name)) | ||
1565 | 765 | return False | ||
1566 | 766 | # Port not checked (useful when checking that ssl is disabled) | ||
1567 | 767 | elif not port and conf_ssl: | ||
1568 | 768 | self.log.debug('SSL is enabled @{}:{} ' | ||
1569 | 769 | '({})'.format(host, port, unit_name)) | ||
1570 | 770 | return True | ||
1571 | 771 | elif not conf_ssl: | ||
1572 | 772 | self.log.debug('SSL not enabled @{}:{} ' | ||
1573 | 773 | '({})'.format(host, port, unit_name)) | ||
1574 | 774 | return False | ||
1575 | 775 | else: | ||
1576 | 776 | msg = ('Unknown condition when checking SSL status @{}:{} ' | ||
1577 | 777 | '({})'.format(host, port, unit_name)) | ||
1578 | 778 | amulet.raise_status(amulet.FAIL, msg) | ||
1579 | 779 | |||
1580 | 780 | def validate_rmq_ssl_enabled_units(self, sentry_units, port=None): | ||
1581 | 781 | """Check that ssl is enabled on rmq juju sentry units. | ||
1582 | 782 | |||
1583 | 783 | :param sentry_units: list of all rmq sentry units | ||
1584 | 784 | :param port: optional ssl port override to validate | ||
1585 | 785 | :returns: None if successful, otherwise return error message | ||
1586 | 786 | """ | ||
1587 | 787 | for sentry_unit in sentry_units: | ||
1588 | 788 | if not self.rmq_ssl_is_enabled_on_unit(sentry_unit, port=port): | ||
1589 | 789 | return ('Unexpected condition: ssl is disabled on unit ' | ||
1590 | 790 | '({})'.format(sentry_unit.info['unit_name'])) | ||
1591 | 791 | return None | ||
1592 | 792 | |||
1593 | 793 | def validate_rmq_ssl_disabled_units(self, sentry_units): | ||
1594 | 794 | """Check that ssl is enabled on listed rmq juju sentry units. | ||
1595 | 795 | |||
1596 | 796 | :param sentry_units: list of all rmq sentry units | ||
1597 | 797 | :returns: True if successful. Raise on error. | ||
1598 | 798 | """ | ||
1599 | 799 | for sentry_unit in sentry_units: | ||
1600 | 800 | if self.rmq_ssl_is_enabled_on_unit(sentry_unit): | ||
1601 | 801 | return ('Unexpected condition: ssl is enabled on unit ' | ||
1602 | 802 | '({})'.format(sentry_unit.info['unit_name'])) | ||
1603 | 803 | return None | ||
1604 | 804 | |||
1605 | 805 | def configure_rmq_ssl_on(self, sentry_units, deployment, | ||
1606 | 806 | port=None, max_wait=60): | ||
1607 | 807 | """Turn ssl charm config option on, with optional non-default | ||
1608 | 808 | ssl port specification. Confirm that it is enabled on every | ||
1609 | 809 | unit. | ||
1610 | 810 | |||
1611 | 811 | :param sentry_units: list of sentry units | ||
1612 | 812 | :param deployment: amulet deployment object pointer | ||
1613 | 813 | :param port: amqp port, use defaults if None | ||
1614 | 814 | :param max_wait: maximum time to wait in seconds to confirm | ||
1615 | 815 | :returns: None if successful. Raise on error. | ||
1616 | 816 | """ | ||
1617 | 817 | self.log.debug('Setting ssl charm config option: on') | ||
1618 | 818 | |||
1619 | 819 | # Enable RMQ SSL | ||
1620 | 820 | config = {'ssl': 'on'} | ||
1621 | 821 | if port: | ||
1622 | 822 | config['ssl_port'] = port | ||
1623 | 823 | |||
1624 | 824 | deployment.d.configure('rabbitmq-server', config) | ||
1625 | 825 | |||
1626 | 826 | # Wait for unit status | ||
1627 | 827 | self.rmq_wait_for_cluster(deployment) | ||
1628 | 828 | |||
1629 | 829 | # Confirm | ||
1630 | 830 | tries = 0 | ||
1631 | 831 | ret = self.validate_rmq_ssl_enabled_units(sentry_units, port=port) | ||
1632 | 832 | while ret and tries < (max_wait / 4): | ||
1633 | 833 | time.sleep(4) | ||
1634 | 834 | self.log.debug('Attempt {}: {}'.format(tries, ret)) | ||
1635 | 835 | ret = self.validate_rmq_ssl_enabled_units(sentry_units, port=port) | ||
1636 | 836 | tries += 1 | ||
1637 | 837 | |||
1638 | 838 | if ret: | ||
1639 | 839 | amulet.raise_status(amulet.FAIL, ret) | ||
1640 | 840 | |||
1641 | 841 | def configure_rmq_ssl_off(self, sentry_units, deployment, max_wait=60): | ||
1642 | 842 | """Turn ssl charm config option off, confirm that it is disabled | ||
1643 | 843 | on every unit. | ||
1644 | 844 | |||
1645 | 845 | :param sentry_units: list of sentry units | ||
1646 | 846 | :param deployment: amulet deployment object pointer | ||
1647 | 847 | :param max_wait: maximum time to wait in seconds to confirm | ||
1648 | 848 | :returns: None if successful. Raise on error. | ||
1649 | 849 | """ | ||
1650 | 850 | self.log.debug('Setting ssl charm config option: off') | ||
1651 | 851 | |||
1652 | 852 | # Disable RMQ SSL | ||
1653 | 853 | config = {'ssl': 'off'} | ||
1654 | 854 | deployment.d.configure('rabbitmq-server', config) | ||
1655 | 855 | |||
1656 | 856 | # Wait for unit status | ||
1657 | 857 | self.rmq_wait_for_cluster(deployment) | ||
1658 | 858 | |||
1659 | 859 | # Confirm | ||
1660 | 860 | tries = 0 | ||
1661 | 861 | ret = self.validate_rmq_ssl_disabled_units(sentry_units) | ||
1662 | 862 | while ret and tries < (max_wait / 4): | ||
1663 | 863 | time.sleep(4) | ||
1664 | 864 | self.log.debug('Attempt {}: {}'.format(tries, ret)) | ||
1665 | 865 | ret = self.validate_rmq_ssl_disabled_units(sentry_units) | ||
1666 | 866 | tries += 1 | ||
1667 | 867 | |||
1668 | 868 | if ret: | ||
1669 | 869 | amulet.raise_status(amulet.FAIL, ret) | ||
1670 | 870 | |||
1671 | 871 | def connect_amqp_by_unit(self, sentry_unit, ssl=False, | ||
1672 | 872 | port=None, fatal=True, | ||
1673 | 873 | username="testuser1", password="changeme"): | ||
1674 | 874 | """Establish and return a pika amqp connection to the rabbitmq service | ||
1675 | 875 | running on a rmq juju unit. | ||
1676 | 876 | |||
1677 | 877 | :param sentry_unit: sentry unit pointer | ||
1678 | 878 | :param ssl: boolean, default to False | ||
1679 | 879 | :param port: amqp port, use defaults if None | ||
1680 | 880 | :param fatal: boolean, default to True (raises on connect error) | ||
1681 | 881 | :param username: amqp user name, default to testuser1 | ||
1682 | 882 | :param password: amqp user password | ||
1683 | 883 | :returns: pika amqp connection pointer or None if failed and non-fatal | ||
1684 | 884 | """ | ||
1685 | 885 | host = sentry_unit.info['public-address'] | ||
1686 | 886 | unit_name = sentry_unit.info['unit_name'] | ||
1687 | 887 | |||
1688 | 888 | # Default port logic if port is not specified | ||
1689 | 889 | if ssl and not port: | ||
1690 | 890 | port = 5671 | ||
1691 | 891 | elif not ssl and not port: | ||
1692 | 892 | port = 5672 | ||
1693 | 893 | |||
1694 | 894 | self.log.debug('Connecting to amqp on {}:{} ({}) as ' | ||
1695 | 895 | '{}...'.format(host, port, unit_name, username)) | ||
1696 | 896 | |||
1697 | 897 | try: | ||
1698 | 898 | credentials = pika.PlainCredentials(username, password) | ||
1699 | 899 | parameters = pika.ConnectionParameters(host=host, port=port, | ||
1700 | 900 | credentials=credentials, | ||
1701 | 901 | ssl=ssl, | ||
1702 | 902 | connection_attempts=3, | ||
1703 | 903 | retry_delay=5, | ||
1704 | 904 | socket_timeout=1) | ||
1705 | 905 | connection = pika.BlockingConnection(parameters) | ||
1706 | 906 | assert connection.server_properties['product'] == 'RabbitMQ' | ||
1707 | 907 | self.log.debug('Connect OK') | ||
1708 | 908 | return connection | ||
1709 | 909 | except Exception as e: | ||
1710 | 910 | msg = ('amqp connection failed to {}:{} as ' | ||
1711 | 911 | '{} ({})'.format(host, port, username, str(e))) | ||
1712 | 912 | if fatal: | ||
1713 | 913 | amulet.raise_status(amulet.FAIL, msg) | ||
1714 | 914 | else: | ||
1715 | 915 | self.log.warn(msg) | ||
1716 | 916 | return None | ||
1717 | 917 | |||
1718 | 918 | def publish_amqp_message_by_unit(self, sentry_unit, message, | ||
1719 | 919 | queue="test", ssl=False, | ||
1720 | 920 | username="testuser1", | ||
1721 | 921 | password="changeme", | ||
1722 | 922 | port=None): | ||
1723 | 923 | """Publish an amqp message to a rmq juju unit. | ||
1724 | 924 | |||
1725 | 925 | :param sentry_unit: sentry unit pointer | ||
1726 | 926 | :param message: amqp message string | ||
1727 | 927 | :param queue: message queue, default to test | ||
1728 | 928 | :param username: amqp user name, default to testuser1 | ||
1729 | 929 | :param password: amqp user password | ||
1730 | 930 | :param ssl: boolean, default to False | ||
1731 | 931 | :param port: amqp port, use defaults if None | ||
1732 | 932 | :returns: None. Raises exception if publish failed. | ||
1733 | 933 | """ | ||
1734 | 934 | self.log.debug('Publishing message to {} queue:\n{}'.format(queue, | ||
1735 | 935 | message)) | ||
1736 | 936 | connection = self.connect_amqp_by_unit(sentry_unit, ssl=ssl, | ||
1737 | 937 | port=port, | ||
1738 | 938 | username=username, | ||
1739 | 939 | password=password) | ||
1740 | 940 | |||
1741 | 941 | # NOTE(beisner): extra debug here re: pika hang potential: | ||
1742 | 942 | # https://github.com/pika/pika/issues/297 | ||
1743 | 943 | # https://groups.google.com/forum/#!topic/rabbitmq-users/Ja0iyfF0Szw | ||
1744 | 944 | self.log.debug('Defining channel...') | ||
1745 | 945 | channel = connection.channel() | ||
1746 | 946 | self.log.debug('Declaring queue...') | ||
1747 | 947 | channel.queue_declare(queue=queue, auto_delete=False, durable=True) | ||
1748 | 948 | self.log.debug('Publishing message...') | ||
1749 | 949 | channel.basic_publish(exchange='', routing_key=queue, body=message) | ||
1750 | 950 | self.log.debug('Closing channel...') | ||
1751 | 951 | channel.close() | ||
1752 | 952 | self.log.debug('Closing connection...') | ||
1753 | 953 | connection.close() | ||
1754 | 954 | |||
1755 | 955 | def get_amqp_message_by_unit(self, sentry_unit, queue="test", | ||
1756 | 956 | username="testuser1", | ||
1757 | 957 | password="changeme", | ||
1758 | 958 | ssl=False, port=None): | ||
1759 | 959 | """Get an amqp message from a rmq juju unit. | ||
1760 | 960 | |||
1761 | 961 | :param sentry_unit: sentry unit pointer | ||
1762 | 962 | :param queue: message queue, default to test | ||
1763 | 963 | :param username: amqp user name, default to testuser1 | ||
1764 | 964 | :param password: amqp user password | ||
1765 | 965 | :param ssl: boolean, default to False | ||
1766 | 966 | :param port: amqp port, use defaults if None | ||
1767 | 967 | :returns: amqp message body as string. Raise if get fails. | ||
1768 | 968 | """ | ||
1769 | 969 | connection = self.connect_amqp_by_unit(sentry_unit, ssl=ssl, | ||
1770 | 970 | port=port, | ||
1771 | 971 | username=username, | ||
1772 | 972 | password=password) | ||
1773 | 973 | channel = connection.channel() | ||
1774 | 974 | method_frame, _, body = channel.basic_get(queue) | ||
1775 | 975 | |||
1776 | 976 | if method_frame: | ||
1777 | 977 | self.log.debug('Retreived message from {} queue:\n{}'.format(queue, | ||
1778 | 978 | body)) | ||
1779 | 979 | channel.basic_ack(method_frame.delivery_tag) | ||
1780 | 980 | channel.close() | ||
1781 | 981 | connection.close() | ||
1782 | 982 | return body | ||
1783 | 983 | else: | ||
1784 | 984 | msg = 'No message retrieved.' | ||
1785 | 985 | amulet.raise_status(amulet.FAIL, msg) | ||
1786 | 605 | 986 | ||
1787 | === modified file 'hooks/charmhelpers/contrib/openstack/context.py' | |||
1788 | --- hooks/charmhelpers/contrib/openstack/context.py 2015-09-12 10:58:20 +0000 | |||
1789 | +++ hooks/charmhelpers/contrib/openstack/context.py 2016-02-18 14:28:13 +0000 | |||
1790 | @@ -14,6 +14,7 @@ | |||
1791 | 14 | # You should have received a copy of the GNU Lesser General Public License | 14 | # You should have received a copy of the GNU Lesser General Public License |
1792 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
1793 | 16 | 16 | ||
1794 | 17 | import glob | ||
1795 | 17 | import json | 18 | import json |
1796 | 18 | import os | 19 | import os |
1797 | 19 | import re | 20 | import re |
1798 | @@ -56,6 +57,7 @@ | |||
1799 | 56 | get_nic_hwaddr, | 57 | get_nic_hwaddr, |
1800 | 57 | mkdir, | 58 | mkdir, |
1801 | 58 | write_file, | 59 | write_file, |
1802 | 60 | pwgen, | ||
1803 | 59 | ) | 61 | ) |
1804 | 60 | from charmhelpers.contrib.hahelpers.cluster import ( | 62 | from charmhelpers.contrib.hahelpers.cluster import ( |
1805 | 61 | determine_apache_port, | 63 | determine_apache_port, |
1806 | @@ -86,6 +88,8 @@ | |||
1807 | 86 | is_bridge_member, | 88 | is_bridge_member, |
1808 | 87 | ) | 89 | ) |
1809 | 88 | from charmhelpers.contrib.openstack.utils import get_host_ip | 90 | from charmhelpers.contrib.openstack.utils import get_host_ip |
1810 | 91 | from charmhelpers.core.unitdata import kv | ||
1811 | 92 | |||
1812 | 89 | CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' | 93 | CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' |
1813 | 90 | ADDRESS_TYPES = ['admin', 'internal', 'public'] | 94 | ADDRESS_TYPES = ['admin', 'internal', 'public'] |
1814 | 91 | 95 | ||
1815 | @@ -194,10 +198,50 @@ | |||
1816 | 194 | class OSContextGenerator(object): | 198 | class OSContextGenerator(object): |
1817 | 195 | """Base class for all context generators.""" | 199 | """Base class for all context generators.""" |
1818 | 196 | interfaces = [] | 200 | interfaces = [] |
1819 | 201 | related = False | ||
1820 | 202 | complete = False | ||
1821 | 203 | missing_data = [] | ||
1822 | 197 | 204 | ||
1823 | 198 | def __call__(self): | 205 | def __call__(self): |
1824 | 199 | raise NotImplementedError | 206 | raise NotImplementedError |
1825 | 200 | 207 | ||
1826 | 208 | def context_complete(self, ctxt): | ||
1827 | 209 | """Check for missing data for the required context data. | ||
1828 | 210 | Set self.missing_data if it exists and return False. | ||
1829 | 211 | Set self.complete if no missing data and return True. | ||
1830 | 212 | """ | ||
1831 | 213 | # Fresh start | ||
1832 | 214 | self.complete = False | ||
1833 | 215 | self.missing_data = [] | ||
1834 | 216 | for k, v in six.iteritems(ctxt): | ||
1835 | 217 | if v is None or v == '': | ||
1836 | 218 | if k not in self.missing_data: | ||
1837 | 219 | self.missing_data.append(k) | ||
1838 | 220 | |||
1839 | 221 | if self.missing_data: | ||
1840 | 222 | self.complete = False | ||
1841 | 223 | log('Missing required data: %s' % ' '.join(self.missing_data), level=INFO) | ||
1842 | 224 | else: | ||
1843 | 225 | self.complete = True | ||
1844 | 226 | return self.complete | ||
1845 | 227 | |||
1846 | 228 | def get_related(self): | ||
1847 | 229 | """Check if any of the context interfaces have relation ids. | ||
1848 | 230 | Set self.related and return True if one of the interfaces | ||
1849 | 231 | has relation ids. | ||
1850 | 232 | """ | ||
1851 | 233 | # Fresh start | ||
1852 | 234 | self.related = False | ||
1853 | 235 | try: | ||
1854 | 236 | for interface in self.interfaces: | ||
1855 | 237 | if relation_ids(interface): | ||
1856 | 238 | self.related = True | ||
1857 | 239 | return self.related | ||
1858 | 240 | except AttributeError as e: | ||
1859 | 241 | log("{} {}" | ||
1860 | 242 | "".format(self, e), 'INFO') | ||
1861 | 243 | return self.related | ||
1862 | 244 | |||
1863 | 201 | 245 | ||
1864 | 202 | class SharedDBContext(OSContextGenerator): | 246 | class SharedDBContext(OSContextGenerator): |
1865 | 203 | interfaces = ['shared-db'] | 247 | interfaces = ['shared-db'] |
1866 | @@ -213,6 +257,7 @@ | |||
1867 | 213 | self.database = database | 257 | self.database = database |
1868 | 214 | self.user = user | 258 | self.user = user |
1869 | 215 | self.ssl_dir = ssl_dir | 259 | self.ssl_dir = ssl_dir |
1870 | 260 | self.rel_name = self.interfaces[0] | ||
1871 | 216 | 261 | ||
1872 | 217 | def __call__(self): | 262 | def __call__(self): |
1873 | 218 | self.database = self.database or config('database') | 263 | self.database = self.database or config('database') |
1874 | @@ -246,6 +291,7 @@ | |||
1875 | 246 | password_setting = self.relation_prefix + '_password' | 291 | password_setting = self.relation_prefix + '_password' |
1876 | 247 | 292 | ||
1877 | 248 | for rid in relation_ids(self.interfaces[0]): | 293 | for rid in relation_ids(self.interfaces[0]): |
1878 | 294 | self.related = True | ||
1879 | 249 | for unit in related_units(rid): | 295 | for unit in related_units(rid): |
1880 | 250 | rdata = relation_get(rid=rid, unit=unit) | 296 | rdata = relation_get(rid=rid, unit=unit) |
1881 | 251 | host = rdata.get('db_host') | 297 | host = rdata.get('db_host') |
1882 | @@ -257,7 +303,7 @@ | |||
1883 | 257 | 'database_password': rdata.get(password_setting), | 303 | 'database_password': rdata.get(password_setting), |
1884 | 258 | 'database_type': 'mysql' | 304 | 'database_type': 'mysql' |
1885 | 259 | } | 305 | } |
1887 | 260 | if context_complete(ctxt): | 306 | if self.context_complete(ctxt): |
1888 | 261 | db_ssl(rdata, ctxt, self.ssl_dir) | 307 | db_ssl(rdata, ctxt, self.ssl_dir) |
1889 | 262 | return ctxt | 308 | return ctxt |
1890 | 263 | return {} | 309 | return {} |
1891 | @@ -278,6 +324,7 @@ | |||
1892 | 278 | 324 | ||
1893 | 279 | ctxt = {} | 325 | ctxt = {} |
1894 | 280 | for rid in relation_ids(self.interfaces[0]): | 326 | for rid in relation_ids(self.interfaces[0]): |
1895 | 327 | self.related = True | ||
1896 | 281 | for unit in related_units(rid): | 328 | for unit in related_units(rid): |
1897 | 282 | rel_host = relation_get('host', rid=rid, unit=unit) | 329 | rel_host = relation_get('host', rid=rid, unit=unit) |
1898 | 283 | rel_user = relation_get('user', rid=rid, unit=unit) | 330 | rel_user = relation_get('user', rid=rid, unit=unit) |
1899 | @@ -287,7 +334,7 @@ | |||
1900 | 287 | 'database_user': rel_user, | 334 | 'database_user': rel_user, |
1901 | 288 | 'database_password': rel_passwd, | 335 | 'database_password': rel_passwd, |
1902 | 289 | 'database_type': 'postgresql'} | 336 | 'database_type': 'postgresql'} |
1904 | 290 | if context_complete(ctxt): | 337 | if self.context_complete(ctxt): |
1905 | 291 | return ctxt | 338 | return ctxt |
1906 | 292 | 339 | ||
1907 | 293 | return {} | 340 | return {} |
1908 | @@ -348,6 +395,7 @@ | |||
1909 | 348 | ctxt['signing_dir'] = cachedir | 395 | ctxt['signing_dir'] = cachedir |
1910 | 349 | 396 | ||
1911 | 350 | for rid in relation_ids(self.rel_name): | 397 | for rid in relation_ids(self.rel_name): |
1912 | 398 | self.related = True | ||
1913 | 351 | for unit in related_units(rid): | 399 | for unit in related_units(rid): |
1914 | 352 | rdata = relation_get(rid=rid, unit=unit) | 400 | rdata = relation_get(rid=rid, unit=unit) |
1915 | 353 | serv_host = rdata.get('service_host') | 401 | serv_host = rdata.get('service_host') |
1916 | @@ -366,7 +414,7 @@ | |||
1917 | 366 | 'service_protocol': svc_protocol, | 414 | 'service_protocol': svc_protocol, |
1918 | 367 | 'auth_protocol': auth_protocol}) | 415 | 'auth_protocol': auth_protocol}) |
1919 | 368 | 416 | ||
1921 | 369 | if context_complete(ctxt): | 417 | if self.context_complete(ctxt): |
1922 | 370 | # NOTE(jamespage) this is required for >= icehouse | 418 | # NOTE(jamespage) this is required for >= icehouse |
1923 | 371 | # so a missing value just indicates keystone needs | 419 | # so a missing value just indicates keystone needs |
1924 | 372 | # upgrading | 420 | # upgrading |
1925 | @@ -405,6 +453,7 @@ | |||
1926 | 405 | ctxt = {} | 453 | ctxt = {} |
1927 | 406 | for rid in relation_ids(self.rel_name): | 454 | for rid in relation_ids(self.rel_name): |
1928 | 407 | ha_vip_only = False | 455 | ha_vip_only = False |
1929 | 456 | self.related = True | ||
1930 | 408 | for unit in related_units(rid): | 457 | for unit in related_units(rid): |
1931 | 409 | if relation_get('clustered', rid=rid, unit=unit): | 458 | if relation_get('clustered', rid=rid, unit=unit): |
1932 | 410 | ctxt['clustered'] = True | 459 | ctxt['clustered'] = True |
1933 | @@ -437,7 +486,7 @@ | |||
1934 | 437 | ha_vip_only = relation_get('ha-vip-only', | 486 | ha_vip_only = relation_get('ha-vip-only', |
1935 | 438 | rid=rid, unit=unit) is not None | 487 | rid=rid, unit=unit) is not None |
1936 | 439 | 488 | ||
1938 | 440 | if context_complete(ctxt): | 489 | if self.context_complete(ctxt): |
1939 | 441 | if 'rabbit_ssl_ca' in ctxt: | 490 | if 'rabbit_ssl_ca' in ctxt: |
1940 | 442 | if not self.ssl_dir: | 491 | if not self.ssl_dir: |
1941 | 443 | log("Charm not setup for ssl support but ssl ca " | 492 | log("Charm not setup for ssl support but ssl ca " |
1942 | @@ -469,7 +518,7 @@ | |||
1943 | 469 | ctxt['oslo_messaging_flags'] = config_flags_parser( | 518 | ctxt['oslo_messaging_flags'] = config_flags_parser( |
1944 | 470 | oslo_messaging_flags) | 519 | oslo_messaging_flags) |
1945 | 471 | 520 | ||
1947 | 472 | if not context_complete(ctxt): | 521 | if not self.complete: |
1948 | 473 | return {} | 522 | return {} |
1949 | 474 | 523 | ||
1950 | 475 | return ctxt | 524 | return ctxt |
1951 | @@ -485,13 +534,15 @@ | |||
1952 | 485 | 534 | ||
1953 | 486 | log('Generating template context for ceph', level=DEBUG) | 535 | log('Generating template context for ceph', level=DEBUG) |
1954 | 487 | mon_hosts = [] | 536 | mon_hosts = [] |
1958 | 488 | auth = None | 537 | ctxt = { |
1959 | 489 | key = None | 538 | 'use_syslog': str(config('use-syslog')).lower() |
1960 | 490 | use_syslog = str(config('use-syslog')).lower() | 539 | } |
1961 | 491 | for rid in relation_ids('ceph'): | 540 | for rid in relation_ids('ceph'): |
1962 | 492 | for unit in related_units(rid): | 541 | for unit in related_units(rid): |
1965 | 493 | auth = relation_get('auth', rid=rid, unit=unit) | 542 | if not ctxt.get('auth'): |
1966 | 494 | key = relation_get('key', rid=rid, unit=unit) | 543 | ctxt['auth'] = relation_get('auth', rid=rid, unit=unit) |
1967 | 544 | if not ctxt.get('key'): | ||
1968 | 545 | ctxt['key'] = relation_get('key', rid=rid, unit=unit) | ||
1969 | 495 | ceph_pub_addr = relation_get('ceph-public-address', rid=rid, | 546 | ceph_pub_addr = relation_get('ceph-public-address', rid=rid, |
1970 | 496 | unit=unit) | 547 | unit=unit) |
1971 | 497 | unit_priv_addr = relation_get('private-address', rid=rid, | 548 | unit_priv_addr = relation_get('private-address', rid=rid, |
1972 | @@ -500,15 +551,12 @@ | |||
1973 | 500 | ceph_addr = format_ipv6_addr(ceph_addr) or ceph_addr | 551 | ceph_addr = format_ipv6_addr(ceph_addr) or ceph_addr |
1974 | 501 | mon_hosts.append(ceph_addr) | 552 | mon_hosts.append(ceph_addr) |
1975 | 502 | 553 | ||
1980 | 503 | ctxt = {'mon_hosts': ' '.join(sorted(mon_hosts)), | 554 | ctxt['mon_hosts'] = ' '.join(sorted(mon_hosts)) |
1977 | 504 | 'auth': auth, | ||
1978 | 505 | 'key': key, | ||
1979 | 506 | 'use_syslog': use_syslog} | ||
1981 | 507 | 555 | ||
1982 | 508 | if not os.path.isdir('/etc/ceph'): | 556 | if not os.path.isdir('/etc/ceph'): |
1983 | 509 | os.mkdir('/etc/ceph') | 557 | os.mkdir('/etc/ceph') |
1984 | 510 | 558 | ||
1986 | 511 | if not context_complete(ctxt): | 559 | if not self.context_complete(ctxt): |
1987 | 512 | return {} | 560 | return {} |
1988 | 513 | 561 | ||
1989 | 514 | ensure_packages(['ceph-common']) | 562 | ensure_packages(['ceph-common']) |
1990 | @@ -581,15 +629,28 @@ | |||
1991 | 581 | if config('haproxy-client-timeout'): | 629 | if config('haproxy-client-timeout'): |
1992 | 582 | ctxt['haproxy_client_timeout'] = config('haproxy-client-timeout') | 630 | ctxt['haproxy_client_timeout'] = config('haproxy-client-timeout') |
1993 | 583 | 631 | ||
1994 | 632 | if config('haproxy-queue-timeout'): | ||
1995 | 633 | ctxt['haproxy_queue_timeout'] = config('haproxy-queue-timeout') | ||
1996 | 634 | |||
1997 | 635 | if config('haproxy-connect-timeout'): | ||
1998 | 636 | ctxt['haproxy_connect_timeout'] = config('haproxy-connect-timeout') | ||
1999 | 637 | |||
2000 | 584 | if config('prefer-ipv6'): | 638 | if config('prefer-ipv6'): |
2001 | 585 | ctxt['ipv6'] = True | 639 | ctxt['ipv6'] = True |
2002 | 586 | ctxt['local_host'] = 'ip6-localhost' | 640 | ctxt['local_host'] = 'ip6-localhost' |
2003 | 587 | ctxt['haproxy_host'] = '::' | 641 | ctxt['haproxy_host'] = '::' |
2004 | 588 | ctxt['stat_port'] = ':::8888' | ||
2005 | 589 | else: | 642 | else: |
2006 | 590 | ctxt['local_host'] = '127.0.0.1' | 643 | ctxt['local_host'] = '127.0.0.1' |
2007 | 591 | ctxt['haproxy_host'] = '0.0.0.0' | 644 | ctxt['haproxy_host'] = '0.0.0.0' |
2009 | 592 | ctxt['stat_port'] = ':8888' | 645 | |
2010 | 646 | ctxt['stat_port'] = '8888' | ||
2011 | 647 | |||
2012 | 648 | db = kv() | ||
2013 | 649 | ctxt['stat_password'] = db.get('stat-password') | ||
2014 | 650 | if not ctxt['stat_password']: | ||
2015 | 651 | ctxt['stat_password'] = db.set('stat-password', | ||
2016 | 652 | pwgen(32)) | ||
2017 | 653 | db.flush() | ||
2018 | 593 | 654 | ||
2019 | 594 | for frontend in cluster_hosts: | 655 | for frontend in cluster_hosts: |
2020 | 595 | if (len(cluster_hosts[frontend]['backends']) > 1 or | 656 | if (len(cluster_hosts[frontend]['backends']) > 1 or |
2021 | @@ -907,6 +968,19 @@ | |||
2022 | 907 | 'config': config} | 968 | 'config': config} |
2023 | 908 | return ovs_ctxt | 969 | return ovs_ctxt |
2024 | 909 | 970 | ||
2025 | 971 | def midonet_ctxt(self): | ||
2026 | 972 | driver = neutron_plugin_attribute(self.plugin, 'driver', | ||
2027 | 973 | self.network_manager) | ||
2028 | 974 | midonet_config = neutron_plugin_attribute(self.plugin, 'config', | ||
2029 | 975 | self.network_manager) | ||
2030 | 976 | mido_ctxt = {'core_plugin': driver, | ||
2031 | 977 | 'neutron_plugin': 'midonet', | ||
2032 | 978 | 'neutron_security_groups': self.neutron_security_groups, | ||
2033 | 979 | 'local_ip': unit_private_ip(), | ||
2034 | 980 | 'config': midonet_config} | ||
2035 | 981 | |||
2036 | 982 | return mido_ctxt | ||
2037 | 983 | |||
2038 | 910 | def __call__(self): | 984 | def __call__(self): |
2039 | 911 | if self.network_manager not in ['quantum', 'neutron']: | 985 | if self.network_manager not in ['quantum', 'neutron']: |
2040 | 912 | return {} | 986 | return {} |
2041 | @@ -928,6 +1002,8 @@ | |||
2042 | 928 | ctxt.update(self.nuage_ctxt()) | 1002 | ctxt.update(self.nuage_ctxt()) |
2043 | 929 | elif self.plugin == 'plumgrid': | 1003 | elif self.plugin == 'plumgrid': |
2044 | 930 | ctxt.update(self.pg_ctxt()) | 1004 | ctxt.update(self.pg_ctxt()) |
2045 | 1005 | elif self.plugin == 'midonet': | ||
2046 | 1006 | ctxt.update(self.midonet_ctxt()) | ||
2047 | 931 | 1007 | ||
2048 | 932 | alchemy_flags = config('neutron-alchemy-flags') | 1008 | alchemy_flags = config('neutron-alchemy-flags') |
2049 | 933 | if alchemy_flags: | 1009 | if alchemy_flags: |
2050 | @@ -1028,6 +1104,20 @@ | |||
2051 | 1028 | config_flags_parser(config_flags)} | 1104 | config_flags_parser(config_flags)} |
2052 | 1029 | 1105 | ||
2053 | 1030 | 1106 | ||
2054 | 1107 | class LibvirtConfigFlagsContext(OSContextGenerator): | ||
2055 | 1108 | """ | ||
2056 | 1109 | This context provides support for extending | ||
2057 | 1110 | the libvirt section through user-defined flags. | ||
2058 | 1111 | """ | ||
2059 | 1112 | def __call__(self): | ||
2060 | 1113 | ctxt = {} | ||
2061 | 1114 | libvirt_flags = config('libvirt-flags') | ||
2062 | 1115 | if libvirt_flags: | ||
2063 | 1116 | ctxt['libvirt_flags'] = config_flags_parser( | ||
2064 | 1117 | libvirt_flags) | ||
2065 | 1118 | return ctxt | ||
2066 | 1119 | |||
2067 | 1120 | |||
2068 | 1031 | class SubordinateConfigContext(OSContextGenerator): | 1121 | class SubordinateConfigContext(OSContextGenerator): |
2069 | 1032 | 1122 | ||
2070 | 1033 | """ | 1123 | """ |
2071 | @@ -1060,7 +1150,7 @@ | |||
2072 | 1060 | 1150 | ||
2073 | 1061 | ctxt = { | 1151 | ctxt = { |
2074 | 1062 | ... other context ... | 1152 | ... other context ... |
2076 | 1063 | 'subordinate_config': { | 1153 | 'subordinate_configuration': { |
2077 | 1064 | 'DEFAULT': { | 1154 | 'DEFAULT': { |
2078 | 1065 | 'key1': 'value1', | 1155 | 'key1': 'value1', |
2079 | 1066 | }, | 1156 | }, |
2080 | @@ -1101,22 +1191,23 @@ | |||
2081 | 1101 | try: | 1191 | try: |
2082 | 1102 | sub_config = json.loads(sub_config) | 1192 | sub_config = json.loads(sub_config) |
2083 | 1103 | except: | 1193 | except: |
2086 | 1104 | log('Could not parse JSON from subordinate_config ' | 1194 | log('Could not parse JSON from ' |
2087 | 1105 | 'setting from %s' % rid, level=ERROR) | 1195 | 'subordinate_configuration setting from %s' |
2088 | 1196 | % rid, level=ERROR) | ||
2089 | 1106 | continue | 1197 | continue |
2090 | 1107 | 1198 | ||
2091 | 1108 | for service in self.services: | 1199 | for service in self.services: |
2092 | 1109 | if service not in sub_config: | 1200 | if service not in sub_config: |
2096 | 1110 | log('Found subordinate_config on %s but it contained' | 1201 | log('Found subordinate_configuration on %s but it ' |
2097 | 1111 | 'nothing for %s service' % (rid, service), | 1202 | 'contained nothing for %s service' |
2098 | 1112 | level=INFO) | 1203 | % (rid, service), level=INFO) |
2099 | 1113 | continue | 1204 | continue |
2100 | 1114 | 1205 | ||
2101 | 1115 | sub_config = sub_config[service] | 1206 | sub_config = sub_config[service] |
2102 | 1116 | if self.config_file not in sub_config: | 1207 | if self.config_file not in sub_config: |
2106 | 1117 | log('Found subordinate_config on %s but it contained' | 1208 | log('Found subordinate_configuration on %s but it ' |
2107 | 1118 | 'nothing for %s' % (rid, self.config_file), | 1209 | 'contained nothing for %s' |
2108 | 1119 | level=INFO) | 1210 | % (rid, self.config_file), level=INFO) |
2109 | 1120 | continue | 1211 | continue |
2110 | 1121 | 1212 | ||
2111 | 1122 | sub_config = sub_config[self.config_file] | 1213 | sub_config = sub_config[self.config_file] |
2112 | @@ -1319,7 +1410,7 @@ | |||
2113 | 1319 | normalized.update({port: port for port in resolved | 1410 | normalized.update({port: port for port in resolved |
2114 | 1320 | if port in ports}) | 1411 | if port in ports}) |
2115 | 1321 | if resolved: | 1412 | if resolved: |
2117 | 1322 | return {bridge: normalized[port] for port, bridge in | 1413 | return {normalized[port]: bridge for port, bridge in |
2118 | 1323 | six.iteritems(portmap) if port in normalized.keys()} | 1414 | six.iteritems(portmap) if port in normalized.keys()} |
2119 | 1324 | 1415 | ||
2120 | 1325 | return None | 1416 | return None |
2121 | @@ -1330,12 +1421,22 @@ | |||
2122 | 1330 | def __call__(self): | 1421 | def __call__(self): |
2123 | 1331 | ctxt = {} | 1422 | ctxt = {} |
2124 | 1332 | mappings = super(PhyNICMTUContext, self).__call__() | 1423 | mappings = super(PhyNICMTUContext, self).__call__() |
2127 | 1333 | if mappings and mappings.values(): | 1424 | if mappings and mappings.keys(): |
2128 | 1334 | ports = mappings.values() | 1425 | ports = sorted(mappings.keys()) |
2129 | 1335 | napi_settings = NeutronAPIContext()() | 1426 | napi_settings = NeutronAPIContext()() |
2130 | 1336 | mtu = napi_settings.get('network_device_mtu') | 1427 | mtu = napi_settings.get('network_device_mtu') |
2131 | 1428 | all_ports = set() | ||
2132 | 1429 | # If any of ports is a vlan device, its underlying device must have | ||
2133 | 1430 | # mtu applied first. | ||
2134 | 1431 | for port in ports: | ||
2135 | 1432 | for lport in glob.glob("/sys/class/net/%s/lower_*" % port): | ||
2136 | 1433 | lport = os.path.basename(lport) | ||
2137 | 1434 | all_ports.add(lport.split('_')[1]) | ||
2138 | 1435 | |||
2139 | 1436 | all_ports = list(all_ports) | ||
2140 | 1437 | all_ports.extend(ports) | ||
2141 | 1337 | if mtu: | 1438 | if mtu: |
2143 | 1338 | ctxt["devs"] = '\\n'.join(ports) | 1439 | ctxt["devs"] = '\\n'.join(all_ports) |
2144 | 1339 | ctxt['mtu'] = mtu | 1440 | ctxt['mtu'] = mtu |
2145 | 1340 | 1441 | ||
2146 | 1341 | return ctxt | 1442 | return ctxt |
2147 | @@ -1367,6 +1468,6 @@ | |||
2148 | 1367 | 'auth_protocol': | 1468 | 'auth_protocol': |
2149 | 1368 | rdata.get('auth_protocol') or 'http', | 1469 | rdata.get('auth_protocol') or 'http', |
2150 | 1369 | } | 1470 | } |
2152 | 1370 | if context_complete(ctxt): | 1471 | if self.context_complete(ctxt): |
2153 | 1371 | return ctxt | 1472 | return ctxt |
2154 | 1372 | return {} | 1473 | return {} |
2155 | 1373 | 1474 | ||
2156 | === modified file 'hooks/charmhelpers/contrib/openstack/files/check_haproxy.sh' | |||
2157 | --- hooks/charmhelpers/contrib/openstack/files/check_haproxy.sh 2015-02-24 05:48:43 +0000 | |||
2158 | +++ hooks/charmhelpers/contrib/openstack/files/check_haproxy.sh 2016-02-18 14:28:13 +0000 | |||
2159 | @@ -9,15 +9,17 @@ | |||
2160 | 9 | CRITICAL=0 | 9 | CRITICAL=0 |
2161 | 10 | NOTACTIVE='' | 10 | NOTACTIVE='' |
2162 | 11 | LOGFILE=/var/log/nagios/check_haproxy.log | 11 | LOGFILE=/var/log/nagios/check_haproxy.log |
2164 | 12 | AUTH=$(grep -r "stats auth" /etc/haproxy | head -1 | awk '{print $4}') | 12 | AUTH=$(grep -r "stats auth" /etc/haproxy | awk 'NR=1{print $4}') |
2165 | 13 | 13 | ||
2167 | 14 | for appserver in $(grep ' server' /etc/haproxy/haproxy.cfg | awk '{print $2'}); | 14 | typeset -i N_INSTANCES=0 |
2168 | 15 | for appserver in $(awk '/^\s+server/{print $2}' /etc/haproxy/haproxy.cfg) | ||
2169 | 15 | do | 16 | do |
2171 | 16 | output=$(/usr/lib/nagios/plugins/check_http -a ${AUTH} -I 127.0.0.1 -p 8888 --regex="class=\"(active|backup)(2|3).*${appserver}" -e ' 200 OK') | 17 | N_INSTANCES=N_INSTANCES+1 |
2172 | 18 | output=$(/usr/lib/nagios/plugins/check_http -a ${AUTH} -I 127.0.0.1 -p 8888 -u '/;csv' --regex=",${appserver},.*,UP.*" -e ' 200 OK') | ||
2173 | 17 | if [ $? != 0 ]; then | 19 | if [ $? != 0 ]; then |
2174 | 18 | date >> $LOGFILE | 20 | date >> $LOGFILE |
2175 | 19 | echo $output >> $LOGFILE | 21 | echo $output >> $LOGFILE |
2177 | 20 | /usr/lib/nagios/plugins/check_http -a ${AUTH} -I 127.0.0.1 -p 8888 -v | grep $appserver >> $LOGFILE 2>&1 | 22 | /usr/lib/nagios/plugins/check_http -a ${AUTH} -I 127.0.0.1 -p 8888 -u '/;csv' -v | grep ",${appserver}," >> $LOGFILE 2>&1 |
2178 | 21 | CRITICAL=1 | 23 | CRITICAL=1 |
2179 | 22 | NOTACTIVE="${NOTACTIVE} $appserver" | 24 | NOTACTIVE="${NOTACTIVE} $appserver" |
2180 | 23 | fi | 25 | fi |
2181 | @@ -28,5 +30,5 @@ | |||
2182 | 28 | exit 2 | 30 | exit 2 |
2183 | 29 | fi | 31 | fi |
2184 | 30 | 32 | ||
2186 | 31 | echo "OK: All haproxy instances looking good" | 33 | echo "OK: All haproxy instances ($N_INSTANCES) looking good" |
2187 | 32 | exit 0 | 34 | exit 0 |
2188 | 33 | 35 | ||
2189 | === modified file 'hooks/charmhelpers/contrib/openstack/neutron.py' | |||
2190 | --- hooks/charmhelpers/contrib/openstack/neutron.py 2015-09-03 09:42:35 +0000 | |||
2191 | +++ hooks/charmhelpers/contrib/openstack/neutron.py 2016-02-18 14:28:13 +0000 | |||
2192 | @@ -204,11 +204,25 @@ | |||
2193 | 204 | database=config('database'), | 204 | database=config('database'), |
2194 | 205 | ssl_dir=NEUTRON_CONF_DIR)], | 205 | ssl_dir=NEUTRON_CONF_DIR)], |
2195 | 206 | 'services': [], | 206 | 'services': [], |
2198 | 207 | 'packages': [['plumgrid-lxc'], | 207 | 'packages': ['plumgrid-lxc', |
2199 | 208 | ['iovisor-dkms']], | 208 | 'iovisor-dkms'], |
2200 | 209 | 'server_packages': ['neutron-server', | 209 | 'server_packages': ['neutron-server', |
2201 | 210 | 'neutron-plugin-plumgrid'], | 210 | 'neutron-plugin-plumgrid'], |
2202 | 211 | 'server_services': ['neutron-server'] | 211 | 'server_services': ['neutron-server'] |
2203 | 212 | }, | ||
2204 | 213 | 'midonet': { | ||
2205 | 214 | 'config': '/etc/neutron/plugins/midonet/midonet.ini', | ||
2206 | 215 | 'driver': 'midonet.neutron.plugin.MidonetPluginV2', | ||
2207 | 216 | 'contexts': [ | ||
2208 | 217 | context.SharedDBContext(user=config('neutron-database-user'), | ||
2209 | 218 | database=config('neutron-database'), | ||
2210 | 219 | relation_prefix='neutron', | ||
2211 | 220 | ssl_dir=NEUTRON_CONF_DIR)], | ||
2212 | 221 | 'services': [], | ||
2213 | 222 | 'packages': [[headers_package()] + determine_dkms_package()], | ||
2214 | 223 | 'server_packages': ['neutron-server', | ||
2215 | 224 | 'python-neutron-plugin-midonet'], | ||
2216 | 225 | 'server_services': ['neutron-server'] | ||
2217 | 212 | } | 226 | } |
2218 | 213 | } | 227 | } |
2219 | 214 | if release >= 'icehouse': | 228 | if release >= 'icehouse': |
2220 | @@ -310,10 +324,10 @@ | |||
2221 | 310 | def parse_data_port_mappings(mappings, default_bridge='br-data'): | 324 | def parse_data_port_mappings(mappings, default_bridge='br-data'): |
2222 | 311 | """Parse data port mappings. | 325 | """Parse data port mappings. |
2223 | 312 | 326 | ||
2225 | 313 | Mappings must be a space-delimited list of port:bridge mappings. | 327 | Mappings must be a space-delimited list of bridge:port. |
2226 | 314 | 328 | ||
2229 | 315 | Returns dict of the form {port:bridge} where port may be an mac address or | 329 | Returns dict of the form {port:bridge} where ports may be mac addresses or |
2230 | 316 | interface name. | 330 | interface names. |
2231 | 317 | """ | 331 | """ |
2232 | 318 | 332 | ||
2233 | 319 | # NOTE(dosaboy): we use rvalue for key to allow multiple values to be | 333 | # NOTE(dosaboy): we use rvalue for key to allow multiple values to be |
2234 | 320 | 334 | ||
2235 | === modified file 'hooks/charmhelpers/contrib/openstack/templates/ceph.conf' | |||
2236 | --- hooks/charmhelpers/contrib/openstack/templates/ceph.conf 2015-07-17 13:24:05 +0000 | |||
2237 | +++ hooks/charmhelpers/contrib/openstack/templates/ceph.conf 2016-02-18 14:28:13 +0000 | |||
2238 | @@ -13,3 +13,9 @@ | |||
2239 | 13 | err to syslog = {{ use_syslog }} | 13 | err to syslog = {{ use_syslog }} |
2240 | 14 | clog to syslog = {{ use_syslog }} | 14 | clog to syslog = {{ use_syslog }} |
2241 | 15 | 15 | ||
2242 | 16 | [client] | ||
2243 | 17 | {% if rbd_client_cache_settings -%} | ||
2244 | 18 | {% for key, value in rbd_client_cache_settings.iteritems() -%} | ||
2245 | 19 | {{ key }} = {{ value }} | ||
2246 | 20 | {% endfor -%} | ||
2247 | 21 | {%- endif %} | ||
2248 | 16 | \ No newline at end of file | 22 | \ No newline at end of file |
2249 | 17 | 23 | ||
2250 | === modified file 'hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg' | |||
2251 | --- hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg 2015-02-24 05:48:43 +0000 | |||
2252 | +++ hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg 2016-02-18 14:28:13 +0000 | |||
2253 | @@ -12,27 +12,35 @@ | |||
2254 | 12 | option tcplog | 12 | option tcplog |
2255 | 13 | option dontlognull | 13 | option dontlognull |
2256 | 14 | retries 3 | 14 | retries 3 |
2260 | 15 | timeout queue 1000 | 15 | {%- if haproxy_queue_timeout %} |
2261 | 16 | timeout connect 1000 | 16 | timeout queue {{ haproxy_queue_timeout }} |
2262 | 17 | {% if haproxy_client_timeout -%} | 17 | {%- else %} |
2263 | 18 | timeout queue 5000 | ||
2264 | 19 | {%- endif %} | ||
2265 | 20 | {%- if haproxy_connect_timeout %} | ||
2266 | 21 | timeout connect {{ haproxy_connect_timeout }} | ||
2267 | 22 | {%- else %} | ||
2268 | 23 | timeout connect 5000 | ||
2269 | 24 | {%- endif %} | ||
2270 | 25 | {%- if haproxy_client_timeout %} | ||
2271 | 18 | timeout client {{ haproxy_client_timeout }} | 26 | timeout client {{ haproxy_client_timeout }} |
2273 | 19 | {% else -%} | 27 | {%- else %} |
2274 | 20 | timeout client 30000 | 28 | timeout client 30000 |
2278 | 21 | {% endif -%} | 29 | {%- endif %} |
2279 | 22 | 30 | {%- if haproxy_server_timeout %} | |
2277 | 23 | {% if haproxy_server_timeout -%} | ||
2280 | 24 | timeout server {{ haproxy_server_timeout }} | 31 | timeout server {{ haproxy_server_timeout }} |
2282 | 25 | {% else -%} | 32 | {%- else %} |
2283 | 26 | timeout server 30000 | 33 | timeout server 30000 |
2285 | 27 | {% endif -%} | 34 | {%- endif %} |
2286 | 28 | 35 | ||
2288 | 29 | listen stats {{ stat_port }} | 36 | listen stats |
2289 | 37 | bind {{ local_host }}:{{ stat_port }} | ||
2290 | 30 | mode http | 38 | mode http |
2291 | 31 | stats enable | 39 | stats enable |
2292 | 32 | stats hide-version | 40 | stats hide-version |
2293 | 33 | stats realm Haproxy\ Statistics | 41 | stats realm Haproxy\ Statistics |
2294 | 34 | stats uri / | 42 | stats uri / |
2296 | 35 | stats auth admin:password | 43 | stats auth admin:{{ stat_password }} |
2297 | 36 | 44 | ||
2298 | 37 | {% if frontends -%} | 45 | {% if frontends -%} |
2299 | 38 | {% for service, ports in service_ports.items() -%} | 46 | {% for service, ports in service_ports.items() -%} |
2300 | 39 | 47 | ||
2301 | === modified file 'hooks/charmhelpers/contrib/openstack/templating.py' | |||
2302 | --- hooks/charmhelpers/contrib/openstack/templating.py 2015-08-27 15:02:34 +0000 | |||
2303 | +++ hooks/charmhelpers/contrib/openstack/templating.py 2016-02-18 14:28:13 +0000 | |||
2304 | @@ -18,7 +18,7 @@ | |||
2305 | 18 | 18 | ||
2306 | 19 | import six | 19 | import six |
2307 | 20 | 20 | ||
2309 | 21 | from charmhelpers.fetch import apt_install | 21 | from charmhelpers.fetch import apt_install, apt_update |
2310 | 22 | from charmhelpers.core.hookenv import ( | 22 | from charmhelpers.core.hookenv import ( |
2311 | 23 | log, | 23 | log, |
2312 | 24 | ERROR, | 24 | ERROR, |
2313 | @@ -29,6 +29,7 @@ | |||
2314 | 29 | try: | 29 | try: |
2315 | 30 | from jinja2 import FileSystemLoader, ChoiceLoader, Environment, exceptions | 30 | from jinja2 import FileSystemLoader, ChoiceLoader, Environment, exceptions |
2316 | 31 | except ImportError: | 31 | except ImportError: |
2317 | 32 | apt_update(fatal=True) | ||
2318 | 32 | apt_install('python-jinja2', fatal=True) | 33 | apt_install('python-jinja2', fatal=True) |
2319 | 33 | from jinja2 import FileSystemLoader, ChoiceLoader, Environment, exceptions | 34 | from jinja2 import FileSystemLoader, ChoiceLoader, Environment, exceptions |
2320 | 34 | 35 | ||
2321 | @@ -112,7 +113,7 @@ | |||
2322 | 112 | 113 | ||
2323 | 113 | def complete_contexts(self): | 114 | def complete_contexts(self): |
2324 | 114 | ''' | 115 | ''' |
2326 | 115 | Return a list of interfaces that have atisfied contexts. | 116 | Return a list of interfaces that have satisfied contexts. |
2327 | 116 | ''' | 117 | ''' |
2328 | 117 | if self._complete_contexts: | 118 | if self._complete_contexts: |
2329 | 118 | return self._complete_contexts | 119 | return self._complete_contexts |
2330 | @@ -293,3 +294,30 @@ | |||
2331 | 293 | [interfaces.extend(i.complete_contexts()) | 294 | [interfaces.extend(i.complete_contexts()) |
2332 | 294 | for i in six.itervalues(self.templates)] | 295 | for i in six.itervalues(self.templates)] |
2333 | 295 | return interfaces | 296 | return interfaces |
2334 | 297 | |||
2335 | 298 | def get_incomplete_context_data(self, interfaces): | ||
2336 | 299 | ''' | ||
2337 | 300 | Return dictionary of relation status of interfaces and any missing | ||
2338 | 301 | required context data. Example: | ||
2339 | 302 | {'amqp': {'missing_data': ['rabbitmq_password'], 'related': True}, | ||
2340 | 303 | 'zeromq-configuration': {'related': False}} | ||
2341 | 304 | ''' | ||
2342 | 305 | incomplete_context_data = {} | ||
2343 | 306 | |||
2344 | 307 | for i in six.itervalues(self.templates): | ||
2345 | 308 | for context in i.contexts: | ||
2346 | 309 | for interface in interfaces: | ||
2347 | 310 | related = False | ||
2348 | 311 | if interface in context.interfaces: | ||
2349 | 312 | related = context.get_related() | ||
2350 | 313 | missing_data = context.missing_data | ||
2351 | 314 | if missing_data: | ||
2352 | 315 | incomplete_context_data[interface] = {'missing_data': missing_data} | ||
2353 | 316 | if related: | ||
2354 | 317 | if incomplete_context_data.get(interface): | ||
2355 | 318 | incomplete_context_data[interface].update({'related': True}) | ||
2356 | 319 | else: | ||
2357 | 320 | incomplete_context_data[interface] = {'related': True} | ||
2358 | 321 | else: | ||
2359 | 322 | incomplete_context_data[interface] = {'related': False} | ||
2360 | 323 | return incomplete_context_data | ||
2361 | 296 | 324 | ||
2362 | === modified file 'hooks/charmhelpers/contrib/openstack/utils.py' | |||
2363 | --- hooks/charmhelpers/contrib/openstack/utils.py 2015-09-14 20:23:58 +0000 | |||
2364 | +++ hooks/charmhelpers/contrib/openstack/utils.py 2016-02-18 14:28:13 +0000 | |||
2365 | @@ -26,6 +26,7 @@ | |||
2366 | 26 | 26 | ||
2367 | 27 | import six | 27 | import six |
2368 | 28 | import traceback | 28 | import traceback |
2369 | 29 | import uuid | ||
2370 | 29 | import yaml | 30 | import yaml |
2371 | 30 | 31 | ||
2372 | 31 | from charmhelpers.contrib.network import ip | 32 | from charmhelpers.contrib.network import ip |
2373 | @@ -41,8 +42,11 @@ | |||
2374 | 41 | log as juju_log, | 42 | log as juju_log, |
2375 | 42 | charm_dir, | 43 | charm_dir, |
2376 | 43 | INFO, | 44 | INFO, |
2377 | 45 | related_units, | ||
2378 | 44 | relation_ids, | 46 | relation_ids, |
2380 | 45 | relation_set | 47 | relation_set, |
2381 | 48 | status_set, | ||
2382 | 49 | hook_name | ||
2383 | 46 | ) | 50 | ) |
2384 | 47 | 51 | ||
2385 | 48 | from charmhelpers.contrib.storage.linux.lvm import ( | 52 | from charmhelpers.contrib.storage.linux.lvm import ( |
2386 | @@ -52,7 +56,8 @@ | |||
2387 | 52 | ) | 56 | ) |
2388 | 53 | 57 | ||
2389 | 54 | from charmhelpers.contrib.network.ip import ( | 58 | from charmhelpers.contrib.network.ip import ( |
2391 | 55 | get_ipv6_addr | 59 | get_ipv6_addr, |
2392 | 60 | is_ipv6, | ||
2393 | 56 | ) | 61 | ) |
2394 | 57 | 62 | ||
2395 | 58 | from charmhelpers.contrib.python.packages import ( | 63 | from charmhelpers.contrib.python.packages import ( |
2396 | @@ -81,6 +86,7 @@ | |||
2397 | 81 | ('utopic', 'juno'), | 86 | ('utopic', 'juno'), |
2398 | 82 | ('vivid', 'kilo'), | 87 | ('vivid', 'kilo'), |
2399 | 83 | ('wily', 'liberty'), | 88 | ('wily', 'liberty'), |
2400 | 89 | ('xenial', 'mitaka'), | ||
2401 | 84 | ]) | 90 | ]) |
2402 | 85 | 91 | ||
2403 | 86 | 92 | ||
2404 | @@ -94,6 +100,7 @@ | |||
2405 | 94 | ('2014.2', 'juno'), | 100 | ('2014.2', 'juno'), |
2406 | 95 | ('2015.1', 'kilo'), | 101 | ('2015.1', 'kilo'), |
2407 | 96 | ('2015.2', 'liberty'), | 102 | ('2015.2', 'liberty'), |
2408 | 103 | ('2016.1', 'mitaka'), | ||
2409 | 97 | ]) | 104 | ]) |
2410 | 98 | 105 | ||
2411 | 99 | # The ugly duckling | 106 | # The ugly duckling |
2412 | @@ -118,36 +125,46 @@ | |||
2413 | 118 | ('2.2.2', 'kilo'), | 125 | ('2.2.2', 'kilo'), |
2414 | 119 | ('2.3.0', 'liberty'), | 126 | ('2.3.0', 'liberty'), |
2415 | 120 | ('2.4.0', 'liberty'), | 127 | ('2.4.0', 'liberty'), |
2416 | 128 | ('2.5.0', 'liberty'), | ||
2417 | 121 | ]) | 129 | ]) |
2418 | 122 | 130 | ||
2419 | 123 | # >= Liberty version->codename mapping | 131 | # >= Liberty version->codename mapping |
2420 | 124 | PACKAGE_CODENAMES = { | 132 | PACKAGE_CODENAMES = { |
2421 | 125 | 'nova-common': OrderedDict([ | 133 | 'nova-common': OrderedDict([ |
2423 | 126 | ('12.0.0', 'liberty'), | 134 | ('12.0', 'liberty'), |
2424 | 135 | ('13.0', 'mitaka'), | ||
2425 | 127 | ]), | 136 | ]), |
2426 | 128 | 'neutron-common': OrderedDict([ | 137 | 'neutron-common': OrderedDict([ |
2428 | 129 | ('7.0.0', 'liberty'), | 138 | ('7.0', 'liberty'), |
2429 | 139 | ('8.0', 'mitaka'), | ||
2430 | 130 | ]), | 140 | ]), |
2431 | 131 | 'cinder-common': OrderedDict([ | 141 | 'cinder-common': OrderedDict([ |
2433 | 132 | ('7.0.0', 'liberty'), | 142 | ('7.0', 'liberty'), |
2434 | 143 | ('8.0', 'mitaka'), | ||
2435 | 133 | ]), | 144 | ]), |
2436 | 134 | 'keystone': OrderedDict([ | 145 | 'keystone': OrderedDict([ |
2438 | 135 | ('8.0.0', 'liberty'), | 146 | ('8.0', 'liberty'), |
2439 | 147 | ('9.0', 'mitaka'), | ||
2440 | 136 | ]), | 148 | ]), |
2441 | 137 | 'horizon-common': OrderedDict([ | 149 | 'horizon-common': OrderedDict([ |
2443 | 138 | ('8.0.0', 'liberty'), | 150 | ('8.0', 'liberty'), |
2444 | 151 | ('9.0', 'mitaka'), | ||
2445 | 139 | ]), | 152 | ]), |
2446 | 140 | 'ceilometer-common': OrderedDict([ | 153 | 'ceilometer-common': OrderedDict([ |
2448 | 141 | ('5.0.0', 'liberty'), | 154 | ('5.0', 'liberty'), |
2449 | 155 | ('6.0', 'mitaka'), | ||
2450 | 142 | ]), | 156 | ]), |
2451 | 143 | 'heat-common': OrderedDict([ | 157 | 'heat-common': OrderedDict([ |
2453 | 144 | ('5.0.0', 'liberty'), | 158 | ('5.0', 'liberty'), |
2454 | 159 | ('6.0', 'mitaka'), | ||
2455 | 145 | ]), | 160 | ]), |
2456 | 146 | 'glance-common': OrderedDict([ | 161 | 'glance-common': OrderedDict([ |
2458 | 147 | ('11.0.0', 'liberty'), | 162 | ('11.0', 'liberty'), |
2459 | 163 | ('12.0', 'mitaka'), | ||
2460 | 148 | ]), | 164 | ]), |
2461 | 149 | 'openstack-dashboard': OrderedDict([ | 165 | 'openstack-dashboard': OrderedDict([ |
2463 | 150 | ('8.0.0', 'liberty'), | 166 | ('8.0', 'liberty'), |
2464 | 167 | ('9.0', 'mitaka'), | ||
2465 | 151 | ]), | 168 | ]), |
2466 | 152 | } | 169 | } |
2467 | 153 | 170 | ||
2468 | @@ -234,7 +251,14 @@ | |||
2469 | 234 | error_out(e) | 251 | error_out(e) |
2470 | 235 | 252 | ||
2471 | 236 | vers = apt.upstream_version(pkg.current_ver.ver_str) | 253 | vers = apt.upstream_version(pkg.current_ver.ver_str) |
2473 | 237 | match = re.match('^(\d+)\.(\d+)\.(\d+)', vers) | 254 | if 'swift' in pkg.name: |
2474 | 255 | # Fully x.y.z match for swift versions | ||
2475 | 256 | match = re.match('^(\d+)\.(\d+)\.(\d+)', vers) | ||
2476 | 257 | else: | ||
2477 | 258 | # x.y match only for 20XX.X | ||
2478 | 259 | # and ignore patch level for other packages | ||
2479 | 260 | match = re.match('^(\d+)\.(\d+)', vers) | ||
2480 | 261 | |||
2481 | 238 | if match: | 262 | if match: |
2482 | 239 | vers = match.group(0) | 263 | vers = match.group(0) |
2483 | 240 | 264 | ||
2484 | @@ -246,13 +270,8 @@ | |||
2485 | 246 | # < Liberty co-ordinated project versions | 270 | # < Liberty co-ordinated project versions |
2486 | 247 | try: | 271 | try: |
2487 | 248 | if 'swift' in pkg.name: | 272 | if 'swift' in pkg.name: |
2493 | 249 | swift_vers = vers[:5] | 273 | return SWIFT_CODENAMES[vers] |
2489 | 250 | if swift_vers not in SWIFT_CODENAMES: | ||
2490 | 251 | # Deal with 1.10.0 upward | ||
2491 | 252 | swift_vers = vers[:6] | ||
2492 | 253 | return SWIFT_CODENAMES[swift_vers] | ||
2494 | 254 | else: | 274 | else: |
2495 | 255 | vers = vers[:6] | ||
2496 | 256 | return OPENSTACK_CODENAMES[vers] | 275 | return OPENSTACK_CODENAMES[vers] |
2497 | 257 | except KeyError: | 276 | except KeyError: |
2498 | 258 | if not fatal: | 277 | if not fatal: |
2499 | @@ -371,6 +390,9 @@ | |||
2500 | 371 | 'liberty': 'trusty-updates/liberty', | 390 | 'liberty': 'trusty-updates/liberty', |
2501 | 372 | 'liberty/updates': 'trusty-updates/liberty', | 391 | 'liberty/updates': 'trusty-updates/liberty', |
2502 | 373 | 'liberty/proposed': 'trusty-proposed/liberty', | 392 | 'liberty/proposed': 'trusty-proposed/liberty', |
2503 | 393 | 'mitaka': 'trusty-updates/mitaka', | ||
2504 | 394 | 'mitaka/updates': 'trusty-updates/mitaka', | ||
2505 | 395 | 'mitaka/proposed': 'trusty-proposed/mitaka', | ||
2506 | 374 | } | 396 | } |
2507 | 375 | 397 | ||
2508 | 376 | try: | 398 | try: |
2509 | @@ -517,6 +539,12 @@ | |||
2510 | 517 | relation_prefix=None): | 539 | relation_prefix=None): |
2511 | 518 | hosts = get_ipv6_addr(dynamic_only=False) | 540 | hosts = get_ipv6_addr(dynamic_only=False) |
2512 | 519 | 541 | ||
2513 | 542 | if config('vip'): | ||
2514 | 543 | vips = config('vip').split() | ||
2515 | 544 | for vip in vips: | ||
2516 | 545 | if vip and is_ipv6(vip): | ||
2517 | 546 | hosts.append(vip) | ||
2518 | 547 | |||
2519 | 520 | kwargs = {'database': database, | 548 | kwargs = {'database': database, |
2520 | 521 | 'username': database_user, | 549 | 'username': database_user, |
2521 | 522 | 'hostname': json.dumps(hosts)} | 550 | 'hostname': json.dumps(hosts)} |
2522 | @@ -565,7 +593,7 @@ | |||
2523 | 565 | return yaml.load(projects_yaml) | 593 | return yaml.load(projects_yaml) |
2524 | 566 | 594 | ||
2525 | 567 | 595 | ||
2527 | 568 | def git_clone_and_install(projects_yaml, core_project, depth=1): | 596 | def git_clone_and_install(projects_yaml, core_project): |
2528 | 569 | """ | 597 | """ |
2529 | 570 | Clone/install all specified OpenStack repositories. | 598 | Clone/install all specified OpenStack repositories. |
2530 | 571 | 599 | ||
2531 | @@ -615,6 +643,9 @@ | |||
2532 | 615 | for p in projects['repositories']: | 643 | for p in projects['repositories']: |
2533 | 616 | repo = p['repository'] | 644 | repo = p['repository'] |
2534 | 617 | branch = p['branch'] | 645 | branch = p['branch'] |
2535 | 646 | depth = '1' | ||
2536 | 647 | if 'depth' in p.keys(): | ||
2537 | 648 | depth = p['depth'] | ||
2538 | 618 | if p['name'] == 'requirements': | 649 | if p['name'] == 'requirements': |
2539 | 619 | repo_dir = _git_clone_and_install_single(repo, branch, depth, | 650 | repo_dir = _git_clone_and_install_single(repo, branch, depth, |
2540 | 620 | parent_dir, http_proxy, | 651 | parent_dir, http_proxy, |
2541 | @@ -659,19 +690,13 @@ | |||
2542 | 659 | """ | 690 | """ |
2543 | 660 | Clone and install a single git repository. | 691 | Clone and install a single git repository. |
2544 | 661 | """ | 692 | """ |
2545 | 662 | dest_dir = os.path.join(parent_dir, os.path.basename(repo)) | ||
2546 | 663 | |||
2547 | 664 | if not os.path.exists(parent_dir): | 693 | if not os.path.exists(parent_dir): |
2548 | 665 | juju_log('Directory already exists at {}. ' | 694 | juju_log('Directory already exists at {}. ' |
2549 | 666 | 'No need to create directory.'.format(parent_dir)) | 695 | 'No need to create directory.'.format(parent_dir)) |
2550 | 667 | os.mkdir(parent_dir) | 696 | os.mkdir(parent_dir) |
2551 | 668 | 697 | ||
2558 | 669 | if not os.path.exists(dest_dir): | 698 | juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) |
2559 | 670 | juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) | 699 | repo_dir = install_remote(repo, dest=parent_dir, branch=branch, depth=depth) |
2554 | 671 | repo_dir = install_remote(repo, dest=parent_dir, branch=branch, | ||
2555 | 672 | depth=depth) | ||
2556 | 673 | else: | ||
2557 | 674 | repo_dir = dest_dir | ||
2560 | 675 | 700 | ||
2561 | 676 | venv = os.path.join(parent_dir, 'venv') | 701 | venv = os.path.join(parent_dir, 'venv') |
2562 | 677 | 702 | ||
2563 | @@ -754,6 +779,178 @@ | |||
2564 | 754 | return None | 779 | return None |
2565 | 755 | 780 | ||
2566 | 756 | 781 | ||
2567 | 782 | def os_workload_status(configs, required_interfaces, charm_func=None): | ||
2568 | 783 | """ | ||
2569 | 784 | Decorator to set workload status based on complete contexts | ||
2570 | 785 | """ | ||
2571 | 786 | def wrap(f): | ||
2572 | 787 | @wraps(f) | ||
2573 | 788 | def wrapped_f(*args, **kwargs): | ||
2574 | 789 | # Run the original function first | ||
2575 | 790 | f(*args, **kwargs) | ||
2576 | 791 | # Set workload status now that contexts have been | ||
2577 | 792 | # acted on | ||
2578 | 793 | set_os_workload_status(configs, required_interfaces, charm_func) | ||
2579 | 794 | return wrapped_f | ||
2580 | 795 | return wrap | ||
2581 | 796 | |||
2582 | 797 | |||
2583 | 798 | def set_os_workload_status(configs, required_interfaces, charm_func=None): | ||
2584 | 799 | """ | ||
2585 | 800 | Set workload status based on complete contexts. | ||
2586 | 801 | status-set missing or incomplete contexts | ||
2587 | 802 | and juju-log details of missing required data. | ||
2588 | 803 | charm_func is a charm specific function to run checking | ||
2589 | 804 | for charm specific requirements such as a VIP setting. | ||
2590 | 805 | """ | ||
2591 | 806 | incomplete_rel_data = incomplete_relation_data(configs, required_interfaces) | ||
2592 | 807 | state = 'active' | ||
2593 | 808 | missing_relations = [] | ||
2594 | 809 | incomplete_relations = [] | ||
2595 | 810 | message = None | ||
2596 | 811 | charm_state = None | ||
2597 | 812 | charm_message = None | ||
2598 | 813 | |||
2599 | 814 | for generic_interface in incomplete_rel_data.keys(): | ||
2600 | 815 | related_interface = None | ||
2601 | 816 | missing_data = {} | ||
2602 | 817 | # Related or not? | ||
2603 | 818 | for interface in incomplete_rel_data[generic_interface]: | ||
2604 | 819 | if incomplete_rel_data[generic_interface][interface].get('related'): | ||
2605 | 820 | related_interface = interface | ||
2606 | 821 | missing_data = incomplete_rel_data[generic_interface][interface].get('missing_data') | ||
2607 | 822 | # No relation ID for the generic_interface | ||
2608 | 823 | if not related_interface: | ||
2609 | 824 | juju_log("{} relation is missing and must be related for " | ||
2610 | 825 | "functionality. ".format(generic_interface), 'WARN') | ||
2611 | 826 | state = 'blocked' | ||
2612 | 827 | if generic_interface not in missing_relations: | ||
2613 | 828 | missing_relations.append(generic_interface) | ||
2614 | 829 | else: | ||
2615 | 830 | # Relation ID exists but no related unit | ||
2616 | 831 | if not missing_data: | ||
2617 | 832 | # Edge case relation ID exists but departing | ||
2618 | 833 | if ('departed' in hook_name() or 'broken' in hook_name()) \ | ||
2619 | 834 | and related_interface in hook_name(): | ||
2620 | 835 | state = 'blocked' | ||
2621 | 836 | if generic_interface not in missing_relations: | ||
2622 | 837 | missing_relations.append(generic_interface) | ||
2623 | 838 | juju_log("{} relation's interface, {}, " | ||
2624 | 839 | "relationship is departed or broken " | ||
2625 | 840 | "and is required for functionality." | ||
2626 | 841 | "".format(generic_interface, related_interface), "WARN") | ||
2627 | 842 | # Normal case relation ID exists but no related unit | ||
2628 | 843 | # (joining) | ||
2629 | 844 | else: | ||
2630 | 845 | juju_log("{} relations's interface, {}, is related but has " | ||
2631 | 846 | "no units in the relation." | ||
2632 | 847 | "".format(generic_interface, related_interface), "INFO") | ||
2633 | 848 | # Related unit exists and data missing on the relation | ||
2634 | 849 | else: | ||
2635 | 850 | juju_log("{} relation's interface, {}, is related awaiting " | ||
2636 | 851 | "the following data from the relationship: {}. " | ||
2637 | 852 | "".format(generic_interface, related_interface, | ||
2638 | 853 | ", ".join(missing_data)), "INFO") | ||
2639 | 854 | if state != 'blocked': | ||
2640 | 855 | state = 'waiting' | ||
2641 | 856 | if generic_interface not in incomplete_relations \ | ||
2642 | 857 | and generic_interface not in missing_relations: | ||
2643 | 858 | incomplete_relations.append(generic_interface) | ||
2644 | 859 | |||
2645 | 860 | if missing_relations: | ||
2646 | 861 | message = "Missing relations: {}".format(", ".join(missing_relations)) | ||
2647 | 862 | if incomplete_relations: | ||
2648 | 863 | message += "; incomplete relations: {}" \ | ||
2649 | 864 | "".format(", ".join(incomplete_relations)) | ||
2650 | 865 | state = 'blocked' | ||
2651 | 866 | elif incomplete_relations: | ||
2652 | 867 | message = "Incomplete relations: {}" \ | ||
2653 | 868 | "".format(", ".join(incomplete_relations)) | ||
2654 | 869 | state = 'waiting' | ||
2655 | 870 | |||
2656 | 871 | # Run charm specific checks | ||
2657 | 872 | if charm_func: | ||
2658 | 873 | charm_state, charm_message = charm_func(configs) | ||
2659 | 874 | if charm_state != 'active' and charm_state != 'unknown': | ||
2660 | 875 | state = workload_state_compare(state, charm_state) | ||
2661 | 876 | if message: | ||
2662 | 877 | charm_message = charm_message.replace("Incomplete relations: ", | ||
2663 | 878 | "") | ||
2664 | 879 | message = "{}, {}".format(message, charm_message) | ||
2665 | 880 | else: | ||
2666 | 881 | message = charm_message | ||
2667 | 882 | |||
2668 | 883 | # Set to active if all requirements have been met | ||
2669 | 884 | if state == 'active': | ||
2670 | 885 | message = "Unit is ready" | ||
2671 | 886 | juju_log(message, "INFO") | ||
2672 | 887 | |||
2673 | 888 | status_set(state, message) | ||
2674 | 889 | |||
2675 | 890 | |||
2676 | 891 | def workload_state_compare(current_workload_state, workload_state): | ||
2677 | 892 | """ Return highest priority of two states""" | ||
2678 | 893 | hierarchy = {'unknown': -1, | ||
2679 | 894 | 'active': 0, | ||
2680 | 895 | 'maintenance': 1, | ||
2681 | 896 | 'waiting': 2, | ||
2682 | 897 | 'blocked': 3, | ||
2683 | 898 | } | ||
2684 | 899 | |||
2685 | 900 | if hierarchy.get(workload_state) is None: | ||
2686 | 901 | workload_state = 'unknown' | ||
2687 | 902 | if hierarchy.get(current_workload_state) is None: | ||
2688 | 903 | current_workload_state = 'unknown' | ||
2689 | 904 | |||
2690 | 905 | # Set workload_state based on hierarchy of statuses | ||
2691 | 906 | if hierarchy.get(current_workload_state) > hierarchy.get(workload_state): | ||
2692 | 907 | return current_workload_state | ||
2693 | 908 | else: | ||
2694 | 909 | return workload_state | ||
2695 | 910 | |||
2696 | 911 | |||
2697 | 912 | def incomplete_relation_data(configs, required_interfaces): | ||
2698 | 913 | """ | ||
2699 | 914 | Check complete contexts against required_interfaces | ||
2700 | 915 | Return dictionary of incomplete relation data. | ||
2701 | 916 | |||
2702 | 917 | configs is an OSConfigRenderer object with configs registered | ||
2703 | 918 | |||
2704 | 919 | required_interfaces is a dictionary of required general interfaces | ||
2705 | 920 | with dictionary values of possible specific interfaces. | ||
2706 | 921 | Example: | ||
2707 | 922 | required_interfaces = {'database': ['shared-db', 'pgsql-db']} | ||
2708 | 923 | |||
2709 | 924 | The interface is said to be satisfied if anyone of the interfaces in the | ||
2710 | 925 | list has a complete context. | ||
2711 | 926 | |||
2712 | 927 | Return dictionary of incomplete or missing required contexts with relation | ||
2713 | 928 | status of interfaces and any missing data points. Example: | ||
2714 | 929 | {'message': | ||
2715 | 930 | {'amqp': {'missing_data': ['rabbitmq_password'], 'related': True}, | ||
2716 | 931 | 'zeromq-configuration': {'related': False}}, | ||
2717 | 932 | 'identity': | ||
2718 | 933 | {'identity-service': {'related': False}}, | ||
2719 | 934 | 'database': | ||
2720 | 935 | {'pgsql-db': {'related': False}, | ||
2721 | 936 | 'shared-db': {'related': True}}} | ||
2722 | 937 | """ | ||
2723 | 938 | complete_ctxts = configs.complete_contexts() | ||
2724 | 939 | incomplete_relations = [] | ||
2725 | 940 | for svc_type in required_interfaces.keys(): | ||
2726 | 941 | # Avoid duplicates | ||
2727 | 942 | found_ctxt = False | ||
2728 | 943 | for interface in required_interfaces[svc_type]: | ||
2729 | 944 | if interface in complete_ctxts: | ||
2730 | 945 | found_ctxt = True | ||
2731 | 946 | if not found_ctxt: | ||
2732 | 947 | incomplete_relations.append(svc_type) | ||
2733 | 948 | incomplete_context_data = {} | ||
2734 | 949 | for i in incomplete_relations: | ||
2735 | 950 | incomplete_context_data[i] = configs.get_incomplete_context_data(required_interfaces[i]) | ||
2736 | 951 | return incomplete_context_data | ||
2737 | 952 | |||
2738 | 953 | |||
2739 | 757 | def do_action_openstack_upgrade(package, upgrade_callback, configs): | 954 | def do_action_openstack_upgrade(package, upgrade_callback, configs): |
2740 | 758 | """Perform action-managed OpenStack upgrade. | 955 | """Perform action-managed OpenStack upgrade. |
2741 | 759 | 956 | ||
2742 | @@ -796,3 +993,19 @@ | |||
2743 | 796 | action_set({'outcome': 'no upgrade available.'}) | 993 | action_set({'outcome': 'no upgrade available.'}) |
2744 | 797 | 994 | ||
2745 | 798 | return ret | 995 | return ret |
2746 | 996 | |||
2747 | 997 | |||
2748 | 998 | def remote_restart(rel_name, remote_service=None): | ||
2749 | 999 | trigger = { | ||
2750 | 1000 | 'restart-trigger': str(uuid.uuid4()), | ||
2751 | 1001 | } | ||
2752 | 1002 | if remote_service: | ||
2753 | 1003 | trigger['remote-service'] = remote_service | ||
2754 | 1004 | for rid in relation_ids(rel_name): | ||
2755 | 1005 | # This subordinate can be related to two seperate services using | ||
2756 | 1006 | # different subordinate relations so only issue the restart if | ||
2757 | 1007 | # the principle is conencted down the relation we think it is | ||
2758 | 1008 | if related_units(relid=rid): | ||
2759 | 1009 | relation_set(relation_id=rid, | ||
2760 | 1010 | relation_settings=trigger, | ||
2761 | 1011 | ) | ||
2762 | 799 | 1012 | ||
2763 | === modified file 'hooks/charmhelpers/contrib/python/packages.py' | |||
2764 | --- hooks/charmhelpers/contrib/python/packages.py 2015-06-24 19:07:21 +0000 | |||
2765 | +++ hooks/charmhelpers/contrib/python/packages.py 2016-02-18 14:28:13 +0000 | |||
2766 | @@ -42,8 +42,12 @@ | |||
2767 | 42 | yield "--{0}={1}".format(key, value) | 42 | yield "--{0}={1}".format(key, value) |
2768 | 43 | 43 | ||
2769 | 44 | 44 | ||
2772 | 45 | def pip_install_requirements(requirements, **options): | 45 | def pip_install_requirements(requirements, constraints=None, **options): |
2773 | 46 | """Install a requirements file """ | 46 | """Install a requirements file. |
2774 | 47 | |||
2775 | 48 | :param constraints: Path to pip constraints file. | ||
2776 | 49 | http://pip.readthedocs.org/en/stable/user_guide/#constraints-files | ||
2777 | 50 | """ | ||
2778 | 47 | command = ["install"] | 51 | command = ["install"] |
2779 | 48 | 52 | ||
2780 | 49 | available_options = ('proxy', 'src', 'log', ) | 53 | available_options = ('proxy', 'src', 'log', ) |
2781 | @@ -51,8 +55,13 @@ | |||
2782 | 51 | command.append(option) | 55 | command.append(option) |
2783 | 52 | 56 | ||
2784 | 53 | command.append("-r {0}".format(requirements)) | 57 | command.append("-r {0}".format(requirements)) |
2787 | 54 | log("Installing from file: {} with options: {}".format(requirements, | 58 | if constraints: |
2788 | 55 | command)) | 59 | command.append("-c {0}".format(constraints)) |
2789 | 60 | log("Installing from file: {} with constraints {} " | ||
2790 | 61 | "and options: {}".format(requirements, constraints, command)) | ||
2791 | 62 | else: | ||
2792 | 63 | log("Installing from file: {} with options: {}".format(requirements, | ||
2793 | 64 | command)) | ||
2794 | 56 | pip_execute(command) | 65 | pip_execute(command) |
2795 | 57 | 66 | ||
2796 | 58 | 67 | ||
2797 | 59 | 68 | ||
2798 | === modified file 'hooks/charmhelpers/contrib/storage/linux/ceph.py' | |||
2799 | --- hooks/charmhelpers/contrib/storage/linux/ceph.py 2015-07-17 13:24:05 +0000 | |||
2800 | +++ hooks/charmhelpers/contrib/storage/linux/ceph.py 2016-02-18 14:28:13 +0000 | |||
2801 | @@ -23,11 +23,14 @@ | |||
2802 | 23 | # James Page <james.page@ubuntu.com> | 23 | # James Page <james.page@ubuntu.com> |
2803 | 24 | # Adam Gandelman <adamg@ubuntu.com> | 24 | # Adam Gandelman <adamg@ubuntu.com> |
2804 | 25 | # | 25 | # |
2805 | 26 | import bisect | ||
2806 | 27 | import six | ||
2807 | 26 | 28 | ||
2808 | 27 | import os | 29 | import os |
2809 | 28 | import shutil | 30 | import shutil |
2810 | 29 | import json | 31 | import json |
2811 | 30 | import time | 32 | import time |
2812 | 33 | import uuid | ||
2813 | 31 | 34 | ||
2814 | 32 | from subprocess import ( | 35 | from subprocess import ( |
2815 | 33 | check_call, | 36 | check_call, |
2816 | @@ -35,8 +38,10 @@ | |||
2817 | 35 | CalledProcessError, | 38 | CalledProcessError, |
2818 | 36 | ) | 39 | ) |
2819 | 37 | from charmhelpers.core.hookenv import ( | 40 | from charmhelpers.core.hookenv import ( |
2820 | 41 | local_unit, | ||
2821 | 38 | relation_get, | 42 | relation_get, |
2822 | 39 | relation_ids, | 43 | relation_ids, |
2823 | 44 | relation_set, | ||
2824 | 40 | related_units, | 45 | related_units, |
2825 | 41 | log, | 46 | log, |
2826 | 42 | DEBUG, | 47 | DEBUG, |
2827 | @@ -56,6 +61,8 @@ | |||
2828 | 56 | apt_install, | 61 | apt_install, |
2829 | 57 | ) | 62 | ) |
2830 | 58 | 63 | ||
2831 | 64 | from charmhelpers.core.kernel import modprobe | ||
2832 | 65 | |||
2833 | 59 | KEYRING = '/etc/ceph/ceph.client.{}.keyring' | 66 | KEYRING = '/etc/ceph/ceph.client.{}.keyring' |
2834 | 60 | KEYFILE = '/etc/ceph/ceph.client.{}.key' | 67 | KEYFILE = '/etc/ceph/ceph.client.{}.key' |
2835 | 61 | 68 | ||
2836 | @@ -67,6 +74,394 @@ | |||
2837 | 67 | err to syslog = {use_syslog} | 74 | err to syslog = {use_syslog} |
2838 | 68 | clog to syslog = {use_syslog} | 75 | clog to syslog = {use_syslog} |
2839 | 69 | """ | 76 | """ |
2840 | 77 | # For 50 < osds < 240,000 OSDs (Roughly 1 Exabyte at 6T OSDs) | ||
2841 | 78 | powers_of_two = [8192, 16384, 32768, 65536, 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608] | ||
2842 | 79 | |||
2843 | 80 | |||
2844 | 81 | def validator(value, valid_type, valid_range=None): | ||
2845 | 82 | """ | ||
2846 | 83 | Used to validate these: http://docs.ceph.com/docs/master/rados/operations/pools/#set-pool-values | ||
2847 | 84 | Example input: | ||
2848 | 85 | validator(value=1, | ||
2849 | 86 | valid_type=int, | ||
2850 | 87 | valid_range=[0, 2]) | ||
2851 | 88 | This says I'm testing value=1. It must be an int inclusive in [0,2] | ||
2852 | 89 | |||
2853 | 90 | :param value: The value to validate | ||
2854 | 91 | :param valid_type: The type that value should be. | ||
2855 | 92 | :param valid_range: A range of values that value can assume. | ||
2856 | 93 | :return: | ||
2857 | 94 | """ | ||
2858 | 95 | assert isinstance(value, valid_type), "{} is not a {}".format( | ||
2859 | 96 | value, | ||
2860 | 97 | valid_type) | ||
2861 | 98 | if valid_range is not None: | ||
2862 | 99 | assert isinstance(valid_range, list), \ | ||
2863 | 100 | "valid_range must be a list, was given {}".format(valid_range) | ||
2864 | 101 | # If we're dealing with strings | ||
2865 | 102 | if valid_type is six.string_types: | ||
2866 | 103 | assert value in valid_range, \ | ||
2867 | 104 | "{} is not in the list {}".format(value, valid_range) | ||
2868 | 105 | # Integer, float should have a min and max | ||
2869 | 106 | else: | ||
2870 | 107 | if len(valid_range) != 2: | ||
2871 | 108 | raise ValueError( | ||
2872 | 109 | "Invalid valid_range list of {} for {}. " | ||
2873 | 110 | "List must be [min,max]".format(valid_range, value)) | ||
2874 | 111 | assert value >= valid_range[0], \ | ||
2875 | 112 | "{} is less than minimum allowed value of {}".format( | ||
2876 | 113 | value, valid_range[0]) | ||
2877 | 114 | assert value <= valid_range[1], \ | ||
2878 | 115 | "{} is greater than maximum allowed value of {}".format( | ||
2879 | 116 | value, valid_range[1]) | ||
2880 | 117 | |||
2881 | 118 | |||
2882 | 119 | class PoolCreationError(Exception): | ||
2883 | 120 | """ | ||
2884 | 121 | A custom error to inform the caller that a pool creation failed. Provides an error message | ||
2885 | 122 | """ | ||
2886 | 123 | def __init__(self, message): | ||
2887 | 124 | super(PoolCreationError, self).__init__(message) | ||
2888 | 125 | |||
2889 | 126 | |||
2890 | 127 | class Pool(object): | ||
2891 | 128 | """ | ||
2892 | 129 | An object oriented approach to Ceph pool creation. This base class is inherited by ReplicatedPool and ErasurePool. | ||
2893 | 130 | Do not call create() on this base class as it will not do anything. Instantiate a child class and call create(). | ||
2894 | 131 | """ | ||
2895 | 132 | def __init__(self, service, name): | ||
2896 | 133 | self.service = service | ||
2897 | 134 | self.name = name | ||
2898 | 135 | |||
2899 | 136 | # Create the pool if it doesn't exist already | ||
2900 | 137 | # To be implemented by subclasses | ||
2901 | 138 | def create(self): | ||
2902 | 139 | pass | ||
2903 | 140 | |||
2904 | 141 | def add_cache_tier(self, cache_pool, mode): | ||
2905 | 142 | """ | ||
2906 | 143 | Adds a new cache tier to an existing pool. | ||
2907 | 144 | :param cache_pool: six.string_types. The cache tier pool name to add. | ||
2908 | 145 | :param mode: six.string_types. The caching mode to use for this pool. valid range = ["readonly", "writeback"] | ||
2909 | 146 | :return: None | ||
2910 | 147 | """ | ||
2911 | 148 | # Check the input types and values | ||
2912 | 149 | validator(value=cache_pool, valid_type=six.string_types) | ||
2913 | 150 | validator(value=mode, valid_type=six.string_types, valid_range=["readonly", "writeback"]) | ||
2914 | 151 | |||
2915 | 152 | check_call(['ceph', '--id', self.service, 'osd', 'tier', 'add', self.name, cache_pool]) | ||
2916 | 153 | check_call(['ceph', '--id', self.service, 'osd', 'tier', 'cache-mode', cache_pool, mode]) | ||
2917 | 154 | check_call(['ceph', '--id', self.service, 'osd', 'tier', 'set-overlay', self.name, cache_pool]) | ||
2918 | 155 | check_call(['ceph', '--id', self.service, 'osd', 'pool', 'set', cache_pool, 'hit_set_type', 'bloom']) | ||
2919 | 156 | |||
2920 | 157 | def remove_cache_tier(self, cache_pool): | ||
2921 | 158 | """ | ||
2922 | 159 | Removes a cache tier from Ceph. Flushes all dirty objects from writeback pools and waits for that to complete. | ||
2923 | 160 | :param cache_pool: six.string_types. The cache tier pool name to remove. | ||
2924 | 161 | :return: None | ||
2925 | 162 | """ | ||
2926 | 163 | # read-only is easy, writeback is much harder | ||
2927 | 164 | mode = get_cache_mode(cache_pool) | ||
2928 | 165 | if mode == 'readonly': | ||
2929 | 166 | check_call(['ceph', '--id', self.service, 'osd', 'tier', 'cache-mode', cache_pool, 'none']) | ||
2930 | 167 | check_call(['ceph', '--id', self.service, 'osd', 'tier', 'remove', self.name, cache_pool]) | ||
2931 | 168 | |||
2932 | 169 | elif mode == 'writeback': | ||
2933 | 170 | check_call(['ceph', '--id', self.service, 'osd', 'tier', 'cache-mode', cache_pool, 'forward']) | ||
2934 | 171 | # Flush the cache and wait for it to return | ||
2935 | 172 | check_call(['ceph', '--id', self.service, '-p', cache_pool, 'cache-flush-evict-all']) | ||
2936 | 173 | check_call(['ceph', '--id', self.service, 'osd', 'tier', 'remove-overlay', self.name]) | ||
2937 | 174 | check_call(['ceph', '--id', self.service, 'osd', 'tier', 'remove', self.name, cache_pool]) | ||
2938 | 175 | |||
2939 | 176 | def get_pgs(self, pool_size): | ||
2940 | 177 | """ | ||
2941 | 178 | :param pool_size: int. pool_size is either the number of replicas for replicated pools or the K+M sum for | ||
2942 | 179 | erasure coded pools | ||
2943 | 180 | :return: int. The number of pgs to use. | ||
2944 | 181 | """ | ||
2945 | 182 | validator(value=pool_size, valid_type=int) | ||
2946 | 183 | osds = get_osds(self.service) | ||
2947 | 184 | if not osds: | ||
2948 | 185 | # NOTE(james-page): Default to 200 for older ceph versions | ||
2949 | 186 | # which don't support OSD query from cli | ||
2950 | 187 | return 200 | ||
2951 | 188 | |||
2952 | 189 | # Calculate based on Ceph best practices | ||
2953 | 190 | if osds < 5: | ||
2954 | 191 | return 128 | ||
2955 | 192 | elif 5 < osds < 10: | ||
2956 | 193 | return 512 | ||
2957 | 194 | elif 10 < osds < 50: | ||
2958 | 195 | return 4096 | ||
2959 | 196 | else: | ||
2960 | 197 | estimate = (osds * 100) / pool_size | ||
2961 | 198 | # Return the next nearest power of 2 | ||
2962 | 199 | index = bisect.bisect_right(powers_of_two, estimate) | ||
2963 | 200 | return powers_of_two[index] | ||
2964 | 201 | |||
2965 | 202 | |||
2966 | 203 | class ReplicatedPool(Pool): | ||
2967 | 204 | def __init__(self, service, name, replicas=2): | ||
2968 | 205 | super(ReplicatedPool, self).__init__(service=service, name=name) | ||
2969 | 206 | self.replicas = replicas | ||
2970 | 207 | |||
2971 | 208 | def create(self): | ||
2972 | 209 | if not pool_exists(self.service, self.name): | ||
2973 | 210 | # Create it | ||
2974 | 211 | pgs = self.get_pgs(self.replicas) | ||
2975 | 212 | cmd = ['ceph', '--id', self.service, 'osd', 'pool', 'create', self.name, str(pgs)] | ||
2976 | 213 | try: | ||
2977 | 214 | check_call(cmd) | ||
2978 | 215 | except CalledProcessError: | ||
2979 | 216 | raise | ||
2980 | 217 | |||
2981 | 218 | |||
2982 | 219 | # Default jerasure erasure coded pool | ||
2983 | 220 | class ErasurePool(Pool): | ||
2984 | 221 | def __init__(self, service, name, erasure_code_profile="default"): | ||
2985 | 222 | super(ErasurePool, self).__init__(service=service, name=name) | ||
2986 | 223 | self.erasure_code_profile = erasure_code_profile | ||
2987 | 224 | |||
2988 | 225 | def create(self): | ||
2989 | 226 | if not pool_exists(self.service, self.name): | ||
2990 | 227 | # Try to find the erasure profile information so we can properly size the pgs | ||
2991 | 228 | erasure_profile = get_erasure_profile(service=self.service, name=self.erasure_code_profile) | ||
2992 | 229 | |||
2993 | 230 | # Check for errors | ||
2994 | 231 | if erasure_profile is None: | ||
2995 | 232 | log(message='Failed to discover erasure_profile named={}'.format(self.erasure_code_profile), | ||
2996 | 233 | level=ERROR) | ||
2997 | 234 | raise PoolCreationError(message='unable to find erasure profile {}'.format(self.erasure_code_profile)) | ||
2998 | 235 | if 'k' not in erasure_profile or 'm' not in erasure_profile: | ||
2999 | 236 | # Error | ||
3000 | 237 | log(message='Unable to find k (data chunks) or m (coding chunks) in {}'.format(erasure_profile), | ||
3001 | 238 | level=ERROR) | ||
3002 | 239 | raise PoolCreationError( | ||
3003 | 240 | message='unable to find k (data chunks) or m (coding chunks) in {}'.format(erasure_profile)) | ||
3004 | 241 | |||
3005 | 242 | pgs = self.get_pgs(int(erasure_profile['k']) + int(erasure_profile['m'])) | ||
3006 | 243 | # Create it | ||
3007 | 244 | cmd = ['ceph', '--id', self.service, 'osd', 'pool', 'create', self.name, str(pgs), | ||
3008 | 245 | 'erasure', self.erasure_code_profile] | ||
3009 | 246 | try: | ||
3010 | 247 | check_call(cmd) | ||
3011 | 248 | except CalledProcessError: | ||
3012 | 249 | raise | ||
3013 | 250 | |||
3014 | 251 | """Get an existing erasure code profile if it already exists. | ||
3015 | 252 | Returns json formatted output""" | ||
3016 | 253 | |||
3017 | 254 | |||
3018 | 255 | def get_erasure_profile(service, name): | ||
3019 | 256 | """ | ||
3020 | 257 | :param service: six.string_types. The Ceph user name to run the command under | ||
3021 | 258 | :param name: | ||
3022 | 259 | :return: | ||
3023 | 260 | """ | ||
3024 | 261 | try: | ||
3025 | 262 | out = check_output(['ceph', '--id', service, | ||
3026 | 263 | 'osd', 'erasure-code-profile', 'get', | ||
3027 | 264 | name, '--format=json']) | ||
3028 | 265 | return json.loads(out) | ||
3029 | 266 | except (CalledProcessError, OSError, ValueError): | ||
3030 | 267 | return None | ||
3031 | 268 | |||
3032 | 269 | |||
3033 | 270 | def pool_set(service, pool_name, key, value): | ||
3034 | 271 | """ | ||
3035 | 272 | Sets a value for a RADOS pool in ceph. | ||
3036 | 273 | :param service: six.string_types. The Ceph user name to run the command under | ||
3037 | 274 | :param pool_name: six.string_types | ||
3038 | 275 | :param key: six.string_types | ||
3039 | 276 | :param value: | ||
3040 | 277 | :return: None. Can raise CalledProcessError | ||
3041 | 278 | """ | ||
3042 | 279 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'set', pool_name, key, value] | ||
3043 | 280 | try: | ||
3044 | 281 | check_call(cmd) | ||
3045 | 282 | except CalledProcessError: | ||
3046 | 283 | raise | ||
3047 | 284 | |||
3048 | 285 | |||
3049 | 286 | def snapshot_pool(service, pool_name, snapshot_name): | ||
3050 | 287 | """ | ||
3051 | 288 | Snapshots a RADOS pool in ceph. | ||
3052 | 289 | :param service: six.string_types. The Ceph user name to run the command under | ||
3053 | 290 | :param pool_name: six.string_types | ||
3054 | 291 | :param snapshot_name: six.string_types | ||
3055 | 292 | :return: None. Can raise CalledProcessError | ||
3056 | 293 | """ | ||
3057 | 294 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'mksnap', pool_name, snapshot_name] | ||
3058 | 295 | try: | ||
3059 | 296 | check_call(cmd) | ||
3060 | 297 | except CalledProcessError: | ||
3061 | 298 | raise | ||
3062 | 299 | |||
3063 | 300 | |||
3064 | 301 | def remove_pool_snapshot(service, pool_name, snapshot_name): | ||
3065 | 302 | """ | ||
3066 | 303 | Remove a snapshot from a RADOS pool in ceph. | ||
3067 | 304 | :param service: six.string_types. The Ceph user name to run the command under | ||
3068 | 305 | :param pool_name: six.string_types | ||
3069 | 306 | :param snapshot_name: six.string_types | ||
3070 | 307 | :return: None. Can raise CalledProcessError | ||
3071 | 308 | """ | ||
3072 | 309 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'rmsnap', pool_name, snapshot_name] | ||
3073 | 310 | try: | ||
3074 | 311 | check_call(cmd) | ||
3075 | 312 | except CalledProcessError: | ||
3076 | 313 | raise | ||
3077 | 314 | |||
3078 | 315 | |||
3079 | 316 | # max_bytes should be an int or long | ||
3080 | 317 | def set_pool_quota(service, pool_name, max_bytes): | ||
3081 | 318 | """ | ||
3082 | 319 | :param service: six.string_types. The Ceph user name to run the command under | ||
3083 | 320 | :param pool_name: six.string_types | ||
3084 | 321 | :param max_bytes: int or long | ||
3085 | 322 | :return: None. Can raise CalledProcessError | ||
3086 | 323 | """ | ||
3087 | 324 | # Set a byte quota on a RADOS pool in ceph. | ||
3088 | 325 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'set-quota', pool_name, 'max_bytes', max_bytes] | ||
3089 | 326 | try: | ||
3090 | 327 | check_call(cmd) | ||
3091 | 328 | except CalledProcessError: | ||
3092 | 329 | raise | ||
3093 | 330 | |||
3094 | 331 | |||
3095 | 332 | def remove_pool_quota(service, pool_name): | ||
3096 | 333 | """ | ||
3097 | 334 | Set a byte quota on a RADOS pool in ceph. | ||
3098 | 335 | :param service: six.string_types. The Ceph user name to run the command under | ||
3099 | 336 | :param pool_name: six.string_types | ||
3100 | 337 | :return: None. Can raise CalledProcessError | ||
3101 | 338 | """ | ||
3102 | 339 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'set-quota', pool_name, 'max_bytes', '0'] | ||
3103 | 340 | try: | ||
3104 | 341 | check_call(cmd) | ||
3105 | 342 | except CalledProcessError: | ||
3106 | 343 | raise | ||
3107 | 344 | |||
3108 | 345 | |||
3109 | 346 | def create_erasure_profile(service, profile_name, erasure_plugin_name='jerasure', failure_domain='host', | ||
3110 | 347 | data_chunks=2, coding_chunks=1, | ||
3111 | 348 | locality=None, durability_estimator=None): | ||
3112 | 349 | """ | ||
3113 | 350 | Create a new erasure code profile if one does not already exist for it. Updates | ||
3114 | 351 | the profile if it exists. Please see http://docs.ceph.com/docs/master/rados/operations/erasure-code-profile/ | ||
3115 | 352 | for more details | ||
3116 | 353 | :param service: six.string_types. The Ceph user name to run the command under | ||
3117 | 354 | :param profile_name: six.string_types | ||
3118 | 355 | :param erasure_plugin_name: six.string_types | ||
3119 | 356 | :param failure_domain: six.string_types. One of ['chassis', 'datacenter', 'host', 'osd', 'pdu', 'pod', 'rack', 'region', | ||
3120 | 357 | 'room', 'root', 'row']) | ||
3121 | 358 | :param data_chunks: int | ||
3122 | 359 | :param coding_chunks: int | ||
3123 | 360 | :param locality: int | ||
3124 | 361 | :param durability_estimator: int | ||
3125 | 362 | :return: None. Can raise CalledProcessError | ||
3126 | 363 | """ | ||
3127 | 364 | # Ensure this failure_domain is allowed by Ceph | ||
3128 | 365 | validator(failure_domain, six.string_types, | ||
3129 | 366 | ['chassis', 'datacenter', 'host', 'osd', 'pdu', 'pod', 'rack', 'region', 'room', 'root', 'row']) | ||
3130 | 367 | |||
3131 | 368 | cmd = ['ceph', '--id', service, 'osd', 'erasure-code-profile', 'set', profile_name, | ||
3132 | 369 | 'plugin=' + erasure_plugin_name, 'k=' + str(data_chunks), 'm=' + str(coding_chunks), | ||
3133 | 370 | 'ruleset_failure_domain=' + failure_domain] | ||
3134 | 371 | if locality is not None and durability_estimator is not None: | ||
3135 | 372 | raise ValueError("create_erasure_profile should be called with k, m and one of l or c but not both.") | ||
3136 | 373 | |||
3137 | 374 | # Add plugin specific information | ||
3138 | 375 | if locality is not None: | ||
3139 | 376 | # For local erasure codes | ||
3140 | 377 | cmd.append('l=' + str(locality)) | ||
3141 | 378 | if durability_estimator is not None: | ||
3142 | 379 | # For Shec erasure codes | ||
3143 | 380 | cmd.append('c=' + str(durability_estimator)) | ||
3144 | 381 | |||
3145 | 382 | if erasure_profile_exists(service, profile_name): | ||
3146 | 383 | cmd.append('--force') | ||
3147 | 384 | |||
3148 | 385 | try: | ||
3149 | 386 | check_call(cmd) | ||
3150 | 387 | except CalledProcessError: | ||
3151 | 388 | raise | ||
3152 | 389 | |||
3153 | 390 | |||
3154 | 391 | def rename_pool(service, old_name, new_name): | ||
3155 | 392 | """ | ||
3156 | 393 | Rename a Ceph pool from old_name to new_name | ||
3157 | 394 | :param service: six.string_types. The Ceph user name to run the command under | ||
3158 | 395 | :param old_name: six.string_types | ||
3159 | 396 | :param new_name: six.string_types | ||
3160 | 397 | :return: None | ||
3161 | 398 | """ | ||
3162 | 399 | validator(value=old_name, valid_type=six.string_types) | ||
3163 | 400 | validator(value=new_name, valid_type=six.string_types) | ||
3164 | 401 | |||
3165 | 402 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'rename', old_name, new_name] | ||
3166 | 403 | check_call(cmd) | ||
3167 | 404 | |||
3168 | 405 | |||
3169 | 406 | def erasure_profile_exists(service, name): | ||
3170 | 407 | """ | ||
3171 | 408 | Check to see if an Erasure code profile already exists. | ||
3172 | 409 | :param service: six.string_types. The Ceph user name to run the command under | ||
3173 | 410 | :param name: six.string_types | ||
3174 | 411 | :return: int or None | ||
3175 | 412 | """ | ||
3176 | 413 | validator(value=name, valid_type=six.string_types) | ||
3177 | 414 | try: | ||
3178 | 415 | check_call(['ceph', '--id', service, | ||
3179 | 416 | 'osd', 'erasure-code-profile', 'get', | ||
3180 | 417 | name]) | ||
3181 | 418 | return True | ||
3182 | 419 | except CalledProcessError: | ||
3183 | 420 | return False | ||
3184 | 421 | |||
3185 | 422 | |||
3186 | 423 | def get_cache_mode(service, pool_name): | ||
3187 | 424 | """ | ||
3188 | 425 | Find the current caching mode of the pool_name given. | ||
3189 | 426 | :param service: six.string_types. The Ceph user name to run the command under | ||
3190 | 427 | :param pool_name: six.string_types | ||
3191 | 428 | :return: int or None | ||
3192 | 429 | """ | ||
3193 | 430 | validator(value=service, valid_type=six.string_types) | ||
3194 | 431 | validator(value=pool_name, valid_type=six.string_types) | ||
3195 | 432 | out = check_output(['ceph', '--id', service, 'osd', 'dump', '--format=json']) | ||
3196 | 433 | try: | ||
3197 | 434 | osd_json = json.loads(out) | ||
3198 | 435 | for pool in osd_json['pools']: | ||
3199 | 436 | if pool['pool_name'] == pool_name: | ||
3200 | 437 | return pool['cache_mode'] | ||
3201 | 438 | return None | ||
3202 | 439 | except ValueError: | ||
3203 | 440 | raise | ||
3204 | 441 | |||
3205 | 442 | |||
3206 | 443 | def pool_exists(service, name): | ||
3207 | 444 | """Check to see if a RADOS pool already exists.""" | ||
3208 | 445 | try: | ||
3209 | 446 | out = check_output(['rados', '--id', service, | ||
3210 | 447 | 'lspools']).decode('UTF-8') | ||
3211 | 448 | except CalledProcessError: | ||
3212 | 449 | return False | ||
3213 | 450 | |||
3214 | 451 | return name in out | ||
3215 | 452 | |||
3216 | 453 | |||
3217 | 454 | def get_osds(service): | ||
3218 | 455 | """Return a list of all Ceph Object Storage Daemons currently in the | ||
3219 | 456 | cluster. | ||
3220 | 457 | """ | ||
3221 | 458 | version = ceph_version() | ||
3222 | 459 | if version and version >= '0.56': | ||
3223 | 460 | return json.loads(check_output(['ceph', '--id', service, | ||
3224 | 461 | 'osd', 'ls', | ||
3225 | 462 | '--format=json']).decode('UTF-8')) | ||
3226 | 463 | |||
3227 | 464 | return None | ||
3228 | 70 | 465 | ||
3229 | 71 | 466 | ||
3230 | 72 | def install(): | 467 | def install(): |
3231 | @@ -96,53 +491,37 @@ | |||
3232 | 96 | check_call(cmd) | 491 | check_call(cmd) |
3233 | 97 | 492 | ||
3234 | 98 | 493 | ||
3260 | 99 | def pool_exists(service, name): | 494 | def update_pool(client, pool, settings): |
3261 | 100 | """Check to see if a RADOS pool already exists.""" | 495 | cmd = ['ceph', '--id', client, 'osd', 'pool', 'set', pool] |
3262 | 101 | try: | 496 | for k, v in six.iteritems(settings): |
3263 | 102 | out = check_output(['rados', '--id', service, | 497 | cmd.append(k) |
3264 | 103 | 'lspools']).decode('UTF-8') | 498 | cmd.append(v) |
3265 | 104 | except CalledProcessError: | 499 | |
3266 | 105 | return False | 500 | check_call(cmd) |
3267 | 106 | 501 | ||
3268 | 107 | return name in out | 502 | |
3269 | 108 | 503 | def create_pool(service, name, replicas=3, pg_num=None): | |
3245 | 109 | |||
3246 | 110 | def get_osds(service): | ||
3247 | 111 | """Return a list of all Ceph Object Storage Daemons currently in the | ||
3248 | 112 | cluster. | ||
3249 | 113 | """ | ||
3250 | 114 | version = ceph_version() | ||
3251 | 115 | if version and version >= '0.56': | ||
3252 | 116 | return json.loads(check_output(['ceph', '--id', service, | ||
3253 | 117 | 'osd', 'ls', | ||
3254 | 118 | '--format=json']).decode('UTF-8')) | ||
3255 | 119 | |||
3256 | 120 | return None | ||
3257 | 121 | |||
3258 | 122 | |||
3259 | 123 | def create_pool(service, name, replicas=3): | ||
3270 | 124 | """Create a new RADOS pool.""" | 504 | """Create a new RADOS pool.""" |
3271 | 125 | if pool_exists(service, name): | 505 | if pool_exists(service, name): |
3272 | 126 | log("Ceph pool {} already exists, skipping creation".format(name), | 506 | log("Ceph pool {} already exists, skipping creation".format(name), |
3273 | 127 | level=WARNING) | 507 | level=WARNING) |
3274 | 128 | return | 508 | return |
3275 | 129 | 509 | ||
3292 | 130 | # Calculate the number of placement groups based | 510 | if not pg_num: |
3293 | 131 | # on upstream recommended best practices. | 511 | # Calculate the number of placement groups based |
3294 | 132 | osds = get_osds(service) | 512 | # on upstream recommended best practices. |
3295 | 133 | if osds: | 513 | osds = get_osds(service) |
3296 | 134 | pgnum = (len(osds) * 100 // replicas) | 514 | if osds: |
3297 | 135 | else: | 515 | pg_num = (len(osds) * 100 // replicas) |
3298 | 136 | # NOTE(james-page): Default to 200 for older ceph versions | 516 | else: |
3299 | 137 | # which don't support OSD query from cli | 517 | # NOTE(james-page): Default to 200 for older ceph versions |
3300 | 138 | pgnum = 200 | 518 | # which don't support OSD query from cli |
3301 | 139 | 519 | pg_num = 200 | |
3302 | 140 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'create', name, str(pgnum)] | 520 | |
3303 | 141 | check_call(cmd) | 521 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'create', name, str(pg_num)] |
3304 | 142 | 522 | check_call(cmd) | |
3305 | 143 | cmd = ['ceph', '--id', service, 'osd', 'pool', 'set', name, 'size', | 523 | |
3306 | 144 | str(replicas)] | 524 | update_pool(service, name, settings={'size': str(replicas)}) |
3291 | 145 | check_call(cmd) | ||
3307 | 146 | 525 | ||
3308 | 147 | 526 | ||
3309 | 148 | def delete_pool(service, name): | 527 | def delete_pool(service, name): |
3310 | @@ -197,10 +576,10 @@ | |||
3311 | 197 | log('Created new keyfile at %s.' % keyfile, level=INFO) | 576 | log('Created new keyfile at %s.' % keyfile, level=INFO) |
3312 | 198 | 577 | ||
3313 | 199 | 578 | ||
3316 | 200 | def get_ceph_nodes(): | 579 | def get_ceph_nodes(relation='ceph'): |
3317 | 201 | """Query named relation 'ceph' to determine current nodes.""" | 580 | """Query named relation to determine current nodes.""" |
3318 | 202 | hosts = [] | 581 | hosts = [] |
3320 | 203 | for r_id in relation_ids('ceph'): | 582 | for r_id in relation_ids(relation): |
3321 | 204 | for unit in related_units(r_id): | 583 | for unit in related_units(r_id): |
3322 | 205 | hosts.append(relation_get('private-address', unit=unit, rid=r_id)) | 584 | hosts.append(relation_get('private-address', unit=unit, rid=r_id)) |
3323 | 206 | 585 | ||
3324 | @@ -288,17 +667,6 @@ | |||
3325 | 288 | os.chown(data_src_dst, uid, gid) | 667 | os.chown(data_src_dst, uid, gid) |
3326 | 289 | 668 | ||
3327 | 290 | 669 | ||
3328 | 291 | # TODO: re-use | ||
3329 | 292 | def modprobe(module): | ||
3330 | 293 | """Load a kernel module and configure for auto-load on reboot.""" | ||
3331 | 294 | log('Loading kernel module', level=INFO) | ||
3332 | 295 | cmd = ['modprobe', module] | ||
3333 | 296 | check_call(cmd) | ||
3334 | 297 | with open('/etc/modules', 'r+') as modules: | ||
3335 | 298 | if module not in modules.read(): | ||
3336 | 299 | modules.write(module) | ||
3337 | 300 | |||
3338 | 301 | |||
3339 | 302 | def copy_files(src, dst, symlinks=False, ignore=None): | 670 | def copy_files(src, dst, symlinks=False, ignore=None): |
3340 | 303 | """Copy files from src to dst.""" | 671 | """Copy files from src to dst.""" |
3341 | 304 | for item in os.listdir(src): | 672 | for item in os.listdir(src): |
3342 | @@ -363,14 +731,14 @@ | |||
3343 | 363 | service_start(svc) | 731 | service_start(svc) |
3344 | 364 | 732 | ||
3345 | 365 | 733 | ||
3347 | 366 | def ensure_ceph_keyring(service, user=None, group=None): | 734 | def ensure_ceph_keyring(service, user=None, group=None, relation='ceph'): |
3348 | 367 | """Ensures a ceph keyring is created for a named service and optionally | 735 | """Ensures a ceph keyring is created for a named service and optionally |
3349 | 368 | ensures user and group ownership. | 736 | ensures user and group ownership. |
3350 | 369 | 737 | ||
3351 | 370 | Returns False if no ceph key is available in relation state. | 738 | Returns False if no ceph key is available in relation state. |
3352 | 371 | """ | 739 | """ |
3353 | 372 | key = None | 740 | key = None |
3355 | 373 | for rid in relation_ids('ceph'): | 741 | for rid in relation_ids(relation): |
3356 | 374 | for unit in related_units(rid): | 742 | for unit in related_units(rid): |
3357 | 375 | key = relation_get('key', rid=rid, unit=unit) | 743 | key = relation_get('key', rid=rid, unit=unit) |
3358 | 376 | if key: | 744 | if key: |
3359 | @@ -411,17 +779,60 @@ | |||
3360 | 411 | 779 | ||
3361 | 412 | The API is versioned and defaults to version 1. | 780 | The API is versioned and defaults to version 1. |
3362 | 413 | """ | 781 | """ |
3364 | 414 | def __init__(self, api_version=1): | 782 | |
3365 | 783 | def __init__(self, api_version=1, request_id=None): | ||
3366 | 415 | self.api_version = api_version | 784 | self.api_version = api_version |
3367 | 785 | if request_id: | ||
3368 | 786 | self.request_id = request_id | ||
3369 | 787 | else: | ||
3370 | 788 | self.request_id = str(uuid.uuid1()) | ||
3371 | 416 | self.ops = [] | 789 | self.ops = [] |
3372 | 417 | 790 | ||
3374 | 418 | def add_op_create_pool(self, name, replica_count=3): | 791 | def add_op_create_pool(self, name, replica_count=3, pg_num=None): |
3375 | 792 | """Adds an operation to create a pool. | ||
3376 | 793 | |||
3377 | 794 | @param pg_num setting: optional setting. If not provided, this value | ||
3378 | 795 | will be calculated by the broker based on how many OSDs are in the | ||
3379 | 796 | cluster at the time of creation. Note that, if provided, this value | ||
3380 | 797 | will be capped at the current available maximum. | ||
3381 | 798 | """ | ||
3382 | 419 | self.ops.append({'op': 'create-pool', 'name': name, | 799 | self.ops.append({'op': 'create-pool', 'name': name, |
3384 | 420 | 'replicas': replica_count}) | 800 | 'replicas': replica_count, 'pg_num': pg_num}) |
3385 | 801 | |||
3386 | 802 | def set_ops(self, ops): | ||
3387 | 803 | """Set request ops to provided value. | ||
3388 | 804 | |||
3389 | 805 | Useful for injecting ops that come from a previous request | ||
3390 | 806 | to allow comparisons to ensure validity. | ||
3391 | 807 | """ | ||
3392 | 808 | self.ops = ops | ||
3393 | 421 | 809 | ||
3394 | 422 | @property | 810 | @property |
3395 | 423 | def request(self): | 811 | def request(self): |
3397 | 424 | return json.dumps({'api-version': self.api_version, 'ops': self.ops}) | 812 | return json.dumps({'api-version': self.api_version, 'ops': self.ops, |
3398 | 813 | 'request-id': self.request_id}) | ||
3399 | 814 | |||
3400 | 815 | def _ops_equal(self, other): | ||
3401 | 816 | if len(self.ops) == len(other.ops): | ||
3402 | 817 | for req_no in range(0, len(self.ops)): | ||
3403 | 818 | for key in ['replicas', 'name', 'op', 'pg_num']: | ||
3404 | 819 | if self.ops[req_no].get(key) != other.ops[req_no].get(key): | ||
3405 | 820 | return False | ||
3406 | 821 | else: | ||
3407 | 822 | return False | ||
3408 | 823 | return True | ||
3409 | 824 | |||
3410 | 825 | def __eq__(self, other): | ||
3411 | 826 | if not isinstance(other, self.__class__): | ||
3412 | 827 | return False | ||
3413 | 828 | if self.api_version == other.api_version and \ | ||
3414 | 829 | self._ops_equal(other): | ||
3415 | 830 | return True | ||
3416 | 831 | else: | ||
3417 | 832 | return False | ||
3418 | 833 | |||
3419 | 834 | def __ne__(self, other): | ||
3420 | 835 | return not self.__eq__(other) | ||
3421 | 425 | 836 | ||
3422 | 426 | 837 | ||
3423 | 427 | class CephBrokerRsp(object): | 838 | class CephBrokerRsp(object): |
3424 | @@ -431,14 +842,198 @@ | |||
3425 | 431 | 842 | ||
3426 | 432 | The API is versioned and defaults to version 1. | 843 | The API is versioned and defaults to version 1. |
3427 | 433 | """ | 844 | """ |
3428 | 845 | |||
3429 | 434 | def __init__(self, encoded_rsp): | 846 | def __init__(self, encoded_rsp): |
3430 | 435 | self.api_version = None | 847 | self.api_version = None |
3431 | 436 | self.rsp = json.loads(encoded_rsp) | 848 | self.rsp = json.loads(encoded_rsp) |
3432 | 437 | 849 | ||
3433 | 438 | @property | 850 | @property |
3434 | 851 | def request_id(self): | ||
3435 | 852 | return self.rsp.get('request-id') | ||
3436 | 853 | |||
3437 | 854 | @property | ||
3438 | 439 | def exit_code(self): | 855 | def exit_code(self): |
3439 | 440 | return self.rsp.get('exit-code') | 856 | return self.rsp.get('exit-code') |
3440 | 441 | 857 | ||
3441 | 442 | @property | 858 | @property |
3442 | 443 | def exit_msg(self): | 859 | def exit_msg(self): |
3443 | 444 | return self.rsp.get('stderr') | 860 | return self.rsp.get('stderr') |
3444 | 861 | |||
3445 | 862 | |||
3446 | 863 | # Ceph Broker Conversation: | ||
3447 | 864 | # If a charm needs an action to be taken by ceph it can create a CephBrokerRq | ||
3448 | 865 | # and send that request to ceph via the ceph relation. The CephBrokerRq has a | ||
3449 | 866 | # unique id so that the client can identity which CephBrokerRsp is associated | ||
3450 | 867 | # with the request. Ceph will also respond to each client unit individually | ||
3451 | 868 | # creating a response key per client unit eg glance/0 will get a CephBrokerRsp | ||
3452 | 869 | # via key broker-rsp-glance-0 | ||
3453 | 870 | # | ||
3454 | 871 | # To use this the charm can just do something like: | ||
3455 | 872 | # | ||
3456 | 873 | # from charmhelpers.contrib.storage.linux.ceph import ( | ||
3457 | 874 | # send_request_if_needed, | ||
3458 | 875 | # is_request_complete, | ||
3459 | 876 | # CephBrokerRq, | ||
3460 | 877 | # ) | ||
3461 | 878 | # | ||
3462 | 879 | # @hooks.hook('ceph-relation-changed') | ||
3463 | 880 | # def ceph_changed(): | ||
3464 | 881 | # rq = CephBrokerRq() | ||
3465 | 882 | # rq.add_op_create_pool(name='poolname', replica_count=3) | ||
3466 | 883 | # | ||
3467 | 884 | # if is_request_complete(rq): | ||
3468 | 885 | # <Request complete actions> | ||
3469 | 886 | # else: | ||
3470 | 887 | # send_request_if_needed(get_ceph_request()) | ||
3471 | 888 | # | ||
3472 | 889 | # CephBrokerRq and CephBrokerRsp are serialized into JSON. Below is an example | ||
3473 | 890 | # of glance having sent a request to ceph which ceph has successfully processed | ||
3474 | 891 | # 'ceph:8': { | ||
3475 | 892 | # 'ceph/0': { | ||
3476 | 893 | # 'auth': 'cephx', | ||
3477 | 894 | # 'broker-rsp-glance-0': '{"request-id": "0bc7dc54", "exit-code": 0}', | ||
3478 | 895 | # 'broker_rsp': '{"request-id": "0da543b8", "exit-code": 0}', | ||
3479 | 896 | # 'ceph-public-address': '10.5.44.103', | ||
3480 | 897 | # 'key': 'AQCLDttVuHXINhAAvI144CB09dYchhHyTUY9BQ==', | ||
3481 | 898 | # 'private-address': '10.5.44.103', | ||
3482 | 899 | # }, | ||
3483 | 900 | # 'glance/0': { | ||
3484 | 901 | # 'broker_req': ('{"api-version": 1, "request-id": "0bc7dc54", ' | ||
3485 | 902 | # '"ops": [{"replicas": 3, "name": "glance", ' | ||
3486 | 903 | # '"op": "create-pool"}]}'), | ||
3487 | 904 | # 'private-address': '10.5.44.109', | ||
3488 | 905 | # }, | ||
3489 | 906 | # } | ||
3490 | 907 | |||
3491 | 908 | def get_previous_request(rid): | ||
3492 | 909 | """Return the last ceph broker request sent on a given relation | ||
3493 | 910 | |||
3494 | 911 | @param rid: Relation id to query for request | ||
3495 | 912 | """ | ||
3496 | 913 | request = None | ||
3497 | 914 | broker_req = relation_get(attribute='broker_req', rid=rid, | ||
3498 | 915 | unit=local_unit()) | ||
3499 | 916 | if broker_req: | ||
3500 | 917 | request_data = json.loads(broker_req) | ||
3501 | 918 | request = CephBrokerRq(api_version=request_data['api-version'], | ||
3502 | 919 | request_id=request_data['request-id']) | ||
3503 | 920 | request.set_ops(request_data['ops']) | ||
3504 | 921 | |||
3505 | 922 | return request | ||
3506 | 923 | |||
3507 | 924 | |||
3508 | 925 | def get_request_states(request, relation='ceph'): | ||
3509 | 926 | """Return a dict of requests per relation id with their corresponding | ||
3510 | 927 | completion state. | ||
3511 | 928 | |||
3512 | 929 | This allows a charm, which has a request for ceph, to see whether there is | ||
3513 | 930 | an equivalent request already being processed and if so what state that | ||
3514 | 931 | request is in. | ||
3515 | 932 | |||
3516 | 933 | @param request: A CephBrokerRq object | ||
3517 | 934 | """ | ||
3518 | 935 | complete = [] | ||
3519 | 936 | requests = {} | ||
3520 | 937 | for rid in relation_ids(relation): | ||
3521 | 938 | complete = False | ||
3522 | 939 | previous_request = get_previous_request(rid) | ||
3523 | 940 | if request == previous_request: | ||
3524 | 941 | sent = True | ||
3525 | 942 | complete = is_request_complete_for_rid(previous_request, rid) | ||
3526 | 943 | else: | ||
3527 | 944 | sent = False | ||
3528 | 945 | complete = False | ||
3529 | 946 | |||
3530 | 947 | requests[rid] = { | ||
3531 | 948 | 'sent': sent, | ||
3532 | 949 | 'complete': complete, | ||
3533 | 950 | } | ||
3534 | 951 | |||
3535 | 952 | return requests | ||
3536 | 953 | |||
3537 | 954 | |||
3538 | 955 | def is_request_sent(request, relation='ceph'): | ||
3539 | 956 | """Check to see if a functionally equivalent request has already been sent | ||
3540 | 957 | |||
3541 | 958 | Returns True if a similair request has been sent | ||
3542 | 959 | |||
3543 | 960 | @param request: A CephBrokerRq object | ||
3544 | 961 | """ | ||
3545 | 962 | states = get_request_states(request, relation=relation) | ||
3546 | 963 | for rid in states.keys(): | ||
3547 | 964 | if not states[rid]['sent']: | ||
3548 | 965 | return False | ||
3549 | 966 | |||
3550 | 967 | return True | ||
3551 | 968 | |||
3552 | 969 | |||
3553 | 970 | def is_request_complete(request, relation='ceph'): | ||
3554 | 971 | """Check to see if a functionally equivalent request has already been | ||
3555 | 972 | completed | ||
3556 | 973 | |||
3557 | 974 | Returns True if a similair request has been completed | ||
3558 | 975 | |||
3559 | 976 | @param request: A CephBrokerRq object | ||
3560 | 977 | """ | ||
3561 | 978 | states = get_request_states(request, relation=relation) | ||
3562 | 979 | for rid in states.keys(): | ||
3563 | 980 | if not states[rid]['complete']: | ||
3564 | 981 | return False | ||
3565 | 982 | |||
3566 | 983 | return True | ||
3567 | 984 | |||
3568 | 985 | |||
3569 | 986 | def is_request_complete_for_rid(request, rid): | ||
3570 | 987 | """Check if a given request has been completed on the given relation | ||
3571 | 988 | |||
3572 | 989 | @param request: A CephBrokerRq object | ||
3573 | 990 | @param rid: Relation ID | ||
3574 | 991 | """ | ||
3575 | 992 | broker_key = get_broker_rsp_key() | ||
3576 | 993 | for unit in related_units(rid): | ||
3577 | 994 | rdata = relation_get(rid=rid, unit=unit) | ||
3578 | 995 | if rdata.get(broker_key): | ||
3579 | 996 | rsp = CephBrokerRsp(rdata.get(broker_key)) | ||
3580 | 997 | if rsp.request_id == request.request_id: | ||
3581 | 998 | if not rsp.exit_code: | ||
3582 | 999 | return True | ||
3583 | 1000 | else: | ||
3584 | 1001 | # The remote unit sent no reply targeted at this unit so either the | ||
3585 | 1002 | # remote ceph cluster does not support unit targeted replies or it | ||
3586 | 1003 | # has not processed our request yet. | ||
3587 | 1004 | if rdata.get('broker_rsp'): | ||
3588 | 1005 | request_data = json.loads(rdata['broker_rsp']) | ||
3589 | 1006 | if request_data.get('request-id'): | ||
3590 | 1007 | log('Ignoring legacy broker_rsp without unit key as remote ' | ||
3591 | 1008 | 'service supports unit specific replies', level=DEBUG) | ||
3592 | 1009 | else: | ||
3593 | 1010 | log('Using legacy broker_rsp as remote service does not ' | ||
3594 | 1011 | 'supports unit specific replies', level=DEBUG) | ||
3595 | 1012 | rsp = CephBrokerRsp(rdata['broker_rsp']) | ||
3596 | 1013 | if not rsp.exit_code: | ||
3597 | 1014 | return True | ||
3598 | 1015 | |||
3599 | 1016 | return False | ||
3600 | 1017 | |||
3601 | 1018 | |||
3602 | 1019 | def get_broker_rsp_key(): | ||
3603 | 1020 | """Return broker response key for this unit | ||
3604 | 1021 | |||
3605 | 1022 | This is the key that ceph is going to use to pass request status | ||
3606 | 1023 | information back to this unit | ||
3607 | 1024 | """ | ||
3608 | 1025 | return 'broker-rsp-' + local_unit().replace('/', '-') | ||
3609 | 1026 | |||
3610 | 1027 | |||
3611 | 1028 | def send_request_if_needed(request, relation='ceph'): | ||
3612 | 1029 | """Send broker request if an equivalent request has not already been sent | ||
3613 | 1030 | |||
3614 | 1031 | @param request: A CephBrokerRq object | ||
3615 | 1032 | """ | ||
3616 | 1033 | if is_request_sent(request, relation=relation): | ||
3617 | 1034 | log('Request already sent but not complete, not sending new request', | ||
3618 | 1035 | level=DEBUG) | ||
3619 | 1036 | else: | ||
3620 | 1037 | for rid in relation_ids(relation): | ||
3621 | 1038 | log('Sending request {}'.format(request.request_id), level=DEBUG) | ||
3622 | 1039 | relation_set(relation_id=rid, broker_req=request.request) | ||
3623 | 445 | 1040 | ||
3624 | === modified file 'hooks/charmhelpers/contrib/storage/linux/loopback.py' | |||
3625 | --- hooks/charmhelpers/contrib/storage/linux/loopback.py 2015-01-26 09:46:38 +0000 | |||
3626 | +++ hooks/charmhelpers/contrib/storage/linux/loopback.py 2016-02-18 14:28:13 +0000 | |||
3627 | @@ -76,3 +76,13 @@ | |||
3628 | 76 | check_call(cmd) | 76 | check_call(cmd) |
3629 | 77 | 77 | ||
3630 | 78 | return create_loopback(path) | 78 | return create_loopback(path) |
3631 | 79 | |||
3632 | 80 | |||
3633 | 81 | def is_mapped_loopback_device(device): | ||
3634 | 82 | """ | ||
3635 | 83 | Checks if a given device name is an existing/mapped loopback device. | ||
3636 | 84 | :param device: str: Full path to the device (eg, /dev/loop1). | ||
3637 | 85 | :returns: str: Path to the backing file if is a loopback device | ||
3638 | 86 | empty string otherwise | ||
3639 | 87 | """ | ||
3640 | 88 | return loopback_devices().get(device, "") | ||
3641 | 79 | 89 | ||
3642 | === added file 'hooks/charmhelpers/core/files.py' | |||
3643 | --- hooks/charmhelpers/core/files.py 1970-01-01 00:00:00 +0000 | |||
3644 | +++ hooks/charmhelpers/core/files.py 2016-02-18 14:28:13 +0000 | |||
3645 | @@ -0,0 +1,45 @@ | |||
3646 | 1 | #!/usr/bin/env python | ||
3647 | 2 | # -*- coding: utf-8 -*- | ||
3648 | 3 | |||
3649 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
3650 | 5 | # | ||
3651 | 6 | # This file is part of charm-helpers. | ||
3652 | 7 | # | ||
3653 | 8 | # charm-helpers is free software: you can redistribute it and/or modify | ||
3654 | 9 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
3655 | 10 | # published by the Free Software Foundation. | ||
3656 | 11 | # | ||
3657 | 12 | # charm-helpers is distributed in the hope that it will be useful, | ||
3658 | 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
3659 | 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
3660 | 15 | # GNU Lesser General Public License for more details. | ||
3661 | 16 | # | ||
3662 | 17 | # You should have received a copy of the GNU Lesser General Public License | ||
3663 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
3664 | 19 | |||
3665 | 20 | __author__ = 'Jorge Niedbalski <niedbalski@ubuntu.com>' | ||
3666 | 21 | |||
3667 | 22 | import os | ||
3668 | 23 | import subprocess | ||
3669 | 24 | |||
3670 | 25 | |||
3671 | 26 | def sed(filename, before, after, flags='g'): | ||
3672 | 27 | """ | ||
3673 | 28 | Search and replaces the given pattern on filename. | ||
3674 | 29 | |||
3675 | 30 | :param filename: relative or absolute file path. | ||
3676 | 31 | :param before: expression to be replaced (see 'man sed') | ||
3677 | 32 | :param after: expression to replace with (see 'man sed') | ||
3678 | 33 | :param flags: sed-compatible regex flags in example, to make | ||
3679 | 34 | the search and replace case insensitive, specify ``flags="i"``. | ||
3680 | 35 | The ``g`` flag is always specified regardless, so you do not | ||
3681 | 36 | need to remember to include it when overriding this parameter. | ||
3682 | 37 | :returns: If the sed command exit code was zero then return, | ||
3683 | 38 | otherwise raise CalledProcessError. | ||
3684 | 39 | """ | ||
3685 | 40 | expression = r's/{0}/{1}/{2}'.format(before, | ||
3686 | 41 | after, flags) | ||
3687 | 42 | |||
3688 | 43 | return subprocess.check_call(["sed", "-i", "-r", "-e", | ||
3689 | 44 | expression, | ||
3690 | 45 | os.path.expanduser(filename)]) | ||
3691 | 0 | 46 | ||
3692 | === removed file 'hooks/charmhelpers/core/files.py' | |||
3693 | --- hooks/charmhelpers/core/files.py 2015-07-29 10:48:39 +0000 | |||
3694 | +++ hooks/charmhelpers/core/files.py 1970-01-01 00:00:00 +0000 | |||
3695 | @@ -1,45 +0,0 @@ | |||
3696 | 1 | #!/usr/bin/env python | ||
3697 | 2 | # -*- coding: utf-8 -*- | ||
3698 | 3 | |||
3699 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
3700 | 5 | # | ||
3701 | 6 | # This file is part of charm-helpers. | ||
3702 | 7 | # | ||
3703 | 8 | # charm-helpers is free software: you can redistribute it and/or modify | ||
3704 | 9 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
3705 | 10 | # published by the Free Software Foundation. | ||
3706 | 11 | # | ||
3707 | 12 | # charm-helpers is distributed in the hope that it will be useful, | ||
3708 | 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
3709 | 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
3710 | 15 | # GNU Lesser General Public License for more details. | ||
3711 | 16 | # | ||
3712 | 17 | # You should have received a copy of the GNU Lesser General Public License | ||
3713 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
3714 | 19 | |||
3715 | 20 | __author__ = 'Jorge Niedbalski <niedbalski@ubuntu.com>' | ||
3716 | 21 | |||
3717 | 22 | import os | ||
3718 | 23 | import subprocess | ||
3719 | 24 | |||
3720 | 25 | |||
3721 | 26 | def sed(filename, before, after, flags='g'): | ||
3722 | 27 | """ | ||
3723 | 28 | Search and replaces the given pattern on filename. | ||
3724 | 29 | |||
3725 | 30 | :param filename: relative or absolute file path. | ||
3726 | 31 | :param before: expression to be replaced (see 'man sed') | ||
3727 | 32 | :param after: expression to replace with (see 'man sed') | ||
3728 | 33 | :param flags: sed-compatible regex flags in example, to make | ||
3729 | 34 | the search and replace case insensitive, specify ``flags="i"``. | ||
3730 | 35 | The ``g`` flag is always specified regardless, so you do not | ||
3731 | 36 | need to remember to include it when overriding this parameter. | ||
3732 | 37 | :returns: If the sed command exit code was zero then return, | ||
3733 | 38 | otherwise raise CalledProcessError. | ||
3734 | 39 | """ | ||
3735 | 40 | expression = r's/{0}/{1}/{2}'.format(before, | ||
3736 | 41 | after, flags) | ||
3737 | 42 | |||
3738 | 43 | return subprocess.check_call(["sed", "-i", "-r", "-e", | ||
3739 | 44 | expression, | ||
3740 | 45 | os.path.expanduser(filename)]) | ||
3741 | 46 | 0 | ||
3742 | === modified file 'hooks/charmhelpers/core/hookenv.py' | |||
3743 | --- hooks/charmhelpers/core/hookenv.py 2015-09-03 09:42:35 +0000 | |||
3744 | +++ hooks/charmhelpers/core/hookenv.py 2016-02-18 14:28:13 +0000 | |||
3745 | @@ -491,6 +491,19 @@ | |||
3746 | 491 | 491 | ||
3747 | 492 | 492 | ||
3748 | 493 | @cached | 493 | @cached |
3749 | 494 | def peer_relation_id(): | ||
3750 | 495 | '''Get the peers relation id if a peers relation has been joined, else None.''' | ||
3751 | 496 | md = metadata() | ||
3752 | 497 | section = md.get('peers') | ||
3753 | 498 | if section: | ||
3754 | 499 | for key in section: | ||
3755 | 500 | relids = relation_ids(key) | ||
3756 | 501 | if relids: | ||
3757 | 502 | return relids[0] | ||
3758 | 503 | return None | ||
3759 | 504 | |||
3760 | 505 | |||
3761 | 506 | @cached | ||
3762 | 494 | def relation_to_interface(relation_name): | 507 | def relation_to_interface(relation_name): |
3763 | 495 | """ | 508 | """ |
3764 | 496 | Given the name of a relation, return the interface that relation uses. | 509 | Given the name of a relation, return the interface that relation uses. |
3765 | @@ -504,12 +517,12 @@ | |||
3766 | 504 | def relation_to_role_and_interface(relation_name): | 517 | def relation_to_role_and_interface(relation_name): |
3767 | 505 | """ | 518 | """ |
3768 | 506 | Given the name of a relation, return the role and the name of the interface | 519 | Given the name of a relation, return the role and the name of the interface |
3770 | 507 | that relation uses (where role is one of ``provides``, ``requires``, or ``peer``). | 520 | that relation uses (where role is one of ``provides``, ``requires``, or ``peers``). |
3771 | 508 | 521 | ||
3772 | 509 | :returns: A tuple containing ``(role, interface)``, or ``(None, None)``. | 522 | :returns: A tuple containing ``(role, interface)``, or ``(None, None)``. |
3773 | 510 | """ | 523 | """ |
3774 | 511 | _metadata = metadata() | 524 | _metadata = metadata() |
3776 | 512 | for role in ('provides', 'requires', 'peer'): | 525 | for role in ('provides', 'requires', 'peers'): |
3777 | 513 | interface = _metadata.get(role, {}).get(relation_name, {}).get('interface') | 526 | interface = _metadata.get(role, {}).get(relation_name, {}).get('interface') |
3778 | 514 | if interface: | 527 | if interface: |
3779 | 515 | return role, interface | 528 | return role, interface |
3780 | @@ -521,7 +534,7 @@ | |||
3781 | 521 | """ | 534 | """ |
3782 | 522 | Given a role and interface name, return a list of relation names for the | 535 | Given a role and interface name, return a list of relation names for the |
3783 | 523 | current charm that use that interface under that role (where role is one | 536 | current charm that use that interface under that role (where role is one |
3785 | 524 | of ``provides``, ``requires``, or ``peer``). | 537 | of ``provides``, ``requires``, or ``peers``). |
3786 | 525 | 538 | ||
3787 | 526 | :returns: A list of relation names. | 539 | :returns: A list of relation names. |
3788 | 527 | """ | 540 | """ |
3789 | @@ -542,7 +555,7 @@ | |||
3790 | 542 | :returns: A list of relation names. | 555 | :returns: A list of relation names. |
3791 | 543 | """ | 556 | """ |
3792 | 544 | results = [] | 557 | results = [] |
3794 | 545 | for role in ('provides', 'requires', 'peer'): | 558 | for role in ('provides', 'requires', 'peers'): |
3795 | 546 | results.extend(role_and_interface_to_relations(role, interface_name)) | 559 | results.extend(role_and_interface_to_relations(role, interface_name)) |
3796 | 547 | return results | 560 | return results |
3797 | 548 | 561 | ||
3798 | @@ -623,6 +636,38 @@ | |||
3799 | 623 | return unit_get('private-address') | 636 | return unit_get('private-address') |
3800 | 624 | 637 | ||
3801 | 625 | 638 | ||
3802 | 639 | @cached | ||
3803 | 640 | def storage_get(attribute=None, storage_id=None): | ||
3804 | 641 | """Get storage attributes""" | ||
3805 | 642 | _args = ['storage-get', '--format=json'] | ||
3806 | 643 | if storage_id: | ||
3807 | 644 | _args.extend(('-s', storage_id)) | ||
3808 | 645 | if attribute: | ||
3809 | 646 | _args.append(attribute) | ||
3810 | 647 | try: | ||
3811 | 648 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | ||
3812 | 649 | except ValueError: | ||
3813 | 650 | return None | ||
3814 | 651 | |||
3815 | 652 | |||
3816 | 653 | @cached | ||
3817 | 654 | def storage_list(storage_name=None): | ||
3818 | 655 | """List the storage IDs for the unit""" | ||
3819 | 656 | _args = ['storage-list', '--format=json'] | ||
3820 | 657 | if storage_name: | ||
3821 | 658 | _args.append(storage_name) | ||
3822 | 659 | try: | ||
3823 | 660 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | ||
3824 | 661 | except ValueError: | ||
3825 | 662 | return None | ||
3826 | 663 | except OSError as e: | ||
3827 | 664 | import errno | ||
3828 | 665 | if e.errno == errno.ENOENT: | ||
3829 | 666 | # storage-list does not exist | ||
3830 | 667 | return [] | ||
3831 | 668 | raise | ||
3832 | 669 | |||
3833 | 670 | |||
3834 | 626 | class UnregisteredHookError(Exception): | 671 | class UnregisteredHookError(Exception): |
3835 | 627 | """Raised when an undefined hook is called""" | 672 | """Raised when an undefined hook is called""" |
3836 | 628 | pass | 673 | pass |
3837 | @@ -788,6 +833,7 @@ | |||
3838 | 788 | 833 | ||
3839 | 789 | def translate_exc(from_exc, to_exc): | 834 | def translate_exc(from_exc, to_exc): |
3840 | 790 | def inner_translate_exc1(f): | 835 | def inner_translate_exc1(f): |
3841 | 836 | @wraps(f) | ||
3842 | 791 | def inner_translate_exc2(*args, **kwargs): | 837 | def inner_translate_exc2(*args, **kwargs): |
3843 | 792 | try: | 838 | try: |
3844 | 793 | return f(*args, **kwargs) | 839 | return f(*args, **kwargs) |
3845 | @@ -832,6 +878,40 @@ | |||
3846 | 832 | subprocess.check_call(cmd) | 878 | subprocess.check_call(cmd) |
3847 | 833 | 879 | ||
3848 | 834 | 880 | ||
3849 | 881 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
3850 | 882 | def payload_register(ptype, klass, pid): | ||
3851 | 883 | """ is used while a hook is running to let Juju know that a | ||
3852 | 884 | payload has been started.""" | ||
3853 | 885 | cmd = ['payload-register'] | ||
3854 | 886 | for x in [ptype, klass, pid]: | ||
3855 | 887 | cmd.append(x) | ||
3856 | 888 | subprocess.check_call(cmd) | ||
3857 | 889 | |||
3858 | 890 | |||
3859 | 891 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
3860 | 892 | def payload_unregister(klass, pid): | ||
3861 | 893 | """ is used while a hook is running to let Juju know | ||
3862 | 894 | that a payload has been manually stopped. The <class> and <id> provided | ||
3863 | 895 | must match a payload that has been previously registered with juju using | ||
3864 | 896 | payload-register.""" | ||
3865 | 897 | cmd = ['payload-unregister'] | ||
3866 | 898 | for x in [klass, pid]: | ||
3867 | 899 | cmd.append(x) | ||
3868 | 900 | subprocess.check_call(cmd) | ||
3869 | 901 | |||
3870 | 902 | |||
3871 | 903 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
3872 | 904 | def payload_status_set(klass, pid, status): | ||
3873 | 905 | """is used to update the current status of a registered payload. | ||
3874 | 906 | The <class> and <id> provided must match a payload that has been previously | ||
3875 | 907 | registered with juju using payload-register. The <status> must be one of the | ||
3876 | 908 | follow: starting, started, stopping, stopped""" | ||
3877 | 909 | cmd = ['payload-status-set'] | ||
3878 | 910 | for x in [klass, pid, status]: | ||
3879 | 911 | cmd.append(x) | ||
3880 | 912 | subprocess.check_call(cmd) | ||
3881 | 913 | |||
3882 | 914 | |||
3883 | 835 | @cached | 915 | @cached |
3884 | 836 | def juju_version(): | 916 | def juju_version(): |
3885 | 837 | """Full version string (eg. '1.23.3.1-trusty-amd64')""" | 917 | """Full version string (eg. '1.23.3.1-trusty-amd64')""" |
3886 | 838 | 918 | ||
3887 | === modified file 'hooks/charmhelpers/core/host.py' | |||
3888 | --- hooks/charmhelpers/core/host.py 2015-08-27 15:02:34 +0000 | |||
3889 | +++ hooks/charmhelpers/core/host.py 2016-02-18 14:28:13 +0000 | |||
3890 | @@ -63,55 +63,85 @@ | |||
3891 | 63 | return service_result | 63 | return service_result |
3892 | 64 | 64 | ||
3893 | 65 | 65 | ||
3895 | 66 | def service_pause(service_name, init_dir=None): | 66 | def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d"): |
3896 | 67 | """Pause a system service. | 67 | """Pause a system service. |
3897 | 68 | 68 | ||
3898 | 69 | Stop it, and prevent it from starting again at boot.""" | 69 | Stop it, and prevent it from starting again at boot.""" |
3907 | 70 | if init_dir is None: | 70 | stopped = True |
3908 | 71 | init_dir = "/etc/init" | 71 | if service_running(service_name): |
3909 | 72 | stopped = service_stop(service_name) | 72 | stopped = service_stop(service_name) |
3910 | 73 | # XXX: Support systemd too | 73 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
3911 | 74 | override_path = os.path.join( | 74 | sysv_file = os.path.join(initd_dir, service_name) |
3912 | 75 | init_dir, '{}.override'.format(service_name)) | 75 | if init_is_systemd(): |
3913 | 76 | with open(override_path, 'w') as fh: | 76 | service('disable', service_name) |
3914 | 77 | fh.write("manual\n") | 77 | elif os.path.exists(upstart_file): |
3915 | 78 | override_path = os.path.join( | ||
3916 | 79 | init_dir, '{}.override'.format(service_name)) | ||
3917 | 80 | with open(override_path, 'w') as fh: | ||
3918 | 81 | fh.write("manual\n") | ||
3919 | 82 | elif os.path.exists(sysv_file): | ||
3920 | 83 | subprocess.check_call(["update-rc.d", service_name, "disable"]) | ||
3921 | 84 | else: | ||
3922 | 85 | raise ValueError( | ||
3923 | 86 | "Unable to detect {0} as SystemD, Upstart {1} or" | ||
3924 | 87 | " SysV {2}".format( | ||
3925 | 88 | service_name, upstart_file, sysv_file)) | ||
3926 | 78 | return stopped | 89 | return stopped |
3927 | 79 | 90 | ||
3928 | 80 | 91 | ||
3930 | 81 | def service_resume(service_name, init_dir=None): | 92 | def service_resume(service_name, init_dir="/etc/init", |
3931 | 93 | initd_dir="/etc/init.d"): | ||
3932 | 82 | """Resume a system service. | 94 | """Resume a system service. |
3933 | 83 | 95 | ||
3934 | 84 | Reenable starting again at boot. Start the service""" | 96 | Reenable starting again at boot. Start the service""" |
3943 | 85 | # XXX: Support systemd too | 97 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
3944 | 86 | if init_dir is None: | 98 | sysv_file = os.path.join(initd_dir, service_name) |
3945 | 87 | init_dir = "/etc/init" | 99 | if init_is_systemd(): |
3946 | 88 | override_path = os.path.join( | 100 | service('enable', service_name) |
3947 | 89 | init_dir, '{}.override'.format(service_name)) | 101 | elif os.path.exists(upstart_file): |
3948 | 90 | if os.path.exists(override_path): | 102 | override_path = os.path.join( |
3949 | 91 | os.unlink(override_path) | 103 | init_dir, '{}.override'.format(service_name)) |
3950 | 92 | started = service_start(service_name) | 104 | if os.path.exists(override_path): |
3951 | 105 | os.unlink(override_path) | ||
3952 | 106 | elif os.path.exists(sysv_file): | ||
3953 | 107 | subprocess.check_call(["update-rc.d", service_name, "enable"]) | ||
3954 | 108 | else: | ||
3955 | 109 | raise ValueError( | ||
3956 | 110 | "Unable to detect {0} as SystemD, Upstart {1} or" | ||
3957 | 111 | " SysV {2}".format( | ||
3958 | 112 | service_name, upstart_file, sysv_file)) | ||
3959 | 113 | |||
3960 | 114 | started = service_running(service_name) | ||
3961 | 115 | if not started: | ||
3962 | 116 | started = service_start(service_name) | ||
3963 | 93 | return started | 117 | return started |
3964 | 94 | 118 | ||
3965 | 95 | 119 | ||
3966 | 96 | def service(action, service_name): | 120 | def service(action, service_name): |
3967 | 97 | """Control a system service""" | 121 | """Control a system service""" |
3969 | 98 | cmd = ['service', service_name, action] | 122 | if init_is_systemd(): |
3970 | 123 | cmd = ['systemctl', action, service_name] | ||
3971 | 124 | else: | ||
3972 | 125 | cmd = ['service', service_name, action] | ||
3973 | 99 | return subprocess.call(cmd) == 0 | 126 | return subprocess.call(cmd) == 0 |
3974 | 100 | 127 | ||
3975 | 101 | 128 | ||
3977 | 102 | def service_running(service): | 129 | def service_running(service_name): |
3978 | 103 | """Determine whether a system service is running""" | 130 | """Determine whether a system service is running""" |
3985 | 104 | try: | 131 | if init_is_systemd(): |
3986 | 105 | output = subprocess.check_output( | 132 | return service('is-active', service_name) |
3981 | 106 | ['service', service, 'status'], | ||
3982 | 107 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
3983 | 108 | except subprocess.CalledProcessError: | ||
3984 | 109 | return False | ||
3987 | 110 | else: | 133 | else: |
3991 | 111 | if ("start/running" in output or "is running" in output): | 134 | try: |
3992 | 112 | return True | 135 | output = subprocess.check_output( |
3993 | 113 | else: | 136 | ['service', service_name, 'status'], |
3994 | 137 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
3995 | 138 | except subprocess.CalledProcessError: | ||
3996 | 114 | return False | 139 | return False |
3997 | 140 | else: | ||
3998 | 141 | if ("start/running" in output or "is running" in output): | ||
3999 | 142 | return True | ||
4000 | 143 | else: | ||
4001 | 144 | return False | ||
4002 | 115 | 145 | ||
4003 | 116 | 146 | ||
4004 | 117 | def service_available(service_name): | 147 | def service_available(service_name): |
4005 | @@ -126,8 +156,29 @@ | |||
4006 | 126 | return True | 156 | return True |
4007 | 127 | 157 | ||
4008 | 128 | 158 | ||
4011 | 129 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | 159 | SYSTEMD_SYSTEM = '/run/systemd/system' |
4012 | 130 | """Add a user to the system""" | 160 | |
4013 | 161 | |||
4014 | 162 | def init_is_systemd(): | ||
4015 | 163 | return os.path.isdir(SYSTEMD_SYSTEM) | ||
4016 | 164 | |||
4017 | 165 | |||
4018 | 166 | def adduser(username, password=None, shell='/bin/bash', system_user=False, | ||
4019 | 167 | primary_group=None, secondary_groups=None): | ||
4020 | 168 | """ | ||
4021 | 169 | Add a user to the system. | ||
4022 | 170 | |||
4023 | 171 | Will log but otherwise succeed if the user already exists. | ||
4024 | 172 | |||
4025 | 173 | :param str username: Username to create | ||
4026 | 174 | :param str password: Password for user; if ``None``, create a system user | ||
4027 | 175 | :param str shell: The default shell for the user | ||
4028 | 176 | :param bool system_user: Whether to create a login or system user | ||
4029 | 177 | :param str primary_group: Primary group for user; defaults to their username | ||
4030 | 178 | :param list secondary_groups: Optional list of additional groups | ||
4031 | 179 | |||
4032 | 180 | :returns: The password database entry struct, as returned by `pwd.getpwnam` | ||
4033 | 181 | """ | ||
4034 | 131 | try: | 182 | try: |
4035 | 132 | user_info = pwd.getpwnam(username) | 183 | user_info = pwd.getpwnam(username) |
4036 | 133 | log('user {0} already exists!'.format(username)) | 184 | log('user {0} already exists!'.format(username)) |
4037 | @@ -142,6 +193,16 @@ | |||
4038 | 142 | '--shell', shell, | 193 | '--shell', shell, |
4039 | 143 | '--password', password, | 194 | '--password', password, |
4040 | 144 | ]) | 195 | ]) |
4041 | 196 | if not primary_group: | ||
4042 | 197 | try: | ||
4043 | 198 | grp.getgrnam(username) | ||
4044 | 199 | primary_group = username # avoid "group exists" error | ||
4045 | 200 | except KeyError: | ||
4046 | 201 | pass | ||
4047 | 202 | if primary_group: | ||
4048 | 203 | cmd.extend(['-g', primary_group]) | ||
4049 | 204 | if secondary_groups: | ||
4050 | 205 | cmd.extend(['-G', ','.join(secondary_groups)]) | ||
4051 | 145 | cmd.append(username) | 206 | cmd.append(username) |
4052 | 146 | subprocess.check_call(cmd) | 207 | subprocess.check_call(cmd) |
4053 | 147 | user_info = pwd.getpwnam(username) | 208 | user_info = pwd.getpwnam(username) |
4054 | @@ -550,7 +611,14 @@ | |||
4055 | 550 | os.chdir(cur) | 611 | os.chdir(cur) |
4056 | 551 | 612 | ||
4057 | 552 | 613 | ||
4059 | 553 | def chownr(path, owner, group, follow_links=True): | 614 | def chownr(path, owner, group, follow_links=True, chowntopdir=False): |
4060 | 615 | """ | ||
4061 | 616 | Recursively change user and group ownership of files and directories | ||
4062 | 617 | in given path. Doesn't chown path itself by default, only its children. | ||
4063 | 618 | |||
4064 | 619 | :param bool follow_links: Also Chown links if True | ||
4065 | 620 | :param bool chowntopdir: Also chown path itself if True | ||
4066 | 621 | """ | ||
4067 | 554 | uid = pwd.getpwnam(owner).pw_uid | 622 | uid = pwd.getpwnam(owner).pw_uid |
4068 | 555 | gid = grp.getgrnam(group).gr_gid | 623 | gid = grp.getgrnam(group).gr_gid |
4069 | 556 | if follow_links: | 624 | if follow_links: |
4070 | @@ -558,6 +626,10 @@ | |||
4071 | 558 | else: | 626 | else: |
4072 | 559 | chown = os.lchown | 627 | chown = os.lchown |
4073 | 560 | 628 | ||
4074 | 629 | if chowntopdir: | ||
4075 | 630 | broken_symlink = os.path.lexists(path) and not os.path.exists(path) | ||
4076 | 631 | if not broken_symlink: | ||
4077 | 632 | chown(path, uid, gid) | ||
4078 | 561 | for root, dirs, files in os.walk(path): | 633 | for root, dirs, files in os.walk(path): |
4079 | 562 | for name in dirs + files: | 634 | for name in dirs + files: |
4080 | 563 | full = os.path.join(root, name) | 635 | full = os.path.join(root, name) |
4081 | @@ -568,3 +640,19 @@ | |||
4082 | 568 | 640 | ||
4083 | 569 | def lchownr(path, owner, group): | 641 | def lchownr(path, owner, group): |
4084 | 570 | chownr(path, owner, group, follow_links=False) | 642 | chownr(path, owner, group, follow_links=False) |
4085 | 643 | |||
4086 | 644 | |||
4087 | 645 | def get_total_ram(): | ||
4088 | 646 | '''The total amount of system RAM in bytes. | ||
4089 | 647 | |||
4090 | 648 | This is what is reported by the OS, and may be overcommitted when | ||
4091 | 649 | there are multiple containers hosted on the same machine. | ||
4092 | 650 | ''' | ||
4093 | 651 | with open('/proc/meminfo', 'r') as f: | ||
4094 | 652 | for line in f.readlines(): | ||
4095 | 653 | if line: | ||
4096 | 654 | key, value, unit = line.split() | ||
4097 | 655 | if key == 'MemTotal:': | ||
4098 | 656 | assert unit == 'kB', 'Unknown unit' | ||
4099 | 657 | return int(value) * 1024 # Classic, not KiB. | ||
4100 | 658 | raise NotImplementedError() | ||
4101 | 571 | 659 | ||
4102 | === added file 'hooks/charmhelpers/core/hugepage.py' | |||
4103 | --- hooks/charmhelpers/core/hugepage.py 1970-01-01 00:00:00 +0000 | |||
4104 | +++ hooks/charmhelpers/core/hugepage.py 2016-02-18 14:28:13 +0000 | |||
4105 | @@ -0,0 +1,71 @@ | |||
4106 | 1 | # -*- coding: utf-8 -*- | ||
4107 | 2 | |||
4108 | 3 | # Copyright 2014-2015 Canonical Limited. | ||
4109 | 4 | # | ||
4110 | 5 | # This file is part of charm-helpers. | ||
4111 | 6 | # | ||
4112 | 7 | # charm-helpers is free software: you can redistribute it and/or modify | ||
4113 | 8 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
4114 | 9 | # published by the Free Software Foundation. | ||
4115 | 10 | # | ||
4116 | 11 | # charm-helpers is distributed in the hope that it will be useful, | ||
4117 | 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
4118 | 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
4119 | 14 | # GNU Lesser General Public License for more details. | ||
4120 | 15 | # | ||
4121 | 16 | # You should have received a copy of the GNU Lesser General Public License | ||
4122 | 17 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
4123 | 18 | |||
4124 | 19 | import yaml | ||
4125 | 20 | from charmhelpers.core import fstab | ||
4126 | 21 | from charmhelpers.core import sysctl | ||
4127 | 22 | from charmhelpers.core.host import ( | ||
4128 | 23 | add_group, | ||
4129 | 24 | add_user_to_group, | ||
4130 | 25 | fstab_mount, | ||
4131 | 26 | mkdir, | ||
4132 | 27 | ) | ||
4133 | 28 | from charmhelpers.core.strutils import bytes_from_string | ||
4134 | 29 | from subprocess import check_output | ||
4135 | 30 | |||
4136 | 31 | |||
4137 | 32 | def hugepage_support(user, group='hugetlb', nr_hugepages=256, | ||
4138 | 33 | max_map_count=65536, mnt_point='/run/hugepages/kvm', | ||
4139 | 34 | pagesize='2MB', mount=True, set_shmmax=False): | ||
4140 | 35 | """Enable hugepages on system. | ||
4141 | 36 | |||
4142 | 37 | Args: | ||
4143 | 38 | user (str) -- Username to allow access to hugepages to | ||
4144 | 39 | group (str) -- Group name to own hugepages | ||
4145 | 40 | nr_hugepages (int) -- Number of pages to reserve | ||
4146 | 41 | max_map_count (int) -- Number of Virtual Memory Areas a process can own | ||
4147 | 42 | mnt_point (str) -- Directory to mount hugepages on | ||
4148 | 43 | pagesize (str) -- Size of hugepages | ||
4149 | 44 | mount (bool) -- Whether to Mount hugepages | ||
4150 | 45 | """ | ||
4151 | 46 | group_info = add_group(group) | ||
4152 | 47 | gid = group_info.gr_gid | ||
4153 | 48 | add_user_to_group(user, group) | ||
4154 | 49 | if max_map_count < 2 * nr_hugepages: | ||
4155 | 50 | max_map_count = 2 * nr_hugepages | ||
4156 | 51 | sysctl_settings = { | ||
4157 | 52 | 'vm.nr_hugepages': nr_hugepages, | ||
4158 | 53 | 'vm.max_map_count': max_map_count, | ||
4159 | 54 | 'vm.hugetlb_shm_group': gid, | ||
4160 | 55 | } | ||
4161 | 56 | if set_shmmax: | ||
4162 | 57 | shmmax_current = int(check_output(['sysctl', '-n', 'kernel.shmmax'])) | ||
4163 | 58 | shmmax_minsize = bytes_from_string(pagesize) * nr_hugepages | ||
4164 | 59 | if shmmax_minsize > shmmax_current: | ||
4165 | 60 | sysctl_settings['kernel.shmmax'] = shmmax_minsize | ||
4166 | 61 | sysctl.create(yaml.dump(sysctl_settings), '/etc/sysctl.d/10-hugepage.conf') | ||
4167 | 62 | mkdir(mnt_point, owner='root', group='root', perms=0o755, force=False) | ||
4168 | 63 | lfstab = fstab.Fstab() | ||
4169 | 64 | fstab_entry = lfstab.get_entry_by_attr('mountpoint', mnt_point) | ||
4170 | 65 | if fstab_entry: | ||
4171 | 66 | lfstab.remove_entry(fstab_entry) | ||
4172 | 67 | entry = lfstab.Entry('nodev', mnt_point, 'hugetlbfs', | ||
4173 | 68 | 'mode=1770,gid={},pagesize={}'.format(gid, pagesize), 0, 0) | ||
4174 | 69 | lfstab.add_entry(entry) | ||
4175 | 70 | if mount: | ||
4176 | 71 | fstab_mount(mnt_point) | ||
4177 | 0 | 72 | ||
4178 | === removed file 'hooks/charmhelpers/core/hugepage.py' | |||
4179 | --- hooks/charmhelpers/core/hugepage.py 2015-08-19 13:51:03 +0000 | |||
4180 | +++ hooks/charmhelpers/core/hugepage.py 1970-01-01 00:00:00 +0000 | |||
4181 | @@ -1,62 +0,0 @@ | |||
4182 | 1 | # -*- coding: utf-8 -*- | ||
4183 | 2 | |||
4184 | 3 | # Copyright 2014-2015 Canonical Limited. | ||
4185 | 4 | # | ||
4186 | 5 | # This file is part of charm-helpers. | ||
4187 | 6 | # | ||
4188 | 7 | # charm-helpers is free software: you can redistribute it and/or modify | ||
4189 | 8 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
4190 | 9 | # published by the Free Software Foundation. | ||
4191 | 10 | # | ||
4192 | 11 | # charm-helpers is distributed in the hope that it will be useful, | ||
4193 | 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
4194 | 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
4195 | 14 | # GNU Lesser General Public License for more details. | ||
4196 | 15 | # | ||
4197 | 16 | # You should have received a copy of the GNU Lesser General Public License | ||
4198 | 17 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
4199 | 18 | |||
4200 | 19 | import yaml | ||
4201 | 20 | from charmhelpers.core import fstab | ||
4202 | 21 | from charmhelpers.core import sysctl | ||
4203 | 22 | from charmhelpers.core.host import ( | ||
4204 | 23 | add_group, | ||
4205 | 24 | add_user_to_group, | ||
4206 | 25 | fstab_mount, | ||
4207 | 26 | mkdir, | ||
4208 | 27 | ) | ||
4209 | 28 | |||
4210 | 29 | |||
4211 | 30 | def hugepage_support(user, group='hugetlb', nr_hugepages=256, | ||
4212 | 31 | max_map_count=65536, mnt_point='/run/hugepages/kvm', | ||
4213 | 32 | pagesize='2MB', mount=True): | ||
4214 | 33 | """Enable hugepages on system. | ||
4215 | 34 | |||
4216 | 35 | Args: | ||
4217 | 36 | user (str) -- Username to allow access to hugepages to | ||
4218 | 37 | group (str) -- Group name to own hugepages | ||
4219 | 38 | nr_hugepages (int) -- Number of pages to reserve | ||
4220 | 39 | max_map_count (int) -- Number of Virtual Memory Areas a process can own | ||
4221 | 40 | mnt_point (str) -- Directory to mount hugepages on | ||
4222 | 41 | pagesize (str) -- Size of hugepages | ||
4223 | 42 | mount (bool) -- Whether to Mount hugepages | ||
4224 | 43 | """ | ||
4225 | 44 | group_info = add_group(group) | ||
4226 | 45 | gid = group_info.gr_gid | ||
4227 | 46 | add_user_to_group(user, group) | ||
4228 | 47 | sysctl_settings = { | ||
4229 | 48 | 'vm.nr_hugepages': nr_hugepages, | ||
4230 | 49 | 'vm.max_map_count': max_map_count, | ||
4231 | 50 | 'vm.hugetlb_shm_group': gid, | ||
4232 | 51 | } | ||
4233 | 52 | sysctl.create(yaml.dump(sysctl_settings), '/etc/sysctl.d/10-hugepage.conf') | ||
4234 | 53 | mkdir(mnt_point, owner='root', group='root', perms=0o755, force=False) | ||
4235 | 54 | lfstab = fstab.Fstab() | ||
4236 | 55 | fstab_entry = lfstab.get_entry_by_attr('mountpoint', mnt_point) | ||
4237 | 56 | if fstab_entry: | ||
4238 | 57 | lfstab.remove_entry(fstab_entry) | ||
4239 | 58 | entry = lfstab.Entry('nodev', mnt_point, 'hugetlbfs', | ||
4240 | 59 | 'mode=1770,gid={},pagesize={}'.format(gid, pagesize), 0, 0) | ||
4241 | 60 | lfstab.add_entry(entry) | ||
4242 | 61 | if mount: | ||
4243 | 62 | fstab_mount(mnt_point) | ||
4244 | 63 | 0 | ||
4245 | === added file 'hooks/charmhelpers/core/kernel.py' | |||
4246 | --- hooks/charmhelpers/core/kernel.py 1970-01-01 00:00:00 +0000 | |||
4247 | +++ hooks/charmhelpers/core/kernel.py 2016-02-18 14:28:13 +0000 | |||
4248 | @@ -0,0 +1,68 @@ | |||
4249 | 1 | #!/usr/bin/env python | ||
4250 | 2 | # -*- coding: utf-8 -*- | ||
4251 | 3 | |||
4252 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
4253 | 5 | # | ||
4254 | 6 | # This file is part of charm-helpers. | ||
4255 | 7 | # | ||
4256 | 8 | # charm-helpers is free software: you can redistribute it and/or modify | ||
4257 | 9 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
4258 | 10 | # published by the Free Software Foundation. | ||
4259 | 11 | # | ||
4260 | 12 | # charm-helpers is distributed in the hope that it will be useful, | ||
4261 | 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
4262 | 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
4263 | 15 | # GNU Lesser General Public License for more details. | ||
4264 | 16 | # | ||
4265 | 17 | # You should have received a copy of the GNU Lesser General Public License | ||
4266 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
4267 | 19 | |||
4268 | 20 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
4269 | 21 | |||
4270 | 22 | from charmhelpers.core.hookenv import ( | ||
4271 | 23 | log, | ||
4272 | 24 | INFO | ||
4273 | 25 | ) | ||
4274 | 26 | |||
4275 | 27 | from subprocess import check_call, check_output | ||
4276 | 28 | import re | ||
4277 | 29 | |||
4278 | 30 | |||
4279 | 31 | def modprobe(module, persist=True): | ||
4280 | 32 | """Load a kernel module and configure for auto-load on reboot.""" | ||
4281 | 33 | cmd = ['modprobe', module] | ||
4282 | 34 | |||
4283 | 35 | log('Loading kernel module %s' % module, level=INFO) | ||
4284 | 36 | |||
4285 | 37 | check_call(cmd) | ||
4286 | 38 | if persist: | ||
4287 | 39 | with open('/etc/modules', 'r+') as modules: | ||
4288 | 40 | if module not in modules.read(): | ||
4289 | 41 | modules.write(module) | ||
4290 | 42 | |||
4291 | 43 | |||
4292 | 44 | def rmmod(module, force=False): | ||
4293 | 45 | """Remove a module from the linux kernel""" | ||
4294 | 46 | cmd = ['rmmod'] | ||
4295 | 47 | if force: | ||
4296 | 48 | cmd.append('-f') | ||
4297 | 49 | cmd.append(module) | ||
4298 | 50 | log('Removing kernel module %s' % module, level=INFO) | ||
4299 | 51 | return check_call(cmd) | ||
4300 | 52 | |||
4301 | 53 | |||
4302 | 54 | def lsmod(): | ||
4303 | 55 | """Shows what kernel modules are currently loaded""" | ||
4304 | 56 | return check_output(['lsmod'], | ||
4305 | 57 | universal_newlines=True) | ||
4306 | 58 | |||
4307 | 59 | |||
4308 | 60 | def is_module_loaded(module): | ||
4309 | 61 | """Checks if a kernel module is already loaded""" | ||
4310 | 62 | matches = re.findall('^%s[ ]+' % module, lsmod(), re.M) | ||
4311 | 63 | return len(matches) > 0 | ||
4312 | 64 | |||
4313 | 65 | |||
4314 | 66 | def update_initramfs(version='all'): | ||
4315 | 67 | """Updates an initramfs image""" | ||
4316 | 68 | return check_call(["update-initramfs", "-k", version, "-u"]) | ||
4317 | 0 | 69 | ||
4318 | === modified file 'hooks/charmhelpers/core/services/helpers.py' | |||
4319 | --- hooks/charmhelpers/core/services/helpers.py 2015-08-18 17:34:36 +0000 | |||
4320 | +++ hooks/charmhelpers/core/services/helpers.py 2016-02-18 14:28:13 +0000 | |||
4321 | @@ -243,33 +243,40 @@ | |||
4322 | 243 | :param str source: The template source file, relative to | 243 | :param str source: The template source file, relative to |
4323 | 244 | `$CHARM_DIR/templates` | 244 | `$CHARM_DIR/templates` |
4324 | 245 | 245 | ||
4326 | 246 | :param str target: The target to write the rendered template to | 246 | :param str target: The target to write the rendered template to (or None) |
4327 | 247 | :param str owner: The owner of the rendered file | 247 | :param str owner: The owner of the rendered file |
4328 | 248 | :param str group: The group of the rendered file | 248 | :param str group: The group of the rendered file |
4329 | 249 | :param int perms: The permissions of the rendered file | 249 | :param int perms: The permissions of the rendered file |
4330 | 250 | :param partial on_change_action: functools partial to be executed when | 250 | :param partial on_change_action: functools partial to be executed when |
4331 | 251 | rendered file changes | 251 | rendered file changes |
4332 | 252 | :param jinja2 loader template_loader: A jinja2 template loader | ||
4333 | 253 | |||
4334 | 254 | :return str: The rendered template | ||
4335 | 252 | """ | 255 | """ |
4336 | 253 | def __init__(self, source, target, | 256 | def __init__(self, source, target, |
4337 | 254 | owner='root', group='root', perms=0o444, | 257 | owner='root', group='root', perms=0o444, |
4339 | 255 | on_change_action=None): | 258 | on_change_action=None, template_loader=None): |
4340 | 256 | self.source = source | 259 | self.source = source |
4341 | 257 | self.target = target | 260 | self.target = target |
4342 | 258 | self.owner = owner | 261 | self.owner = owner |
4343 | 259 | self.group = group | 262 | self.group = group |
4344 | 260 | self.perms = perms | 263 | self.perms = perms |
4345 | 261 | self.on_change_action = on_change_action | 264 | self.on_change_action = on_change_action |
4346 | 265 | self.template_loader = template_loader | ||
4347 | 262 | 266 | ||
4348 | 263 | def __call__(self, manager, service_name, event_name): | 267 | def __call__(self, manager, service_name, event_name): |
4349 | 264 | pre_checksum = '' | 268 | pre_checksum = '' |
4350 | 265 | if self.on_change_action and os.path.isfile(self.target): | 269 | if self.on_change_action and os.path.isfile(self.target): |
4351 | 266 | pre_checksum = host.file_hash(self.target) | 270 | pre_checksum = host.file_hash(self.target) |
4352 | 267 | service = manager.get_service(service_name) | 271 | service = manager.get_service(service_name) |
4354 | 268 | context = {} | 272 | context = {'ctx': {}} |
4355 | 269 | for ctx in service.get('required_data', []): | 273 | for ctx in service.get('required_data', []): |
4356 | 270 | context.update(ctx) | 274 | context.update(ctx) |
4359 | 271 | templating.render(self.source, self.target, context, | 275 | context['ctx'].update(ctx) |
4360 | 272 | self.owner, self.group, self.perms) | 276 | |
4361 | 277 | result = templating.render(self.source, self.target, context, | ||
4362 | 278 | self.owner, self.group, self.perms, | ||
4363 | 279 | template_loader=self.template_loader) | ||
4364 | 273 | if self.on_change_action: | 280 | if self.on_change_action: |
4365 | 274 | if pre_checksum == host.file_hash(self.target): | 281 | if pre_checksum == host.file_hash(self.target): |
4366 | 275 | hookenv.log( | 282 | hookenv.log( |
4367 | @@ -278,6 +285,8 @@ | |||
4368 | 278 | else: | 285 | else: |
4369 | 279 | self.on_change_action() | 286 | self.on_change_action() |
4370 | 280 | 287 | ||
4371 | 288 | return result | ||
4372 | 289 | |||
4373 | 281 | 290 | ||
4374 | 282 | # Convenience aliases for templates | 291 | # Convenience aliases for templates |
4375 | 283 | render_template = template = TemplateCallback | 292 | render_template = template = TemplateCallback |
4376 | 284 | 293 | ||
4377 | === modified file 'hooks/charmhelpers/core/strutils.py' | |||
4378 | --- hooks/charmhelpers/core/strutils.py 2015-04-16 20:24:28 +0000 | |||
4379 | +++ hooks/charmhelpers/core/strutils.py 2016-02-18 14:28:13 +0000 | |||
4380 | @@ -18,6 +18,7 @@ | |||
4381 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
4382 | 19 | 19 | ||
4383 | 20 | import six | 20 | import six |
4384 | 21 | import re | ||
4385 | 21 | 22 | ||
4386 | 22 | 23 | ||
4387 | 23 | def bool_from_string(value): | 24 | def bool_from_string(value): |
4388 | @@ -40,3 +41,32 @@ | |||
4389 | 40 | 41 | ||
4390 | 41 | msg = "Unable to interpret string value '%s' as boolean" % (value) | 42 | msg = "Unable to interpret string value '%s' as boolean" % (value) |
4391 | 42 | raise ValueError(msg) | 43 | raise ValueError(msg) |
4392 | 44 | |||
4393 | 45 | |||
4394 | 46 | def bytes_from_string(value): | ||
4395 | 47 | """Interpret human readable string value as bytes. | ||
4396 | 48 | |||
4397 | 49 | Returns int | ||
4398 | 50 | """ | ||
4399 | 51 | BYTE_POWER = { | ||
4400 | 52 | 'K': 1, | ||
4401 | 53 | 'KB': 1, | ||
4402 | 54 | 'M': 2, | ||
4403 | 55 | 'MB': 2, | ||
4404 | 56 | 'G': 3, | ||
4405 | 57 | 'GB': 3, | ||
4406 | 58 | 'T': 4, | ||
4407 | 59 | 'TB': 4, | ||
4408 | 60 | 'P': 5, | ||
4409 | 61 | 'PB': 5, | ||
4410 | 62 | } | ||
4411 | 63 | if isinstance(value, six.string_types): | ||
4412 | 64 | value = six.text_type(value) | ||
4413 | 65 | else: | ||
4414 | 66 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | ||
4415 | 67 | raise ValueError(msg) | ||
4416 | 68 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) | ||
4417 | 69 | if not matches: | ||
4418 | 70 | msg = "Unable to interpret string value '%s' as bytes" % (value) | ||
4419 | 71 | raise ValueError(msg) | ||
4420 | 72 | return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) | ||
4421 | 43 | 73 | ||
4422 | === modified file 'hooks/charmhelpers/core/templating.py' | |||
4423 | --- hooks/charmhelpers/core/templating.py 2015-02-26 10:11:26 +0000 | |||
4424 | +++ hooks/charmhelpers/core/templating.py 2016-02-18 14:28:13 +0000 | |||
4425 | @@ -21,13 +21,14 @@ | |||
4426 | 21 | 21 | ||
4427 | 22 | 22 | ||
4428 | 23 | def render(source, target, context, owner='root', group='root', | 23 | def render(source, target, context, owner='root', group='root', |
4430 | 24 | perms=0o444, templates_dir=None, encoding='UTF-8'): | 24 | perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None): |
4431 | 25 | """ | 25 | """ |
4432 | 26 | Render a template. | 26 | Render a template. |
4433 | 27 | 27 | ||
4434 | 28 | The `source` path, if not absolute, is relative to the `templates_dir`. | 28 | The `source` path, if not absolute, is relative to the `templates_dir`. |
4435 | 29 | 29 | ||
4437 | 30 | The `target` path should be absolute. | 30 | The `target` path should be absolute. It can also be `None`, in which |
4438 | 31 | case no file will be written. | ||
4439 | 31 | 32 | ||
4440 | 32 | The context should be a dict containing the values to be replaced in the | 33 | The context should be a dict containing the values to be replaced in the |
4441 | 33 | template. | 34 | template. |
4442 | @@ -36,6 +37,9 @@ | |||
4443 | 36 | 37 | ||
4444 | 37 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. | 38 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. |
4445 | 38 | 39 | ||
4446 | 40 | The rendered template will be written to the file as well as being returned | ||
4447 | 41 | as a string. | ||
4448 | 42 | |||
4449 | 39 | Note: Using this requires python-jinja2; if it is not installed, calling | 43 | Note: Using this requires python-jinja2; if it is not installed, calling |
4450 | 40 | this will attempt to use charmhelpers.fetch.apt_install to install it. | 44 | this will attempt to use charmhelpers.fetch.apt_install to install it. |
4451 | 41 | """ | 45 | """ |
4452 | @@ -52,17 +56,26 @@ | |||
4453 | 52 | apt_install('python-jinja2', fatal=True) | 56 | apt_install('python-jinja2', fatal=True) |
4454 | 53 | from jinja2 import FileSystemLoader, Environment, exceptions | 57 | from jinja2 import FileSystemLoader, Environment, exceptions |
4455 | 54 | 58 | ||
4459 | 55 | if templates_dir is None: | 59 | if template_loader: |
4460 | 56 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') | 60 | template_env = Environment(loader=template_loader) |
4461 | 57 | loader = Environment(loader=FileSystemLoader(templates_dir)) | 61 | else: |
4462 | 62 | if templates_dir is None: | ||
4463 | 63 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') | ||
4464 | 64 | template_env = Environment(loader=FileSystemLoader(templates_dir)) | ||
4465 | 58 | try: | 65 | try: |
4466 | 59 | source = source | 66 | source = source |
4468 | 60 | template = loader.get_template(source) | 67 | template = template_env.get_template(source) |
4469 | 61 | except exceptions.TemplateNotFound as e: | 68 | except exceptions.TemplateNotFound as e: |
4470 | 62 | hookenv.log('Could not load template %s from %s.' % | 69 | hookenv.log('Could not load template %s from %s.' % |
4471 | 63 | (source, templates_dir), | 70 | (source, templates_dir), |
4472 | 64 | level=hookenv.ERROR) | 71 | level=hookenv.ERROR) |
4473 | 65 | raise e | 72 | raise e |
4474 | 66 | content = template.render(context) | 73 | content = template.render(context) |
4477 | 67 | host.mkdir(os.path.dirname(target), owner, group, perms=0o755) | 74 | if target is not None: |
4478 | 68 | host.write_file(target, content.encode(encoding), owner, group, perms) | 75 | target_dir = os.path.dirname(target) |
4479 | 76 | if not os.path.exists(target_dir): | ||
4480 | 77 | # This is a terrible default directory permission, as the file | ||
4481 | 78 | # or its siblings will often contain secrets. | ||
4482 | 79 | host.mkdir(os.path.dirname(target), owner, group, perms=0o755) | ||
4483 | 80 | host.write_file(target, content.encode(encoding), owner, group, perms) | ||
4484 | 81 | return content | ||
4485 | 69 | 82 | ||
4486 | === modified file 'hooks/charmhelpers/fetch/__init__.py' | |||
4487 | --- hooks/charmhelpers/fetch/__init__.py 2015-08-18 17:34:36 +0000 | |||
4488 | +++ hooks/charmhelpers/fetch/__init__.py 2016-02-18 14:28:13 +0000 | |||
4489 | @@ -98,6 +98,14 @@ | |||
4490 | 98 | 'liberty/proposed': 'trusty-proposed/liberty', | 98 | 'liberty/proposed': 'trusty-proposed/liberty', |
4491 | 99 | 'trusty-liberty/proposed': 'trusty-proposed/liberty', | 99 | 'trusty-liberty/proposed': 'trusty-proposed/liberty', |
4492 | 100 | 'trusty-proposed/liberty': 'trusty-proposed/liberty', | 100 | 'trusty-proposed/liberty': 'trusty-proposed/liberty', |
4493 | 101 | # Mitaka | ||
4494 | 102 | 'mitaka': 'trusty-updates/mitaka', | ||
4495 | 103 | 'trusty-mitaka': 'trusty-updates/mitaka', | ||
4496 | 104 | 'trusty-mitaka/updates': 'trusty-updates/mitaka', | ||
4497 | 105 | 'trusty-updates/mitaka': 'trusty-updates/mitaka', | ||
4498 | 106 | 'mitaka/proposed': 'trusty-proposed/mitaka', | ||
4499 | 107 | 'trusty-mitaka/proposed': 'trusty-proposed/mitaka', | ||
4500 | 108 | 'trusty-proposed/mitaka': 'trusty-proposed/mitaka', | ||
4501 | 101 | } | 109 | } |
4502 | 102 | 110 | ||
4503 | 103 | # The order of this list is very important. Handlers should be listed in from | 111 | # The order of this list is very important. Handlers should be listed in from |
4504 | @@ -225,12 +233,12 @@ | |||
4505 | 225 | 233 | ||
4506 | 226 | def apt_mark(packages, mark, fatal=False): | 234 | def apt_mark(packages, mark, fatal=False): |
4507 | 227 | """Flag one or more packages using apt-mark""" | 235 | """Flag one or more packages using apt-mark""" |
4508 | 236 | log("Marking {} as {}".format(packages, mark)) | ||
4509 | 228 | cmd = ['apt-mark', mark] | 237 | cmd = ['apt-mark', mark] |
4510 | 229 | if isinstance(packages, six.string_types): | 238 | if isinstance(packages, six.string_types): |
4511 | 230 | cmd.append(packages) | 239 | cmd.append(packages) |
4512 | 231 | else: | 240 | else: |
4513 | 232 | cmd.extend(packages) | 241 | cmd.extend(packages) |
4514 | 233 | log("Holding {}".format(packages)) | ||
4515 | 234 | 242 | ||
4516 | 235 | if fatal: | 243 | if fatal: |
4517 | 236 | subprocess.check_call(cmd, universal_newlines=True) | 244 | subprocess.check_call(cmd, universal_newlines=True) |
4518 | @@ -411,7 +419,7 @@ | |||
4519 | 411 | importlib.import_module(package), | 419 | importlib.import_module(package), |
4520 | 412 | classname) | 420 | classname) |
4521 | 413 | plugin_list.append(handler_class()) | 421 | plugin_list.append(handler_class()) |
4523 | 414 | except (ImportError, AttributeError): | 422 | except NotImplementedError: |
4524 | 415 | # Skip missing plugins so that they can be ommitted from | 423 | # Skip missing plugins so that they can be ommitted from |
4525 | 416 | # installation if desired | 424 | # installation if desired |
4526 | 417 | log("FetchHandler {} not found, skipping plugin".format( | 425 | log("FetchHandler {} not found, skipping plugin".format( |
4527 | 418 | 426 | ||
4528 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' | |||
4529 | --- hooks/charmhelpers/fetch/archiveurl.py 2015-07-17 13:24:05 +0000 | |||
4530 | +++ hooks/charmhelpers/fetch/archiveurl.py 2016-02-18 14:28:13 +0000 | |||
4531 | @@ -108,7 +108,7 @@ | |||
4532 | 108 | install_opener(opener) | 108 | install_opener(opener) |
4533 | 109 | response = urlopen(source) | 109 | response = urlopen(source) |
4534 | 110 | try: | 110 | try: |
4536 | 111 | with open(dest, 'w') as dest_file: | 111 | with open(dest, 'wb') as dest_file: |
4537 | 112 | dest_file.write(response.read()) | 112 | dest_file.write(response.read()) |
4538 | 113 | except Exception as e: | 113 | except Exception as e: |
4539 | 114 | if os.path.isfile(dest): | 114 | if os.path.isfile(dest): |
4540 | 115 | 115 | ||
4541 | === modified file 'hooks/charmhelpers/fetch/bzrurl.py' | |||
4542 | --- hooks/charmhelpers/fetch/bzrurl.py 2015-01-26 09:46:38 +0000 | |||
4543 | +++ hooks/charmhelpers/fetch/bzrurl.py 2016-02-18 14:28:13 +0000 | |||
4544 | @@ -15,60 +15,50 @@ | |||
4545 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
4546 | 16 | 16 | ||
4547 | 17 | import os | 17 | import os |
4548 | 18 | from subprocess import check_call | ||
4549 | 18 | from charmhelpers.fetch import ( | 19 | from charmhelpers.fetch import ( |
4550 | 19 | BaseFetchHandler, | 20 | BaseFetchHandler, |
4552 | 20 | UnhandledSource | 21 | UnhandledSource, |
4553 | 22 | filter_installed_packages, | ||
4554 | 23 | apt_install, | ||
4555 | 21 | ) | 24 | ) |
4556 | 22 | from charmhelpers.core.host import mkdir | 25 | from charmhelpers.core.host import mkdir |
4557 | 23 | 26 | ||
4558 | 24 | import six | ||
4559 | 25 | if six.PY3: | ||
4560 | 26 | raise ImportError('bzrlib does not support Python3') | ||
4561 | 27 | 27 | ||
4570 | 28 | try: | 28 | if filter_installed_packages(['bzr']) != []: |
4571 | 29 | from bzrlib.branch import Branch | 29 | apt_install(['bzr']) |
4572 | 30 | from bzrlib import bzrdir, workingtree, errors | 30 | if filter_installed_packages(['bzr']) != []: |
4573 | 31 | except ImportError: | 31 | raise NotImplementedError('Unable to install bzr') |
4566 | 32 | from charmhelpers.fetch import apt_install | ||
4567 | 33 | apt_install("python-bzrlib") | ||
4568 | 34 | from bzrlib.branch import Branch | ||
4569 | 35 | from bzrlib import bzrdir, workingtree, errors | ||
4574 | 36 | 32 | ||
4575 | 37 | 33 | ||
4576 | 38 | class BzrUrlFetchHandler(BaseFetchHandler): | 34 | class BzrUrlFetchHandler(BaseFetchHandler): |
4577 | 39 | """Handler for bazaar branches via generic and lp URLs""" | 35 | """Handler for bazaar branches via generic and lp URLs""" |
4578 | 40 | def can_handle(self, source): | 36 | def can_handle(self, source): |
4579 | 41 | url_parts = self.parse_url(source) | 37 | url_parts = self.parse_url(source) |
4581 | 42 | if url_parts.scheme not in ('bzr+ssh', 'lp'): | 38 | if url_parts.scheme not in ('bzr+ssh', 'lp', ''): |
4582 | 43 | return False | 39 | return False |
4583 | 40 | elif not url_parts.scheme: | ||
4584 | 41 | return os.path.exists(os.path.join(source, '.bzr')) | ||
4585 | 44 | else: | 42 | else: |
4586 | 45 | return True | 43 | return True |
4587 | 46 | 44 | ||
4588 | 47 | def branch(self, source, dest): | 45 | def branch(self, source, dest): |
4589 | 48 | url_parts = self.parse_url(source) | ||
4590 | 49 | # If we use lp:branchname scheme we need to load plugins | ||
4591 | 50 | if not self.can_handle(source): | 46 | if not self.can_handle(source): |
4592 | 51 | raise UnhandledSource("Cannot handle {}".format(source)) | 47 | raise UnhandledSource("Cannot handle {}".format(source)) |
4607 | 52 | if url_parts.scheme == "lp": | 48 | if os.path.exists(dest): |
4608 | 53 | from bzrlib.plugin import load_plugins | 49 | check_call(['bzr', 'pull', '--overwrite', '-d', dest, source]) |
4609 | 54 | load_plugins() | 50 | else: |
4610 | 55 | try: | 51 | check_call(['bzr', 'branch', source, dest]) |
4597 | 56 | local_branch = bzrdir.BzrDir.create_branch_convenience(dest) | ||
4598 | 57 | except errors.AlreadyControlDirError: | ||
4599 | 58 | local_branch = Branch.open(dest) | ||
4600 | 59 | try: | ||
4601 | 60 | remote_branch = Branch.open(source) | ||
4602 | 61 | remote_branch.push(local_branch) | ||
4603 | 62 | tree = workingtree.WorkingTree.open(dest) | ||
4604 | 63 | tree.update() | ||
4605 | 64 | except Exception as e: | ||
4606 | 65 | raise e | ||
4611 | 66 | 52 | ||
4613 | 67 | def install(self, source): | 53 | def install(self, source, dest=None): |
4614 | 68 | url_parts = self.parse_url(source) | 54 | url_parts = self.parse_url(source) |
4615 | 69 | branch_name = url_parts.path.strip("/").split("/")[-1] | 55 | branch_name = url_parts.path.strip("/").split("/")[-1] |
4618 | 70 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | 56 | if dest: |
4619 | 71 | branch_name) | 57 | dest_dir = os.path.join(dest, branch_name) |
4620 | 58 | else: | ||
4621 | 59 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | ||
4622 | 60 | branch_name) | ||
4623 | 61 | |||
4624 | 72 | if not os.path.exists(dest_dir): | 62 | if not os.path.exists(dest_dir): |
4625 | 73 | mkdir(dest_dir, perms=0o755) | 63 | mkdir(dest_dir, perms=0o755) |
4626 | 74 | try: | 64 | try: |
4627 | 75 | 65 | ||
4628 | === modified file 'hooks/charmhelpers/fetch/giturl.py' | |||
4629 | --- hooks/charmhelpers/fetch/giturl.py 2015-07-17 13:24:05 +0000 | |||
4630 | +++ hooks/charmhelpers/fetch/giturl.py 2016-02-18 14:28:13 +0000 | |||
4631 | @@ -15,24 +15,18 @@ | |||
4632 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
4633 | 16 | 16 | ||
4634 | 17 | import os | 17 | import os |
4635 | 18 | from subprocess import check_call | ||
4636 | 18 | from charmhelpers.fetch import ( | 19 | from charmhelpers.fetch import ( |
4637 | 19 | BaseFetchHandler, | 20 | BaseFetchHandler, |
4639 | 20 | UnhandledSource | 21 | UnhandledSource, |
4640 | 22 | filter_installed_packages, | ||
4641 | 23 | apt_install, | ||
4642 | 21 | ) | 24 | ) |
4657 | 22 | from charmhelpers.core.host import mkdir | 25 | |
4658 | 23 | 26 | if filter_installed_packages(['git']) != []: | |
4659 | 24 | import six | 27 | apt_install(['git']) |
4660 | 25 | if six.PY3: | 28 | if filter_installed_packages(['git']) != []: |
4661 | 26 | raise ImportError('GitPython does not support Python 3') | 29 | raise NotImplementedError('Unable to install git') |
4648 | 27 | |||
4649 | 28 | try: | ||
4650 | 29 | from git import Repo | ||
4651 | 30 | except ImportError: | ||
4652 | 31 | from charmhelpers.fetch import apt_install | ||
4653 | 32 | apt_install("python-git") | ||
4654 | 33 | from git import Repo | ||
4655 | 34 | |||
4656 | 35 | from git.exc import GitCommandError # noqa E402 | ||
4662 | 36 | 30 | ||
4663 | 37 | 31 | ||
4664 | 38 | class GitUrlFetchHandler(BaseFetchHandler): | 32 | class GitUrlFetchHandler(BaseFetchHandler): |
4665 | @@ -40,19 +34,24 @@ | |||
4666 | 40 | def can_handle(self, source): | 34 | def can_handle(self, source): |
4667 | 41 | url_parts = self.parse_url(source) | 35 | url_parts = self.parse_url(source) |
4668 | 42 | # TODO (mattyw) no support for ssh git@ yet | 36 | # TODO (mattyw) no support for ssh git@ yet |
4670 | 43 | if url_parts.scheme not in ('http', 'https', 'git'): | 37 | if url_parts.scheme not in ('http', 'https', 'git', ''): |
4671 | 44 | return False | 38 | return False |
4672 | 39 | elif not url_parts.scheme: | ||
4673 | 40 | return os.path.exists(os.path.join(source, '.git')) | ||
4674 | 45 | else: | 41 | else: |
4675 | 46 | return True | 42 | return True |
4676 | 47 | 43 | ||
4678 | 48 | def clone(self, source, dest, branch, depth=None): | 44 | def clone(self, source, dest, branch="master", depth=None): |
4679 | 49 | if not self.can_handle(source): | 45 | if not self.can_handle(source): |
4680 | 50 | raise UnhandledSource("Cannot handle {}".format(source)) | 46 | raise UnhandledSource("Cannot handle {}".format(source)) |
4681 | 51 | 47 | ||
4682 | 48 | if os.path.exists(dest): | ||
4683 | 49 | cmd = ['git', '-C', dest, 'pull', source, branch] | ||
4684 | 50 | else: | ||
4685 | 51 | cmd = ['git', 'clone', source, dest, '--branch', branch] | ||
4686 | 52 | if depth: | 52 | if depth: |
4690 | 53 | Repo.clone_from(source, dest, branch=branch, depth=depth) | 53 | cmd.extend(['--depth', depth]) |
4691 | 54 | else: | 54 | check_call(cmd) |
4689 | 55 | Repo.clone_from(source, dest, branch=branch) | ||
4692 | 56 | 55 | ||
4693 | 57 | def install(self, source, branch="master", dest=None, depth=None): | 56 | def install(self, source, branch="master", dest=None, depth=None): |
4694 | 58 | url_parts = self.parse_url(source) | 57 | url_parts = self.parse_url(source) |
4695 | @@ -62,12 +61,8 @@ | |||
4696 | 62 | else: | 61 | else: |
4697 | 63 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | 62 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", |
4698 | 64 | branch_name) | 63 | branch_name) |
4699 | 65 | if not os.path.exists(dest_dir): | ||
4700 | 66 | mkdir(dest_dir, perms=0o755) | ||
4701 | 67 | try: | 64 | try: |
4702 | 68 | self.clone(source, dest_dir, branch, depth) | 65 | self.clone(source, dest_dir, branch, depth) |
4703 | 69 | except GitCommandError as e: | ||
4704 | 70 | raise UnhandledSource(e) | ||
4705 | 71 | except OSError as e: | 66 | except OSError as e: |
4706 | 72 | raise UnhandledSource(e.strerror) | 67 | raise UnhandledSource(e.strerror) |
4707 | 73 | return dest_dir | 68 | return dest_dir |
4708 | 74 | 69 | ||
4709 | === added file 'hooks/charmhelpers/payload/archive.py' | |||
4710 | --- hooks/charmhelpers/payload/archive.py 1970-01-01 00:00:00 +0000 | |||
4711 | +++ hooks/charmhelpers/payload/archive.py 2016-02-18 14:28:13 +0000 | |||
4712 | @@ -0,0 +1,73 @@ | |||
4713 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
4714 | 2 | # | ||
4715 | 3 | # This file is part of charm-helpers. | ||
4716 | 4 | # | ||
4717 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
4718 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
4719 | 7 | # published by the Free Software Foundation. | ||
4720 | 8 | # | ||
4721 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
4722 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
4723 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
4724 | 12 | # GNU Lesser General Public License for more details. | ||
4725 | 13 | # | ||
4726 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
4727 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
4728 | 16 | |||
4729 | 17 | import os | ||
4730 | 18 | import tarfile | ||
4731 | 19 | import zipfile | ||
4732 | 20 | from charmhelpers.core import ( | ||
4733 | 21 | host, | ||
4734 | 22 | hookenv, | ||
4735 | 23 | ) | ||
4736 | 24 | |||
4737 | 25 | |||
4738 | 26 | class ArchiveError(Exception): | ||
4739 | 27 | pass | ||
4740 | 28 | |||
4741 | 29 | |||
4742 | 30 | def get_archive_handler(archive_name): | ||
4743 | 31 | if os.path.isfile(archive_name): | ||
4744 | 32 | if tarfile.is_tarfile(archive_name): | ||
4745 | 33 | return extract_tarfile | ||
4746 | 34 | elif zipfile.is_zipfile(archive_name): | ||
4747 | 35 | return extract_zipfile | ||
4748 | 36 | else: | ||
4749 | 37 | # look at the file name | ||
4750 | 38 | for ext in ('.tar', '.tar.gz', '.tgz', 'tar.bz2', '.tbz2', '.tbz'): | ||
4751 | 39 | if archive_name.endswith(ext): | ||
4752 | 40 | return extract_tarfile | ||
4753 | 41 | for ext in ('.zip', '.jar'): | ||
4754 | 42 | if archive_name.endswith(ext): | ||
4755 | 43 | return extract_zipfile | ||
4756 | 44 | |||
4757 | 45 | |||
4758 | 46 | def archive_dest_default(archive_name): | ||
4759 | 47 | archive_file = os.path.basename(archive_name) | ||
4760 | 48 | return os.path.join(hookenv.charm_dir(), "archives", archive_file) | ||
4761 | 49 | |||
4762 | 50 | |||
4763 | 51 | def extract(archive_name, destpath=None): | ||
4764 | 52 | handler = get_archive_handler(archive_name) | ||
4765 | 53 | if handler: | ||
4766 | 54 | if not destpath: | ||
4767 | 55 | destpath = archive_dest_default(archive_name) | ||
4768 | 56 | if not os.path.isdir(destpath): | ||
4769 | 57 | host.mkdir(destpath) | ||
4770 | 58 | handler(archive_name, destpath) | ||
4771 | 59 | return destpath | ||
4772 | 60 | else: | ||
4773 | 61 | raise ArchiveError("No handler for archive") | ||
4774 | 62 | |||
4775 | 63 | |||
4776 | 64 | def extract_tarfile(archive_name, destpath): | ||
4777 | 65 | "Unpack a tar archive, optionally compressed" | ||
4778 | 66 | archive = tarfile.open(archive_name) | ||
4779 | 67 | archive.extractall(destpath) | ||
4780 | 68 | |||
4781 | 69 | |||
4782 | 70 | def extract_zipfile(archive_name, destpath): | ||
4783 | 71 | "Unpack a zip file" | ||
4784 | 72 | archive = zipfile.ZipFile(archive_name) | ||
4785 | 73 | archive.extractall(destpath) | ||
4786 | 0 | 74 | ||
4787 | === added symlink 'hooks/dashboard-plugin-relation-changed' | |||
4788 | === target is u'horizon_hooks.py' | |||
4789 | === removed symlink 'hooks/dashboard-plugin-relation-changed' | |||
4790 | === target was u'horizon_hooks.py' | |||
4791 | === added symlink 'hooks/dashboard-plugin-relation-joined' | |||
4792 | === target is u'horizon_hooks.py' | |||
4793 | === removed symlink 'hooks/dashboard-plugin-relation-joined' | |||
4794 | === target was u'horizon_hooks.py' | |||
4795 | === modified file 'hooks/horizon_hooks.py' | |||
4796 | --- hooks/horizon_hooks.py 2015-09-28 19:15:37 +0000 | |||
4797 | +++ hooks/horizon_hooks.py 2016-02-18 14:28:13 +0000 | |||
4798 | @@ -10,7 +10,8 @@ | |||
4799 | 10 | relation_set, | 10 | relation_set, |
4800 | 11 | relation_get, | 11 | relation_get, |
4801 | 12 | relation_ids, | 12 | relation_ids, |
4803 | 13 | unit_get | 13 | unit_get, |
4804 | 14 | status_set, | ||
4805 | 14 | ) | 15 | ) |
4806 | 15 | from charmhelpers.fetch import ( | 16 | from charmhelpers.fetch import ( |
4807 | 16 | apt_update, apt_install, | 17 | apt_update, apt_install, |
4808 | @@ -27,7 +28,8 @@ | |||
4809 | 27 | git_pip_venv_dir, | 28 | git_pip_venv_dir, |
4810 | 28 | openstack_upgrade_available, | 29 | openstack_upgrade_available, |
4811 | 29 | os_release, | 30 | os_release, |
4813 | 30 | save_script_rc | 31 | save_script_rc, |
4814 | 32 | set_os_workload_status, | ||
4815 | 31 | ) | 33 | ) |
4816 | 32 | from horizon_utils import ( | 34 | from horizon_utils import ( |
4817 | 33 | determine_packages, | 35 | determine_packages, |
4818 | @@ -40,7 +42,8 @@ | |||
4819 | 40 | git_install, | 42 | git_install, |
4820 | 41 | git_post_install_late, | 43 | git_post_install_late, |
4821 | 42 | setup_ipv6, | 44 | setup_ipv6, |
4823 | 43 | INSTALL_DIR | 45 | INSTALL_DIR, |
4824 | 46 | REQUIRED_INTERFACES, | ||
4825 | 44 | ) | 47 | ) |
4826 | 45 | from charmhelpers.contrib.network.ip import ( | 48 | from charmhelpers.contrib.network.ip import ( |
4827 | 46 | get_iface_for_address, | 49 | get_iface_for_address, |
4828 | @@ -70,7 +73,10 @@ | |||
4829 | 70 | if lsb_release()['DISTRIB_CODENAME'] == 'precise': | 73 | if lsb_release()['DISTRIB_CODENAME'] == 'precise': |
4830 | 71 | # Explicitly upgrade python-six Bug#1420708 | 74 | # Explicitly upgrade python-six Bug#1420708 |
4831 | 72 | apt_install('python-six', fatal=True) | 75 | apt_install('python-six', fatal=True) |
4833 | 73 | apt_install(filter_installed_packages(packages), fatal=True) | 76 | packages = filter_installed_packages(packages) |
4834 | 77 | if packages: | ||
4835 | 78 | status_set('maintenance', 'Installing packages') | ||
4836 | 79 | apt_install(packages, fatal=True) | ||
4837 | 74 | 80 | ||
4838 | 75 | git_install(config('openstack-origin-git')) | 81 | git_install(config('openstack-origin-git')) |
4839 | 76 | 82 | ||
4840 | @@ -108,6 +114,7 @@ | |||
4841 | 108 | git_install(config('openstack-origin-git')) | 114 | git_install(config('openstack-origin-git')) |
4842 | 109 | elif not config('action-managed-upgrade'): | 115 | elif not config('action-managed-upgrade'): |
4843 | 110 | if openstack_upgrade_available('openstack-dashboard'): | 116 | if openstack_upgrade_available('openstack-dashboard'): |
4844 | 117 | status_set('maintenance', 'Upgrading to new OpenStack release') | ||
4845 | 111 | do_openstack_upgrade(configs=CONFIGS) | 118 | do_openstack_upgrade(configs=CONFIGS) |
4846 | 112 | 119 | ||
4847 | 113 | env_vars = { | 120 | env_vars = { |
4848 | @@ -265,10 +272,12 @@ | |||
4849 | 265 | 272 | ||
4850 | 266 | 273 | ||
4851 | 267 | def main(): | 274 | def main(): |
4852 | 275 | print sys.argv | ||
4853 | 268 | try: | 276 | try: |
4854 | 269 | hooks.execute(sys.argv) | 277 | hooks.execute(sys.argv) |
4855 | 270 | except UnregisteredHookError as e: | 278 | except UnregisteredHookError as e: |
4856 | 271 | log('Unknown hook {} - skipping.'.format(e)) | 279 | log('Unknown hook {} - skipping.'.format(e)) |
4857 | 280 | set_os_workload_status(CONFIGS, REQUIRED_INTERFACES) | ||
4858 | 272 | 281 | ||
4859 | 273 | 282 | ||
4860 | 274 | if __name__ == '__main__': | 283 | if __name__ == '__main__': |
4861 | 275 | 284 | ||
4862 | === modified file 'hooks/horizon_utils.py' | |||
4863 | --- hooks/horizon_utils.py 2015-09-28 19:15:37 +0000 | |||
4864 | +++ hooks/horizon_utils.py 2016-02-18 14:28:13 +0000 | |||
4865 | @@ -72,6 +72,9 @@ | |||
4866 | 72 | 'zlib1g-dev', | 72 | 'zlib1g-dev', |
4867 | 73 | ] | 73 | ] |
4868 | 74 | 74 | ||
4869 | 75 | REQUIRED_INTERFACES = { | ||
4870 | 76 | 'identity': ['identity-service'], | ||
4871 | 77 | } | ||
4872 | 75 | # ubuntu packages that should not be installed when deploying from git | 78 | # ubuntu packages that should not be installed when deploying from git |
4873 | 76 | GIT_PACKAGE_BLACKLIST = [ | 79 | GIT_PACKAGE_BLACKLIST = [ |
4874 | 77 | 'openstack-dashboard', | 80 | 'openstack-dashboard', |
4875 | 78 | 81 | ||
4876 | === added symlink 'hooks/identity-service-relation-departed' | |||
4877 | === target is u'horizon_hooks.py' | |||
4878 | === added symlink 'hooks/install.real' | |||
4879 | === target is u'horizon_hooks.py' | |||
4880 | === removed symlink 'hooks/install.real' | |||
4881 | === target was u'horizon_hooks.py' | |||
4882 | === modified file 'metadata.yaml' | |||
4883 | --- metadata.yaml 2015-09-30 13:56:20 +0000 | |||
4884 | +++ metadata.yaml 2016-02-18 14:28:13 +0000 | |||
4885 | @@ -1,6 +1,6 @@ | |||
4886 | 1 | name: openstack-dashboard | 1 | name: openstack-dashboard |
4889 | 2 | summary: a Django web interface to OpenStack | 2 | summary: Web dashboard for OpenStack |
4890 | 3 | maintainer: Adam Gandelman <adamg@canonical.com> | 3 | maintainer: OpenStack Charmers <openstack-charmers@lists.ubuntu.com> |
4891 | 4 | description: | | 4 | description: | |
4892 | 5 | The OpenStack Dashboard provides a full feature web interface for interacting | 5 | The OpenStack Dashboard provides a full feature web interface for interacting |
4893 | 6 | with instances, images, volumes and networks within an OpenStack deployment. | 6 | with instances, images, volumes and networks within an OpenStack deployment. |
4894 | 7 | 7 | ||
4895 | === modified file 'templates/icehouse/local_settings.py' | |||
4896 | --- templates/icehouse/local_settings.py 2015-09-25 02:05:05 +0000 | |||
4897 | +++ templates/icehouse/local_settings.py 2016-02-18 14:28:13 +0000 | |||
4898 | @@ -213,7 +213,7 @@ | |||
4899 | 213 | # external to the OpenStack environment. The default is 'publicURL'. | 213 | # external to the OpenStack environment. The default is 'publicURL'. |
4900 | 214 | #OPENSTACK_ENDPOINT_TYPE = "publicURL" | 214 | #OPENSTACK_ENDPOINT_TYPE = "publicURL" |
4901 | 215 | {% if primary_endpoint -%} | 215 | {% if primary_endpoint -%} |
4903 | 216 | OPENSTACK_ENDPOINT_TYPE = {{ primary_endpoint }} | 216 | OPENSTACK_ENDPOINT_TYPE = "{{ primary_endpoint }}" |
4904 | 217 | {% endif -%} | 217 | {% endif -%} |
4905 | 218 | 218 | ||
4906 | 219 | # SECONDARY_ENDPOINT_TYPE specifies the fallback endpoint type to use in the | 219 | # SECONDARY_ENDPOINT_TYPE specifies the fallback endpoint type to use in the |
4907 | @@ -223,7 +223,7 @@ | |||
4908 | 223 | # value should differ from OPENSTACK_ENDPOINT_TYPE if used. | 223 | # value should differ from OPENSTACK_ENDPOINT_TYPE if used. |
4909 | 224 | #SECONDARY_ENDPOINT_TYPE = "publicURL" | 224 | #SECONDARY_ENDPOINT_TYPE = "publicURL" |
4910 | 225 | {% if secondary_endpoint -%} | 225 | {% if secondary_endpoint -%} |
4912 | 226 | SECONDARY_ENDPOINT_TYPE = {{ secondary_endpoint }} | 226 | SECONDARY_ENDPOINT_TYPE = "{{ secondary_endpoint }}" |
4913 | 227 | {% endif -%} | 227 | {% endif -%} |
4914 | 228 | 228 | ||
4915 | 229 | # The number of objects (Swift containers/objects or images) to display | 229 | # The number of objects (Swift containers/objects or images) to display |
4916 | @@ -521,4 +521,4 @@ | |||
4917 | 521 | # see https://docs.djangoproject.com/en/dev/ref/settings/. | 521 | # see https://docs.djangoproject.com/en/dev/ref/settings/. |
4918 | 522 | ALLOWED_HOSTS = '*' | 522 | ALLOWED_HOSTS = '*' |
4919 | 523 | 523 | ||
4920 | 524 | {{ settings|join('\n\n') }} | ||
4921 | 525 | \ No newline at end of file | 524 | \ No newline at end of file |
4922 | 525 | {{ settings|join('\n\n') }} | ||
4923 | 526 | 526 | ||
4924 | === modified file 'templates/juno/local_settings.py' | |||
4925 | --- templates/juno/local_settings.py 2015-09-25 02:05:05 +0000 | |||
4926 | +++ templates/juno/local_settings.py 2016-02-18 14:28:13 +0000 | |||
4927 | @@ -251,7 +251,7 @@ | |||
4928 | 251 | # external to the OpenStack environment. The default is 'publicURL'. | 251 | # external to the OpenStack environment. The default is 'publicURL'. |
4929 | 252 | #OPENSTACK_ENDPOINT_TYPE = "publicURL" | 252 | #OPENSTACK_ENDPOINT_TYPE = "publicURL" |
4930 | 253 | {% if primary_endpoint -%} | 253 | {% if primary_endpoint -%} |
4932 | 254 | OPENSTACK_ENDPOINT_TYPE = {{ primary_endpoint }} | 254 | OPENSTACK_ENDPOINT_TYPE = "{{ primary_endpoint }}" |
4933 | 255 | {% endif -%} | 255 | {% endif -%} |
4934 | 256 | 256 | ||
4935 | 257 | # SECONDARY_ENDPOINT_TYPE specifies the fallback endpoint type to use in the | 257 | # SECONDARY_ENDPOINT_TYPE specifies the fallback endpoint type to use in the |
4936 | @@ -261,7 +261,7 @@ | |||
4937 | 261 | # value should differ from OPENSTACK_ENDPOINT_TYPE if used. | 261 | # value should differ from OPENSTACK_ENDPOINT_TYPE if used. |
4938 | 262 | #SECONDARY_ENDPOINT_TYPE = "publicURL" | 262 | #SECONDARY_ENDPOINT_TYPE = "publicURL" |
4939 | 263 | {% if secondary_endpoint -%} | 263 | {% if secondary_endpoint -%} |
4941 | 264 | SECONDARY_ENDPOINT_TYPE = {{ secondary_endpoint }} | 264 | SECONDARY_ENDPOINT_TYPE = "{{ secondary_endpoint }}" |
4942 | 265 | {% endif -%} | 265 | {% endif -%} |
4943 | 266 | 266 | ||
4944 | 267 | # The number of objects (Swift containers/objects or images) to display | 267 | # The number of objects (Swift containers/objects or images) to display |
4945 | @@ -626,4 +626,4 @@ | |||
4946 | 626 | # see https://docs.djangoproject.com/en/dev/ref/settings/. | 626 | # see https://docs.djangoproject.com/en/dev/ref/settings/. |
4947 | 627 | ALLOWED_HOSTS = '*' | 627 | ALLOWED_HOSTS = '*' |
4948 | 628 | 628 | ||
4949 | 629 | {{ settings|join('\n\n') }} | ||
4950 | 630 | \ No newline at end of file | 629 | \ No newline at end of file |
4951 | 630 | {{ settings|join('\n\n') }} | ||
4952 | 631 | 631 | ||
4953 | === added file 'tests/018-basic-trusty-liberty' | |||
4954 | --- tests/018-basic-trusty-liberty 1970-01-01 00:00:00 +0000 | |||
4955 | +++ tests/018-basic-trusty-liberty 2016-02-18 14:28:13 +0000 | |||
4956 | @@ -0,0 +1,11 @@ | |||
4957 | 1 | #!/usr/bin/python | ||
4958 | 2 | |||
4959 | 3 | """Amulet tests on a basic openstack-dashboard deployment on trusty-liberty.""" | ||
4960 | 4 | |||
4961 | 5 | from basic_deployment import OpenstackDashboardBasicDeployment | ||
4962 | 6 | |||
4963 | 7 | if __name__ == '__main__': | ||
4964 | 8 | deployment = OpenstackDashboardBasicDeployment(series='trusty', | ||
4965 | 9 | openstack='cloud:trusty-liberty', | ||
4966 | 10 | source='cloud:trusty-updates/liberty') | ||
4967 | 11 | deployment.run_tests() | ||
4968 | 0 | 12 | ||
4969 | === added file 'tests/019-basic-trusty-mitaka' | |||
4970 | --- tests/019-basic-trusty-mitaka 1970-01-01 00:00:00 +0000 | |||
4971 | +++ tests/019-basic-trusty-mitaka 2016-02-18 14:28:13 +0000 | |||
4972 | @@ -0,0 +1,11 @@ | |||
4973 | 1 | #!/usr/bin/python | ||
4974 | 2 | |||
4975 | 3 | """Amulet tests on a basic openstack-dashboard deployment on trusty-mitaka.""" | ||
4976 | 4 | |||
4977 | 5 | from basic_deployment import OpenstackDashboardBasicDeployment | ||
4978 | 6 | |||
4979 | 7 | if __name__ == '__main__': | ||
4980 | 8 | deployment = OpenstackDashboardBasicDeployment(series='trusty', | ||
4981 | 9 | openstack='cloud:trusty-mitaka', | ||
4982 | 10 | source='cloud:trusty-updates/mitaka') | ||
4983 | 11 | deployment.run_tests() | ||
4984 | 0 | 12 | ||
4985 | === added file 'tests/020-basic-wily-liberty' | |||
4986 | --- tests/020-basic-wily-liberty 1970-01-01 00:00:00 +0000 | |||
4987 | +++ tests/020-basic-wily-liberty 2016-02-18 14:28:13 +0000 | |||
4988 | @@ -0,0 +1,9 @@ | |||
4989 | 1 | #!/usr/bin/python | ||
4990 | 2 | |||
4991 | 3 | """Amulet tests on a basic openstack-dashboard deployment on wily-liberty.""" | ||
4992 | 4 | |||
4993 | 5 | from basic_deployment import OpenstackDashboardBasicDeployment | ||
4994 | 6 | |||
4995 | 7 | if __name__ == '__main__': | ||
4996 | 8 | deployment = OpenstackDashboardBasicDeployment(series='wily') | ||
4997 | 9 | deployment.run_tests() | ||
4998 | 0 | 10 | ||
4999 | === added file 'tests/021-basic-xenial-mitaka' | |||
5000 | --- tests/021-basic-xenial-mitaka 1970-01-01 00:00:00 +0000 |
The diff has been truncated for viewing.
LGTM.