Merge lp:~corey.bryant/charms/trusty/percona-cluster/render into lp:~openstack-charmers-archive/charms/trusty/percona-cluster/trunk
- Trusty Tahr (14.04)
- render
- Merge into trunk
Proposed by
Corey Bryant
Status: | Merged |
---|---|
Merged at revision: | 42 |
Proposed branch: | lp:~corey.bryant/charms/trusty/percona-cluster/render |
Merge into: | lp:~openstack-charmers-archive/charms/trusty/percona-cluster/trunk |
Diff against target: |
995 lines (+246/-128) 15 files modified
hooks/charmhelpers/__init__.py (+22/-0) hooks/charmhelpers/contrib/hahelpers/cluster.py (+16/-7) hooks/charmhelpers/contrib/network/ip.py (+2/-2) hooks/charmhelpers/contrib/peerstorage/__init__.py (+4/-3) hooks/charmhelpers/core/fstab.py (+10/-8) hooks/charmhelpers/core/hookenv.py (+36/-16) hooks/charmhelpers/core/host.py (+52/-24) hooks/charmhelpers/core/services/helpers.py (+9/-5) hooks/charmhelpers/core/templating.py (+3/-2) hooks/charmhelpers/fetch/__init__.py (+13/-11) hooks/charmhelpers/fetch/archiveurl.py (+53/-16) hooks/charmhelpers/fetch/bzrurl.py (+5/-1) hooks/charmhelpers/fetch/giturl.py (+12/-5) hooks/percona_hooks.py (+2/-4) hooks/percona_utils.py (+7/-24) |
To merge this branch: | bzr merge lp:~corey.bryant/charms/trusty/percona-cluster/render |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Liam Young (community) | Approve | ||
OpenStack Charmers | Pending | ||
Review via email: mp+245040@code.launchpad.net |
Commit message
Description of the change
This is just simplifying the code and using render() from charm-helpers instead of the roll-your-own render_template().
To post a comment you must log in.
Revision history for this message
Corey Bryant (corey.bryant) wrote : | # |
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'hooks/charmhelpers/__init__.py' | |||
2 | --- hooks/charmhelpers/__init__.py 2013-09-03 16:52:02 +0000 | |||
3 | +++ hooks/charmhelpers/__init__.py 2014-12-17 21:09:26 +0000 | |||
4 | @@ -0,0 +1,22 @@ | |||
5 | 1 | # Bootstrap charm-helpers, installing its dependencies if necessary using | ||
6 | 2 | # only standard libraries. | ||
7 | 3 | import subprocess | ||
8 | 4 | import sys | ||
9 | 5 | |||
10 | 6 | try: | ||
11 | 7 | import six # flake8: noqa | ||
12 | 8 | except ImportError: | ||
13 | 9 | if sys.version_info.major == 2: | ||
14 | 10 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) | ||
15 | 11 | else: | ||
16 | 12 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) | ||
17 | 13 | import six # flake8: noqa | ||
18 | 14 | |||
19 | 15 | try: | ||
20 | 16 | import yaml # flake8: noqa | ||
21 | 17 | except ImportError: | ||
22 | 18 | if sys.version_info.major == 2: | ||
23 | 19 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) | ||
24 | 20 | else: | ||
25 | 21 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) | ||
26 | 22 | import yaml # flake8: noqa | ||
27 | 0 | 23 | ||
28 | === modified file 'hooks/charmhelpers/contrib/hahelpers/cluster.py' | |||
29 | --- hooks/charmhelpers/contrib/hahelpers/cluster.py 2014-10-22 10:34:07 +0000 | |||
30 | +++ hooks/charmhelpers/contrib/hahelpers/cluster.py 2014-12-17 21:09:26 +0000 | |||
31 | @@ -13,9 +13,10 @@ | |||
32 | 13 | 13 | ||
33 | 14 | import subprocess | 14 | import subprocess |
34 | 15 | import os | 15 | import os |
35 | 16 | |||
36 | 17 | from socket import gethostname as get_unit_hostname | 16 | from socket import gethostname as get_unit_hostname |
37 | 18 | 17 | ||
38 | 18 | import six | ||
39 | 19 | |||
40 | 19 | from charmhelpers.core.hookenv import ( | 20 | from charmhelpers.core.hookenv import ( |
41 | 20 | log, | 21 | log, |
42 | 21 | relation_ids, | 22 | relation_ids, |
43 | @@ -77,7 +78,7 @@ | |||
44 | 77 | "show", resource | 78 | "show", resource |
45 | 78 | ] | 79 | ] |
46 | 79 | try: | 80 | try: |
48 | 80 | status = subprocess.check_output(cmd) | 81 | status = subprocess.check_output(cmd).decode('UTF-8') |
49 | 81 | except subprocess.CalledProcessError: | 82 | except subprocess.CalledProcessError: |
50 | 82 | return False | 83 | return False |
51 | 83 | else: | 84 | else: |
52 | @@ -150,34 +151,42 @@ | |||
53 | 150 | return False | 151 | return False |
54 | 151 | 152 | ||
55 | 152 | 153 | ||
57 | 153 | def determine_api_port(public_port): | 154 | def determine_api_port(public_port, singlenode_mode=False): |
58 | 154 | ''' | 155 | ''' |
59 | 155 | Determine correct API server listening port based on | 156 | Determine correct API server listening port based on |
60 | 156 | existence of HTTPS reverse proxy and/or haproxy. | 157 | existence of HTTPS reverse proxy and/or haproxy. |
61 | 157 | 158 | ||
62 | 158 | public_port: int: standard public port for given service | 159 | public_port: int: standard public port for given service |
63 | 159 | 160 | ||
64 | 161 | singlenode_mode: boolean: Shuffle ports when only a single unit is present | ||
65 | 162 | |||
66 | 160 | returns: int: the correct listening port for the API service | 163 | returns: int: the correct listening port for the API service |
67 | 161 | ''' | 164 | ''' |
68 | 162 | i = 0 | 165 | i = 0 |
70 | 163 | if len(peer_units()) > 0 or is_clustered(): | 166 | if singlenode_mode: |
71 | 167 | i += 1 | ||
72 | 168 | elif len(peer_units()) > 0 or is_clustered(): | ||
73 | 164 | i += 1 | 169 | i += 1 |
74 | 165 | if https(): | 170 | if https(): |
75 | 166 | i += 1 | 171 | i += 1 |
76 | 167 | return public_port - (i * 10) | 172 | return public_port - (i * 10) |
77 | 168 | 173 | ||
78 | 169 | 174 | ||
80 | 170 | def determine_apache_port(public_port): | 175 | def determine_apache_port(public_port, singlenode_mode=False): |
81 | 171 | ''' | 176 | ''' |
82 | 172 | Description: Determine correct apache listening port based on public IP + | 177 | Description: Determine correct apache listening port based on public IP + |
83 | 173 | state of the cluster. | 178 | state of the cluster. |
84 | 174 | 179 | ||
85 | 175 | public_port: int: standard public port for given service | 180 | public_port: int: standard public port for given service |
86 | 176 | 181 | ||
87 | 182 | singlenode_mode: boolean: Shuffle ports when only a single unit is present | ||
88 | 183 | |||
89 | 177 | returns: int: the correct listening port for the HAProxy service | 184 | returns: int: the correct listening port for the HAProxy service |
90 | 178 | ''' | 185 | ''' |
91 | 179 | i = 0 | 186 | i = 0 |
93 | 180 | if len(peer_units()) > 0 or is_clustered(): | 187 | if singlenode_mode: |
94 | 188 | i += 1 | ||
95 | 189 | elif len(peer_units()) > 0 or is_clustered(): | ||
96 | 181 | i += 1 | 190 | i += 1 |
97 | 182 | return public_port - (i * 10) | 191 | return public_port - (i * 10) |
98 | 183 | 192 | ||
99 | @@ -197,7 +206,7 @@ | |||
100 | 197 | for setting in settings: | 206 | for setting in settings: |
101 | 198 | conf[setting] = config_get(setting) | 207 | conf[setting] = config_get(setting) |
102 | 199 | missing = [] | 208 | missing = [] |
104 | 200 | [missing.append(s) for s, v in conf.iteritems() if v is None] | 209 | [missing.append(s) for s, v in six.iteritems(conf) if v is None] |
105 | 201 | if missing: | 210 | if missing: |
106 | 202 | log('Insufficient config data to configure hacluster.', level=ERROR) | 211 | log('Insufficient config data to configure hacluster.', level=ERROR) |
107 | 203 | raise HAIncompleteConfig | 212 | raise HAIncompleteConfig |
108 | 204 | 213 | ||
109 | === modified file 'hooks/charmhelpers/contrib/network/ip.py' | |||
110 | --- hooks/charmhelpers/contrib/network/ip.py 2014-11-21 19:25:26 +0000 | |||
111 | +++ hooks/charmhelpers/contrib/network/ip.py 2014-12-17 21:09:26 +0000 | |||
112 | @@ -228,7 +228,7 @@ | |||
113 | 228 | raise Exception("Interface '%s' doesn't have any %s addresses." % | 228 | raise Exception("Interface '%s' doesn't have any %s addresses." % |
114 | 229 | (iface, inet_type)) | 229 | (iface, inet_type)) |
115 | 230 | 230 | ||
117 | 231 | return addresses | 231 | return sorted(addresses) |
118 | 232 | 232 | ||
119 | 233 | 233 | ||
120 | 234 | get_ipv4_addr = partial(get_iface_addr, inet_type='AF_INET') | 234 | get_ipv4_addr = partial(get_iface_addr, inet_type='AF_INET') |
121 | @@ -302,7 +302,7 @@ | |||
122 | 302 | if global_addrs: | 302 | if global_addrs: |
123 | 303 | # Make sure any found global addresses are not temporary | 303 | # Make sure any found global addresses are not temporary |
124 | 304 | cmd = ['ip', 'addr', 'show', iface] | 304 | cmd = ['ip', 'addr', 'show', iface] |
126 | 305 | out = subprocess.check_output(cmd) | 305 | out = subprocess.check_output(cmd).decode('UTF-8') |
127 | 306 | if dynamic_only: | 306 | if dynamic_only: |
128 | 307 | key = re.compile("inet6 (.+)/[0-9]+ scope global dynamic.*") | 307 | key = re.compile("inet6 (.+)/[0-9]+ scope global dynamic.*") |
129 | 308 | else: | 308 | else: |
130 | 309 | 309 | ||
131 | === modified file 'hooks/charmhelpers/contrib/peerstorage/__init__.py' | |||
132 | --- hooks/charmhelpers/contrib/peerstorage/__init__.py 2014-10-22 10:34:07 +0000 | |||
133 | +++ hooks/charmhelpers/contrib/peerstorage/__init__.py 2014-12-17 21:09:26 +0000 | |||
134 | @@ -1,3 +1,4 @@ | |||
135 | 1 | import six | ||
136 | 1 | from charmhelpers.core.hookenv import relation_id as current_relation_id | 2 | from charmhelpers.core.hookenv import relation_id as current_relation_id |
137 | 2 | from charmhelpers.core.hookenv import ( | 3 | from charmhelpers.core.hookenv import ( |
138 | 3 | is_relation_made, | 4 | is_relation_made, |
139 | @@ -93,7 +94,7 @@ | |||
140 | 93 | if ex in echo_data: | 94 | if ex in echo_data: |
141 | 94 | echo_data.pop(ex) | 95 | echo_data.pop(ex) |
142 | 95 | else: | 96 | else: |
144 | 96 | for attribute, value in rdata.iteritems(): | 97 | for attribute, value in six.iteritems(rdata): |
145 | 97 | for include in includes: | 98 | for include in includes: |
146 | 98 | if include in attribute: | 99 | if include in attribute: |
147 | 99 | echo_data[attribute] = value | 100 | echo_data[attribute] = value |
148 | @@ -119,8 +120,8 @@ | |||
149 | 119 | relation_settings=relation_settings, | 120 | relation_settings=relation_settings, |
150 | 120 | **kwargs) | 121 | **kwargs) |
151 | 121 | if is_relation_made(peer_relation_name): | 122 | if is_relation_made(peer_relation_name): |
154 | 122 | for key, value in dict(kwargs.items() + | 123 | for key, value in six.iteritems(dict(list(kwargs.items()) + |
155 | 123 | relation_settings.items()).iteritems(): | 124 | list(relation_settings.items()))): |
156 | 124 | key_prefix = relation_id or current_relation_id() | 125 | key_prefix = relation_id or current_relation_id() |
157 | 125 | peer_store(key_prefix + delimiter + key, | 126 | peer_store(key_prefix + delimiter + key, |
158 | 126 | value, | 127 | value, |
159 | 127 | 128 | ||
160 | === modified file 'hooks/charmhelpers/core/fstab.py' | |||
161 | --- hooks/charmhelpers/core/fstab.py 2014-06-23 10:01:12 +0000 | |||
162 | +++ hooks/charmhelpers/core/fstab.py 2014-12-17 21:09:26 +0000 | |||
163 | @@ -3,10 +3,11 @@ | |||
164 | 3 | 3 | ||
165 | 4 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | 4 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
166 | 5 | 5 | ||
167 | 6 | import io | ||
168 | 6 | import os | 7 | import os |
169 | 7 | 8 | ||
170 | 8 | 9 | ||
172 | 9 | class Fstab(file): | 10 | class Fstab(io.FileIO): |
173 | 10 | """This class extends file in order to implement a file reader/writer | 11 | """This class extends file in order to implement a file reader/writer |
174 | 11 | for file `/etc/fstab` | 12 | for file `/etc/fstab` |
175 | 12 | """ | 13 | """ |
176 | @@ -24,8 +25,8 @@ | |||
177 | 24 | options = "defaults" | 25 | options = "defaults" |
178 | 25 | 26 | ||
179 | 26 | self.options = options | 27 | self.options = options |
182 | 27 | self.d = d | 28 | self.d = int(d) |
183 | 28 | self.p = p | 29 | self.p = int(p) |
184 | 29 | 30 | ||
185 | 30 | def __eq__(self, o): | 31 | def __eq__(self, o): |
186 | 31 | return str(self) == str(o) | 32 | return str(self) == str(o) |
187 | @@ -45,7 +46,7 @@ | |||
188 | 45 | self._path = path | 46 | self._path = path |
189 | 46 | else: | 47 | else: |
190 | 47 | self._path = self.DEFAULT_PATH | 48 | self._path = self.DEFAULT_PATH |
192 | 48 | file.__init__(self, self._path, 'r+') | 49 | super(Fstab, self).__init__(self._path, 'rb+') |
193 | 49 | 50 | ||
194 | 50 | def _hydrate_entry(self, line): | 51 | def _hydrate_entry(self, line): |
195 | 51 | # NOTE: use split with no arguments to split on any | 52 | # NOTE: use split with no arguments to split on any |
196 | @@ -58,8 +59,9 @@ | |||
197 | 58 | def entries(self): | 59 | def entries(self): |
198 | 59 | self.seek(0) | 60 | self.seek(0) |
199 | 60 | for line in self.readlines(): | 61 | for line in self.readlines(): |
200 | 62 | line = line.decode('us-ascii') | ||
201 | 61 | try: | 63 | try: |
203 | 62 | if not line.startswith("#"): | 64 | if line.strip() and not line.startswith("#"): |
204 | 63 | yield self._hydrate_entry(line) | 65 | yield self._hydrate_entry(line) |
205 | 64 | except ValueError: | 66 | except ValueError: |
206 | 65 | pass | 67 | pass |
207 | @@ -75,14 +77,14 @@ | |||
208 | 75 | if self.get_entry_by_attr('device', entry.device): | 77 | if self.get_entry_by_attr('device', entry.device): |
209 | 76 | return False | 78 | return False |
210 | 77 | 79 | ||
212 | 78 | self.write(str(entry) + '\n') | 80 | self.write((str(entry) + '\n').encode('us-ascii')) |
213 | 79 | self.truncate() | 81 | self.truncate() |
214 | 80 | return entry | 82 | return entry |
215 | 81 | 83 | ||
216 | 82 | def remove_entry(self, entry): | 84 | def remove_entry(self, entry): |
217 | 83 | self.seek(0) | 85 | self.seek(0) |
218 | 84 | 86 | ||
220 | 85 | lines = self.readlines() | 87 | lines = [l.decode('us-ascii') for l in self.readlines()] |
221 | 86 | 88 | ||
222 | 87 | found = False | 89 | found = False |
223 | 88 | for index, line in enumerate(lines): | 90 | for index, line in enumerate(lines): |
224 | @@ -97,7 +99,7 @@ | |||
225 | 97 | lines.remove(line) | 99 | lines.remove(line) |
226 | 98 | 100 | ||
227 | 99 | self.seek(0) | 101 | self.seek(0) |
229 | 100 | self.write(''.join(lines)) | 102 | self.write(''.join(lines).encode('us-ascii')) |
230 | 101 | self.truncate() | 103 | self.truncate() |
231 | 102 | return True | 104 | return True |
232 | 103 | 105 | ||
233 | 104 | 106 | ||
234 | === modified file 'hooks/charmhelpers/core/hookenv.py' | |||
235 | --- hooks/charmhelpers/core/hookenv.py 2014-11-21 19:25:26 +0000 | |||
236 | +++ hooks/charmhelpers/core/hookenv.py 2014-12-17 21:09:26 +0000 | |||
237 | @@ -9,9 +9,14 @@ | |||
238 | 9 | import yaml | 9 | import yaml |
239 | 10 | import subprocess | 10 | import subprocess |
240 | 11 | import sys | 11 | import sys |
241 | 12 | import UserDict | ||
242 | 13 | from subprocess import CalledProcessError | 12 | from subprocess import CalledProcessError |
243 | 14 | 13 | ||
244 | 14 | import six | ||
245 | 15 | if not six.PY3: | ||
246 | 16 | from UserDict import UserDict | ||
247 | 17 | else: | ||
248 | 18 | from collections import UserDict | ||
249 | 19 | |||
250 | 15 | CRITICAL = "CRITICAL" | 20 | CRITICAL = "CRITICAL" |
251 | 16 | ERROR = "ERROR" | 21 | ERROR = "ERROR" |
252 | 17 | WARNING = "WARNING" | 22 | WARNING = "WARNING" |
253 | @@ -63,16 +68,18 @@ | |||
254 | 63 | command = ['juju-log'] | 68 | command = ['juju-log'] |
255 | 64 | if level: | 69 | if level: |
256 | 65 | command += ['-l', level] | 70 | command += ['-l', level] |
257 | 71 | if not isinstance(message, six.string_types): | ||
258 | 72 | message = repr(message) | ||
259 | 66 | command += [message] | 73 | command += [message] |
260 | 67 | subprocess.call(command) | 74 | subprocess.call(command) |
261 | 68 | 75 | ||
262 | 69 | 76 | ||
264 | 70 | class Serializable(UserDict.IterableUserDict): | 77 | class Serializable(UserDict): |
265 | 71 | """Wrapper, an object that can be serialized to yaml or json""" | 78 | """Wrapper, an object that can be serialized to yaml or json""" |
266 | 72 | 79 | ||
267 | 73 | def __init__(self, obj): | 80 | def __init__(self, obj): |
268 | 74 | # wrap the object | 81 | # wrap the object |
270 | 75 | UserDict.IterableUserDict.__init__(self) | 82 | UserDict.__init__(self) |
271 | 76 | self.data = obj | 83 | self.data = obj |
272 | 77 | 84 | ||
273 | 78 | def __getattr__(self, attr): | 85 | def __getattr__(self, attr): |
274 | @@ -218,7 +225,7 @@ | |||
275 | 218 | prev_keys = [] | 225 | prev_keys = [] |
276 | 219 | if self._prev_dict is not None: | 226 | if self._prev_dict is not None: |
277 | 220 | prev_keys = self._prev_dict.keys() | 227 | prev_keys = self._prev_dict.keys() |
279 | 221 | return list(set(prev_keys + dict.keys(self))) | 228 | return list(set(prev_keys + list(dict.keys(self)))) |
280 | 222 | 229 | ||
281 | 223 | def load_previous(self, path=None): | 230 | def load_previous(self, path=None): |
282 | 224 | """Load previous copy of config from disk. | 231 | """Load previous copy of config from disk. |
283 | @@ -269,7 +276,7 @@ | |||
284 | 269 | 276 | ||
285 | 270 | """ | 277 | """ |
286 | 271 | if self._prev_dict: | 278 | if self._prev_dict: |
288 | 272 | for k, v in self._prev_dict.iteritems(): | 279 | for k, v in six.iteritems(self._prev_dict): |
289 | 273 | if k not in self: | 280 | if k not in self: |
290 | 274 | self[k] = v | 281 | self[k] = v |
291 | 275 | with open(self.path, 'w') as f: | 282 | with open(self.path, 'w') as f: |
292 | @@ -284,7 +291,8 @@ | |||
293 | 284 | config_cmd_line.append(scope) | 291 | config_cmd_line.append(scope) |
294 | 285 | config_cmd_line.append('--format=json') | 292 | config_cmd_line.append('--format=json') |
295 | 286 | try: | 293 | try: |
297 | 287 | config_data = json.loads(subprocess.check_output(config_cmd_line)) | 294 | config_data = json.loads( |
298 | 295 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | ||
299 | 288 | if scope is not None: | 296 | if scope is not None: |
300 | 289 | return config_data | 297 | return config_data |
301 | 290 | return Config(config_data) | 298 | return Config(config_data) |
302 | @@ -303,10 +311,10 @@ | |||
303 | 303 | if unit: | 311 | if unit: |
304 | 304 | _args.append(unit) | 312 | _args.append(unit) |
305 | 305 | try: | 313 | try: |
307 | 306 | return json.loads(subprocess.check_output(_args)) | 314 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) |
308 | 307 | except ValueError: | 315 | except ValueError: |
309 | 308 | return None | 316 | return None |
311 | 309 | except CalledProcessError, e: | 317 | except CalledProcessError as e: |
312 | 310 | if e.returncode == 2: | 318 | if e.returncode == 2: |
313 | 311 | return None | 319 | return None |
314 | 312 | raise | 320 | raise |
315 | @@ -318,7 +326,7 @@ | |||
316 | 318 | relation_cmd_line = ['relation-set'] | 326 | relation_cmd_line = ['relation-set'] |
317 | 319 | if relation_id is not None: | 327 | if relation_id is not None: |
318 | 320 | relation_cmd_line.extend(('-r', relation_id)) | 328 | relation_cmd_line.extend(('-r', relation_id)) |
320 | 321 | for k, v in (relation_settings.items() + kwargs.items()): | 329 | for k, v in (list(relation_settings.items()) + list(kwargs.items())): |
321 | 322 | if v is None: | 330 | if v is None: |
322 | 323 | relation_cmd_line.append('{}='.format(k)) | 331 | relation_cmd_line.append('{}='.format(k)) |
323 | 324 | else: | 332 | else: |
324 | @@ -335,7 +343,8 @@ | |||
325 | 335 | relid_cmd_line = ['relation-ids', '--format=json'] | 343 | relid_cmd_line = ['relation-ids', '--format=json'] |
326 | 336 | if reltype is not None: | 344 | if reltype is not None: |
327 | 337 | relid_cmd_line.append(reltype) | 345 | relid_cmd_line.append(reltype) |
329 | 338 | return json.loads(subprocess.check_output(relid_cmd_line)) or [] | 346 | return json.loads( |
330 | 347 | subprocess.check_output(relid_cmd_line).decode('UTF-8')) or [] | ||
331 | 339 | return [] | 348 | return [] |
332 | 340 | 349 | ||
333 | 341 | 350 | ||
334 | @@ -346,7 +355,8 @@ | |||
335 | 346 | units_cmd_line = ['relation-list', '--format=json'] | 355 | units_cmd_line = ['relation-list', '--format=json'] |
336 | 347 | if relid is not None: | 356 | if relid is not None: |
337 | 348 | units_cmd_line.extend(('-r', relid)) | 357 | units_cmd_line.extend(('-r', relid)) |
339 | 349 | return json.loads(subprocess.check_output(units_cmd_line)) or [] | 358 | return json.loads( |
340 | 359 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] | ||
341 | 350 | 360 | ||
342 | 351 | 361 | ||
343 | 352 | @cached | 362 | @cached |
344 | @@ -386,21 +396,31 @@ | |||
345 | 386 | 396 | ||
346 | 387 | 397 | ||
347 | 388 | @cached | 398 | @cached |
348 | 399 | def metadata(): | ||
349 | 400 | """Get the current charm metadata.yaml contents as a python object""" | ||
350 | 401 | with open(os.path.join(charm_dir(), 'metadata.yaml')) as md: | ||
351 | 402 | return yaml.safe_load(md) | ||
352 | 403 | |||
353 | 404 | |||
354 | 405 | @cached | ||
355 | 389 | def relation_types(): | 406 | def relation_types(): |
356 | 390 | """Get a list of relation types supported by this charm""" | 407 | """Get a list of relation types supported by this charm""" |
357 | 391 | charmdir = os.environ.get('CHARM_DIR', '') | ||
358 | 392 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) | ||
359 | 393 | md = yaml.safe_load(mdf) | ||
360 | 394 | rel_types = [] | 408 | rel_types = [] |
361 | 409 | md = metadata() | ||
362 | 395 | for key in ('provides', 'requires', 'peers'): | 410 | for key in ('provides', 'requires', 'peers'): |
363 | 396 | section = md.get(key) | 411 | section = md.get(key) |
364 | 397 | if section: | 412 | if section: |
365 | 398 | rel_types.extend(section.keys()) | 413 | rel_types.extend(section.keys()) |
366 | 399 | mdf.close() | ||
367 | 400 | return rel_types | 414 | return rel_types |
368 | 401 | 415 | ||
369 | 402 | 416 | ||
370 | 403 | @cached | 417 | @cached |
371 | 418 | def charm_name(): | ||
372 | 419 | """Get the name of the current charm as is specified on metadata.yaml""" | ||
373 | 420 | return metadata().get('name') | ||
374 | 421 | |||
375 | 422 | |||
376 | 423 | @cached | ||
377 | 404 | def relations(): | 424 | def relations(): |
378 | 405 | """Get a nested dictionary of relation data for all related units""" | 425 | """Get a nested dictionary of relation data for all related units""" |
379 | 406 | rels = {} | 426 | rels = {} |
380 | @@ -455,7 +475,7 @@ | |||
381 | 455 | """Get the unit ID for the remote unit""" | 475 | """Get the unit ID for the remote unit""" |
382 | 456 | _args = ['unit-get', '--format=json', attribute] | 476 | _args = ['unit-get', '--format=json', attribute] |
383 | 457 | try: | 477 | try: |
385 | 458 | return json.loads(subprocess.check_output(_args)) | 478 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) |
386 | 459 | except ValueError: | 479 | except ValueError: |
387 | 460 | return None | 480 | return None |
388 | 461 | 481 | ||
389 | 462 | 482 | ||
390 | === modified file 'hooks/charmhelpers/core/host.py' | |||
391 | --- hooks/charmhelpers/core/host.py 2014-11-21 19:25:26 +0000 | |||
392 | +++ hooks/charmhelpers/core/host.py 2014-12-17 21:09:26 +0000 | |||
393 | @@ -14,11 +14,12 @@ | |||
394 | 14 | import subprocess | 14 | import subprocess |
395 | 15 | import hashlib | 15 | import hashlib |
396 | 16 | from contextlib import contextmanager | 16 | from contextlib import contextmanager |
397 | 17 | |||
398 | 18 | from collections import OrderedDict | 17 | from collections import OrderedDict |
399 | 19 | 18 | ||
402 | 20 | from hookenv import log | 19 | import six |
403 | 21 | from fstab import Fstab | 20 | |
404 | 21 | from .hookenv import log | ||
405 | 22 | from .fstab import Fstab | ||
406 | 22 | 23 | ||
407 | 23 | 24 | ||
408 | 24 | def service_start(service_name): | 25 | def service_start(service_name): |
409 | @@ -54,7 +55,9 @@ | |||
410 | 54 | def service_running(service): | 55 | def service_running(service): |
411 | 55 | """Determine whether a system service is running""" | 56 | """Determine whether a system service is running""" |
412 | 56 | try: | 57 | try: |
414 | 57 | output = subprocess.check_output(['service', service, 'status'], stderr=subprocess.STDOUT) | 58 | output = subprocess.check_output( |
415 | 59 | ['service', service, 'status'], | ||
416 | 60 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
417 | 58 | except subprocess.CalledProcessError: | 61 | except subprocess.CalledProcessError: |
418 | 59 | return False | 62 | return False |
419 | 60 | else: | 63 | else: |
420 | @@ -67,7 +70,9 @@ | |||
421 | 67 | def service_available(service_name): | 70 | def service_available(service_name): |
422 | 68 | """Determine whether a system service is available""" | 71 | """Determine whether a system service is available""" |
423 | 69 | try: | 72 | try: |
425 | 70 | subprocess.check_output(['service', service_name, 'status'], stderr=subprocess.STDOUT) | 73 | subprocess.check_output( |
426 | 74 | ['service', service_name, 'status'], | ||
427 | 75 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
428 | 71 | except subprocess.CalledProcessError as e: | 76 | except subprocess.CalledProcessError as e: |
429 | 72 | return 'unrecognized service' not in e.output | 77 | return 'unrecognized service' not in e.output |
430 | 73 | else: | 78 | else: |
431 | @@ -96,6 +101,26 @@ | |||
432 | 96 | return user_info | 101 | return user_info |
433 | 97 | 102 | ||
434 | 98 | 103 | ||
435 | 104 | def add_group(group_name, system_group=False): | ||
436 | 105 | """Add a group to the system""" | ||
437 | 106 | try: | ||
438 | 107 | group_info = grp.getgrnam(group_name) | ||
439 | 108 | log('group {0} already exists!'.format(group_name)) | ||
440 | 109 | except KeyError: | ||
441 | 110 | log('creating group {0}'.format(group_name)) | ||
442 | 111 | cmd = ['addgroup'] | ||
443 | 112 | if system_group: | ||
444 | 113 | cmd.append('--system') | ||
445 | 114 | else: | ||
446 | 115 | cmd.extend([ | ||
447 | 116 | '--group', | ||
448 | 117 | ]) | ||
449 | 118 | cmd.append(group_name) | ||
450 | 119 | subprocess.check_call(cmd) | ||
451 | 120 | group_info = grp.getgrnam(group_name) | ||
452 | 121 | return group_info | ||
453 | 122 | |||
454 | 123 | |||
455 | 99 | def add_user_to_group(username, group): | 124 | def add_user_to_group(username, group): |
456 | 100 | """Add a user to a group""" | 125 | """Add a user to a group""" |
457 | 101 | cmd = [ | 126 | cmd = [ |
458 | @@ -115,7 +140,7 @@ | |||
459 | 115 | cmd.append(from_path) | 140 | cmd.append(from_path) |
460 | 116 | cmd.append(to_path) | 141 | cmd.append(to_path) |
461 | 117 | log(" ".join(cmd)) | 142 | log(" ".join(cmd)) |
463 | 118 | return subprocess.check_output(cmd).strip() | 143 | return subprocess.check_output(cmd).decode('UTF-8').strip() |
464 | 119 | 144 | ||
465 | 120 | 145 | ||
466 | 121 | def symlink(source, destination): | 146 | def symlink(source, destination): |
467 | @@ -130,23 +155,26 @@ | |||
468 | 130 | subprocess.check_call(cmd) | 155 | subprocess.check_call(cmd) |
469 | 131 | 156 | ||
470 | 132 | 157 | ||
472 | 133 | def mkdir(path, owner='root', group='root', perms=0555, force=False): | 158 | def mkdir(path, owner='root', group='root', perms=0o555, force=False): |
473 | 134 | """Create a directory""" | 159 | """Create a directory""" |
474 | 135 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | 160 | log("Making dir {} {}:{} {:o}".format(path, owner, group, |
475 | 136 | perms)) | 161 | perms)) |
476 | 137 | uid = pwd.getpwnam(owner).pw_uid | 162 | uid = pwd.getpwnam(owner).pw_uid |
477 | 138 | gid = grp.getgrnam(group).gr_gid | 163 | gid = grp.getgrnam(group).gr_gid |
478 | 139 | realpath = os.path.abspath(path) | 164 | realpath = os.path.abspath(path) |
481 | 140 | if os.path.exists(realpath): | 165 | path_exists = os.path.exists(realpath) |
482 | 141 | if force and not os.path.isdir(realpath): | 166 | if path_exists and force: |
483 | 167 | if not os.path.isdir(realpath): | ||
484 | 142 | log("Removing non-directory file {} prior to mkdir()".format(path)) | 168 | log("Removing non-directory file {} prior to mkdir()".format(path)) |
485 | 143 | os.unlink(realpath) | 169 | os.unlink(realpath) |
487 | 144 | else: | 170 | os.makedirs(realpath, perms) |
488 | 171 | os.chown(realpath, uid, gid) | ||
489 | 172 | elif not path_exists: | ||
490 | 145 | os.makedirs(realpath, perms) | 173 | os.makedirs(realpath, perms) |
495 | 146 | os.chown(realpath, uid, gid) | 174 | os.chown(realpath, uid, gid) |
496 | 147 | 175 | ||
497 | 148 | 176 | ||
498 | 149 | def write_file(path, content, owner='root', group='root', perms=0444): | 177 | def write_file(path, content, owner='root', group='root', perms=0o444): |
499 | 150 | """Create or overwrite a file with the contents of a string""" | 178 | """Create or overwrite a file with the contents of a string""" |
500 | 151 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | 179 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
501 | 152 | uid = pwd.getpwnam(owner).pw_uid | 180 | uid = pwd.getpwnam(owner).pw_uid |
502 | @@ -177,7 +205,7 @@ | |||
503 | 177 | cmd_args.extend([device, mountpoint]) | 205 | cmd_args.extend([device, mountpoint]) |
504 | 178 | try: | 206 | try: |
505 | 179 | subprocess.check_output(cmd_args) | 207 | subprocess.check_output(cmd_args) |
507 | 180 | except subprocess.CalledProcessError, e: | 208 | except subprocess.CalledProcessError as e: |
508 | 181 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) | 209 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) |
509 | 182 | return False | 210 | return False |
510 | 183 | 211 | ||
511 | @@ -191,7 +219,7 @@ | |||
512 | 191 | cmd_args = ['umount', mountpoint] | 219 | cmd_args = ['umount', mountpoint] |
513 | 192 | try: | 220 | try: |
514 | 193 | subprocess.check_output(cmd_args) | 221 | subprocess.check_output(cmd_args) |
516 | 194 | except subprocess.CalledProcessError, e: | 222 | except subprocess.CalledProcessError as e: |
517 | 195 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | 223 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) |
518 | 196 | return False | 224 | return False |
519 | 197 | 225 | ||
520 | @@ -218,8 +246,8 @@ | |||
521 | 218 | """ | 246 | """ |
522 | 219 | if os.path.exists(path): | 247 | if os.path.exists(path): |
523 | 220 | h = getattr(hashlib, hash_type)() | 248 | h = getattr(hashlib, hash_type)() |
526 | 221 | with open(path, 'r') as source: | 249 | with open(path, 'rb') as source: |
527 | 222 | h.update(source.read()) # IGNORE:E1101 - it does have update | 250 | h.update(source.read()) |
528 | 223 | return h.hexdigest() | 251 | return h.hexdigest() |
529 | 224 | else: | 252 | else: |
530 | 225 | return None | 253 | return None |
531 | @@ -297,7 +325,7 @@ | |||
532 | 297 | if length is None: | 325 | if length is None: |
533 | 298 | length = random.choice(range(35, 45)) | 326 | length = random.choice(range(35, 45)) |
534 | 299 | alphanumeric_chars = [ | 327 | alphanumeric_chars = [ |
536 | 300 | l for l in (string.letters + string.digits) | 328 | l for l in (string.ascii_letters + string.digits) |
537 | 301 | if l not in 'l0QD1vAEIOUaeiou'] | 329 | if l not in 'l0QD1vAEIOUaeiou'] |
538 | 302 | random_chars = [ | 330 | random_chars = [ |
539 | 303 | random.choice(alphanumeric_chars) for _ in range(length)] | 331 | random.choice(alphanumeric_chars) for _ in range(length)] |
540 | @@ -306,14 +334,14 @@ | |||
541 | 306 | 334 | ||
542 | 307 | def list_nics(nic_type): | 335 | def list_nics(nic_type): |
543 | 308 | '''Return a list of nics of given type(s)''' | 336 | '''Return a list of nics of given type(s)''' |
545 | 309 | if isinstance(nic_type, basestring): | 337 | if isinstance(nic_type, six.string_types): |
546 | 310 | int_types = [nic_type] | 338 | int_types = [nic_type] |
547 | 311 | else: | 339 | else: |
548 | 312 | int_types = nic_type | 340 | int_types = nic_type |
549 | 313 | interfaces = [] | 341 | interfaces = [] |
550 | 314 | for int_type in int_types: | 342 | for int_type in int_types: |
551 | 315 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | 343 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] |
553 | 316 | ip_output = subprocess.check_output(cmd).split('\n') | 344 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
554 | 317 | ip_output = (line for line in ip_output if line) | 345 | ip_output = (line for line in ip_output if line) |
555 | 318 | for line in ip_output: | 346 | for line in ip_output: |
556 | 319 | if line.split()[1].startswith(int_type): | 347 | if line.split()[1].startswith(int_type): |
557 | @@ -335,7 +363,7 @@ | |||
558 | 335 | 363 | ||
559 | 336 | def get_nic_mtu(nic): | 364 | def get_nic_mtu(nic): |
560 | 337 | cmd = ['ip', 'addr', 'show', nic] | 365 | cmd = ['ip', 'addr', 'show', nic] |
562 | 338 | ip_output = subprocess.check_output(cmd).split('\n') | 366 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
563 | 339 | mtu = "" | 367 | mtu = "" |
564 | 340 | for line in ip_output: | 368 | for line in ip_output: |
565 | 341 | words = line.split() | 369 | words = line.split() |
566 | @@ -346,7 +374,7 @@ | |||
567 | 346 | 374 | ||
568 | 347 | def get_nic_hwaddr(nic): | 375 | def get_nic_hwaddr(nic): |
569 | 348 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | 376 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
571 | 349 | ip_output = subprocess.check_output(cmd) | 377 | ip_output = subprocess.check_output(cmd).decode('UTF-8') |
572 | 350 | hwaddr = "" | 378 | hwaddr = "" |
573 | 351 | words = ip_output.split() | 379 | words = ip_output.split() |
574 | 352 | if 'link/ether' in words: | 380 | if 'link/ether' in words: |
575 | @@ -363,8 +391,8 @@ | |||
576 | 363 | 391 | ||
577 | 364 | ''' | 392 | ''' |
578 | 365 | import apt_pkg | 393 | import apt_pkg |
579 | 366 | from charmhelpers.fetch import apt_cache | ||
580 | 367 | if not pkgcache: | 394 | if not pkgcache: |
581 | 395 | from charmhelpers.fetch import apt_cache | ||
582 | 368 | pkgcache = apt_cache() | 396 | pkgcache = apt_cache() |
583 | 369 | pkg = pkgcache[package] | 397 | pkg = pkgcache[package] |
584 | 370 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | 398 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
585 | 371 | 399 | ||
586 | === modified file 'hooks/charmhelpers/core/services/helpers.py' | |||
587 | --- hooks/charmhelpers/core/services/helpers.py 2014-10-22 10:34:07 +0000 | |||
588 | +++ hooks/charmhelpers/core/services/helpers.py 2014-12-17 21:09:26 +0000 | |||
589 | @@ -196,7 +196,7 @@ | |||
590 | 196 | if not os.path.isabs(file_name): | 196 | if not os.path.isabs(file_name): |
591 | 197 | file_name = os.path.join(hookenv.charm_dir(), file_name) | 197 | file_name = os.path.join(hookenv.charm_dir(), file_name) |
592 | 198 | with open(file_name, 'w') as file_stream: | 198 | with open(file_name, 'w') as file_stream: |
594 | 199 | os.fchmod(file_stream.fileno(), 0600) | 199 | os.fchmod(file_stream.fileno(), 0o600) |
595 | 200 | yaml.dump(config_data, file_stream) | 200 | yaml.dump(config_data, file_stream) |
596 | 201 | 201 | ||
597 | 202 | def read_context(self, file_name): | 202 | def read_context(self, file_name): |
598 | @@ -211,15 +211,19 @@ | |||
599 | 211 | 211 | ||
600 | 212 | class TemplateCallback(ManagerCallback): | 212 | class TemplateCallback(ManagerCallback): |
601 | 213 | """ | 213 | """ |
605 | 214 | Callback class that will render a Jinja2 template, for use as a ready action. | 214 | Callback class that will render a Jinja2 template, for use as a ready |
606 | 215 | 215 | action. | |
607 | 216 | :param str source: The template source file, relative to `$CHARM_DIR/templates` | 216 | |
608 | 217 | :param str source: The template source file, relative to | ||
609 | 218 | `$CHARM_DIR/templates` | ||
610 | 219 | |||
611 | 217 | :param str target: The target to write the rendered template to | 220 | :param str target: The target to write the rendered template to |
612 | 218 | :param str owner: The owner of the rendered file | 221 | :param str owner: The owner of the rendered file |
613 | 219 | :param str group: The group of the rendered file | 222 | :param str group: The group of the rendered file |
614 | 220 | :param int perms: The permissions of the rendered file | 223 | :param int perms: The permissions of the rendered file |
615 | 221 | """ | 224 | """ |
617 | 222 | def __init__(self, source, target, owner='root', group='root', perms=0444): | 225 | def __init__(self, source, target, |
618 | 226 | owner='root', group='root', perms=0o444): | ||
619 | 223 | self.source = source | 227 | self.source = source |
620 | 224 | self.target = target | 228 | self.target = target |
621 | 225 | self.owner = owner | 229 | self.owner = owner |
622 | 226 | 230 | ||
623 | === modified file 'hooks/charmhelpers/core/templating.py' | |||
624 | --- hooks/charmhelpers/core/templating.py 2014-08-18 12:39:26 +0000 | |||
625 | +++ hooks/charmhelpers/core/templating.py 2014-12-17 21:09:26 +0000 | |||
626 | @@ -4,7 +4,8 @@ | |||
627 | 4 | from charmhelpers.core import hookenv | 4 | from charmhelpers.core import hookenv |
628 | 5 | 5 | ||
629 | 6 | 6 | ||
631 | 7 | def render(source, target, context, owner='root', group='root', perms=0444, templates_dir=None): | 7 | def render(source, target, context, owner='root', group='root', |
632 | 8 | perms=0o444, templates_dir=None): | ||
633 | 8 | """ | 9 | """ |
634 | 9 | Render a template. | 10 | Render a template. |
635 | 10 | 11 | ||
636 | @@ -47,5 +48,5 @@ | |||
637 | 47 | level=hookenv.ERROR) | 48 | level=hookenv.ERROR) |
638 | 48 | raise e | 49 | raise e |
639 | 49 | content = template.render(context) | 50 | content = template.render(context) |
641 | 50 | host.mkdir(os.path.dirname(target)) | 51 | host.mkdir(os.path.dirname(target), owner, group) |
642 | 51 | host.write_file(target, content, owner, group, perms) | 52 | host.write_file(target, content, owner, group, perms) |
643 | 52 | 53 | ||
644 | === modified file 'hooks/charmhelpers/fetch/__init__.py' | |||
645 | --- hooks/charmhelpers/fetch/__init__.py 2014-11-21 19:25:26 +0000 | |||
646 | +++ hooks/charmhelpers/fetch/__init__.py 2014-12-17 21:09:26 +0000 | |||
647 | @@ -5,10 +5,6 @@ | |||
648 | 5 | from charmhelpers.core.host import ( | 5 | from charmhelpers.core.host import ( |
649 | 6 | lsb_release | 6 | lsb_release |
650 | 7 | ) | 7 | ) |
651 | 8 | from urlparse import ( | ||
652 | 9 | urlparse, | ||
653 | 10 | urlunparse, | ||
654 | 11 | ) | ||
655 | 12 | import subprocess | 8 | import subprocess |
656 | 13 | from charmhelpers.core.hookenv import ( | 9 | from charmhelpers.core.hookenv import ( |
657 | 14 | config, | 10 | config, |
658 | @@ -16,6 +12,12 @@ | |||
659 | 16 | ) | 12 | ) |
660 | 17 | import os | 13 | import os |
661 | 18 | 14 | ||
662 | 15 | import six | ||
663 | 16 | if six.PY3: | ||
664 | 17 | from urllib.parse import urlparse, urlunparse | ||
665 | 18 | else: | ||
666 | 19 | from urlparse import urlparse, urlunparse | ||
667 | 20 | |||
668 | 19 | 21 | ||
669 | 20 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | 22 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
670 | 21 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | 23 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
671 | @@ -149,7 +151,7 @@ | |||
672 | 149 | cmd = ['apt-get', '--assume-yes'] | 151 | cmd = ['apt-get', '--assume-yes'] |
673 | 150 | cmd.extend(options) | 152 | cmd.extend(options) |
674 | 151 | cmd.append('install') | 153 | cmd.append('install') |
676 | 152 | if isinstance(packages, basestring): | 154 | if isinstance(packages, six.string_types): |
677 | 153 | cmd.append(packages) | 155 | cmd.append(packages) |
678 | 154 | else: | 156 | else: |
679 | 155 | cmd.extend(packages) | 157 | cmd.extend(packages) |
680 | @@ -182,7 +184,7 @@ | |||
681 | 182 | def apt_purge(packages, fatal=False): | 184 | def apt_purge(packages, fatal=False): |
682 | 183 | """Purge one or more packages""" | 185 | """Purge one or more packages""" |
683 | 184 | cmd = ['apt-get', '--assume-yes', 'purge'] | 186 | cmd = ['apt-get', '--assume-yes', 'purge'] |
685 | 185 | if isinstance(packages, basestring): | 187 | if isinstance(packages, six.string_types): |
686 | 186 | cmd.append(packages) | 188 | cmd.append(packages) |
687 | 187 | else: | 189 | else: |
688 | 188 | cmd.extend(packages) | 190 | cmd.extend(packages) |
689 | @@ -193,7 +195,7 @@ | |||
690 | 193 | def apt_hold(packages, fatal=False): | 195 | def apt_hold(packages, fatal=False): |
691 | 194 | """Hold one or more packages""" | 196 | """Hold one or more packages""" |
692 | 195 | cmd = ['apt-mark', 'hold'] | 197 | cmd = ['apt-mark', 'hold'] |
694 | 196 | if isinstance(packages, basestring): | 198 | if isinstance(packages, six.string_types): |
695 | 197 | cmd.append(packages) | 199 | cmd.append(packages) |
696 | 198 | else: | 200 | else: |
697 | 199 | cmd.extend(packages) | 201 | cmd.extend(packages) |
698 | @@ -260,7 +262,7 @@ | |||
699 | 260 | 262 | ||
700 | 261 | if key: | 263 | if key: |
701 | 262 | if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key: | 264 | if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key: |
703 | 263 | with NamedTemporaryFile() as key_file: | 265 | with NamedTemporaryFile('w+') as key_file: |
704 | 264 | key_file.write(key) | 266 | key_file.write(key) |
705 | 265 | key_file.flush() | 267 | key_file.flush() |
706 | 266 | key_file.seek(0) | 268 | key_file.seek(0) |
707 | @@ -297,14 +299,14 @@ | |||
708 | 297 | sources = safe_load((config(sources_var) or '').strip()) or [] | 299 | sources = safe_load((config(sources_var) or '').strip()) or [] |
709 | 298 | keys = safe_load((config(keys_var) or '').strip()) or None | 300 | keys = safe_load((config(keys_var) or '').strip()) or None |
710 | 299 | 301 | ||
712 | 300 | if isinstance(sources, basestring): | 302 | if isinstance(sources, six.string_types): |
713 | 301 | sources = [sources] | 303 | sources = [sources] |
714 | 302 | 304 | ||
715 | 303 | if keys is None: | 305 | if keys is None: |
716 | 304 | for source in sources: | 306 | for source in sources: |
717 | 305 | add_source(source, None) | 307 | add_source(source, None) |
718 | 306 | else: | 308 | else: |
720 | 307 | if isinstance(keys, basestring): | 309 | if isinstance(keys, six.string_types): |
721 | 308 | keys = [keys] | 310 | keys = [keys] |
722 | 309 | 311 | ||
723 | 310 | if len(sources) != len(keys): | 312 | if len(sources) != len(keys): |
724 | @@ -401,7 +403,7 @@ | |||
725 | 401 | while result is None or result == APT_NO_LOCK: | 403 | while result is None or result == APT_NO_LOCK: |
726 | 402 | try: | 404 | try: |
727 | 403 | result = subprocess.check_call(cmd, env=env) | 405 | result = subprocess.check_call(cmd, env=env) |
729 | 404 | except subprocess.CalledProcessError, e: | 406 | except subprocess.CalledProcessError as e: |
730 | 405 | retry_count = retry_count + 1 | 407 | retry_count = retry_count + 1 |
731 | 406 | if retry_count > APT_NO_LOCK_RETRY_COUNT: | 408 | if retry_count > APT_NO_LOCK_RETRY_COUNT: |
732 | 407 | raise | 409 | raise |
733 | 408 | 410 | ||
734 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' | |||
735 | --- hooks/charmhelpers/fetch/archiveurl.py 2014-10-22 10:34:07 +0000 | |||
736 | +++ hooks/charmhelpers/fetch/archiveurl.py 2014-12-17 21:09:26 +0000 | |||
737 | @@ -1,8 +1,23 @@ | |||
738 | 1 | import os | 1 | import os |
739 | 2 | import urllib2 | ||
740 | 3 | from urllib import urlretrieve | ||
741 | 4 | import urlparse | ||
742 | 5 | import hashlib | 2 | import hashlib |
743 | 3 | import re | ||
744 | 4 | |||
745 | 5 | import six | ||
746 | 6 | if six.PY3: | ||
747 | 7 | from urllib.request import ( | ||
748 | 8 | build_opener, install_opener, urlopen, urlretrieve, | ||
749 | 9 | HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, | ||
750 | 10 | ) | ||
751 | 11 | from urllib.parse import urlparse, urlunparse, parse_qs | ||
752 | 12 | from urllib.error import URLError | ||
753 | 13 | else: | ||
754 | 14 | from urllib import urlretrieve | ||
755 | 15 | from urllib2 import ( | ||
756 | 16 | build_opener, install_opener, urlopen, | ||
757 | 17 | HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, | ||
758 | 18 | URLError | ||
759 | 19 | ) | ||
760 | 20 | from urlparse import urlparse, urlunparse, parse_qs | ||
761 | 6 | 21 | ||
762 | 7 | from charmhelpers.fetch import ( | 22 | from charmhelpers.fetch import ( |
763 | 8 | BaseFetchHandler, | 23 | BaseFetchHandler, |
764 | @@ -15,6 +30,24 @@ | |||
765 | 15 | from charmhelpers.core.host import mkdir, check_hash | 30 | from charmhelpers.core.host import mkdir, check_hash |
766 | 16 | 31 | ||
767 | 17 | 32 | ||
768 | 33 | def splituser(host): | ||
769 | 34 | '''urllib.splituser(), but six's support of this seems broken''' | ||
770 | 35 | _userprog = re.compile('^(.*)@(.*)$') | ||
771 | 36 | match = _userprog.match(host) | ||
772 | 37 | if match: | ||
773 | 38 | return match.group(1, 2) | ||
774 | 39 | return None, host | ||
775 | 40 | |||
776 | 41 | |||
777 | 42 | def splitpasswd(user): | ||
778 | 43 | '''urllib.splitpasswd(), but six's support of this is missing''' | ||
779 | 44 | _passwdprog = re.compile('^([^:]*):(.*)$', re.S) | ||
780 | 45 | match = _passwdprog.match(user) | ||
781 | 46 | if match: | ||
782 | 47 | return match.group(1, 2) | ||
783 | 48 | return user, None | ||
784 | 49 | |||
785 | 50 | |||
786 | 18 | class ArchiveUrlFetchHandler(BaseFetchHandler): | 51 | class ArchiveUrlFetchHandler(BaseFetchHandler): |
787 | 19 | """ | 52 | """ |
788 | 20 | Handler to download archive files from arbitrary URLs. | 53 | Handler to download archive files from arbitrary URLs. |
789 | @@ -42,20 +75,20 @@ | |||
790 | 42 | """ | 75 | """ |
791 | 43 | # propogate all exceptions | 76 | # propogate all exceptions |
792 | 44 | # URLError, OSError, etc | 77 | # URLError, OSError, etc |
794 | 45 | proto, netloc, path, params, query, fragment = urlparse.urlparse(source) | 78 | proto, netloc, path, params, query, fragment = urlparse(source) |
795 | 46 | if proto in ('http', 'https'): | 79 | if proto in ('http', 'https'): |
797 | 47 | auth, barehost = urllib2.splituser(netloc) | 80 | auth, barehost = splituser(netloc) |
798 | 48 | if auth is not None: | 81 | if auth is not None: |
802 | 49 | source = urlparse.urlunparse((proto, barehost, path, params, query, fragment)) | 82 | source = urlunparse((proto, barehost, path, params, query, fragment)) |
803 | 50 | username, password = urllib2.splitpasswd(auth) | 83 | username, password = splitpasswd(auth) |
804 | 51 | passman = urllib2.HTTPPasswordMgrWithDefaultRealm() | 84 | passman = HTTPPasswordMgrWithDefaultRealm() |
805 | 52 | # Realm is set to None in add_password to force the username and password | 85 | # Realm is set to None in add_password to force the username and password |
806 | 53 | # to be used whatever the realm | 86 | # to be used whatever the realm |
807 | 54 | passman.add_password(None, source, username, password) | 87 | passman.add_password(None, source, username, password) |
812 | 55 | authhandler = urllib2.HTTPBasicAuthHandler(passman) | 88 | authhandler = HTTPBasicAuthHandler(passman) |
813 | 56 | opener = urllib2.build_opener(authhandler) | 89 | opener = build_opener(authhandler) |
814 | 57 | urllib2.install_opener(opener) | 90 | install_opener(opener) |
815 | 58 | response = urllib2.urlopen(source) | 91 | response = urlopen(source) |
816 | 59 | try: | 92 | try: |
817 | 60 | with open(dest, 'w') as dest_file: | 93 | with open(dest, 'w') as dest_file: |
818 | 61 | dest_file.write(response.read()) | 94 | dest_file.write(response.read()) |
819 | @@ -91,17 +124,21 @@ | |||
820 | 91 | url_parts = self.parse_url(source) | 124 | url_parts = self.parse_url(source) |
821 | 92 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') | 125 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') |
822 | 93 | if not os.path.exists(dest_dir): | 126 | if not os.path.exists(dest_dir): |
824 | 94 | mkdir(dest_dir, perms=0755) | 127 | mkdir(dest_dir, perms=0o755) |
825 | 95 | dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) | 128 | dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) |
826 | 96 | try: | 129 | try: |
827 | 97 | self.download(source, dld_file) | 130 | self.download(source, dld_file) |
829 | 98 | except urllib2.URLError as e: | 131 | except URLError as e: |
830 | 99 | raise UnhandledSource(e.reason) | 132 | raise UnhandledSource(e.reason) |
831 | 100 | except OSError as e: | 133 | except OSError as e: |
832 | 101 | raise UnhandledSource(e.strerror) | 134 | raise UnhandledSource(e.strerror) |
834 | 102 | options = urlparse.parse_qs(url_parts.fragment) | 135 | options = parse_qs(url_parts.fragment) |
835 | 103 | for key, value in options.items(): | 136 | for key, value in options.items(): |
837 | 104 | if key in hashlib.algorithms: | 137 | if not six.PY3: |
838 | 138 | algorithms = hashlib.algorithms | ||
839 | 139 | else: | ||
840 | 140 | algorithms = hashlib.algorithms_available | ||
841 | 141 | if key in algorithms: | ||
842 | 105 | check_hash(dld_file, value, key) | 142 | check_hash(dld_file, value, key) |
843 | 106 | if checksum: | 143 | if checksum: |
844 | 107 | check_hash(dld_file, checksum, hash_type) | 144 | check_hash(dld_file, checksum, hash_type) |
845 | 108 | 145 | ||
846 | === modified file 'hooks/charmhelpers/fetch/bzrurl.py' | |||
847 | --- hooks/charmhelpers/fetch/bzrurl.py 2014-06-23 09:47:35 +0000 | |||
848 | +++ hooks/charmhelpers/fetch/bzrurl.py 2014-12-17 21:09:26 +0000 | |||
849 | @@ -5,6 +5,10 @@ | |||
850 | 5 | ) | 5 | ) |
851 | 6 | from charmhelpers.core.host import mkdir | 6 | from charmhelpers.core.host import mkdir |
852 | 7 | 7 | ||
853 | 8 | import six | ||
854 | 9 | if six.PY3: | ||
855 | 10 | raise ImportError('bzrlib does not support Python3') | ||
856 | 11 | |||
857 | 8 | try: | 12 | try: |
858 | 9 | from bzrlib.branch import Branch | 13 | from bzrlib.branch import Branch |
859 | 10 | except ImportError: | 14 | except ImportError: |
860 | @@ -42,7 +46,7 @@ | |||
861 | 42 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | 46 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", |
862 | 43 | branch_name) | 47 | branch_name) |
863 | 44 | if not os.path.exists(dest_dir): | 48 | if not os.path.exists(dest_dir): |
865 | 45 | mkdir(dest_dir, perms=0755) | 49 | mkdir(dest_dir, perms=0o755) |
866 | 46 | try: | 50 | try: |
867 | 47 | self.branch(source, dest_dir) | 51 | self.branch(source, dest_dir) |
868 | 48 | except OSError as e: | 52 | except OSError as e: |
869 | 49 | 53 | ||
870 | === modified file 'hooks/charmhelpers/fetch/giturl.py' | |||
871 | --- hooks/charmhelpers/fetch/giturl.py 2014-11-21 19:25:26 +0000 | |||
872 | +++ hooks/charmhelpers/fetch/giturl.py 2014-12-17 21:09:26 +0000 | |||
873 | @@ -5,6 +5,10 @@ | |||
874 | 5 | ) | 5 | ) |
875 | 6 | from charmhelpers.core.host import mkdir | 6 | from charmhelpers.core.host import mkdir |
876 | 7 | 7 | ||
877 | 8 | import six | ||
878 | 9 | if six.PY3: | ||
879 | 10 | raise ImportError('GitPython does not support Python 3') | ||
880 | 11 | |||
881 | 8 | try: | 12 | try: |
882 | 9 | from git import Repo | 13 | from git import Repo |
883 | 10 | except ImportError: | 14 | except ImportError: |
884 | @@ -17,7 +21,7 @@ | |||
885 | 17 | """Handler for git branches via generic and github URLs""" | 21 | """Handler for git branches via generic and github URLs""" |
886 | 18 | def can_handle(self, source): | 22 | def can_handle(self, source): |
887 | 19 | url_parts = self.parse_url(source) | 23 | url_parts = self.parse_url(source) |
889 | 20 | #TODO (mattyw) no support for ssh git@ yet | 24 | # TODO (mattyw) no support for ssh git@ yet |
890 | 21 | if url_parts.scheme not in ('http', 'https', 'git'): | 25 | if url_parts.scheme not in ('http', 'https', 'git'): |
891 | 22 | return False | 26 | return False |
892 | 23 | else: | 27 | else: |
893 | @@ -30,13 +34,16 @@ | |||
894 | 30 | repo = Repo.clone_from(source, dest) | 34 | repo = Repo.clone_from(source, dest) |
895 | 31 | repo.git.checkout(branch) | 35 | repo.git.checkout(branch) |
896 | 32 | 36 | ||
898 | 33 | def install(self, source, branch="master"): | 37 | def install(self, source, branch="master", dest=None): |
899 | 34 | url_parts = self.parse_url(source) | 38 | url_parts = self.parse_url(source) |
900 | 35 | branch_name = url_parts.path.strip("/").split("/")[-1] | 39 | branch_name = url_parts.path.strip("/").split("/")[-1] |
903 | 36 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | 40 | if dest: |
904 | 37 | branch_name) | 41 | dest_dir = os.path.join(dest, branch_name) |
905 | 42 | else: | ||
906 | 43 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | ||
907 | 44 | branch_name) | ||
908 | 38 | if not os.path.exists(dest_dir): | 45 | if not os.path.exists(dest_dir): |
910 | 39 | mkdir(dest_dir, perms=0755) | 46 | mkdir(dest_dir, perms=0o755) |
911 | 40 | try: | 47 | try: |
912 | 41 | self.clone(source, dest_dir, branch) | 48 | self.clone(source, dest_dir, branch) |
913 | 42 | except OSError as e: | 49 | except OSError as e: |
914 | 43 | 50 | ||
915 | === modified file 'hooks/percona_hooks.py' | |||
916 | --- hooks/percona_hooks.py 2014-11-25 14:59:34 +0000 | |||
917 | +++ hooks/percona_hooks.py 2014-12-17 21:09:26 +0000 | |||
918 | @@ -26,6 +26,7 @@ | |||
919 | 26 | write_file, | 26 | write_file, |
920 | 27 | lsb_release, | 27 | lsb_release, |
921 | 28 | ) | 28 | ) |
922 | 29 | from charmhelpers.core.templating import render | ||
923 | 29 | from charmhelpers.fetch import ( | 30 | from charmhelpers.fetch import ( |
924 | 30 | apt_update, | 31 | apt_update, |
925 | 31 | apt_install, | 32 | apt_install, |
926 | @@ -40,7 +41,6 @@ | |||
927 | 40 | PACKAGES, | 41 | PACKAGES, |
928 | 41 | MY_CNF, | 42 | MY_CNF, |
929 | 42 | setup_percona_repo, | 43 | setup_percona_repo, |
930 | 43 | render_template, | ||
931 | 44 | get_host_ip, | 44 | get_host_ip, |
932 | 45 | get_cluster_hosts, | 45 | get_cluster_hosts, |
933 | 46 | configure_sstuser, | 46 | configure_sstuser, |
934 | @@ -123,9 +123,7 @@ | |||
935 | 123 | context['ipv6'] = False | 123 | context['ipv6'] = False |
936 | 124 | 124 | ||
937 | 125 | context.update(parse_config()) | 125 | context.update(parse_config()) |
941 | 126 | write_file(path=MY_CNF, | 126 | render(os.path.basename(MY_CNF), MY_CNF, context, perms=0o444) |
939 | 127 | content=render_template(os.path.basename(MY_CNF), context), | ||
940 | 128 | perms=0o444) | ||
942 | 129 | 127 | ||
943 | 130 | 128 | ||
944 | 131 | @hooks.hook('upgrade-charm') | 129 | @hooks.hook('upgrade-charm') |
945 | 132 | 130 | ||
946 | === modified file 'hooks/percona_utils.py' | |||
947 | --- hooks/percona_utils.py 2014-11-21 19:25:26 +0000 | |||
948 | +++ hooks/percona_utils.py 2014-12-17 21:09:26 +0000 | |||
949 | @@ -28,20 +28,6 @@ | |||
950 | 28 | from mysql import get_mysql_root_password, MySQLHelper | 28 | from mysql import get_mysql_root_password, MySQLHelper |
951 | 29 | 29 | ||
952 | 30 | 30 | ||
953 | 31 | try: | ||
954 | 32 | import jinja2 | ||
955 | 33 | except ImportError: | ||
956 | 34 | apt_install(filter_installed_packages(['python-jinja2']), | ||
957 | 35 | fatal=True) | ||
958 | 36 | import jinja2 | ||
959 | 37 | |||
960 | 38 | try: | ||
961 | 39 | import dns.resolver | ||
962 | 40 | except ImportError: | ||
963 | 41 | apt_install(filter_installed_packages(['python-dnspython']), | ||
964 | 42 | fatal=True) | ||
965 | 43 | import dns.resolver | ||
966 | 44 | |||
967 | 45 | PACKAGES = [ | 31 | PACKAGES = [ |
968 | 46 | 'percona-xtradb-cluster-server-5.5', | 32 | 'percona-xtradb-cluster-server-5.5', |
969 | 47 | 'percona-xtradb-cluster-client-5.5', | 33 | 'percona-xtradb-cluster-client-5.5', |
970 | @@ -72,18 +58,15 @@ | |||
971 | 72 | sources.write(REPO.format(release=lsb_release()['DISTRIB_CODENAME'])) | 58 | sources.write(REPO.format(release=lsb_release()['DISTRIB_CODENAME'])) |
972 | 73 | subprocess.check_call(['apt-key', 'add', KEY]) | 59 | subprocess.check_call(['apt-key', 'add', KEY]) |
973 | 74 | 60 | ||
974 | 75 | TEMPLATES_DIR = 'templates' | ||
975 | 76 | FILES_DIR = 'files' | ||
976 | 77 | |||
977 | 78 | |||
978 | 79 | def render_template(template_name, context, template_dir=TEMPLATES_DIR): | ||
979 | 80 | templates = jinja2.Environment( | ||
980 | 81 | loader=jinja2.FileSystemLoader(template_dir)) | ||
981 | 82 | template = templates.get_template(template_name) | ||
982 | 83 | return template.render(context) | ||
983 | 84 | |||
984 | 85 | 61 | ||
985 | 86 | def get_host_ip(hostname=None): | 62 | def get_host_ip(hostname=None): |
986 | 63 | try: | ||
987 | 64 | import dns.resolver | ||
988 | 65 | except ImportError: | ||
989 | 66 | apt_install(filter_installed_packages(['python-dnspython']), | ||
990 | 67 | fatal=True) | ||
991 | 68 | import dns.resolver | ||
992 | 69 | |||
993 | 87 | if config('prefer-ipv6'): | 70 | if config('prefer-ipv6'): |
994 | 88 | # Ensure we have a valid ipv6 address configured | 71 | # Ensure we have a valid ipv6 address configured |
995 | 89 | get_ipv6_addr(exc_list=[config('vip')], fatal=True)[0] | 72 | get_ipv6_addr(exc_list=[config('vip')], fatal=True)[0] |
This merge also fixes a pre-existing unit test failure.