Merge lp:~james-page/charms/precise/nvp-transport-node/tarball-url into lp:charms/nvp-transport-node
- Precise Pangolin (12.04)
- tarball-url
- Merge into trunk
Proposed by
James Page
Status: | Merged |
---|---|
Merged at revision: | 47 |
Proposed branch: | lp:~james-page/charms/precise/nvp-transport-node/tarball-url |
Merge into: | lp:charms/nvp-transport-node |
Diff against target: |
1002 lines (+642/-71) 12 files modified
charm-helpers-sync.yaml (+1/-0) config.yaml (+3/-0) hooks/charmhelpers/core/fstab.py (+114/-0) hooks/charmhelpers/core/hookenv.py (+124/-1) hooks/charmhelpers/core/host.py (+87/-9) hooks/charmhelpers/fetch/__init__.py (+180/-58) hooks/charmhelpers/fetch/archiveurl.py (+15/-0) hooks/charmhelpers/fetch/bzrurl.py (+3/-2) hooks/charmhelpers/payload/__init__.py (+1/-0) hooks/charmhelpers/payload/archive.py (+57/-0) hooks/charmhelpers/payload/execd.py (+50/-0) hooks/hooks.py (+7/-1) |
To merge this branch: | bzr merge lp:~james-page/charms/precise/nvp-transport-node/tarball-url |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
charmers | Pending | ||
Review via email: mp+223732@code.launchpad.net |
Commit message
Description of the change
Updates to support deploying required OVS debs from an HTTP accessible location.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'charm-helpers-sync.yaml' |
2 | --- charm-helpers-sync.yaml 2013-10-16 09:07:33 +0000 |
3 | +++ charm-helpers-sync.yaml 2014-06-19 12:54:16 +0000 |
4 | @@ -4,3 +4,4 @@ |
5 | - core |
6 | - fetch |
7 | - contrib.network.ovs |
8 | + - payload |
9 | |
10 | === modified file 'config.yaml' |
11 | --- config.yaml 2014-02-05 14:53:19 +0000 |
12 | +++ config.yaml 2014-06-19 12:54:16 +0000 |
13 | @@ -26,3 +26,6 @@ |
14 | type: string |
15 | default: "openvswitch-datapath-dkms openvswitch-switch nicira-ovs-hypervisor-node" |
16 | description: List of packages to install for NVP. |
17 | + ovs-tarball-url: |
18 | + type: string |
19 | + description: Optional URL to NSX OpenvSwitch tarball containing Debian packages. |
20 | |
21 | === added file 'hooks/charmhelpers/core/fstab.py' |
22 | --- hooks/charmhelpers/core/fstab.py 1970-01-01 00:00:00 +0000 |
23 | +++ hooks/charmhelpers/core/fstab.py 2014-06-19 12:54:16 +0000 |
24 | @@ -0,0 +1,114 @@ |
25 | +#!/usr/bin/env python |
26 | +# -*- coding: utf-8 -*- |
27 | + |
28 | +__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
29 | + |
30 | +import os |
31 | + |
32 | + |
33 | +class Fstab(file): |
34 | + """This class extends file in order to implement a file reader/writer |
35 | + for file `/etc/fstab` |
36 | + """ |
37 | + |
38 | + class Entry(object): |
39 | + """Entry class represents a non-comment line on the `/etc/fstab` file |
40 | + """ |
41 | + def __init__(self, device, mountpoint, filesystem, |
42 | + options, d=0, p=0): |
43 | + self.device = device |
44 | + self.mountpoint = mountpoint |
45 | + self.filesystem = filesystem |
46 | + |
47 | + if not options: |
48 | + options = "defaults" |
49 | + |
50 | + self.options = options |
51 | + self.d = d |
52 | + self.p = p |
53 | + |
54 | + def __eq__(self, o): |
55 | + return str(self) == str(o) |
56 | + |
57 | + def __str__(self): |
58 | + return "{} {} {} {} {} {}".format(self.device, |
59 | + self.mountpoint, |
60 | + self.filesystem, |
61 | + self.options, |
62 | + self.d, |
63 | + self.p) |
64 | + |
65 | + DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab') |
66 | + |
67 | + def __init__(self, path=None): |
68 | + if path: |
69 | + self._path = path |
70 | + else: |
71 | + self._path = self.DEFAULT_PATH |
72 | + file.__init__(self, self._path, 'r+') |
73 | + |
74 | + def _hydrate_entry(self, line): |
75 | + return Fstab.Entry(*filter( |
76 | + lambda x: x not in ('', None), |
77 | + line.strip("\n").split(" "))) |
78 | + |
79 | + @property |
80 | + def entries(self): |
81 | + self.seek(0) |
82 | + for line in self.readlines(): |
83 | + try: |
84 | + if not line.startswith("#"): |
85 | + yield self._hydrate_entry(line) |
86 | + except ValueError: |
87 | + pass |
88 | + |
89 | + def get_entry_by_attr(self, attr, value): |
90 | + for entry in self.entries: |
91 | + e_attr = getattr(entry, attr) |
92 | + if e_attr == value: |
93 | + return entry |
94 | + return None |
95 | + |
96 | + def add_entry(self, entry): |
97 | + if self.get_entry_by_attr('device', entry.device): |
98 | + return False |
99 | + |
100 | + self.write(str(entry) + '\n') |
101 | + self.truncate() |
102 | + return entry |
103 | + |
104 | + def remove_entry(self, entry): |
105 | + self.seek(0) |
106 | + |
107 | + lines = self.readlines() |
108 | + |
109 | + found = False |
110 | + for index, line in enumerate(lines): |
111 | + if not line.startswith("#"): |
112 | + if self._hydrate_entry(line) == entry: |
113 | + found = True |
114 | + break |
115 | + |
116 | + if not found: |
117 | + return False |
118 | + |
119 | + lines.remove(line) |
120 | + |
121 | + self.seek(0) |
122 | + self.write(''.join(lines)) |
123 | + self.truncate() |
124 | + return True |
125 | + |
126 | + @classmethod |
127 | + def remove_by_mountpoint(cls, mountpoint, path=None): |
128 | + fstab = cls(path=path) |
129 | + entry = fstab.get_entry_by_attr('mountpoint', mountpoint) |
130 | + if entry: |
131 | + return fstab.remove_entry(entry) |
132 | + return False |
133 | + |
134 | + @classmethod |
135 | + def add(cls, device, mountpoint, filesystem, options=None, path=None): |
136 | + return cls(path=path).add_entry(Fstab.Entry(device, |
137 | + mountpoint, filesystem, |
138 | + options=options)) |
139 | |
140 | === modified file 'hooks/charmhelpers/core/hookenv.py' |
141 | --- hooks/charmhelpers/core/hookenv.py 2013-10-22 04:07:21 +0000 |
142 | +++ hooks/charmhelpers/core/hookenv.py 2014-06-19 12:54:16 +0000 |
143 | @@ -8,6 +8,7 @@ |
144 | import json |
145 | import yaml |
146 | import subprocess |
147 | +import sys |
148 | import UserDict |
149 | from subprocess import CalledProcessError |
150 | |
151 | @@ -149,6 +150,105 @@ |
152 | return local_unit().split('/')[0] |
153 | |
154 | |
155 | +def hook_name(): |
156 | + """The name of the currently executing hook""" |
157 | + return os.path.basename(sys.argv[0]) |
158 | + |
159 | + |
160 | +class Config(dict): |
161 | + """A Juju charm config dictionary that can write itself to |
162 | + disk (as json) and track which values have changed since |
163 | + the previous hook invocation. |
164 | + |
165 | + Do not instantiate this object directly - instead call |
166 | + ``hookenv.config()`` |
167 | + |
168 | + Example usage:: |
169 | + |
170 | + >>> # inside a hook |
171 | + >>> from charmhelpers.core import hookenv |
172 | + >>> config = hookenv.config() |
173 | + >>> config['foo'] |
174 | + 'bar' |
175 | + >>> config['mykey'] = 'myval' |
176 | + >>> config.save() |
177 | + |
178 | + |
179 | + >>> # user runs `juju set mycharm foo=baz` |
180 | + >>> # now we're inside subsequent config-changed hook |
181 | + >>> config = hookenv.config() |
182 | + >>> config['foo'] |
183 | + 'baz' |
184 | + >>> # test to see if this val has changed since last hook |
185 | + >>> config.changed('foo') |
186 | + True |
187 | + >>> # what was the previous value? |
188 | + >>> config.previous('foo') |
189 | + 'bar' |
190 | + >>> # keys/values that we add are preserved across hooks |
191 | + >>> config['mykey'] |
192 | + 'myval' |
193 | + >>> # don't forget to save at the end of hook! |
194 | + >>> config.save() |
195 | + |
196 | + """ |
197 | + CONFIG_FILE_NAME = '.juju-persistent-config' |
198 | + |
199 | + def __init__(self, *args, **kw): |
200 | + super(Config, self).__init__(*args, **kw) |
201 | + self._prev_dict = None |
202 | + self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
203 | + if os.path.exists(self.path): |
204 | + self.load_previous() |
205 | + |
206 | + def load_previous(self, path=None): |
207 | + """Load previous copy of config from disk so that current values |
208 | + can be compared to previous values. |
209 | + |
210 | + :param path: |
211 | + |
212 | + File path from which to load the previous config. If `None`, |
213 | + config is loaded from the default location. If `path` is |
214 | + specified, subsequent `save()` calls will write to the same |
215 | + path. |
216 | + |
217 | + """ |
218 | + self.path = path or self.path |
219 | + with open(self.path) as f: |
220 | + self._prev_dict = json.load(f) |
221 | + |
222 | + def changed(self, key): |
223 | + """Return true if the value for this key has changed since |
224 | + the last save. |
225 | + |
226 | + """ |
227 | + if self._prev_dict is None: |
228 | + return True |
229 | + return self.previous(key) != self.get(key) |
230 | + |
231 | + def previous(self, key): |
232 | + """Return previous value for this key, or None if there |
233 | + is no "previous" value. |
234 | + |
235 | + """ |
236 | + if self._prev_dict: |
237 | + return self._prev_dict.get(key) |
238 | + return None |
239 | + |
240 | + def save(self): |
241 | + """Save this config to disk. |
242 | + |
243 | + Preserves items in _prev_dict that do not exist in self. |
244 | + |
245 | + """ |
246 | + if self._prev_dict: |
247 | + for k, v in self._prev_dict.iteritems(): |
248 | + if k not in self: |
249 | + self[k] = v |
250 | + with open(self.path, 'w') as f: |
251 | + json.dump(self, f) |
252 | + |
253 | + |
254 | @cached |
255 | def config(scope=None): |
256 | """Juju charm configuration""" |
257 | @@ -157,7 +257,10 @@ |
258 | config_cmd_line.append(scope) |
259 | config_cmd_line.append('--format=json') |
260 | try: |
261 | - return json.loads(subprocess.check_output(config_cmd_line)) |
262 | + config_data = json.loads(subprocess.check_output(config_cmd_line)) |
263 | + if scope is not None: |
264 | + return config_data |
265 | + return Config(config_data) |
266 | except ValueError: |
267 | return None |
268 | |
269 | @@ -285,6 +388,26 @@ |
270 | return rels |
271 | |
272 | |
273 | +@cached |
274 | +def is_relation_made(relation, keys='private-address'): |
275 | + ''' |
276 | + Determine whether a relation is established by checking for |
277 | + presence of key(s). If a list of keys is provided, they |
278 | + must all be present for the relation to be identified as made |
279 | + ''' |
280 | + if isinstance(keys, str): |
281 | + keys = [keys] |
282 | + for r_id in relation_ids(relation): |
283 | + for unit in related_units(r_id): |
284 | + context = {} |
285 | + for k in keys: |
286 | + context[k] = relation_get(k, rid=r_id, |
287 | + unit=unit) |
288 | + if None not in context.values(): |
289 | + return True |
290 | + return False |
291 | + |
292 | + |
293 | def open_port(port, protocol="TCP"): |
294 | """Open a service network port""" |
295 | _args = ['open-port'] |
296 | |
297 | === modified file 'hooks/charmhelpers/core/host.py' |
298 | --- hooks/charmhelpers/core/host.py 2013-10-22 04:07:21 +0000 |
299 | +++ hooks/charmhelpers/core/host.py 2014-06-19 12:54:16 +0000 |
300 | @@ -16,6 +16,7 @@ |
301 | from collections import OrderedDict |
302 | |
303 | from hookenv import log |
304 | +from fstab import Fstab |
305 | |
306 | |
307 | def service_start(service_name): |
308 | @@ -34,7 +35,8 @@ |
309 | |
310 | |
311 | def service_reload(service_name, restart_on_failure=False): |
312 | - """Reload a system service, optionally falling back to restart if reload fails""" |
313 | + """Reload a system service, optionally falling back to restart if |
314 | + reload fails""" |
315 | service_result = service('reload', service_name) |
316 | if not service_result and restart_on_failure: |
317 | service_result = service('restart', service_name) |
318 | @@ -143,7 +145,19 @@ |
319 | target.write(content) |
320 | |
321 | |
322 | -def mount(device, mountpoint, options=None, persist=False): |
323 | +def fstab_remove(mp): |
324 | + """Remove the given mountpoint entry from /etc/fstab |
325 | + """ |
326 | + return Fstab.remove_by_mountpoint(mp) |
327 | + |
328 | + |
329 | +def fstab_add(dev, mp, fs, options=None): |
330 | + """Adds the given device entry to the /etc/fstab file |
331 | + """ |
332 | + return Fstab.add(dev, mp, fs, options=options) |
333 | + |
334 | + |
335 | +def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"): |
336 | """Mount a filesystem at a particular mountpoint""" |
337 | cmd_args = ['mount'] |
338 | if options is not None: |
339 | @@ -154,9 +168,9 @@ |
340 | except subprocess.CalledProcessError, e: |
341 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) |
342 | return False |
343 | + |
344 | if persist: |
345 | - # TODO: update fstab |
346 | - pass |
347 | + return fstab_add(device, mountpoint, filesystem, options=options) |
348 | return True |
349 | |
350 | |
351 | @@ -168,9 +182,9 @@ |
352 | except subprocess.CalledProcessError, e: |
353 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) |
354 | return False |
355 | + |
356 | if persist: |
357 | - # TODO: update fstab |
358 | - pass |
359 | + return fstab_remove(mountpoint) |
360 | return True |
361 | |
362 | |
363 | @@ -194,7 +208,7 @@ |
364 | return None |
365 | |
366 | |
367 | -def restart_on_change(restart_map): |
368 | +def restart_on_change(restart_map, stopstart=False): |
369 | """Restart services based on configuration files changing |
370 | |
371 | This function is used a decorator, for example |
372 | @@ -219,8 +233,14 @@ |
373 | for path in restart_map: |
374 | if checksums[path] != file_hash(path): |
375 | restarts += restart_map[path] |
376 | - for service_name in list(OrderedDict.fromkeys(restarts)): |
377 | - service('restart', service_name) |
378 | + services_list = list(OrderedDict.fromkeys(restarts)) |
379 | + if not stopstart: |
380 | + for service_name in services_list: |
381 | + service('restart', service_name) |
382 | + else: |
383 | + for action in ['stop', 'start']: |
384 | + for service_name in services_list: |
385 | + service(action, service_name) |
386 | return wrapped_f |
387 | return wrap |
388 | |
389 | @@ -245,3 +265,61 @@ |
390 | random_chars = [ |
391 | random.choice(alphanumeric_chars) for _ in range(length)] |
392 | return(''.join(random_chars)) |
393 | + |
394 | + |
395 | +def list_nics(nic_type): |
396 | + '''Return a list of nics of given type(s)''' |
397 | + if isinstance(nic_type, basestring): |
398 | + int_types = [nic_type] |
399 | + else: |
400 | + int_types = nic_type |
401 | + interfaces = [] |
402 | + for int_type in int_types: |
403 | + cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] |
404 | + ip_output = subprocess.check_output(cmd).split('\n') |
405 | + ip_output = (line for line in ip_output if line) |
406 | + for line in ip_output: |
407 | + if line.split()[1].startswith(int_type): |
408 | + interfaces.append(line.split()[1].replace(":", "")) |
409 | + return interfaces |
410 | + |
411 | + |
412 | +def set_nic_mtu(nic, mtu): |
413 | + '''Set MTU on a network interface''' |
414 | + cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] |
415 | + subprocess.check_call(cmd) |
416 | + |
417 | + |
418 | +def get_nic_mtu(nic): |
419 | + cmd = ['ip', 'addr', 'show', nic] |
420 | + ip_output = subprocess.check_output(cmd).split('\n') |
421 | + mtu = "" |
422 | + for line in ip_output: |
423 | + words = line.split() |
424 | + if 'mtu' in words: |
425 | + mtu = words[words.index("mtu") + 1] |
426 | + return mtu |
427 | + |
428 | + |
429 | +def get_nic_hwaddr(nic): |
430 | + cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
431 | + ip_output = subprocess.check_output(cmd) |
432 | + hwaddr = "" |
433 | + words = ip_output.split() |
434 | + if 'link/ether' in words: |
435 | + hwaddr = words[words.index('link/ether') + 1] |
436 | + return hwaddr |
437 | + |
438 | + |
439 | +def cmp_pkgrevno(package, revno, pkgcache=None): |
440 | + '''Compare supplied revno with the revno of the installed package |
441 | + 1 => Installed revno is greater than supplied arg |
442 | + 0 => Installed revno is the same as supplied arg |
443 | + -1 => Installed revno is less than supplied arg |
444 | + ''' |
445 | + import apt_pkg |
446 | + if not pkgcache: |
447 | + apt_pkg.init() |
448 | + pkgcache = apt_pkg.Cache() |
449 | + pkg = pkgcache[package] |
450 | + return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
451 | |
452 | === modified file 'hooks/charmhelpers/fetch/__init__.py' |
453 | --- hooks/charmhelpers/fetch/__init__.py 2013-10-22 04:07:21 +0000 |
454 | +++ hooks/charmhelpers/fetch/__init__.py 2014-06-19 12:54:16 +0000 |
455 | @@ -1,4 +1,5 @@ |
456 | import importlib |
457 | +import time |
458 | from yaml import safe_load |
459 | from charmhelpers.core.host import ( |
460 | lsb_release |
461 | @@ -12,7 +13,8 @@ |
462 | config, |
463 | log, |
464 | ) |
465 | -import apt_pkg |
466 | +import os |
467 | + |
468 | |
469 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
470 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
471 | @@ -20,11 +22,107 @@ |
472 | PROPOSED_POCKET = """# Proposed |
473 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted |
474 | """ |
475 | +CLOUD_ARCHIVE_POCKETS = { |
476 | + # Folsom |
477 | + 'folsom': 'precise-updates/folsom', |
478 | + 'precise-folsom': 'precise-updates/folsom', |
479 | + 'precise-folsom/updates': 'precise-updates/folsom', |
480 | + 'precise-updates/folsom': 'precise-updates/folsom', |
481 | + 'folsom/proposed': 'precise-proposed/folsom', |
482 | + 'precise-folsom/proposed': 'precise-proposed/folsom', |
483 | + 'precise-proposed/folsom': 'precise-proposed/folsom', |
484 | + # Grizzly |
485 | + 'grizzly': 'precise-updates/grizzly', |
486 | + 'precise-grizzly': 'precise-updates/grizzly', |
487 | + 'precise-grizzly/updates': 'precise-updates/grizzly', |
488 | + 'precise-updates/grizzly': 'precise-updates/grizzly', |
489 | + 'grizzly/proposed': 'precise-proposed/grizzly', |
490 | + 'precise-grizzly/proposed': 'precise-proposed/grizzly', |
491 | + 'precise-proposed/grizzly': 'precise-proposed/grizzly', |
492 | + # Havana |
493 | + 'havana': 'precise-updates/havana', |
494 | + 'precise-havana': 'precise-updates/havana', |
495 | + 'precise-havana/updates': 'precise-updates/havana', |
496 | + 'precise-updates/havana': 'precise-updates/havana', |
497 | + 'havana/proposed': 'precise-proposed/havana', |
498 | + 'precise-havana/proposed': 'precise-proposed/havana', |
499 | + 'precise-proposed/havana': 'precise-proposed/havana', |
500 | + # Icehouse |
501 | + 'icehouse': 'precise-updates/icehouse', |
502 | + 'precise-icehouse': 'precise-updates/icehouse', |
503 | + 'precise-icehouse/updates': 'precise-updates/icehouse', |
504 | + 'precise-updates/icehouse': 'precise-updates/icehouse', |
505 | + 'icehouse/proposed': 'precise-proposed/icehouse', |
506 | + 'precise-icehouse/proposed': 'precise-proposed/icehouse', |
507 | + 'precise-proposed/icehouse': 'precise-proposed/icehouse', |
508 | + # Juno |
509 | + 'juno': 'trusty-updates/juno', |
510 | + 'trusty-juno': 'trusty-updates/juno', |
511 | + 'trusty-juno/updates': 'trusty-updates/juno', |
512 | + 'trusty-updates/juno': 'trusty-updates/juno', |
513 | + 'juno/proposed': 'trusty-proposed/juno', |
514 | + 'juno/proposed': 'trusty-proposed/juno', |
515 | + 'trusty-juno/proposed': 'trusty-proposed/juno', |
516 | + 'trusty-proposed/juno': 'trusty-proposed/juno', |
517 | +} |
518 | + |
519 | +# The order of this list is very important. Handlers should be listed in from |
520 | +# least- to most-specific URL matching. |
521 | +FETCH_HANDLERS = ( |
522 | + 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', |
523 | + 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', |
524 | +) |
525 | + |
526 | +APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. |
527 | +APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. |
528 | +APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. |
529 | + |
530 | + |
531 | +class SourceConfigError(Exception): |
532 | + pass |
533 | + |
534 | + |
535 | +class UnhandledSource(Exception): |
536 | + pass |
537 | + |
538 | + |
539 | +class AptLockError(Exception): |
540 | + pass |
541 | + |
542 | + |
543 | +class BaseFetchHandler(object): |
544 | + |
545 | + """Base class for FetchHandler implementations in fetch plugins""" |
546 | + |
547 | + def can_handle(self, source): |
548 | + """Returns True if the source can be handled. Otherwise returns |
549 | + a string explaining why it cannot""" |
550 | + return "Wrong source type" |
551 | + |
552 | + def install(self, source): |
553 | + """Try to download and unpack the source. Return the path to the |
554 | + unpacked files or raise UnhandledSource.""" |
555 | + raise UnhandledSource("Wrong source type {}".format(source)) |
556 | + |
557 | + def parse_url(self, url): |
558 | + return urlparse(url) |
559 | + |
560 | + def base_url(self, url): |
561 | + """Return url without querystring or fragment""" |
562 | + parts = list(self.parse_url(url)) |
563 | + parts[4:] = ['' for i in parts[4:]] |
564 | + return urlunparse(parts) |
565 | |
566 | |
567 | def filter_installed_packages(packages): |
568 | """Returns a list of packages that require installation""" |
569 | + import apt_pkg |
570 | apt_pkg.init() |
571 | + |
572 | + # Tell apt to build an in-memory cache to prevent race conditions (if |
573 | + # another process is already building the cache). |
574 | + apt_pkg.config.set("Dir::Cache::pkgcache", "") |
575 | + |
576 | cache = apt_pkg.Cache() |
577 | _pkgs = [] |
578 | for package in packages: |
579 | @@ -40,8 +138,10 @@ |
580 | |
581 | def apt_install(packages, options=None, fatal=False): |
582 | """Install one or more packages""" |
583 | - options = options or [] |
584 | - cmd = ['apt-get', '-y'] |
585 | + if options is None: |
586 | + options = ['--option=Dpkg::Options::=--force-confold'] |
587 | + |
588 | + cmd = ['apt-get', '--assume-yes'] |
589 | cmd.extend(options) |
590 | cmd.append('install') |
591 | if isinstance(packages, basestring): |
592 | @@ -50,33 +150,39 @@ |
593 | cmd.extend(packages) |
594 | log("Installing {} with options: {}".format(packages, |
595 | options)) |
596 | - if fatal: |
597 | - subprocess.check_call(cmd) |
598 | + _run_apt_command(cmd, fatal) |
599 | + |
600 | + |
601 | +def apt_upgrade(options=None, fatal=False, dist=False): |
602 | + """Upgrade all packages""" |
603 | + if options is None: |
604 | + options = ['--option=Dpkg::Options::=--force-confold'] |
605 | + |
606 | + cmd = ['apt-get', '--assume-yes'] |
607 | + cmd.extend(options) |
608 | + if dist: |
609 | + cmd.append('dist-upgrade') |
610 | else: |
611 | - subprocess.call(cmd) |
612 | + cmd.append('upgrade') |
613 | + log("Upgrading with options: {}".format(options)) |
614 | + _run_apt_command(cmd, fatal) |
615 | |
616 | |
617 | def apt_update(fatal=False): |
618 | """Update local apt cache""" |
619 | cmd = ['apt-get', 'update'] |
620 | - if fatal: |
621 | - subprocess.check_call(cmd) |
622 | - else: |
623 | - subprocess.call(cmd) |
624 | + _run_apt_command(cmd, fatal) |
625 | |
626 | |
627 | def apt_purge(packages, fatal=False): |
628 | """Purge one or more packages""" |
629 | - cmd = ['apt-get', '-y', 'purge'] |
630 | + cmd = ['apt-get', '--assume-yes', 'purge'] |
631 | if isinstance(packages, basestring): |
632 | cmd.append(packages) |
633 | else: |
634 | cmd.extend(packages) |
635 | log("Purging {}".format(packages)) |
636 | - if fatal: |
637 | - subprocess.check_call(cmd) |
638 | - else: |
639 | - subprocess.call(cmd) |
640 | + _run_apt_command(cmd, fatal) |
641 | |
642 | |
643 | def apt_hold(packages, fatal=False): |
644 | @@ -87,6 +193,7 @@ |
645 | else: |
646 | cmd.extend(packages) |
647 | log("Holding {}".format(packages)) |
648 | + |
649 | if fatal: |
650 | subprocess.check_call(cmd) |
651 | else: |
652 | @@ -94,26 +201,34 @@ |
653 | |
654 | |
655 | def add_source(source, key=None): |
656 | + if source is None: |
657 | + log('Source is not present. Skipping') |
658 | + return |
659 | + |
660 | if (source.startswith('ppa:') or |
661 | - source.startswith('http:') or |
662 | - source.startswith('deb ')): |
663 | + source.startswith('http') or |
664 | + source.startswith('deb ') or |
665 | + source.startswith('cloud-archive:')): |
666 | subprocess.check_call(['add-apt-repository', '--yes', source]) |
667 | elif source.startswith('cloud:'): |
668 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), |
669 | fatal=True) |
670 | pocket = source.split(':')[-1] |
671 | + if pocket not in CLOUD_ARCHIVE_POCKETS: |
672 | + raise SourceConfigError( |
673 | + 'Unsupported cloud: source option %s' % |
674 | + pocket) |
675 | + actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] |
676 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: |
677 | - apt.write(CLOUD_ARCHIVE.format(pocket)) |
678 | + apt.write(CLOUD_ARCHIVE.format(actual_pocket)) |
679 | elif source == 'proposed': |
680 | release = lsb_release()['DISTRIB_CODENAME'] |
681 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: |
682 | apt.write(PROPOSED_POCKET.format(release)) |
683 | if key: |
684 | - subprocess.check_call(['apt-key', 'import', key]) |
685 | - |
686 | - |
687 | -class SourceConfigError(Exception): |
688 | - pass |
689 | + subprocess.check_call(['apt-key', 'adv', '--keyserver', |
690 | + 'hkp://keyserver.ubuntu.com:80', '--recv', |
691 | + key]) |
692 | |
693 | |
694 | def configure_sources(update=False, |
695 | @@ -148,17 +263,6 @@ |
696 | if update: |
697 | apt_update(fatal=True) |
698 | |
699 | -# The order of this list is very important. Handlers should be listed in from |
700 | -# least- to most-specific URL matching. |
701 | -FETCH_HANDLERS = ( |
702 | - 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', |
703 | - 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', |
704 | -) |
705 | - |
706 | - |
707 | -class UnhandledSource(Exception): |
708 | - pass |
709 | - |
710 | |
711 | def install_remote(source): |
712 | """ |
713 | @@ -189,28 +293,6 @@ |
714 | return install_remote(source) |
715 | |
716 | |
717 | -class BaseFetchHandler(object): |
718 | - """Base class for FetchHandler implementations in fetch plugins""" |
719 | - def can_handle(self, source): |
720 | - """Returns True if the source can be handled. Otherwise returns |
721 | - a string explaining why it cannot""" |
722 | - return "Wrong source type" |
723 | - |
724 | - def install(self, source): |
725 | - """Try to download and unpack the source. Return the path to the |
726 | - unpacked files or raise UnhandledSource.""" |
727 | - raise UnhandledSource("Wrong source type {}".format(source)) |
728 | - |
729 | - def parse_url(self, url): |
730 | - return urlparse(url) |
731 | - |
732 | - def base_url(self, url): |
733 | - """Return url without querystring or fragment""" |
734 | - parts = list(self.parse_url(url)) |
735 | - parts[4:] = ['' for i in parts[4:]] |
736 | - return urlunparse(parts) |
737 | - |
738 | - |
739 | def plugins(fetch_handlers=None): |
740 | if not fetch_handlers: |
741 | fetch_handlers = FETCH_HANDLERS |
742 | @@ -218,10 +300,50 @@ |
743 | for handler_name in fetch_handlers: |
744 | package, classname = handler_name.rsplit('.', 1) |
745 | try: |
746 | - handler_class = getattr(importlib.import_module(package), classname) |
747 | + handler_class = getattr( |
748 | + importlib.import_module(package), |
749 | + classname) |
750 | plugin_list.append(handler_class()) |
751 | except (ImportError, AttributeError): |
752 | # Skip missing plugins so that they can be ommitted from |
753 | # installation if desired |
754 | - log("FetchHandler {} not found, skipping plugin".format(handler_name)) |
755 | + log("FetchHandler {} not found, skipping plugin".format( |
756 | + handler_name)) |
757 | return plugin_list |
758 | + |
759 | + |
760 | +def _run_apt_command(cmd, fatal=False): |
761 | + """ |
762 | + Run an APT command, checking output and retrying if the fatal flag is set |
763 | + to True. |
764 | + |
765 | + :param: cmd: str: The apt command to run. |
766 | + :param: fatal: bool: Whether the command's output should be checked and |
767 | + retried. |
768 | + """ |
769 | + env = os.environ.copy() |
770 | + |
771 | + if 'DEBIAN_FRONTEND' not in env: |
772 | + env['DEBIAN_FRONTEND'] = 'noninteractive' |
773 | + |
774 | + if fatal: |
775 | + retry_count = 0 |
776 | + result = None |
777 | + |
778 | + # If the command is considered "fatal", we need to retry if the apt |
779 | + # lock was not acquired. |
780 | + |
781 | + while result is None or result == APT_NO_LOCK: |
782 | + try: |
783 | + result = subprocess.check_call(cmd, env=env) |
784 | + except subprocess.CalledProcessError, e: |
785 | + retry_count = retry_count + 1 |
786 | + if retry_count > APT_NO_LOCK_RETRY_COUNT: |
787 | + raise |
788 | + result = e.returncode |
789 | + log("Couldn't acquire DPKG lock. Will retry in {} seconds." |
790 | + "".format(APT_NO_LOCK_RETRY_DELAY)) |
791 | + time.sleep(APT_NO_LOCK_RETRY_DELAY) |
792 | + |
793 | + else: |
794 | + subprocess.call(cmd, env=env) |
795 | |
796 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' |
797 | --- hooks/charmhelpers/fetch/archiveurl.py 2013-10-16 09:07:33 +0000 |
798 | +++ hooks/charmhelpers/fetch/archiveurl.py 2014-06-19 12:54:16 +0000 |
799 | @@ -1,5 +1,7 @@ |
800 | import os |
801 | import urllib2 |
802 | +import urlparse |
803 | + |
804 | from charmhelpers.fetch import ( |
805 | BaseFetchHandler, |
806 | UnhandledSource |
807 | @@ -24,6 +26,19 @@ |
808 | def download(self, source, dest): |
809 | # propogate all exceptions |
810 | # URLError, OSError, etc |
811 | + proto, netloc, path, params, query, fragment = urlparse.urlparse(source) |
812 | + if proto in ('http', 'https'): |
813 | + auth, barehost = urllib2.splituser(netloc) |
814 | + if auth is not None: |
815 | + source = urlparse.urlunparse((proto, barehost, path, params, query, fragment)) |
816 | + username, password = urllib2.splitpasswd(auth) |
817 | + passman = urllib2.HTTPPasswordMgrWithDefaultRealm() |
818 | + # Realm is set to None in add_password to force the username and password |
819 | + # to be used whatever the realm |
820 | + passman.add_password(None, source, username, password) |
821 | + authhandler = urllib2.HTTPBasicAuthHandler(passman) |
822 | + opener = urllib2.build_opener(authhandler) |
823 | + urllib2.install_opener(opener) |
824 | response = urllib2.urlopen(source) |
825 | try: |
826 | with open(dest, 'w') as dest_file: |
827 | |
828 | === modified file 'hooks/charmhelpers/fetch/bzrurl.py' |
829 | --- hooks/charmhelpers/fetch/bzrurl.py 2013-10-16 09:07:33 +0000 |
830 | +++ hooks/charmhelpers/fetch/bzrurl.py 2014-06-19 12:54:16 +0000 |
831 | @@ -12,6 +12,7 @@ |
832 | apt_install("python-bzrlib") |
833 | from bzrlib.branch import Branch |
834 | |
835 | + |
836 | class BzrUrlFetchHandler(BaseFetchHandler): |
837 | """Handler for bazaar branches via generic and lp URLs""" |
838 | def can_handle(self, source): |
839 | @@ -38,7 +39,8 @@ |
840 | def install(self, source): |
841 | url_parts = self.parse_url(source) |
842 | branch_name = url_parts.path.strip("/").split("/")[-1] |
843 | - dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) |
844 | + dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", |
845 | + branch_name) |
846 | if not os.path.exists(dest_dir): |
847 | mkdir(dest_dir, perms=0755) |
848 | try: |
849 | @@ -46,4 +48,3 @@ |
850 | except OSError as e: |
851 | raise UnhandledSource(e.strerror) |
852 | return dest_dir |
853 | - |
854 | |
855 | === added directory 'hooks/charmhelpers/payload' |
856 | === added file 'hooks/charmhelpers/payload/__init__.py' |
857 | --- hooks/charmhelpers/payload/__init__.py 1970-01-01 00:00:00 +0000 |
858 | +++ hooks/charmhelpers/payload/__init__.py 2014-06-19 12:54:16 +0000 |
859 | @@ -0,0 +1,1 @@ |
860 | +"Tools for working with files injected into a charm just before deployment." |
861 | |
862 | === added file 'hooks/charmhelpers/payload/archive.py' |
863 | --- hooks/charmhelpers/payload/archive.py 1970-01-01 00:00:00 +0000 |
864 | +++ hooks/charmhelpers/payload/archive.py 2014-06-19 12:54:16 +0000 |
865 | @@ -0,0 +1,57 @@ |
866 | +import os |
867 | +import tarfile |
868 | +import zipfile |
869 | +from charmhelpers.core import ( |
870 | + host, |
871 | + hookenv, |
872 | +) |
873 | + |
874 | + |
875 | +class ArchiveError(Exception): |
876 | + pass |
877 | + |
878 | + |
879 | +def get_archive_handler(archive_name): |
880 | + if os.path.isfile(archive_name): |
881 | + if tarfile.is_tarfile(archive_name): |
882 | + return extract_tarfile |
883 | + elif zipfile.is_zipfile(archive_name): |
884 | + return extract_zipfile |
885 | + else: |
886 | + # look at the file name |
887 | + for ext in ('.tar', '.tar.gz', '.tgz', 'tar.bz2', '.tbz2', '.tbz'): |
888 | + if archive_name.endswith(ext): |
889 | + return extract_tarfile |
890 | + for ext in ('.zip', '.jar'): |
891 | + if archive_name.endswith(ext): |
892 | + return extract_zipfile |
893 | + |
894 | + |
895 | +def archive_dest_default(archive_name): |
896 | + archive_file = os.path.basename(archive_name) |
897 | + return os.path.join(hookenv.charm_dir(), "archives", archive_file) |
898 | + |
899 | + |
900 | +def extract(archive_name, destpath=None): |
901 | + handler = get_archive_handler(archive_name) |
902 | + if handler: |
903 | + if not destpath: |
904 | + destpath = archive_dest_default(archive_name) |
905 | + if not os.path.isdir(destpath): |
906 | + host.mkdir(destpath) |
907 | + handler(archive_name, destpath) |
908 | + return destpath |
909 | + else: |
910 | + raise ArchiveError("No handler for archive") |
911 | + |
912 | + |
913 | +def extract_tarfile(archive_name, destpath): |
914 | + "Unpack a tar archive, optionally compressed" |
915 | + archive = tarfile.open(archive_name) |
916 | + archive.extractall(destpath) |
917 | + |
918 | + |
919 | +def extract_zipfile(archive_name, destpath): |
920 | + "Unpack a zip file" |
921 | + archive = zipfile.ZipFile(archive_name) |
922 | + archive.extractall(destpath) |
923 | |
924 | === added file 'hooks/charmhelpers/payload/execd.py' |
925 | --- hooks/charmhelpers/payload/execd.py 1970-01-01 00:00:00 +0000 |
926 | +++ hooks/charmhelpers/payload/execd.py 2014-06-19 12:54:16 +0000 |
927 | @@ -0,0 +1,50 @@ |
928 | +#!/usr/bin/env python |
929 | + |
930 | +import os |
931 | +import sys |
932 | +import subprocess |
933 | +from charmhelpers.core import hookenv |
934 | + |
935 | + |
936 | +def default_execd_dir(): |
937 | + return os.path.join(os.environ['CHARM_DIR'], 'exec.d') |
938 | + |
939 | + |
940 | +def execd_module_paths(execd_dir=None): |
941 | + """Generate a list of full paths to modules within execd_dir.""" |
942 | + if not execd_dir: |
943 | + execd_dir = default_execd_dir() |
944 | + |
945 | + if not os.path.exists(execd_dir): |
946 | + return |
947 | + |
948 | + for subpath in os.listdir(execd_dir): |
949 | + module = os.path.join(execd_dir, subpath) |
950 | + if os.path.isdir(module): |
951 | + yield module |
952 | + |
953 | + |
954 | +def execd_submodule_paths(command, execd_dir=None): |
955 | + """Generate a list of full paths to the specified command within exec_dir. |
956 | + """ |
957 | + for module_path in execd_module_paths(execd_dir): |
958 | + path = os.path.join(module_path, command) |
959 | + if os.access(path, os.X_OK) and os.path.isfile(path): |
960 | + yield path |
961 | + |
962 | + |
963 | +def execd_run(command, execd_dir=None, die_on_error=False, stderr=None): |
964 | + """Run command for each module within execd_dir which defines it.""" |
965 | + for submodule_path in execd_submodule_paths(command, execd_dir): |
966 | + try: |
967 | + subprocess.check_call(submodule_path, shell=True, stderr=stderr) |
968 | + except subprocess.CalledProcessError as e: |
969 | + hookenv.log("Error ({}) running {}. Output: {}".format( |
970 | + e.returncode, e.cmd, e.output)) |
971 | + if die_on_error: |
972 | + sys.exit(e.returncode) |
973 | + |
974 | + |
975 | +def execd_preinstall(execd_dir=None): |
976 | + """Run charm-pre-install for each module within execd_dir.""" |
977 | + execd_run('charm-pre-install', execd_dir=execd_dir) |
978 | |
979 | === modified file 'hooks/hooks.py' |
980 | --- hooks/hooks.py 2013-10-16 09:07:33 +0000 |
981 | +++ hooks/hooks.py 2014-06-19 12:54:16 +0000 |
982 | @@ -14,6 +14,7 @@ |
983 | from charmhelpers.fetch import ( |
984 | apt_update, apt_install, |
985 | ) |
986 | +from charmhelpers.fetch.archiveurl import ArchiveUrlFetchHandler |
987 | import charmhelpers.contrib.network.ovs as ovs |
988 | from utils import get_host_ip |
989 | from nvp import NVPHelper |
990 | @@ -32,7 +33,12 @@ |
991 | def install(): |
992 | archive.install() |
993 | archive.create_archive(ARCHIVE) |
994 | - archive.include_deb(ARCHIVE, 'payload') |
995 | + if config('ovs-tarball-url') is not None: |
996 | + handler = ArchiveUrlFetchHandler() |
997 | + path = handler.install(config('ovs-tarball-url')) |
998 | + archive.include_deb(ARCHIVE, path) |
999 | + else: |
1000 | + archive.include_deb(ARCHIVE, 'payload') |
1001 | archive.configure_local_source(ARCHIVE) |
1002 | apt_update(fatal=True) |
1003 |