Merge lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers into lp:charms/trusty/ntpmaster

Proposed by Chris Glass
Status: Merged
Merged at revision: 11
Proposed branch: lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers
Merge into: lp:charms/trusty/ntpmaster
Diff against target: 2155 lines (+1453/-133)
12 files modified
.bzrignore (+1/-0)
Makefile (+8/-2)
hooks/charmhelpers/core/fstab.py (+116/-0)
hooks/charmhelpers/core/hookenv.py (+215/-29)
hooks/charmhelpers/core/host.py (+167/-23)
hooks/charmhelpers/core/services/__init__.py (+2/-0)
hooks/charmhelpers/core/services/base.py (+313/-0)
hooks/charmhelpers/core/services/helpers.py (+239/-0)
hooks/charmhelpers/core/templating.py (+51/-0)
hooks/charmhelpers/fetch/__init__.py (+274/-73)
hooks/charmhelpers/fetch/archiveurl.py (+64/-4)
hooks/charmhelpers/fetch/bzrurl.py (+3/-2)
To merge this branch: bzr merge lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers
Reviewer Review Type Date Requested Status
David Britton (community) Approve
Review via email: mp+236071@code.launchpad.net

Description of the change

This branch resyncs charm-helpers to pull in a fix where the apt-cache was not built in-memory and was causing some locking race conditions (see https://bugs.launchpad.net/charms/+source/ceph/+bug/1346489 for a charm with the same problem and fix)

To post a comment you must log in.
Revision history for this message
David Britton (dpb) wrote :

Looks great Chris, committed.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== added file '.bzrignore'
2--- .bzrignore 1970-01-01 00:00:00 +0000
3+++ .bzrignore 2014-09-26 08:01:25 +0000
4@@ -0,0 +1,1 @@
5+bin/
6
7=== modified file 'Makefile'
8--- Makefile 2014-08-11 08:11:00 +0000
9+++ Makefile 2014-09-26 08:01:25 +0000
10@@ -1,11 +1,17 @@
11 #!/usr/bin/make
12+PYTHON := /usr/bin/env python
13
14 lint:
15 @flake8 --exclude hooks/charmhelpers hooks
16 @charm proof
17
18-sync:
19- @charm-helper-sync -c charm-helpers-sync.yaml
20+bin/charm_helpers_sync.py:
21+ @mkdir -p bin
22+ @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \
23+ > bin/charm_helpers_sync.py
24+
25+sync: bin/charm_helpers_sync.py
26+ $(PYTHON) bin/charm_helpers_sync.py -c charm-helpers-sync.yaml
27
28 publish: lint
29 bzr push lp:charms/ntpmaster
30
31=== added file 'hooks/charmhelpers/core/fstab.py'
32--- hooks/charmhelpers/core/fstab.py 1970-01-01 00:00:00 +0000
33+++ hooks/charmhelpers/core/fstab.py 2014-09-26 08:01:25 +0000
34@@ -0,0 +1,116 @@
35+#!/usr/bin/env python
36+# -*- coding: utf-8 -*-
37+
38+__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
39+
40+import os
41+
42+
43+class Fstab(file):
44+ """This class extends file in order to implement a file reader/writer
45+ for file `/etc/fstab`
46+ """
47+
48+ class Entry(object):
49+ """Entry class represents a non-comment line on the `/etc/fstab` file
50+ """
51+ def __init__(self, device, mountpoint, filesystem,
52+ options, d=0, p=0):
53+ self.device = device
54+ self.mountpoint = mountpoint
55+ self.filesystem = filesystem
56+
57+ if not options:
58+ options = "defaults"
59+
60+ self.options = options
61+ self.d = d
62+ self.p = p
63+
64+ def __eq__(self, o):
65+ return str(self) == str(o)
66+
67+ def __str__(self):
68+ return "{} {} {} {} {} {}".format(self.device,
69+ self.mountpoint,
70+ self.filesystem,
71+ self.options,
72+ self.d,
73+ self.p)
74+
75+ DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab')
76+
77+ def __init__(self, path=None):
78+ if path:
79+ self._path = path
80+ else:
81+ self._path = self.DEFAULT_PATH
82+ file.__init__(self, self._path, 'r+')
83+
84+ def _hydrate_entry(self, line):
85+ # NOTE: use split with no arguments to split on any
86+ # whitespace including tabs
87+ return Fstab.Entry(*filter(
88+ lambda x: x not in ('', None),
89+ line.strip("\n").split()))
90+
91+ @property
92+ def entries(self):
93+ self.seek(0)
94+ for line in self.readlines():
95+ try:
96+ if not line.startswith("#"):
97+ yield self._hydrate_entry(line)
98+ except ValueError:
99+ pass
100+
101+ def get_entry_by_attr(self, attr, value):
102+ for entry in self.entries:
103+ e_attr = getattr(entry, attr)
104+ if e_attr == value:
105+ return entry
106+ return None
107+
108+ def add_entry(self, entry):
109+ if self.get_entry_by_attr('device', entry.device):
110+ return False
111+
112+ self.write(str(entry) + '\n')
113+ self.truncate()
114+ return entry
115+
116+ def remove_entry(self, entry):
117+ self.seek(0)
118+
119+ lines = self.readlines()
120+
121+ found = False
122+ for index, line in enumerate(lines):
123+ if not line.startswith("#"):
124+ if self._hydrate_entry(line) == entry:
125+ found = True
126+ break
127+
128+ if not found:
129+ return False
130+
131+ lines.remove(line)
132+
133+ self.seek(0)
134+ self.write(''.join(lines))
135+ self.truncate()
136+ return True
137+
138+ @classmethod
139+ def remove_by_mountpoint(cls, mountpoint, path=None):
140+ fstab = cls(path=path)
141+ entry = fstab.get_entry_by_attr('mountpoint', mountpoint)
142+ if entry:
143+ return fstab.remove_entry(entry)
144+ return False
145+
146+ @classmethod
147+ def add(cls, device, mountpoint, filesystem, options=None, path=None):
148+ return cls(path=path).add_entry(Fstab.Entry(device,
149+ mountpoint, filesystem,
150+ options=options))
151
152=== modified file 'hooks/charmhelpers/core/hookenv.py'
153--- hooks/charmhelpers/core/hookenv.py 2013-08-29 18:39:36 +0000
154+++ hooks/charmhelpers/core/hookenv.py 2014-09-26 08:01:25 +0000
155@@ -8,7 +8,9 @@
156 import json
157 import yaml
158 import subprocess
159+import sys
160 import UserDict
161+from subprocess import CalledProcessError
162
163 CRITICAL = "CRITICAL"
164 ERROR = "ERROR"
165@@ -21,9 +23,9 @@
166
167
168 def cached(func):
169- ''' Cache return values for multiple executions of func + args
170+ """Cache return values for multiple executions of func + args
171
172- For example:
173+ For example::
174
175 @cached
176 def unit_get(attribute):
177@@ -32,7 +34,7 @@
178 unit_get('test')
179
180 will cache the result of unit_get + 'test' for future calls.
181- '''
182+ """
183 def wrapper(*args, **kwargs):
184 global cache
185 key = str((func, args, kwargs))
186@@ -46,8 +48,8 @@
187
188
189 def flush(key):
190- ''' Flushes any entries from function cache where the
191- key is found in the function+args '''
192+ """Flushes any entries from function cache where the
193+ key is found in the function+args """
194 flush_list = []
195 for item in cache:
196 if key in item:
197@@ -57,7 +59,7 @@
198
199
200 def log(message, level=None):
201- "Write a message to the juju log"
202+ """Write a message to the juju log"""
203 command = ['juju-log']
204 if level:
205 command += ['-l', level]
206@@ -66,7 +68,7 @@
207
208
209 class Serializable(UserDict.IterableUserDict):
210- "Wrapper, an object that can be serialized to yaml or json"
211+ """Wrapper, an object that can be serialized to yaml or json"""
212
213 def __init__(self, obj):
214 # wrap the object
215@@ -96,11 +98,11 @@
216 self.data = state
217
218 def json(self):
219- "Serialize the object to json"
220+ """Serialize the object to json"""
221 return json.dumps(self.data)
222
223 def yaml(self):
224- "Serialize the object to yaml"
225+ """Serialize the object to yaml"""
226 return yaml.dump(self.data)
227
228
229@@ -119,50 +121,174 @@
230
231
232 def in_relation_hook():
233- "Determine whether we're running in a relation hook"
234+ """Determine whether we're running in a relation hook"""
235 return 'JUJU_RELATION' in os.environ
236
237
238 def relation_type():
239- "The scope for the current relation hook"
240+ """The scope for the current relation hook"""
241 return os.environ.get('JUJU_RELATION', None)
242
243
244 def relation_id():
245- "The relation ID for the current relation hook"
246+ """The relation ID for the current relation hook"""
247 return os.environ.get('JUJU_RELATION_ID', None)
248
249
250 def local_unit():
251- "Local unit ID"
252+ """Local unit ID"""
253 return os.environ['JUJU_UNIT_NAME']
254
255
256 def remote_unit():
257- "The remote unit for the current relation hook"
258+ """The remote unit for the current relation hook"""
259 return os.environ['JUJU_REMOTE_UNIT']
260
261
262 def service_name():
263- "The name service group this unit belongs to"
264+ """The name service group this unit belongs to"""
265 return local_unit().split('/')[0]
266
267
268+def hook_name():
269+ """The name of the currently executing hook"""
270+ return os.path.basename(sys.argv[0])
271+
272+
273+class Config(dict):
274+ """A dictionary representation of the charm's config.yaml, with some
275+ extra features:
276+
277+ - See which values in the dictionary have changed since the previous hook.
278+ - For values that have changed, see what the previous value was.
279+ - Store arbitrary data for use in a later hook.
280+
281+ NOTE: Do not instantiate this object directly - instead call
282+ ``hookenv.config()``, which will return an instance of :class:`Config`.
283+
284+ Example usage::
285+
286+ >>> # inside a hook
287+ >>> from charmhelpers.core import hookenv
288+ >>> config = hookenv.config()
289+ >>> config['foo']
290+ 'bar'
291+ >>> # store a new key/value for later use
292+ >>> config['mykey'] = 'myval'
293+
294+
295+ >>> # user runs `juju set mycharm foo=baz`
296+ >>> # now we're inside subsequent config-changed hook
297+ >>> config = hookenv.config()
298+ >>> config['foo']
299+ 'baz'
300+ >>> # test to see if this val has changed since last hook
301+ >>> config.changed('foo')
302+ True
303+ >>> # what was the previous value?
304+ >>> config.previous('foo')
305+ 'bar'
306+ >>> # keys/values that we add are preserved across hooks
307+ >>> config['mykey']
308+ 'myval'
309+
310+ """
311+ CONFIG_FILE_NAME = '.juju-persistent-config'
312+
313+ def __init__(self, *args, **kw):
314+ super(Config, self).__init__(*args, **kw)
315+ self.implicit_save = True
316+ self._prev_dict = None
317+ self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
318+ if os.path.exists(self.path):
319+ self.load_previous()
320+
321+ def __getitem__(self, key):
322+ """For regular dict lookups, check the current juju config first,
323+ then the previous (saved) copy. This ensures that user-saved values
324+ will be returned by a dict lookup.
325+
326+ """
327+ try:
328+ return dict.__getitem__(self, key)
329+ except KeyError:
330+ return (self._prev_dict or {})[key]
331+
332+ def load_previous(self, path=None):
333+ """Load previous copy of config from disk.
334+
335+ In normal usage you don't need to call this method directly - it
336+ is called automatically at object initialization.
337+
338+ :param path:
339+
340+ File path from which to load the previous config. If `None`,
341+ config is loaded from the default location. If `path` is
342+ specified, subsequent `save()` calls will write to the same
343+ path.
344+
345+ """
346+ self.path = path or self.path
347+ with open(self.path) as f:
348+ self._prev_dict = json.load(f)
349+
350+ def changed(self, key):
351+ """Return True if the current value for this key is different from
352+ the previous value.
353+
354+ """
355+ if self._prev_dict is None:
356+ return True
357+ return self.previous(key) != self.get(key)
358+
359+ def previous(self, key):
360+ """Return previous value for this key, or None if there
361+ is no previous value.
362+
363+ """
364+ if self._prev_dict:
365+ return self._prev_dict.get(key)
366+ return None
367+
368+ def save(self):
369+ """Save this config to disk.
370+
371+ If the charm is using the :mod:`Services Framework <services.base>`
372+ or :meth:'@hook <Hooks.hook>' decorator, this
373+ is called automatically at the end of successful hook execution.
374+ Otherwise, it should be called directly by user code.
375+
376+ To disable automatic saves, set ``implicit_save=False`` on this
377+ instance.
378+
379+ """
380+ if self._prev_dict:
381+ for k, v in self._prev_dict.iteritems():
382+ if k not in self:
383+ self[k] = v
384+ with open(self.path, 'w') as f:
385+ json.dump(self, f)
386+
387+
388 @cached
389 def config(scope=None):
390- "Juju charm configuration"
391+ """Juju charm configuration"""
392 config_cmd_line = ['config-get']
393 if scope is not None:
394 config_cmd_line.append(scope)
395 config_cmd_line.append('--format=json')
396 try:
397- return json.loads(subprocess.check_output(config_cmd_line))
398+ config_data = json.loads(subprocess.check_output(config_cmd_line))
399+ if scope is not None:
400+ return config_data
401+ return Config(config_data)
402 except ValueError:
403 return None
404
405
406 @cached
407 def relation_get(attribute=None, unit=None, rid=None):
408+ """Get relation information"""
409 _args = ['relation-get', '--format=json']
410 if rid:
411 _args.append('-r')
412@@ -174,9 +300,15 @@
413 return json.loads(subprocess.check_output(_args))
414 except ValueError:
415 return None
416-
417-
418-def relation_set(relation_id=None, relation_settings={}, **kwargs):
419+ except CalledProcessError, e:
420+ if e.returncode == 2:
421+ return None
422+ raise
423+
424+
425+def relation_set(relation_id=None, relation_settings=None, **kwargs):
426+ """Set relation information for the current unit"""
427+ relation_settings = relation_settings if relation_settings else {}
428 relation_cmd_line = ['relation-set']
429 if relation_id is not None:
430 relation_cmd_line.extend(('-r', relation_id))
431@@ -192,7 +324,7 @@
432
433 @cached
434 def relation_ids(reltype=None):
435- "A list of relation_ids"
436+ """A list of relation_ids"""
437 reltype = reltype or relation_type()
438 relid_cmd_line = ['relation-ids', '--format=json']
439 if reltype is not None:
440@@ -203,7 +335,7 @@
441
442 @cached
443 def related_units(relid=None):
444- "A list of related units"
445+ """A list of related units"""
446 relid = relid or relation_id()
447 units_cmd_line = ['relation-list', '--format=json']
448 if relid is not None:
449@@ -213,7 +345,7 @@
450
451 @cached
452 def relation_for_unit(unit=None, rid=None):
453- "Get the json represenation of a unit's relation"
454+ """Get the json represenation of a unit's relation"""
455 unit = unit or remote_unit()
456 relation = relation_get(unit=unit, rid=rid)
457 for key in relation:
458@@ -225,7 +357,7 @@
459
460 @cached
461 def relations_for_id(relid=None):
462- "Get relations of a specific relation ID"
463+ """Get relations of a specific relation ID"""
464 relation_data = []
465 relid = relid or relation_ids()
466 for unit in related_units(relid):
467@@ -237,7 +369,7 @@
468
469 @cached
470 def relations_of_type(reltype=None):
471- "Get relations of a specific type"
472+ """Get relations of a specific type"""
473 relation_data = []
474 reltype = reltype or relation_type()
475 for relid in relation_ids(reltype):
476@@ -249,7 +381,7 @@
477
478 @cached
479 def relation_types():
480- "Get a list of relation types supported by this charm"
481+ """Get a list of relation types supported by this charm"""
482 charmdir = os.environ.get('CHARM_DIR', '')
483 mdf = open(os.path.join(charmdir, 'metadata.yaml'))
484 md = yaml.safe_load(mdf)
485@@ -264,6 +396,7 @@
486
487 @cached
488 def relations():
489+ """Get a nested dictionary of relation data for all related units"""
490 rels = {}
491 for reltype in relation_types():
492 relids = {}
493@@ -277,15 +410,35 @@
494 return rels
495
496
497+@cached
498+def is_relation_made(relation, keys='private-address'):
499+ '''
500+ Determine whether a relation is established by checking for
501+ presence of key(s). If a list of keys is provided, they
502+ must all be present for the relation to be identified as made
503+ '''
504+ if isinstance(keys, str):
505+ keys = [keys]
506+ for r_id in relation_ids(relation):
507+ for unit in related_units(r_id):
508+ context = {}
509+ for k in keys:
510+ context[k] = relation_get(k, rid=r_id,
511+ unit=unit)
512+ if None not in context.values():
513+ return True
514+ return False
515+
516+
517 def open_port(port, protocol="TCP"):
518- "Open a service network port"
519+ """Open a service network port"""
520 _args = ['open-port']
521 _args.append('{}/{}'.format(port, protocol))
522 subprocess.check_call(_args)
523
524
525 def close_port(port, protocol="TCP"):
526- "Close a service network port"
527+ """Close a service network port"""
528 _args = ['close-port']
529 _args.append('{}/{}'.format(port, protocol))
530 subprocess.check_call(_args)
531@@ -293,6 +446,7 @@
532
533 @cached
534 def unit_get(attribute):
535+ """Get the unit ID for the remote unit"""
536 _args = ['unit-get', '--format=json', attribute]
537 try:
538 return json.loads(subprocess.check_output(_args))
539@@ -301,29 +455,60 @@
540
541
542 def unit_private_ip():
543+ """Get this unit's private IP address"""
544 return unit_get('private-address')
545
546
547 class UnregisteredHookError(Exception):
548+ """Raised when an undefined hook is called"""
549 pass
550
551
552 class Hooks(object):
553- def __init__(self):
554+ """A convenient handler for hook functions.
555+
556+ Example::
557+
558+ hooks = Hooks()
559+
560+ # register a hook, taking its name from the function name
561+ @hooks.hook()
562+ def install():
563+ pass # your code here
564+
565+ # register a hook, providing a custom hook name
566+ @hooks.hook("config-changed")
567+ def config_changed():
568+ pass # your code here
569+
570+ if __name__ == "__main__":
571+ # execute a hook based on the name the program is called by
572+ hooks.execute(sys.argv)
573+ """
574+
575+ def __init__(self, config_save=True):
576 super(Hooks, self).__init__()
577 self._hooks = {}
578+ self._config_save = config_save
579
580 def register(self, name, function):
581+ """Register a hook"""
582 self._hooks[name] = function
583
584 def execute(self, args):
585+ """Execute a registered hook based on args[0]"""
586 hook_name = os.path.basename(args[0])
587 if hook_name in self._hooks:
588 self._hooks[hook_name]()
589+ if self._config_save:
590+ cfg = config()
591+ if cfg.implicit_save:
592+ cfg.save()
593 else:
594 raise UnregisteredHookError(hook_name)
595
596 def hook(self, *hook_names):
597+ """Decorator, registering them as hooks"""
598 def wrapper(decorated):
599 for hook_name in hook_names:
600 self.register(hook_name, decorated)
601@@ -337,4 +522,5 @@
602
603
604 def charm_dir():
605+ """Return the root directory of the current charm"""
606 return os.environ.get('CHARM_DIR')
607
608=== modified file 'hooks/charmhelpers/core/host.py'
609--- hooks/charmhelpers/core/host.py 2013-08-29 18:39:36 +0000
610+++ hooks/charmhelpers/core/host.py 2014-09-26 08:01:25 +0000
611@@ -12,25 +12,33 @@
612 import string
613 import subprocess
614 import hashlib
615+import shutil
616+from contextlib import contextmanager
617
618 from collections import OrderedDict
619
620 from hookenv import log
621+from fstab import Fstab
622
623
624 def service_start(service_name):
625+ """Start a system service"""
626 return service('start', service_name)
627
628
629 def service_stop(service_name):
630+ """Stop a system service"""
631 return service('stop', service_name)
632
633
634 def service_restart(service_name):
635+ """Restart a system service"""
636 return service('restart', service_name)
637
638
639 def service_reload(service_name, restart_on_failure=False):
640+ """Reload a system service, optionally falling back to restart if
641+ reload fails"""
642 service_result = service('reload', service_name)
643 if not service_result and restart_on_failure:
644 service_result = service('restart', service_name)
645@@ -38,13 +46,15 @@
646
647
648 def service(action, service_name):
649+ """Control a system service"""
650 cmd = ['service', service_name, action]
651 return subprocess.call(cmd) == 0
652
653
654 def service_running(service):
655+ """Determine whether a system service is running"""
656 try:
657- output = subprocess.check_output(['service', service, 'status'])
658+ output = subprocess.check_output(['service', service, 'status'], stderr=subprocess.STDOUT)
659 except subprocess.CalledProcessError:
660 return False
661 else:
662@@ -54,8 +64,18 @@
663 return False
664
665
666+def service_available(service_name):
667+ """Determine whether a system service is available"""
668+ try:
669+ subprocess.check_output(['service', service_name, 'status'], stderr=subprocess.STDOUT)
670+ except subprocess.CalledProcessError as e:
671+ return 'unrecognized service' not in e.output
672+ else:
673+ return True
674+
675+
676 def adduser(username, password=None, shell='/bin/bash', system_user=False):
677- """Add a user"""
678+ """Add a user to the system"""
679 try:
680 user_info = pwd.getpwnam(username)
681 log('user {0} already exists!'.format(username))
682@@ -137,8 +157,20 @@
683 target.write(content)
684
685
686-def mount(device, mountpoint, options=None, persist=False):
687- '''Mount a filesystem'''
688+def fstab_remove(mp):
689+ """Remove the given mountpoint entry from /etc/fstab
690+ """
691+ return Fstab.remove_by_mountpoint(mp)
692+
693+
694+def fstab_add(dev, mp, fs, options=None):
695+ """Adds the given device entry to the /etc/fstab file
696+ """
697+ return Fstab.add(dev, mp, fs, options=options)
698+
699+
700+def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"):
701+ """Mount a filesystem at a particular mountpoint"""
702 cmd_args = ['mount']
703 if options is not None:
704 cmd_args.extend(['-o', options])
705@@ -148,28 +180,28 @@
706 except subprocess.CalledProcessError, e:
707 log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
708 return False
709+
710 if persist:
711- # TODO: update fstab
712- pass
713+ return fstab_add(device, mountpoint, filesystem, options=options)
714 return True
715
716
717 def umount(mountpoint, persist=False):
718- '''Unmount a filesystem'''
719+ """Unmount a filesystem"""
720 cmd_args = ['umount', mountpoint]
721 try:
722 subprocess.check_output(cmd_args)
723 except subprocess.CalledProcessError, e:
724 log('Error unmounting {}\n{}'.format(mountpoint, e.output))
725 return False
726+
727 if persist:
728- # TODO: update fstab
729- pass
730+ return fstab_remove(mountpoint)
731 return True
732
733
734 def mounts():
735- '''List of all mounted volumes as [[mountpoint,device],[...]]'''
736+ """Get a list of all mounted volumes as [[mountpoint,device],[...]]"""
737 with open('/proc/mounts') as f:
738 # [['/mount/point','/dev/path'],[...]]
739 system_mounts = [m[1::-1] for m in [l.strip().split()
740@@ -177,10 +209,15 @@
741 return system_mounts
742
743
744-def file_hash(path):
745- ''' Generate a md5 hash of the contents of 'path' or None if not found '''
746+def file_hash(path, hash_type='md5'):
747+ """
748+ Generate a hash checksum of the contents of 'path' or None if not found.
749+
750+ :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`,
751+ such as md5, sha1, sha256, sha512, etc.
752+ """
753 if os.path.exists(path):
754- h = hashlib.md5()
755+ h = getattr(hashlib, hash_type)()
756 with open(path, 'r') as source:
757 h.update(source.read()) # IGNORE:E1101 - it does have update
758 return h.hexdigest()
759@@ -188,21 +225,41 @@
760 return None
761
762
763-def restart_on_change(restart_map):
764- ''' Restart services based on configuration files changing
765-
766- This function is used a decorator, for example
767+def check_hash(path, checksum, hash_type='md5'):
768+ """
769+ Validate a file using a cryptographic checksum.
770+
771+ :param str checksum: Value of the checksum used to validate the file.
772+ :param str hash_type: Hash algorithm used to generate `checksum`.
773+ Can be any hash alrgorithm supported by :mod:`hashlib`,
774+ such as md5, sha1, sha256, sha512, etc.
775+ :raises ChecksumError: If the file fails the checksum
776+
777+ """
778+ actual_checksum = file_hash(path, hash_type)
779+ if checksum != actual_checksum:
780+ raise ChecksumError("'%s' != '%s'" % (checksum, actual_checksum))
781+
782+
783+class ChecksumError(ValueError):
784+ pass
785+
786+
787+def restart_on_change(restart_map, stopstart=False):
788+ """Restart services based on configuration files changing
789+
790+ This function is used a decorator, for example::
791
792 @restart_on_change({
793 '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ]
794 })
795 def ceph_client_changed():
796- ...
797+ pass # your code here
798
799 In this example, the cinder-api and cinder-volume services
800 would be restarted if /etc/ceph/ceph.conf is changed by the
801 ceph_client_changed function.
802- '''
803+ """
804 def wrap(f):
805 def wrapped_f(*args):
806 checksums = {}
807@@ -213,14 +270,20 @@
808 for path in restart_map:
809 if checksums[path] != file_hash(path):
810 restarts += restart_map[path]
811- for service_name in list(OrderedDict.fromkeys(restarts)):
812- service('restart', service_name)
813+ services_list = list(OrderedDict.fromkeys(restarts))
814+ if not stopstart:
815+ for service_name in services_list:
816+ service('restart', service_name)
817+ else:
818+ for action in ['stop', 'start']:
819+ for service_name in services_list:
820+ service(action, service_name)
821 return wrapped_f
822 return wrap
823
824
825 def lsb_release():
826- '''Return /etc/lsb-release in a dict'''
827+ """Return /etc/lsb-release in a dict"""
828 d = {}
829 with open('/etc/lsb-release', 'r') as lsb:
830 for l in lsb:
831@@ -230,7 +293,7 @@
832
833
834 def pwgen(length=None):
835- '''Generate a random pasword.'''
836+ """Generate a random pasword."""
837 if length is None:
838 length = random.choice(range(35, 45))
839 alphanumeric_chars = [
840@@ -239,3 +302,84 @@
841 random_chars = [
842 random.choice(alphanumeric_chars) for _ in range(length)]
843 return(''.join(random_chars))
844+
845+
846+def list_nics(nic_type):
847+ '''Return a list of nics of given type(s)'''
848+ if isinstance(nic_type, basestring):
849+ int_types = [nic_type]
850+ else:
851+ int_types = nic_type
852+ interfaces = []
853+ for int_type in int_types:
854+ cmd = ['ip', 'addr', 'show', 'label', int_type + '*']
855+ ip_output = subprocess.check_output(cmd).split('\n')
856+ ip_output = (line for line in ip_output if line)
857+ for line in ip_output:
858+ if line.split()[1].startswith(int_type):
859+ interfaces.append(line.split()[1].replace(":", ""))
860+ return interfaces
861+
862+
863+def set_nic_mtu(nic, mtu):
864+ '''Set MTU on a network interface'''
865+ cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
866+ subprocess.check_call(cmd)
867+
868+
869+def get_nic_mtu(nic):
870+ cmd = ['ip', 'addr', 'show', nic]
871+ ip_output = subprocess.check_output(cmd).split('\n')
872+ mtu = ""
873+ for line in ip_output:
874+ words = line.split()
875+ if 'mtu' in words:
876+ mtu = words[words.index("mtu") + 1]
877+ return mtu
878+
879+
880+def get_nic_hwaddr(nic):
881+ cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
882+ ip_output = subprocess.check_output(cmd)
883+ hwaddr = ""
884+ words = ip_output.split()
885+ if 'link/ether' in words:
886+ hwaddr = words[words.index('link/ether') + 1]
887+ return hwaddr
888+
889+
890+def cmp_pkgrevno(package, revno, pkgcache=None):
891+ '''Compare supplied revno with the revno of the installed package
892+
893+ * 1 => Installed revno is greater than supplied arg
894+ * 0 => Installed revno is the same as supplied arg
895+ * -1 => Installed revno is less than supplied arg
896+
897+ '''
898+ import apt_pkg
899+ from charmhelpers.fetch import apt_cache
900+ if not pkgcache:
901+ pkgcache = apt_cache()
902+ pkg = pkgcache[package]
903+ return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
904+
905+
906+@contextmanager
907+def chdir(d):
908+ cur = os.getcwd()
909+ try:
910+ yield os.chdir(d)
911+ finally:
912+ os.chdir(cur)
913+
914+
915+def chownr(path, owner, group):
916+ uid = pwd.getpwnam(owner).pw_uid
917+ gid = grp.getgrnam(group).gr_gid
918+
919+ for root, dirs, files in os.walk(path):
920+ for name in dirs + files:
921+ full = os.path.join(root, name)
922+ broken_symlink = os.path.lexists(full) and not os.path.exists(full)
923+ if not broken_symlink:
924+ os.chown(full, uid, gid)
925
926=== added directory 'hooks/charmhelpers/core/services'
927=== added file 'hooks/charmhelpers/core/services/__init__.py'
928--- hooks/charmhelpers/core/services/__init__.py 1970-01-01 00:00:00 +0000
929+++ hooks/charmhelpers/core/services/__init__.py 2014-09-26 08:01:25 +0000
930@@ -0,0 +1,2 @@
931+from .base import *
932+from .helpers import *
933
934=== added file 'hooks/charmhelpers/core/services/base.py'
935--- hooks/charmhelpers/core/services/base.py 1970-01-01 00:00:00 +0000
936+++ hooks/charmhelpers/core/services/base.py 2014-09-26 08:01:25 +0000
937@@ -0,0 +1,313 @@
938+import os
939+import re
940+import json
941+from collections import Iterable
942+
943+from charmhelpers.core import host
944+from charmhelpers.core import hookenv
945+
946+
947+__all__ = ['ServiceManager', 'ManagerCallback',
948+ 'PortManagerCallback', 'open_ports', 'close_ports', 'manage_ports',
949+ 'service_restart', 'service_stop']
950+
951+
952+class ServiceManager(object):
953+ def __init__(self, services=None):
954+ """
955+ Register a list of services, given their definitions.
956+
957+ Service definitions are dicts in the following formats (all keys except
958+ 'service' are optional)::
959+
960+ {
961+ "service": <service name>,
962+ "required_data": <list of required data contexts>,
963+ "provided_data": <list of provided data contexts>,
964+ "data_ready": <one or more callbacks>,
965+ "data_lost": <one or more callbacks>,
966+ "start": <one or more callbacks>,
967+ "stop": <one or more callbacks>,
968+ "ports": <list of ports to manage>,
969+ }
970+
971+ The 'required_data' list should contain dicts of required data (or
972+ dependency managers that act like dicts and know how to collect the data).
973+ Only when all items in the 'required_data' list are populated are the list
974+ of 'data_ready' and 'start' callbacks executed. See `is_ready()` for more
975+ information.
976+
977+ The 'provided_data' list should contain relation data providers, most likely
978+ a subclass of :class:`charmhelpers.core.services.helpers.RelationContext`,
979+ that will indicate a set of data to set on a given relation.
980+
981+ The 'data_ready' value should be either a single callback, or a list of
982+ callbacks, to be called when all items in 'required_data' pass `is_ready()`.
983+ Each callback will be called with the service name as the only parameter.
984+ After all of the 'data_ready' callbacks are called, the 'start' callbacks
985+ are fired.
986+
987+ The 'data_lost' value should be either a single callback, or a list of
988+ callbacks, to be called when a 'required_data' item no longer passes
989+ `is_ready()`. Each callback will be called with the service name as the
990+ only parameter. After all of the 'data_lost' callbacks are called,
991+ the 'stop' callbacks are fired.
992+
993+ The 'start' value should be either a single callback, or a list of
994+ callbacks, to be called when starting the service, after the 'data_ready'
995+ callbacks are complete. Each callback will be called with the service
996+ name as the only parameter. This defaults to
997+ `[host.service_start, services.open_ports]`.
998+
999+ The 'stop' value should be either a single callback, or a list of
1000+ callbacks, to be called when stopping the service. If the service is
1001+ being stopped because it no longer has all of its 'required_data', this
1002+ will be called after all of the 'data_lost' callbacks are complete.
1003+ Each callback will be called with the service name as the only parameter.
1004+ This defaults to `[services.close_ports, host.service_stop]`.
1005+
1006+ The 'ports' value should be a list of ports to manage. The default
1007+ 'start' handler will open the ports after the service is started,
1008+ and the default 'stop' handler will close the ports prior to stopping
1009+ the service.
1010+
1011+
1012+ Examples:
1013+
1014+ The following registers an Upstart service called bingod that depends on
1015+ a mongodb relation and which runs a custom `db_migrate` function prior to
1016+ restarting the service, and a Runit service called spadesd::
1017+
1018+ manager = services.ServiceManager([
1019+ {
1020+ 'service': 'bingod',
1021+ 'ports': [80, 443],
1022+ 'required_data': [MongoRelation(), config(), {'my': 'data'}],
1023+ 'data_ready': [
1024+ services.template(source='bingod.conf'),
1025+ services.template(source='bingod.ini',
1026+ target='/etc/bingod.ini',
1027+ owner='bingo', perms=0400),
1028+ ],
1029+ },
1030+ {
1031+ 'service': 'spadesd',
1032+ 'data_ready': services.template(source='spadesd_run.j2',
1033+ target='/etc/sv/spadesd/run',
1034+ perms=0555),
1035+ 'start': runit_start,
1036+ 'stop': runit_stop,
1037+ },
1038+ ])
1039+ manager.manage()
1040+ """
1041+ self._ready_file = os.path.join(hookenv.charm_dir(), 'READY-SERVICES.json')
1042+ self._ready = None
1043+ self.services = {}
1044+ for service in services or []:
1045+ service_name = service['service']
1046+ self.services[service_name] = service
1047+
1048+ def manage(self):
1049+ """
1050+ Handle the current hook by doing The Right Thing with the registered services.
1051+ """
1052+ hook_name = hookenv.hook_name()
1053+ if hook_name == 'stop':
1054+ self.stop_services()
1055+ else:
1056+ self.provide_data()
1057+ self.reconfigure_services()
1058+ cfg = hookenv.config()
1059+ if cfg.implicit_save:
1060+ cfg.save()
1061+
1062+ def provide_data(self):
1063+ """
1064+ Set the relation data for each provider in the ``provided_data`` list.
1065+
1066+ A provider must have a `name` attribute, which indicates which relation
1067+ to set data on, and a `provide_data()` method, which returns a dict of
1068+ data to set.
1069+ """
1070+ hook_name = hookenv.hook_name()
1071+ for service in self.services.values():
1072+ for provider in service.get('provided_data', []):
1073+ if re.match(r'{}-relation-(joined|changed)'.format(provider.name), hook_name):
1074+ data = provider.provide_data()
1075+ _ready = provider._is_ready(data) if hasattr(provider, '_is_ready') else data
1076+ if _ready:
1077+ hookenv.relation_set(None, data)
1078+
1079+ def reconfigure_services(self, *service_names):
1080+ """
1081+ Update all files for one or more registered services, and,
1082+ if ready, optionally restart them.
1083+
1084+ If no service names are given, reconfigures all registered services.
1085+ """
1086+ for service_name in service_names or self.services.keys():
1087+ if self.is_ready(service_name):
1088+ self.fire_event('data_ready', service_name)
1089+ self.fire_event('start', service_name, default=[
1090+ service_restart,
1091+ manage_ports])
1092+ self.save_ready(service_name)
1093+ else:
1094+ if self.was_ready(service_name):
1095+ self.fire_event('data_lost', service_name)
1096+ self.fire_event('stop', service_name, default=[
1097+ manage_ports,
1098+ service_stop])
1099+ self.save_lost(service_name)
1100+
1101+ def stop_services(self, *service_names):
1102+ """
1103+ Stop one or more registered services, by name.
1104+
1105+ If no service names are given, stops all registered services.
1106+ """
1107+ for service_name in service_names or self.services.keys():
1108+ self.fire_event('stop', service_name, default=[
1109+ manage_ports,
1110+ service_stop])
1111+
1112+ def get_service(self, service_name):
1113+ """
1114+ Given the name of a registered service, return its service definition.
1115+ """
1116+ service = self.services.get(service_name)
1117+ if not service:
1118+ raise KeyError('Service not registered: %s' % service_name)
1119+ return service
1120+
1121+ def fire_event(self, event_name, service_name, default=None):
1122+ """
1123+ Fire a data_ready, data_lost, start, or stop event on a given service.
1124+ """
1125+ service = self.get_service(service_name)
1126+ callbacks = service.get(event_name, default)
1127+ if not callbacks:
1128+ return
1129+ if not isinstance(callbacks, Iterable):
1130+ callbacks = [callbacks]
1131+ for callback in callbacks:
1132+ if isinstance(callback, ManagerCallback):
1133+ callback(self, service_name, event_name)
1134+ else:
1135+ callback(service_name)
1136+
1137+ def is_ready(self, service_name):
1138+ """
1139+ Determine if a registered service is ready, by checking its 'required_data'.
1140+
1141+ A 'required_data' item can be any mapping type, and is considered ready
1142+ if `bool(item)` evaluates as True.
1143+ """
1144+ service = self.get_service(service_name)
1145+ reqs = service.get('required_data', [])
1146+ return all(bool(req) for req in reqs)
1147+
1148+ def _load_ready_file(self):
1149+ if self._ready is not None:
1150+ return
1151+ if os.path.exists(self._ready_file):
1152+ with open(self._ready_file) as fp:
1153+ self._ready = set(json.load(fp))
1154+ else:
1155+ self._ready = set()
1156+
1157+ def _save_ready_file(self):
1158+ if self._ready is None:
1159+ return
1160+ with open(self._ready_file, 'w') as fp:
1161+ json.dump(list(self._ready), fp)
1162+
1163+ def save_ready(self, service_name):
1164+ """
1165+ Save an indicator that the given service is now data_ready.
1166+ """
1167+ self._load_ready_file()
1168+ self._ready.add(service_name)
1169+ self._save_ready_file()
1170+
1171+ def save_lost(self, service_name):
1172+ """
1173+ Save an indicator that the given service is no longer data_ready.
1174+ """
1175+ self._load_ready_file()
1176+ self._ready.discard(service_name)
1177+ self._save_ready_file()
1178+
1179+ def was_ready(self, service_name):
1180+ """
1181+ Determine if the given service was previously data_ready.
1182+ """
1183+ self._load_ready_file()
1184+ return service_name in self._ready
1185+
1186+
1187+class ManagerCallback(object):
1188+ """
1189+ Special case of a callback that takes the `ServiceManager` instance
1190+ in addition to the service name.
1191+
1192+ Subclasses should implement `__call__` which should accept three parameters:
1193+
1194+ * `manager` The `ServiceManager` instance
1195+ * `service_name` The name of the service it's being triggered for
1196+ * `event_name` The name of the event that this callback is handling
1197+ """
1198+ def __call__(self, manager, service_name, event_name):
1199+ raise NotImplementedError()
1200+
1201+
1202+class PortManagerCallback(ManagerCallback):
1203+ """
1204+ Callback class that will open or close ports, for use as either
1205+ a start or stop action.
1206+ """
1207+ def __call__(self, manager, service_name, event_name):
1208+ service = manager.get_service(service_name)
1209+ new_ports = service.get('ports', [])
1210+ port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
1211+ if os.path.exists(port_file):
1212+ with open(port_file) as fp:
1213+ old_ports = fp.read().split(',')
1214+ for old_port in old_ports:
1215+ if bool(old_port):
1216+ old_port = int(old_port)
1217+ if old_port not in new_ports:
1218+ hookenv.close_port(old_port)
1219+ with open(port_file, 'w') as fp:
1220+ fp.write(','.join(str(port) for port in new_ports))
1221+ for port in new_ports:
1222+ if event_name == 'start':
1223+ hookenv.open_port(port)
1224+ elif event_name == 'stop':
1225+ hookenv.close_port(port)
1226+
1227+
1228+def service_stop(service_name):
1229+ """
1230+ Wrapper around host.service_stop to prevent spurious "unknown service"
1231+ messages in the logs.
1232+ """
1233+ if host.service_running(service_name):
1234+ host.service_stop(service_name)
1235+
1236+
1237+def service_restart(service_name):
1238+ """
1239+ Wrapper around host.service_restart to prevent spurious "unknown service"
1240+ messages in the logs.
1241+ """
1242+ if host.service_available(service_name):
1243+ if host.service_running(service_name):
1244+ host.service_restart(service_name)
1245+ else:
1246+ host.service_start(service_name)
1247+
1248+
1249+# Convenience aliases
1250+open_ports = close_ports = manage_ports = PortManagerCallback()
1251
1252=== added file 'hooks/charmhelpers/core/services/helpers.py'
1253--- hooks/charmhelpers/core/services/helpers.py 1970-01-01 00:00:00 +0000
1254+++ hooks/charmhelpers/core/services/helpers.py 2014-09-26 08:01:25 +0000
1255@@ -0,0 +1,239 @@
1256+import os
1257+import yaml
1258+from charmhelpers.core import hookenv
1259+from charmhelpers.core import templating
1260+
1261+from charmhelpers.core.services.base import ManagerCallback
1262+
1263+
1264+__all__ = ['RelationContext', 'TemplateCallback',
1265+ 'render_template', 'template']
1266+
1267+
1268+class RelationContext(dict):
1269+ """
1270+ Base class for a context generator that gets relation data from juju.
1271+
1272+ Subclasses must provide the attributes `name`, which is the name of the
1273+ interface of interest, `interface`, which is the type of the interface of
1274+ interest, and `required_keys`, which is the set of keys required for the
1275+ relation to be considered complete. The data for all interfaces matching
1276+ the `name` attribute that are complete will used to populate the dictionary
1277+ values (see `get_data`, below).
1278+
1279+ The generated context will be namespaced under the relation :attr:`name`,
1280+ to prevent potential naming conflicts.
1281+
1282+ :param str name: Override the relation :attr:`name`, since it can vary from charm to charm
1283+ :param list additional_required_keys: Extend the list of :attr:`required_keys`
1284+ """
1285+ name = None
1286+ interface = None
1287+ required_keys = []
1288+
1289+ def __init__(self, name=None, additional_required_keys=None):
1290+ if name is not None:
1291+ self.name = name
1292+ if additional_required_keys is not None:
1293+ self.required_keys.extend(additional_required_keys)
1294+ self.get_data()
1295+
1296+ def __bool__(self):
1297+ """
1298+ Returns True if all of the required_keys are available.
1299+ """
1300+ return self.is_ready()
1301+
1302+ __nonzero__ = __bool__
1303+
1304+ def __repr__(self):
1305+ return super(RelationContext, self).__repr__()
1306+
1307+ def is_ready(self):
1308+ """
1309+ Returns True if all of the `required_keys` are available from any units.
1310+ """
1311+ ready = len(self.get(self.name, [])) > 0
1312+ if not ready:
1313+ hookenv.log('Incomplete relation: {}'.format(self.__class__.__name__), hookenv.DEBUG)
1314+ return ready
1315+
1316+ def _is_ready(self, unit_data):
1317+ """
1318+ Helper method that tests a set of relation data and returns True if
1319+ all of the `required_keys` are present.
1320+ """
1321+ return set(unit_data.keys()).issuperset(set(self.required_keys))
1322+
1323+ def get_data(self):
1324+ """
1325+ Retrieve the relation data for each unit involved in a relation and,
1326+ if complete, store it in a list under `self[self.name]`. This
1327+ is automatically called when the RelationContext is instantiated.
1328+
1329+ The units are sorted lexographically first by the service ID, then by
1330+ the unit ID. Thus, if an interface has two other services, 'db:1'
1331+ and 'db:2', with 'db:1' having two units, 'wordpress/0' and 'wordpress/1',
1332+ and 'db:2' having one unit, 'mediawiki/0', all of which have a complete
1333+ set of data, the relation data for the units will be stored in the
1334+ order: 'wordpress/0', 'wordpress/1', 'mediawiki/0'.
1335+
1336+ If you only care about a single unit on the relation, you can just
1337+ access it as `{{ interface[0]['key'] }}`. However, if you can at all
1338+ support multiple units on a relation, you should iterate over the list,
1339+ like::
1340+
1341+ {% for unit in interface -%}
1342+ {{ unit['key'] }}{% if not loop.last %},{% endif %}
1343+ {%- endfor %}
1344+
1345+ Note that since all sets of relation data from all related services and
1346+ units are in a single list, if you need to know which service or unit a
1347+ set of data came from, you'll need to extend this class to preserve
1348+ that information.
1349+ """
1350+ if not hookenv.relation_ids(self.name):
1351+ return
1352+
1353+ ns = self.setdefault(self.name, [])
1354+ for rid in sorted(hookenv.relation_ids(self.name)):
1355+ for unit in sorted(hookenv.related_units(rid)):
1356+ reldata = hookenv.relation_get(rid=rid, unit=unit)
1357+ if self._is_ready(reldata):
1358+ ns.append(reldata)
1359+
1360+ def provide_data(self):
1361+ """
1362+ Return data to be relation_set for this interface.
1363+ """
1364+ return {}
1365+
1366+
1367+class MysqlRelation(RelationContext):
1368+ """
1369+ Relation context for the `mysql` interface.
1370+
1371+ :param str name: Override the relation :attr:`name`, since it can vary from charm to charm
1372+ :param list additional_required_keys: Extend the list of :attr:`required_keys`
1373+ """
1374+ name = 'db'
1375+ interface = 'mysql'
1376+ required_keys = ['host', 'user', 'password', 'database']
1377+
1378+
1379+class HttpRelation(RelationContext):
1380+ """
1381+ Relation context for the `http` interface.
1382+
1383+ :param str name: Override the relation :attr:`name`, since it can vary from charm to charm
1384+ :param list additional_required_keys: Extend the list of :attr:`required_keys`
1385+ """
1386+ name = 'website'
1387+ interface = 'http'
1388+ required_keys = ['host', 'port']
1389+
1390+ def provide_data(self):
1391+ return {
1392+ 'host': hookenv.unit_get('private-address'),
1393+ 'port': 80,
1394+ }
1395+
1396+
1397+class RequiredConfig(dict):
1398+ """
1399+ Data context that loads config options with one or more mandatory options.
1400+
1401+ Once the required options have been changed from their default values, all
1402+ config options will be available, namespaced under `config` to prevent
1403+ potential naming conflicts (for example, between a config option and a
1404+ relation property).
1405+
1406+ :param list *args: List of options that must be changed from their default values.
1407+ """
1408+
1409+ def __init__(self, *args):
1410+ self.required_options = args
1411+ self['config'] = hookenv.config()
1412+ with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp:
1413+ self.config = yaml.load(fp).get('options', {})
1414+
1415+ def __bool__(self):
1416+ for option in self.required_options:
1417+ if option not in self['config']:
1418+ return False
1419+ current_value = self['config'][option]
1420+ default_value = self.config[option].get('default')
1421+ if current_value == default_value:
1422+ return False
1423+ if current_value in (None, '') and default_value in (None, ''):
1424+ return False
1425+ return True
1426+
1427+ def __nonzero__(self):
1428+ return self.__bool__()
1429+
1430+
1431+class StoredContext(dict):
1432+ """
1433+ A data context that always returns the data that it was first created with.
1434+
1435+ This is useful to do a one-time generation of things like passwords, that
1436+ will thereafter use the same value that was originally generated, instead
1437+ of generating a new value each time it is run.
1438+ """
1439+ def __init__(self, file_name, config_data):
1440+ """
1441+ If the file exists, populate `self` with the data from the file.
1442+ Otherwise, populate with the given data and persist it to the file.
1443+ """
1444+ if os.path.exists(file_name):
1445+ self.update(self.read_context(file_name))
1446+ else:
1447+ self.store_context(file_name, config_data)
1448+ self.update(config_data)
1449+
1450+ def store_context(self, file_name, config_data):
1451+ if not os.path.isabs(file_name):
1452+ file_name = os.path.join(hookenv.charm_dir(), file_name)
1453+ with open(file_name, 'w') as file_stream:
1454+ os.fchmod(file_stream.fileno(), 0600)
1455+ yaml.dump(config_data, file_stream)
1456+
1457+ def read_context(self, file_name):
1458+ if not os.path.isabs(file_name):
1459+ file_name = os.path.join(hookenv.charm_dir(), file_name)
1460+ with open(file_name, 'r') as file_stream:
1461+ data = yaml.load(file_stream)
1462+ if not data:
1463+ raise OSError("%s is empty" % file_name)
1464+ return data
1465+
1466+
1467+class TemplateCallback(ManagerCallback):
1468+ """
1469+ Callback class that will render a Jinja2 template, for use as a ready action.
1470+
1471+ :param str source: The template source file, relative to `$CHARM_DIR/templates`
1472+ :param str target: The target to write the rendered template to
1473+ :param str owner: The owner of the rendered file
1474+ :param str group: The group of the rendered file
1475+ :param int perms: The permissions of the rendered file
1476+ """
1477+ def __init__(self, source, target, owner='root', group='root', perms=0444):
1478+ self.source = source
1479+ self.target = target
1480+ self.owner = owner
1481+ self.group = group
1482+ self.perms = perms
1483+
1484+ def __call__(self, manager, service_name, event_name):
1485+ service = manager.get_service(service_name)
1486+ context = {}
1487+ for ctx in service.get('required_data', []):
1488+ context.update(ctx)
1489+ templating.render(self.source, self.target, context,
1490+ self.owner, self.group, self.perms)
1491+
1492+
1493+# Convenience aliases for templates
1494+render_template = template = TemplateCallback
1495
1496=== added file 'hooks/charmhelpers/core/templating.py'
1497--- hooks/charmhelpers/core/templating.py 1970-01-01 00:00:00 +0000
1498+++ hooks/charmhelpers/core/templating.py 2014-09-26 08:01:25 +0000
1499@@ -0,0 +1,51 @@
1500+import os
1501+
1502+from charmhelpers.core import host
1503+from charmhelpers.core import hookenv
1504+
1505+
1506+def render(source, target, context, owner='root', group='root', perms=0444, templates_dir=None):
1507+ """
1508+ Render a template.
1509+
1510+ The `source` path, if not absolute, is relative to the `templates_dir`.
1511+
1512+ The `target` path should be absolute.
1513+
1514+ The context should be a dict containing the values to be replaced in the
1515+ template.
1516+
1517+ The `owner`, `group`, and `perms` options will be passed to `write_file`.
1518+
1519+ If omitted, `templates_dir` defaults to the `templates` folder in the charm.
1520+
1521+ Note: Using this requires python-jinja2; if it is not installed, calling
1522+ this will attempt to use charmhelpers.fetch.apt_install to install it.
1523+ """
1524+ try:
1525+ from jinja2 import FileSystemLoader, Environment, exceptions
1526+ except ImportError:
1527+ try:
1528+ from charmhelpers.fetch import apt_install
1529+ except ImportError:
1530+ hookenv.log('Could not import jinja2, and could not import '
1531+ 'charmhelpers.fetch to install it',
1532+ level=hookenv.ERROR)
1533+ raise
1534+ apt_install('python-jinja2', fatal=True)
1535+ from jinja2 import FileSystemLoader, Environment, exceptions
1536+
1537+ if templates_dir is None:
1538+ templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
1539+ loader = Environment(loader=FileSystemLoader(templates_dir))
1540+ try:
1541+ source = source
1542+ template = loader.get_template(source)
1543+ except exceptions.TemplateNotFound as e:
1544+ hookenv.log('Could not load template %s from %s.' %
1545+ (source, templates_dir),
1546+ level=hookenv.ERROR)
1547+ raise e
1548+ content = template.render(context)
1549+ host.mkdir(os.path.dirname(target))
1550+ host.write_file(target, content, owner, group, perms)
1551
1552=== modified file 'hooks/charmhelpers/fetch/__init__.py'
1553--- hooks/charmhelpers/fetch/__init__.py 2013-08-29 18:41:54 +0000
1554+++ hooks/charmhelpers/fetch/__init__.py 2014-09-26 08:01:25 +0000
1555@@ -1,4 +1,6 @@
1556 import importlib
1557+from tempfile import NamedTemporaryFile
1558+import time
1559 from yaml import safe_load
1560 from charmhelpers.core.host import (
1561 lsb_release
1562@@ -12,7 +14,8 @@
1563 config,
1564 log,
1565 )
1566-import apt_pkg
1567+import os
1568+
1569
1570 CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
1571 deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
1572@@ -20,12 +23,101 @@
1573 PROPOSED_POCKET = """# Proposed
1574 deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
1575 """
1576+CLOUD_ARCHIVE_POCKETS = {
1577+ # Folsom
1578+ 'folsom': 'precise-updates/folsom',
1579+ 'precise-folsom': 'precise-updates/folsom',
1580+ 'precise-folsom/updates': 'precise-updates/folsom',
1581+ 'precise-updates/folsom': 'precise-updates/folsom',
1582+ 'folsom/proposed': 'precise-proposed/folsom',
1583+ 'precise-folsom/proposed': 'precise-proposed/folsom',
1584+ 'precise-proposed/folsom': 'precise-proposed/folsom',
1585+ # Grizzly
1586+ 'grizzly': 'precise-updates/grizzly',
1587+ 'precise-grizzly': 'precise-updates/grizzly',
1588+ 'precise-grizzly/updates': 'precise-updates/grizzly',
1589+ 'precise-updates/grizzly': 'precise-updates/grizzly',
1590+ 'grizzly/proposed': 'precise-proposed/grizzly',
1591+ 'precise-grizzly/proposed': 'precise-proposed/grizzly',
1592+ 'precise-proposed/grizzly': 'precise-proposed/grizzly',
1593+ # Havana
1594+ 'havana': 'precise-updates/havana',
1595+ 'precise-havana': 'precise-updates/havana',
1596+ 'precise-havana/updates': 'precise-updates/havana',
1597+ 'precise-updates/havana': 'precise-updates/havana',
1598+ 'havana/proposed': 'precise-proposed/havana',
1599+ 'precise-havana/proposed': 'precise-proposed/havana',
1600+ 'precise-proposed/havana': 'precise-proposed/havana',
1601+ # Icehouse
1602+ 'icehouse': 'precise-updates/icehouse',
1603+ 'precise-icehouse': 'precise-updates/icehouse',
1604+ 'precise-icehouse/updates': 'precise-updates/icehouse',
1605+ 'precise-updates/icehouse': 'precise-updates/icehouse',
1606+ 'icehouse/proposed': 'precise-proposed/icehouse',
1607+ 'precise-icehouse/proposed': 'precise-proposed/icehouse',
1608+ 'precise-proposed/icehouse': 'precise-proposed/icehouse',
1609+ # Juno
1610+ 'juno': 'trusty-updates/juno',
1611+ 'trusty-juno': 'trusty-updates/juno',
1612+ 'trusty-juno/updates': 'trusty-updates/juno',
1613+ 'trusty-updates/juno': 'trusty-updates/juno',
1614+ 'juno/proposed': 'trusty-proposed/juno',
1615+ 'juno/proposed': 'trusty-proposed/juno',
1616+ 'trusty-juno/proposed': 'trusty-proposed/juno',
1617+ 'trusty-proposed/juno': 'trusty-proposed/juno',
1618+}
1619+
1620+# The order of this list is very important. Handlers should be listed in from
1621+# least- to most-specific URL matching.
1622+FETCH_HANDLERS = (
1623+ 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
1624+ 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
1625+)
1626+
1627+APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
1628+APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks.
1629+APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
1630+
1631+
1632+class SourceConfigError(Exception):
1633+ pass
1634+
1635+
1636+class UnhandledSource(Exception):
1637+ pass
1638+
1639+
1640+class AptLockError(Exception):
1641+ pass
1642+
1643+
1644+class BaseFetchHandler(object):
1645+
1646+ """Base class for FetchHandler implementations in fetch plugins"""
1647+
1648+ def can_handle(self, source):
1649+ """Returns True if the source can be handled. Otherwise returns
1650+ a string explaining why it cannot"""
1651+ return "Wrong source type"
1652+
1653+ def install(self, source):
1654+ """Try to download and unpack the source. Return the path to the
1655+ unpacked files or raise UnhandledSource."""
1656+ raise UnhandledSource("Wrong source type {}".format(source))
1657+
1658+ def parse_url(self, url):
1659+ return urlparse(url)
1660+
1661+ def base_url(self, url):
1662+ """Return url without querystring or fragment"""
1663+ parts = list(self.parse_url(url))
1664+ parts[4:] = ['' for i in parts[4:]]
1665+ return urlunparse(parts)
1666
1667
1668 def filter_installed_packages(packages):
1669 """Returns a list of packages that require installation"""
1670- apt_pkg.init()
1671- cache = apt_pkg.Cache()
1672+ cache = apt_cache()
1673 _pkgs = []
1674 for package in packages:
1675 try:
1676@@ -38,10 +130,22 @@
1677 return _pkgs
1678
1679
1680+def apt_cache(in_memory=True):
1681+ """Build and return an apt cache"""
1682+ import apt_pkg
1683+ apt_pkg.init()
1684+ if in_memory:
1685+ apt_pkg.config.set("Dir::Cache::pkgcache", "")
1686+ apt_pkg.config.set("Dir::Cache::srcpkgcache", "")
1687+ return apt_pkg.Cache()
1688+
1689+
1690 def apt_install(packages, options=None, fatal=False):
1691 """Install one or more packages"""
1692- options = options or []
1693- cmd = ['apt-get', '-y']
1694+ if options is None:
1695+ options = ['--option=Dpkg::Options::=--force-confold']
1696+
1697+ cmd = ['apt-get', '--assume-yes']
1698 cmd.extend(options)
1699 cmd.append('install')
1700 if isinstance(packages, basestring):
1701@@ -50,29 +154,50 @@
1702 cmd.extend(packages)
1703 log("Installing {} with options: {}".format(packages,
1704 options))
1705- if fatal:
1706- subprocess.check_call(cmd)
1707+ _run_apt_command(cmd, fatal)
1708+
1709+
1710+def apt_upgrade(options=None, fatal=False, dist=False):
1711+ """Upgrade all packages"""
1712+ if options is None:
1713+ options = ['--option=Dpkg::Options::=--force-confold']
1714+
1715+ cmd = ['apt-get', '--assume-yes']
1716+ cmd.extend(options)
1717+ if dist:
1718+ cmd.append('dist-upgrade')
1719 else:
1720- subprocess.call(cmd)
1721+ cmd.append('upgrade')
1722+ log("Upgrading with options: {}".format(options))
1723+ _run_apt_command(cmd, fatal)
1724
1725
1726 def apt_update(fatal=False):
1727 """Update local apt cache"""
1728 cmd = ['apt-get', 'update']
1729- if fatal:
1730- subprocess.check_call(cmd)
1731- else:
1732- subprocess.call(cmd)
1733+ _run_apt_command(cmd, fatal)
1734
1735
1736 def apt_purge(packages, fatal=False):
1737 """Purge one or more packages"""
1738- cmd = ['apt-get', '-y', 'purge']
1739+ cmd = ['apt-get', '--assume-yes', 'purge']
1740 if isinstance(packages, basestring):
1741 cmd.append(packages)
1742 else:
1743 cmd.extend(packages)
1744 log("Purging {}".format(packages))
1745+ _run_apt_command(cmd, fatal)
1746+
1747+
1748+def apt_hold(packages, fatal=False):
1749+ """Hold one or more packages"""
1750+ cmd = ['apt-mark', 'hold']
1751+ if isinstance(packages, basestring):
1752+ cmd.append(packages)
1753+ else:
1754+ cmd.extend(packages)
1755+ log("Holding {}".format(packages))
1756+
1757 if fatal:
1758 subprocess.check_call(cmd)
1759 else:
1760@@ -80,84 +205,142 @@
1761
1762
1763 def add_source(source, key=None):
1764- if ((source.startswith('ppa:') or
1765- source.startswith('http:'))):
1766+ """Add a package source to this system.
1767+
1768+ @param source: a URL or sources.list entry, as supported by
1769+ add-apt-repository(1). Examples::
1770+
1771+ ppa:charmers/example
1772+ deb https://stub:key@private.example.com/ubuntu trusty main
1773+
1774+ In addition:
1775+ 'proposed:' may be used to enable the standard 'proposed'
1776+ pocket for the release.
1777+ 'cloud:' may be used to activate official cloud archive pockets,
1778+ such as 'cloud:icehouse'
1779+
1780+ @param key: A key to be added to the system's APT keyring and used
1781+ to verify the signatures on packages. Ideally, this should be an
1782+ ASCII format GPG public key including the block headers. A GPG key
1783+ id may also be used, but be aware that only insecure protocols are
1784+ available to retrieve the actual public key from a public keyserver
1785+ placing your Juju environment at risk. ppa and cloud archive keys
1786+ are securely added automtically, so sould not be provided.
1787+ """
1788+ if source is None:
1789+ log('Source is not present. Skipping')
1790+ return
1791+
1792+ if (source.startswith('ppa:') or
1793+ source.startswith('http') or
1794+ source.startswith('deb ') or
1795+ source.startswith('cloud-archive:')):
1796 subprocess.check_call(['add-apt-repository', '--yes', source])
1797 elif source.startswith('cloud:'):
1798 apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
1799 fatal=True)
1800 pocket = source.split(':')[-1]
1801+ if pocket not in CLOUD_ARCHIVE_POCKETS:
1802+ raise SourceConfigError(
1803+ 'Unsupported cloud: source option %s' %
1804+ pocket)
1805+ actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
1806 with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
1807- apt.write(CLOUD_ARCHIVE.format(pocket))
1808+ apt.write(CLOUD_ARCHIVE.format(actual_pocket))
1809 elif source == 'proposed':
1810 release = lsb_release()['DISTRIB_CODENAME']
1811 with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
1812 apt.write(PROPOSED_POCKET.format(release))
1813+ else:
1814+ raise SourceConfigError("Unknown source: {!r}".format(source))
1815+
1816 if key:
1817- subprocess.check_call(['apt-key', 'import', key])
1818-
1819-
1820-class SourceConfigError(Exception):
1821- pass
1822+ if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
1823+ with NamedTemporaryFile() as key_file:
1824+ key_file.write(key)
1825+ key_file.flush()
1826+ key_file.seek(0)
1827+ subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file)
1828+ else:
1829+ # Note that hkp: is in no way a secure protocol. Using a
1830+ # GPG key id is pointless from a security POV unless you
1831+ # absolutely trust your network and DNS.
1832+ subprocess.check_call(['apt-key', 'adv', '--keyserver',
1833+ 'hkp://keyserver.ubuntu.com:80', '--recv',
1834+ key])
1835
1836
1837 def configure_sources(update=False,
1838 sources_var='install_sources',
1839 keys_var='install_keys'):
1840 """
1841- Configure multiple sources from charm configuration
1842+ Configure multiple sources from charm configuration.
1843+
1844+ The lists are encoded as yaml fragments in the configuration.
1845+ The frament needs to be included as a string. Sources and their
1846+ corresponding keys are of the types supported by add_source().
1847
1848 Example config:
1849- install_sources:
1850+ install_sources: |
1851 - "ppa:foo"
1852 - "http://example.com/repo precise main"
1853- install_keys:
1854+ install_keys: |
1855 - null
1856 - "a1b2c3d4"
1857
1858 Note that 'null' (a.k.a. None) should not be quoted.
1859 """
1860- sources = safe_load(config(sources_var))
1861- keys = safe_load(config(keys_var))
1862- if isinstance(sources, basestring) and isinstance(keys, basestring):
1863- add_source(sources, keys)
1864+ sources = safe_load((config(sources_var) or '').strip()) or []
1865+ keys = safe_load((config(keys_var) or '').strip()) or None
1866+
1867+ if isinstance(sources, basestring):
1868+ sources = [sources]
1869+
1870+ if keys is None:
1871+ for source in sources:
1872+ add_source(source, None)
1873 else:
1874- if not len(sources) == len(keys):
1875- msg = 'Install sources and keys lists are different lengths'
1876- raise SourceConfigError(msg)
1877- for src_num in range(len(sources)):
1878- add_source(sources[src_num], keys[src_num])
1879+ if isinstance(keys, basestring):
1880+ keys = [keys]
1881+
1882+ if len(sources) != len(keys):
1883+ raise SourceConfigError(
1884+ 'Install sources and keys lists are different lengths')
1885+ for source, key in zip(sources, keys):
1886+ add_source(source, key)
1887 if update:
1888 apt_update(fatal=True)
1889
1890-# The order of this list is very important. Handlers should be listed in from
1891-# least- to most-specific URL matching.
1892-FETCH_HANDLERS = (
1893- 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
1894- 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
1895-)
1896-
1897-
1898-class UnhandledSource(Exception):
1899- pass
1900-
1901-
1902-def install_remote(source):
1903+
1904+def install_remote(source, *args, **kwargs):
1905 """
1906 Install a file tree from a remote source
1907
1908 The specified source should be a url of the form:
1909 scheme://[host]/path[#[option=value][&...]]
1910
1911- Schemes supported are based on this modules submodules
1912- Options supported are submodule-specific"""
1913+ Schemes supported are based on this modules submodules.
1914+ Options supported are submodule-specific.
1915+ Additional arguments are passed through to the submodule.
1916+
1917+ For example::
1918+
1919+ dest = install_remote('http://example.com/archive.tgz',
1920+ checksum='deadbeef',
1921+ hash_type='sha1')
1922+
1923+ This will download `archive.tgz`, validate it using SHA1 and, if
1924+ the file is ok, extract it and return the directory in which it
1925+ was extracted. If the checksum fails, it will raise
1926+ :class:`charmhelpers.core.host.ChecksumError`.
1927+ """
1928 # We ONLY check for True here because can_handle may return a string
1929 # explaining why it can't handle a given source.
1930 handlers = [h for h in plugins() if h.can_handle(source) is True]
1931 installed_to = None
1932 for handler in handlers:
1933 try:
1934- installed_to = handler.install(source)
1935+ installed_to = handler.install(source, *args, **kwargs)
1936 except UnhandledSource:
1937 pass
1938 if not installed_to:
1939@@ -171,28 +354,6 @@
1940 return install_remote(source)
1941
1942
1943-class BaseFetchHandler(object):
1944- """Base class for FetchHandler implementations in fetch plugins"""
1945- def can_handle(self, source):
1946- """Returns True if the source can be handled. Otherwise returns
1947- a string explaining why it cannot"""
1948- return "Wrong source type"
1949-
1950- def install(self, source):
1951- """Try to download and unpack the source. Return the path to the
1952- unpacked files or raise UnhandledSource."""
1953- raise UnhandledSource("Wrong source type {}".format(source))
1954-
1955- def parse_url(self, url):
1956- return urlparse(url)
1957-
1958- def base_url(self, url):
1959- """Return url without querystring or fragment"""
1960- parts = list(self.parse_url(url))
1961- parts[4:] = ['' for i in parts[4:]]
1962- return urlunparse(parts)
1963-
1964-
1965 def plugins(fetch_handlers=None):
1966 if not fetch_handlers:
1967 fetch_handlers = FETCH_HANDLERS
1968@@ -200,10 +361,50 @@
1969 for handler_name in fetch_handlers:
1970 package, classname = handler_name.rsplit('.', 1)
1971 try:
1972- handler_class = getattr(importlib.import_module(package), classname)
1973+ handler_class = getattr(
1974+ importlib.import_module(package),
1975+ classname)
1976 plugin_list.append(handler_class())
1977 except (ImportError, AttributeError):
1978 # Skip missing plugins so that they can be ommitted from
1979 # installation if desired
1980- log("FetchHandler {} not found, skipping plugin".format(handler_name))
1981+ log("FetchHandler {} not found, skipping plugin".format(
1982+ handler_name))
1983 return plugin_list
1984+
1985+
1986+def _run_apt_command(cmd, fatal=False):
1987+ """
1988+ Run an APT command, checking output and retrying if the fatal flag is set
1989+ to True.
1990+
1991+ :param: cmd: str: The apt command to run.
1992+ :param: fatal: bool: Whether the command's output should be checked and
1993+ retried.
1994+ """
1995+ env = os.environ.copy()
1996+
1997+ if 'DEBIAN_FRONTEND' not in env:
1998+ env['DEBIAN_FRONTEND'] = 'noninteractive'
1999+
2000+ if fatal:
2001+ retry_count = 0
2002+ result = None
2003+
2004+ # If the command is considered "fatal", we need to retry if the apt
2005+ # lock was not acquired.
2006+
2007+ while result is None or result == APT_NO_LOCK:
2008+ try:
2009+ result = subprocess.check_call(cmd, env=env)
2010+ except subprocess.CalledProcessError, e:
2011+ retry_count = retry_count + 1
2012+ if retry_count > APT_NO_LOCK_RETRY_COUNT:
2013+ raise
2014+ result = e.returncode
2015+ log("Couldn't acquire DPKG lock. Will retry in {} seconds."
2016+ "".format(APT_NO_LOCK_RETRY_DELAY))
2017+ time.sleep(APT_NO_LOCK_RETRY_DELAY)
2018+
2019+ else:
2020+ subprocess.call(cmd, env=env)
2021
2022=== modified file 'hooks/charmhelpers/fetch/archiveurl.py'
2023--- hooks/charmhelpers/fetch/archiveurl.py 2013-08-29 18:41:54 +0000
2024+++ hooks/charmhelpers/fetch/archiveurl.py 2014-09-26 08:01:25 +0000
2025@@ -1,5 +1,9 @@
2026 import os
2027 import urllib2
2028+from urllib import urlretrieve
2029+import urlparse
2030+import hashlib
2031+
2032 from charmhelpers.fetch import (
2033 BaseFetchHandler,
2034 UnhandledSource
2035@@ -8,11 +12,19 @@
2036 get_archive_handler,
2037 extract,
2038 )
2039-from charmhelpers.core.host import mkdir
2040+from charmhelpers.core.host import mkdir, check_hash
2041
2042
2043 class ArchiveUrlFetchHandler(BaseFetchHandler):
2044- """Handler for archives via generic URLs"""
2045+ """
2046+ Handler to download archive files from arbitrary URLs.
2047+
2048+ Can fetch from http, https, ftp, and file URLs.
2049+
2050+ Can install either tarballs (.tar, .tgz, .tbz2, etc) or zip files.
2051+
2052+ Installs the contents of the archive in $CHARM_DIR/fetched/.
2053+ """
2054 def can_handle(self, source):
2055 url_parts = self.parse_url(source)
2056 if url_parts.scheme not in ('http', 'https', 'ftp', 'file'):
2057@@ -22,8 +34,27 @@
2058 return False
2059
2060 def download(self, source, dest):
2061+ """
2062+ Download an archive file.
2063+
2064+ :param str source: URL pointing to an archive file.
2065+ :param str dest: Local path location to download archive file to.
2066+ """
2067 # propogate all exceptions
2068 # URLError, OSError, etc
2069+ proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
2070+ if proto in ('http', 'https'):
2071+ auth, barehost = urllib2.splituser(netloc)
2072+ if auth is not None:
2073+ source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
2074+ username, password = urllib2.splitpasswd(auth)
2075+ passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
2076+ # Realm is set to None in add_password to force the username and password
2077+ # to be used whatever the realm
2078+ passman.add_password(None, source, username, password)
2079+ authhandler = urllib2.HTTPBasicAuthHandler(passman)
2080+ opener = urllib2.build_opener(authhandler)
2081+ urllib2.install_opener(opener)
2082 response = urllib2.urlopen(source)
2083 try:
2084 with open(dest, 'w') as dest_file:
2085@@ -33,7 +64,30 @@
2086 os.unlink(dest)
2087 raise e
2088
2089- def install(self, source):
2090+ # Mandatory file validation via Sha1 or MD5 hashing.
2091+ def download_and_validate(self, url, hashsum, validate="sha1"):
2092+ tempfile, headers = urlretrieve(url)
2093+ check_hash(tempfile, hashsum, validate)
2094+ return tempfile
2095+
2096+ def install(self, source, dest=None, checksum=None, hash_type='sha1'):
2097+ """
2098+ Download and install an archive file, with optional checksum validation.
2099+
2100+ The checksum can also be given on the `source` URL's fragment.
2101+ For example::
2102+
2103+ handler.install('http://example.com/file.tgz#sha1=deadbeef')
2104+
2105+ :param str source: URL pointing to an archive file.
2106+ :param str dest: Local destination path to install to. If not given,
2107+ installs to `$CHARM_DIR/archives/archive_file_name`.
2108+ :param str checksum: If given, validate the archive file after download.
2109+ :param str hash_type: Algorithm used to generate `checksum`.
2110+ Can be any hash alrgorithm supported by :mod:`hashlib`,
2111+ such as md5, sha1, sha256, sha512, etc.
2112+
2113+ """
2114 url_parts = self.parse_url(source)
2115 dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
2116 if not os.path.exists(dest_dir):
2117@@ -45,4 +99,10 @@
2118 raise UnhandledSource(e.reason)
2119 except OSError as e:
2120 raise UnhandledSource(e.strerror)
2121- return extract(dld_file)
2122+ options = urlparse.parse_qs(url_parts.fragment)
2123+ for key, value in options.items():
2124+ if key in hashlib.algorithms:
2125+ check_hash(dld_file, value, key)
2126+ if checksum:
2127+ check_hash(dld_file, checksum, hash_type)
2128+ return extract(dld_file, dest)
2129
2130=== modified file 'hooks/charmhelpers/fetch/bzrurl.py'
2131--- hooks/charmhelpers/fetch/bzrurl.py 2013-08-29 18:41:54 +0000
2132+++ hooks/charmhelpers/fetch/bzrurl.py 2014-09-26 08:01:25 +0000
2133@@ -12,6 +12,7 @@
2134 apt_install("python-bzrlib")
2135 from bzrlib.branch import Branch
2136
2137+
2138 class BzrUrlFetchHandler(BaseFetchHandler):
2139 """Handler for bazaar branches via generic and lp URLs"""
2140 def can_handle(self, source):
2141@@ -38,7 +39,8 @@
2142 def install(self, source):
2143 url_parts = self.parse_url(source)
2144 branch_name = url_parts.path.strip("/").split("/")[-1]
2145- dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name)
2146+ dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
2147+ branch_name)
2148 if not os.path.exists(dest_dir):
2149 mkdir(dest_dir, perms=0755)
2150 try:
2151@@ -46,4 +48,3 @@
2152 except OSError as e:
2153 raise UnhandledSource(e.strerror)
2154 return dest_dir
2155-

Subscribers

People subscribed via source and target branches

to all changes: