Merge lp:~tribaal/charms/trusty/ntp/ntp-charm-helpers-sync into lp:charms/trusty/ntp

Proposed by Chris Glass
Status: Merged
Merged at revision: 14
Proposed branch: lp:~tribaal/charms/trusty/ntp/ntp-charm-helpers-sync
Merge into: lp:charms/trusty/ntp
Diff against target: 1240 lines (+637/-112)
6 files modified
hooks/charmhelpers/core/fstab.py (+116/-0)
hooks/charmhelpers/core/hookenv.py (+184/-25)
hooks/charmhelpers/core/host.py (+105/-19)
hooks/charmhelpers/fetch/__init__.py (+214/-66)
hooks/charmhelpers/fetch/archiveurl.py (+15/-0)
hooks/charmhelpers/fetch/bzrurl.py (+3/-2)
To merge this branch: bzr merge lp:~tribaal/charms/trusty/ntp/ntp-charm-helpers-sync
Reviewer Review Type Date Requested Status
Liam Young (community) Approve
Review via email: mp+226962@code.launchpad.net

Description of the change

This branch updates charm-helpers to the latest version to let the charm make use of the recently introduced "retry" logic around apt methods (the charm will wait and retry should it fail to acquire the apt lock).

To post a comment you must log in.
Revision history for this message
Liam Young (gnuoy) wrote :

LGTM and amulet test passed

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== added file 'hooks/charmhelpers/core/fstab.py'
2--- hooks/charmhelpers/core/fstab.py 1970-01-01 00:00:00 +0000
3+++ hooks/charmhelpers/core/fstab.py 2014-07-16 05:51:26 +0000
4@@ -0,0 +1,116 @@
5+#!/usr/bin/env python
6+# -*- coding: utf-8 -*-
7+
8+__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
9+
10+import os
11+
12+
13+class Fstab(file):
14+ """This class extends file in order to implement a file reader/writer
15+ for file `/etc/fstab`
16+ """
17+
18+ class Entry(object):
19+ """Entry class represents a non-comment line on the `/etc/fstab` file
20+ """
21+ def __init__(self, device, mountpoint, filesystem,
22+ options, d=0, p=0):
23+ self.device = device
24+ self.mountpoint = mountpoint
25+ self.filesystem = filesystem
26+
27+ if not options:
28+ options = "defaults"
29+
30+ self.options = options
31+ self.d = d
32+ self.p = p
33+
34+ def __eq__(self, o):
35+ return str(self) == str(o)
36+
37+ def __str__(self):
38+ return "{} {} {} {} {} {}".format(self.device,
39+ self.mountpoint,
40+ self.filesystem,
41+ self.options,
42+ self.d,
43+ self.p)
44+
45+ DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab')
46+
47+ def __init__(self, path=None):
48+ if path:
49+ self._path = path
50+ else:
51+ self._path = self.DEFAULT_PATH
52+ file.__init__(self, self._path, 'r+')
53+
54+ def _hydrate_entry(self, line):
55+ # NOTE: use split with no arguments to split on any
56+ # whitespace including tabs
57+ return Fstab.Entry(*filter(
58+ lambda x: x not in ('', None),
59+ line.strip("\n").split()))
60+
61+ @property
62+ def entries(self):
63+ self.seek(0)
64+ for line in self.readlines():
65+ try:
66+ if not line.startswith("#"):
67+ yield self._hydrate_entry(line)
68+ except ValueError:
69+ pass
70+
71+ def get_entry_by_attr(self, attr, value):
72+ for entry in self.entries:
73+ e_attr = getattr(entry, attr)
74+ if e_attr == value:
75+ return entry
76+ return None
77+
78+ def add_entry(self, entry):
79+ if self.get_entry_by_attr('device', entry.device):
80+ return False
81+
82+ self.write(str(entry) + '\n')
83+ self.truncate()
84+ return entry
85+
86+ def remove_entry(self, entry):
87+ self.seek(0)
88+
89+ lines = self.readlines()
90+
91+ found = False
92+ for index, line in enumerate(lines):
93+ if not line.startswith("#"):
94+ if self._hydrate_entry(line) == entry:
95+ found = True
96+ break
97+
98+ if not found:
99+ return False
100+
101+ lines.remove(line)
102+
103+ self.seek(0)
104+ self.write(''.join(lines))
105+ self.truncate()
106+ return True
107+
108+ @classmethod
109+ def remove_by_mountpoint(cls, mountpoint, path=None):
110+ fstab = cls(path=path)
111+ entry = fstab.get_entry_by_attr('mountpoint', mountpoint)
112+ if entry:
113+ return fstab.remove_entry(entry)
114+ return False
115+
116+ @classmethod
117+ def add(cls, device, mountpoint, filesystem, options=None, path=None):
118+ return cls(path=path).add_entry(Fstab.Entry(device,
119+ mountpoint, filesystem,
120+ options=options))
121
122=== modified file 'hooks/charmhelpers/core/hookenv.py'
123--- hooks/charmhelpers/core/hookenv.py 2013-08-29 18:41:41 +0000
124+++ hooks/charmhelpers/core/hookenv.py 2014-07-16 05:51:26 +0000
125@@ -8,7 +8,9 @@
126 import json
127 import yaml
128 import subprocess
129+import sys
130 import UserDict
131+from subprocess import CalledProcessError
132
133 CRITICAL = "CRITICAL"
134 ERROR = "ERROR"
135@@ -21,9 +23,9 @@
136
137
138 def cached(func):
139- ''' Cache return values for multiple executions of func + args
140+ """Cache return values for multiple executions of func + args
141
142- For example:
143+ For example::
144
145 @cached
146 def unit_get(attribute):
147@@ -32,7 +34,7 @@
148 unit_get('test')
149
150 will cache the result of unit_get + 'test' for future calls.
151- '''
152+ """
153 def wrapper(*args, **kwargs):
154 global cache
155 key = str((func, args, kwargs))
156@@ -46,8 +48,8 @@
157
158
159 def flush(key):
160- ''' Flushes any entries from function cache where the
161- key is found in the function+args '''
162+ """Flushes any entries from function cache where the
163+ key is found in the function+args """
164 flush_list = []
165 for item in cache:
166 if key in item:
167@@ -57,7 +59,7 @@
168
169
170 def log(message, level=None):
171- "Write a message to the juju log"
172+ """Write a message to the juju log"""
173 command = ['juju-log']
174 if level:
175 command += ['-l', level]
176@@ -66,7 +68,7 @@
177
178
179 class Serializable(UserDict.IterableUserDict):
180- "Wrapper, an object that can be serialized to yaml or json"
181+ """Wrapper, an object that can be serialized to yaml or json"""
182
183 def __init__(self, obj):
184 # wrap the object
185@@ -96,11 +98,11 @@
186 self.data = state
187
188 def json(self):
189- "Serialize the object to json"
190+ """Serialize the object to json"""
191 return json.dumps(self.data)
192
193 def yaml(self):
194- "Serialize the object to yaml"
195+ """Serialize the object to yaml"""
196 return yaml.dump(self.data)
197
198
199@@ -119,50 +121,153 @@
200
201
202 def in_relation_hook():
203- "Determine whether we're running in a relation hook"
204+ """Determine whether we're running in a relation hook"""
205 return 'JUJU_RELATION' in os.environ
206
207
208 def relation_type():
209- "The scope for the current relation hook"
210+ """The scope for the current relation hook"""
211 return os.environ.get('JUJU_RELATION', None)
212
213
214 def relation_id():
215- "The relation ID for the current relation hook"
216+ """The relation ID for the current relation hook"""
217 return os.environ.get('JUJU_RELATION_ID', None)
218
219
220 def local_unit():
221- "Local unit ID"
222+ """Local unit ID"""
223 return os.environ['JUJU_UNIT_NAME']
224
225
226 def remote_unit():
227- "The remote unit for the current relation hook"
228+ """The remote unit for the current relation hook"""
229 return os.environ['JUJU_REMOTE_UNIT']
230
231
232 def service_name():
233- "The name service group this unit belongs to"
234+ """The name service group this unit belongs to"""
235 return local_unit().split('/')[0]
236
237
238+def hook_name():
239+ """The name of the currently executing hook"""
240+ return os.path.basename(sys.argv[0])
241+
242+
243+class Config(dict):
244+ """A Juju charm config dictionary that can write itself to
245+ disk (as json) and track which values have changed since
246+ the previous hook invocation.
247+
248+ Do not instantiate this object directly - instead call
249+ ``hookenv.config()``
250+
251+ Example usage::
252+
253+ >>> # inside a hook
254+ >>> from charmhelpers.core import hookenv
255+ >>> config = hookenv.config()
256+ >>> config['foo']
257+ 'bar'
258+ >>> config['mykey'] = 'myval'
259+ >>> config.save()
260+
261+
262+ >>> # user runs `juju set mycharm foo=baz`
263+ >>> # now we're inside subsequent config-changed hook
264+ >>> config = hookenv.config()
265+ >>> config['foo']
266+ 'baz'
267+ >>> # test to see if this val has changed since last hook
268+ >>> config.changed('foo')
269+ True
270+ >>> # what was the previous value?
271+ >>> config.previous('foo')
272+ 'bar'
273+ >>> # keys/values that we add are preserved across hooks
274+ >>> config['mykey']
275+ 'myval'
276+ >>> # don't forget to save at the end of hook!
277+ >>> config.save()
278+
279+ """
280+ CONFIG_FILE_NAME = '.juju-persistent-config'
281+
282+ def __init__(self, *args, **kw):
283+ super(Config, self).__init__(*args, **kw)
284+ self._prev_dict = None
285+ self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
286+ if os.path.exists(self.path):
287+ self.load_previous()
288+
289+ def load_previous(self, path=None):
290+ """Load previous copy of config from disk so that current values
291+ can be compared to previous values.
292+
293+ :param path:
294+
295+ File path from which to load the previous config. If `None`,
296+ config is loaded from the default location. If `path` is
297+ specified, subsequent `save()` calls will write to the same
298+ path.
299+
300+ """
301+ self.path = path or self.path
302+ with open(self.path) as f:
303+ self._prev_dict = json.load(f)
304+
305+ def changed(self, key):
306+ """Return true if the value for this key has changed since
307+ the last save.
308+
309+ """
310+ if self._prev_dict is None:
311+ return True
312+ return self.previous(key) != self.get(key)
313+
314+ def previous(self, key):
315+ """Return previous value for this key, or None if there
316+ is no "previous" value.
317+
318+ """
319+ if self._prev_dict:
320+ return self._prev_dict.get(key)
321+ return None
322+
323+ def save(self):
324+ """Save this config to disk.
325+
326+ Preserves items in _prev_dict that do not exist in self.
327+
328+ """
329+ if self._prev_dict:
330+ for k, v in self._prev_dict.iteritems():
331+ if k not in self:
332+ self[k] = v
333+ with open(self.path, 'w') as f:
334+ json.dump(self, f)
335+
336+
337 @cached
338 def config(scope=None):
339- "Juju charm configuration"
340+ """Juju charm configuration"""
341 config_cmd_line = ['config-get']
342 if scope is not None:
343 config_cmd_line.append(scope)
344 config_cmd_line.append('--format=json')
345 try:
346- return json.loads(subprocess.check_output(config_cmd_line))
347+ config_data = json.loads(subprocess.check_output(config_cmd_line))
348+ if scope is not None:
349+ return config_data
350+ return Config(config_data)
351 except ValueError:
352 return None
353
354
355 @cached
356 def relation_get(attribute=None, unit=None, rid=None):
357+ """Get relation information"""
358 _args = ['relation-get', '--format=json']
359 if rid:
360 _args.append('-r')
361@@ -174,9 +279,14 @@
362 return json.loads(subprocess.check_output(_args))
363 except ValueError:
364 return None
365+ except CalledProcessError, e:
366+ if e.returncode == 2:
367+ return None
368+ raise
369
370
371 def relation_set(relation_id=None, relation_settings={}, **kwargs):
372+ """Set relation information for the current unit"""
373 relation_cmd_line = ['relation-set']
374 if relation_id is not None:
375 relation_cmd_line.extend(('-r', relation_id))
376@@ -192,7 +302,7 @@
377
378 @cached
379 def relation_ids(reltype=None):
380- "A list of relation_ids"
381+ """A list of relation_ids"""
382 reltype = reltype or relation_type()
383 relid_cmd_line = ['relation-ids', '--format=json']
384 if reltype is not None:
385@@ -203,7 +313,7 @@
386
387 @cached
388 def related_units(relid=None):
389- "A list of related units"
390+ """A list of related units"""
391 relid = relid or relation_id()
392 units_cmd_line = ['relation-list', '--format=json']
393 if relid is not None:
394@@ -213,7 +323,7 @@
395
396 @cached
397 def relation_for_unit(unit=None, rid=None):
398- "Get the json represenation of a unit's relation"
399+ """Get the json represenation of a unit's relation"""
400 unit = unit or remote_unit()
401 relation = relation_get(unit=unit, rid=rid)
402 for key in relation:
403@@ -225,7 +335,7 @@
404
405 @cached
406 def relations_for_id(relid=None):
407- "Get relations of a specific relation ID"
408+ """Get relations of a specific relation ID"""
409 relation_data = []
410 relid = relid or relation_ids()
411 for unit in related_units(relid):
412@@ -237,7 +347,7 @@
413
414 @cached
415 def relations_of_type(reltype=None):
416- "Get relations of a specific type"
417+ """Get relations of a specific type"""
418 relation_data = []
419 reltype = reltype or relation_type()
420 for relid in relation_ids(reltype):
421@@ -249,7 +359,7 @@
422
423 @cached
424 def relation_types():
425- "Get a list of relation types supported by this charm"
426+ """Get a list of relation types supported by this charm"""
427 charmdir = os.environ.get('CHARM_DIR', '')
428 mdf = open(os.path.join(charmdir, 'metadata.yaml'))
429 md = yaml.safe_load(mdf)
430@@ -264,6 +374,7 @@
431
432 @cached
433 def relations():
434+ """Get a nested dictionary of relation data for all related units"""
435 rels = {}
436 for reltype in relation_types():
437 relids = {}
438@@ -277,15 +388,35 @@
439 return rels
440
441
442+@cached
443+def is_relation_made(relation, keys='private-address'):
444+ '''
445+ Determine whether a relation is established by checking for
446+ presence of key(s). If a list of keys is provided, they
447+ must all be present for the relation to be identified as made
448+ '''
449+ if isinstance(keys, str):
450+ keys = [keys]
451+ for r_id in relation_ids(relation):
452+ for unit in related_units(r_id):
453+ context = {}
454+ for k in keys:
455+ context[k] = relation_get(k, rid=r_id,
456+ unit=unit)
457+ if None not in context.values():
458+ return True
459+ return False
460+
461+
462 def open_port(port, protocol="TCP"):
463- "Open a service network port"
464+ """Open a service network port"""
465 _args = ['open-port']
466 _args.append('{}/{}'.format(port, protocol))
467 subprocess.check_call(_args)
468
469
470 def close_port(port, protocol="TCP"):
471- "Close a service network port"
472+ """Close a service network port"""
473 _args = ['close-port']
474 _args.append('{}/{}'.format(port, protocol))
475 subprocess.check_call(_args)
476@@ -293,6 +424,7 @@
477
478 @cached
479 def unit_get(attribute):
480+ """Get the unit ID for the remote unit"""
481 _args = ['unit-get', '--format=json', attribute]
482 try:
483 return json.loads(subprocess.check_output(_args))
484@@ -301,22 +433,47 @@
485
486
487 def unit_private_ip():
488+ """Get this unit's private IP address"""
489 return unit_get('private-address')
490
491
492 class UnregisteredHookError(Exception):
493+ """Raised when an undefined hook is called"""
494 pass
495
496
497 class Hooks(object):
498+ """A convenient handler for hook functions.
499+
500+ Example::
501+
502+ hooks = Hooks()
503+
504+ # register a hook, taking its name from the function name
505+ @hooks.hook()
506+ def install():
507+ pass # your code here
508+
509+ # register a hook, providing a custom hook name
510+ @hooks.hook("config-changed")
511+ def config_changed():
512+ pass # your code here
513+
514+ if __name__ == "__main__":
515+ # execute a hook based on the name the program is called by
516+ hooks.execute(sys.argv)
517+ """
518+
519 def __init__(self):
520 super(Hooks, self).__init__()
521 self._hooks = {}
522
523 def register(self, name, function):
524+ """Register a hook"""
525 self._hooks[name] = function
526
527 def execute(self, args):
528+ """Execute a registered hook based on args[0]"""
529 hook_name = os.path.basename(args[0])
530 if hook_name in self._hooks:
531 self._hooks[hook_name]()
532@@ -324,6 +481,7 @@
533 raise UnregisteredHookError(hook_name)
534
535 def hook(self, *hook_names):
536+ """Decorator, registering them as hooks"""
537 def wrapper(decorated):
538 for hook_name in hook_names:
539 self.register(hook_name, decorated)
540@@ -337,4 +495,5 @@
541
542
543 def charm_dir():
544+ """Return the root directory of the current charm"""
545 return os.environ.get('CHARM_DIR')
546
547=== modified file 'hooks/charmhelpers/core/host.py'
548--- hooks/charmhelpers/core/host.py 2013-08-29 18:41:41 +0000
549+++ hooks/charmhelpers/core/host.py 2014-07-16 05:51:26 +0000
550@@ -16,21 +16,27 @@
551 from collections import OrderedDict
552
553 from hookenv import log
554+from fstab import Fstab
555
556
557 def service_start(service_name):
558+ """Start a system service"""
559 return service('start', service_name)
560
561
562 def service_stop(service_name):
563+ """Stop a system service"""
564 return service('stop', service_name)
565
566
567 def service_restart(service_name):
568+ """Restart a system service"""
569 return service('restart', service_name)
570
571
572 def service_reload(service_name, restart_on_failure=False):
573+ """Reload a system service, optionally falling back to restart if
574+ reload fails"""
575 service_result = service('reload', service_name)
576 if not service_result and restart_on_failure:
577 service_result = service('restart', service_name)
578@@ -38,11 +44,13 @@
579
580
581 def service(action, service_name):
582+ """Control a system service"""
583 cmd = ['service', service_name, action]
584 return subprocess.call(cmd) == 0
585
586
587 def service_running(service):
588+ """Determine whether a system service is running"""
589 try:
590 output = subprocess.check_output(['service', service, 'status'])
591 except subprocess.CalledProcessError:
592@@ -55,7 +63,7 @@
593
594
595 def adduser(username, password=None, shell='/bin/bash', system_user=False):
596- """Add a user"""
597+ """Add a user to the system"""
598 try:
599 user_info = pwd.getpwnam(username)
600 log('user {0} already exists!'.format(username))
601@@ -137,8 +145,20 @@
602 target.write(content)
603
604
605-def mount(device, mountpoint, options=None, persist=False):
606- '''Mount a filesystem'''
607+def fstab_remove(mp):
608+ """Remove the given mountpoint entry from /etc/fstab
609+ """
610+ return Fstab.remove_by_mountpoint(mp)
611+
612+
613+def fstab_add(dev, mp, fs, options=None):
614+ """Adds the given device entry to the /etc/fstab file
615+ """
616+ return Fstab.add(dev, mp, fs, options=options)
617+
618+
619+def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"):
620+ """Mount a filesystem at a particular mountpoint"""
621 cmd_args = ['mount']
622 if options is not None:
623 cmd_args.extend(['-o', options])
624@@ -148,28 +168,28 @@
625 except subprocess.CalledProcessError, e:
626 log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
627 return False
628+
629 if persist:
630- # TODO: update fstab
631- pass
632+ return fstab_add(device, mountpoint, filesystem, options=options)
633 return True
634
635
636 def umount(mountpoint, persist=False):
637- '''Unmount a filesystem'''
638+ """Unmount a filesystem"""
639 cmd_args = ['umount', mountpoint]
640 try:
641 subprocess.check_output(cmd_args)
642 except subprocess.CalledProcessError, e:
643 log('Error unmounting {}\n{}'.format(mountpoint, e.output))
644 return False
645+
646 if persist:
647- # TODO: update fstab
648- pass
649+ return fstab_remove(mountpoint)
650 return True
651
652
653 def mounts():
654- '''List of all mounted volumes as [[mountpoint,device],[...]]'''
655+ """Get a list of all mounted volumes as [[mountpoint,device],[...]]"""
656 with open('/proc/mounts') as f:
657 # [['/mount/point','/dev/path'],[...]]
658 system_mounts = [m[1::-1] for m in [l.strip().split()
659@@ -178,7 +198,7 @@
660
661
662 def file_hash(path):
663- ''' Generate a md5 hash of the contents of 'path' or None if not found '''
664+ """Generate a md5 hash of the contents of 'path' or None if not found """
665 if os.path.exists(path):
666 h = hashlib.md5()
667 with open(path, 'r') as source:
668@@ -188,21 +208,21 @@
669 return None
670
671
672-def restart_on_change(restart_map):
673- ''' Restart services based on configuration files changing
674+def restart_on_change(restart_map, stopstart=False):
675+ """Restart services based on configuration files changing
676
677- This function is used a decorator, for example
678+ This function is used a decorator, for example::
679
680 @restart_on_change({
681 '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ]
682 })
683 def ceph_client_changed():
684- ...
685+ pass # your code here
686
687 In this example, the cinder-api and cinder-volume services
688 would be restarted if /etc/ceph/ceph.conf is changed by the
689 ceph_client_changed function.
690- '''
691+ """
692 def wrap(f):
693 def wrapped_f(*args):
694 checksums = {}
695@@ -213,14 +233,20 @@
696 for path in restart_map:
697 if checksums[path] != file_hash(path):
698 restarts += restart_map[path]
699- for service_name in list(OrderedDict.fromkeys(restarts)):
700- service('restart', service_name)
701+ services_list = list(OrderedDict.fromkeys(restarts))
702+ if not stopstart:
703+ for service_name in services_list:
704+ service('restart', service_name)
705+ else:
706+ for action in ['stop', 'start']:
707+ for service_name in services_list:
708+ service(action, service_name)
709 return wrapped_f
710 return wrap
711
712
713 def lsb_release():
714- '''Return /etc/lsb-release in a dict'''
715+ """Return /etc/lsb-release in a dict"""
716 d = {}
717 with open('/etc/lsb-release', 'r') as lsb:
718 for l in lsb:
719@@ -230,7 +256,7 @@
720
721
722 def pwgen(length=None):
723- '''Generate a random pasword.'''
724+ """Generate a random pasword."""
725 if length is None:
726 length = random.choice(range(35, 45))
727 alphanumeric_chars = [
728@@ -239,3 +265,63 @@
729 random_chars = [
730 random.choice(alphanumeric_chars) for _ in range(length)]
731 return(''.join(random_chars))
732+
733+
734+def list_nics(nic_type):
735+ '''Return a list of nics of given type(s)'''
736+ if isinstance(nic_type, basestring):
737+ int_types = [nic_type]
738+ else:
739+ int_types = nic_type
740+ interfaces = []
741+ for int_type in int_types:
742+ cmd = ['ip', 'addr', 'show', 'label', int_type + '*']
743+ ip_output = subprocess.check_output(cmd).split('\n')
744+ ip_output = (line for line in ip_output if line)
745+ for line in ip_output:
746+ if line.split()[1].startswith(int_type):
747+ interfaces.append(line.split()[1].replace(":", ""))
748+ return interfaces
749+
750+
751+def set_nic_mtu(nic, mtu):
752+ '''Set MTU on a network interface'''
753+ cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
754+ subprocess.check_call(cmd)
755+
756+
757+def get_nic_mtu(nic):
758+ cmd = ['ip', 'addr', 'show', nic]
759+ ip_output = subprocess.check_output(cmd).split('\n')
760+ mtu = ""
761+ for line in ip_output:
762+ words = line.split()
763+ if 'mtu' in words:
764+ mtu = words[words.index("mtu") + 1]
765+ return mtu
766+
767+
768+def get_nic_hwaddr(nic):
769+ cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
770+ ip_output = subprocess.check_output(cmd)
771+ hwaddr = ""
772+ words = ip_output.split()
773+ if 'link/ether' in words:
774+ hwaddr = words[words.index('link/ether') + 1]
775+ return hwaddr
776+
777+
778+def cmp_pkgrevno(package, revno, pkgcache=None):
779+ '''Compare supplied revno with the revno of the installed package
780+
781+ * 1 => Installed revno is greater than supplied arg
782+ * 0 => Installed revno is the same as supplied arg
783+ * -1 => Installed revno is less than supplied arg
784+
785+ '''
786+ import apt_pkg
787+ if not pkgcache:
788+ apt_pkg.init()
789+ pkgcache = apt_pkg.Cache()
790+ pkg = pkgcache[package]
791+ return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
792
793=== modified file 'hooks/charmhelpers/fetch/__init__.py'
794--- hooks/charmhelpers/fetch/__init__.py 2013-08-29 18:41:41 +0000
795+++ hooks/charmhelpers/fetch/__init__.py 2014-07-16 05:51:26 +0000
796@@ -1,4 +1,5 @@
797 import importlib
798+import time
799 from yaml import safe_load
800 from charmhelpers.core.host import (
801 lsb_release
802@@ -12,7 +13,8 @@
803 config,
804 log,
805 )
806-import apt_pkg
807+import os
808+
809
810 CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
811 deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
812@@ -20,11 +22,107 @@
813 PROPOSED_POCKET = """# Proposed
814 deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
815 """
816+CLOUD_ARCHIVE_POCKETS = {
817+ # Folsom
818+ 'folsom': 'precise-updates/folsom',
819+ 'precise-folsom': 'precise-updates/folsom',
820+ 'precise-folsom/updates': 'precise-updates/folsom',
821+ 'precise-updates/folsom': 'precise-updates/folsom',
822+ 'folsom/proposed': 'precise-proposed/folsom',
823+ 'precise-folsom/proposed': 'precise-proposed/folsom',
824+ 'precise-proposed/folsom': 'precise-proposed/folsom',
825+ # Grizzly
826+ 'grizzly': 'precise-updates/grizzly',
827+ 'precise-grizzly': 'precise-updates/grizzly',
828+ 'precise-grizzly/updates': 'precise-updates/grizzly',
829+ 'precise-updates/grizzly': 'precise-updates/grizzly',
830+ 'grizzly/proposed': 'precise-proposed/grizzly',
831+ 'precise-grizzly/proposed': 'precise-proposed/grizzly',
832+ 'precise-proposed/grizzly': 'precise-proposed/grizzly',
833+ # Havana
834+ 'havana': 'precise-updates/havana',
835+ 'precise-havana': 'precise-updates/havana',
836+ 'precise-havana/updates': 'precise-updates/havana',
837+ 'precise-updates/havana': 'precise-updates/havana',
838+ 'havana/proposed': 'precise-proposed/havana',
839+ 'precise-havana/proposed': 'precise-proposed/havana',
840+ 'precise-proposed/havana': 'precise-proposed/havana',
841+ # Icehouse
842+ 'icehouse': 'precise-updates/icehouse',
843+ 'precise-icehouse': 'precise-updates/icehouse',
844+ 'precise-icehouse/updates': 'precise-updates/icehouse',
845+ 'precise-updates/icehouse': 'precise-updates/icehouse',
846+ 'icehouse/proposed': 'precise-proposed/icehouse',
847+ 'precise-icehouse/proposed': 'precise-proposed/icehouse',
848+ 'precise-proposed/icehouse': 'precise-proposed/icehouse',
849+ # Juno
850+ 'juno': 'trusty-updates/juno',
851+ 'trusty-juno': 'trusty-updates/juno',
852+ 'trusty-juno/updates': 'trusty-updates/juno',
853+ 'trusty-updates/juno': 'trusty-updates/juno',
854+ 'juno/proposed': 'trusty-proposed/juno',
855+ 'juno/proposed': 'trusty-proposed/juno',
856+ 'trusty-juno/proposed': 'trusty-proposed/juno',
857+ 'trusty-proposed/juno': 'trusty-proposed/juno',
858+}
859+
860+# The order of this list is very important. Handlers should be listed in from
861+# least- to most-specific URL matching.
862+FETCH_HANDLERS = (
863+ 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
864+ 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
865+)
866+
867+APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
868+APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks.
869+APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
870+
871+
872+class SourceConfigError(Exception):
873+ pass
874+
875+
876+class UnhandledSource(Exception):
877+ pass
878+
879+
880+class AptLockError(Exception):
881+ pass
882+
883+
884+class BaseFetchHandler(object):
885+
886+ """Base class for FetchHandler implementations in fetch plugins"""
887+
888+ def can_handle(self, source):
889+ """Returns True if the source can be handled. Otherwise returns
890+ a string explaining why it cannot"""
891+ return "Wrong source type"
892+
893+ def install(self, source):
894+ """Try to download and unpack the source. Return the path to the
895+ unpacked files or raise UnhandledSource."""
896+ raise UnhandledSource("Wrong source type {}".format(source))
897+
898+ def parse_url(self, url):
899+ return urlparse(url)
900+
901+ def base_url(self, url):
902+ """Return url without querystring or fragment"""
903+ parts = list(self.parse_url(url))
904+ parts[4:] = ['' for i in parts[4:]]
905+ return urlunparse(parts)
906
907
908 def filter_installed_packages(packages):
909 """Returns a list of packages that require installation"""
910+ import apt_pkg
911 apt_pkg.init()
912+
913+ # Tell apt to build an in-memory cache to prevent race conditions (if
914+ # another process is already building the cache).
915+ apt_pkg.config.set("Dir::Cache::pkgcache", "")
916+
917 cache = apt_pkg.Cache()
918 _pkgs = []
919 for package in packages:
920@@ -40,8 +138,10 @@
921
922 def apt_install(packages, options=None, fatal=False):
923 """Install one or more packages"""
924- options = options or []
925- cmd = ['apt-get', '-y']
926+ if options is None:
927+ options = ['--option=Dpkg::Options::=--force-confold']
928+
929+ cmd = ['apt-get', '--assume-yes']
930 cmd.extend(options)
931 cmd.append('install')
932 if isinstance(packages, basestring):
933@@ -50,29 +150,50 @@
934 cmd.extend(packages)
935 log("Installing {} with options: {}".format(packages,
936 options))
937- if fatal:
938- subprocess.check_call(cmd)
939+ _run_apt_command(cmd, fatal)
940+
941+
942+def apt_upgrade(options=None, fatal=False, dist=False):
943+ """Upgrade all packages"""
944+ if options is None:
945+ options = ['--option=Dpkg::Options::=--force-confold']
946+
947+ cmd = ['apt-get', '--assume-yes']
948+ cmd.extend(options)
949+ if dist:
950+ cmd.append('dist-upgrade')
951 else:
952- subprocess.call(cmd)
953+ cmd.append('upgrade')
954+ log("Upgrading with options: {}".format(options))
955+ _run_apt_command(cmd, fatal)
956
957
958 def apt_update(fatal=False):
959 """Update local apt cache"""
960 cmd = ['apt-get', 'update']
961- if fatal:
962- subprocess.check_call(cmd)
963- else:
964- subprocess.call(cmd)
965+ _run_apt_command(cmd, fatal)
966
967
968 def apt_purge(packages, fatal=False):
969 """Purge one or more packages"""
970- cmd = ['apt-get', '-y', 'purge']
971+ cmd = ['apt-get', '--assume-yes', 'purge']
972 if isinstance(packages, basestring):
973 cmd.append(packages)
974 else:
975 cmd.extend(packages)
976 log("Purging {}".format(packages))
977+ _run_apt_command(cmd, fatal)
978+
979+
980+def apt_hold(packages, fatal=False):
981+ """Hold one or more packages"""
982+ cmd = ['apt-mark', 'hold']
983+ if isinstance(packages, basestring):
984+ cmd.append(packages)
985+ else:
986+ cmd.extend(packages)
987+ log("Holding {}".format(packages))
988+
989 if fatal:
990 subprocess.check_call(cmd)
991 else:
992@@ -80,67 +201,76 @@
993
994
995 def add_source(source, key=None):
996- if ((source.startswith('ppa:') or
997- source.startswith('http:'))):
998+ if source is None:
999+ log('Source is not present. Skipping')
1000+ return
1001+
1002+ if (source.startswith('ppa:') or
1003+ source.startswith('http') or
1004+ source.startswith('deb ') or
1005+ source.startswith('cloud-archive:')):
1006 subprocess.check_call(['add-apt-repository', '--yes', source])
1007 elif source.startswith('cloud:'):
1008 apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
1009 fatal=True)
1010 pocket = source.split(':')[-1]
1011+ if pocket not in CLOUD_ARCHIVE_POCKETS:
1012+ raise SourceConfigError(
1013+ 'Unsupported cloud: source option %s' %
1014+ pocket)
1015+ actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
1016 with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
1017- apt.write(CLOUD_ARCHIVE.format(pocket))
1018+ apt.write(CLOUD_ARCHIVE.format(actual_pocket))
1019 elif source == 'proposed':
1020 release = lsb_release()['DISTRIB_CODENAME']
1021 with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
1022 apt.write(PROPOSED_POCKET.format(release))
1023 if key:
1024- subprocess.check_call(['apt-key', 'import', key])
1025-
1026-
1027-class SourceConfigError(Exception):
1028- pass
1029+ subprocess.check_call(['apt-key', 'adv', '--keyserver',
1030+ 'hkp://keyserver.ubuntu.com:80', '--recv',
1031+ key])
1032
1033
1034 def configure_sources(update=False,
1035 sources_var='install_sources',
1036 keys_var='install_keys'):
1037 """
1038- Configure multiple sources from charm configuration
1039+ Configure multiple sources from charm configuration.
1040+
1041+ The lists are encoded as yaml fragments in the configuration.
1042+ The frament needs to be included as a string.
1043
1044 Example config:
1045- install_sources:
1046+ install_sources: |
1047 - "ppa:foo"
1048 - "http://example.com/repo precise main"
1049- install_keys:
1050+ install_keys: |
1051 - null
1052 - "a1b2c3d4"
1053
1054 Note that 'null' (a.k.a. None) should not be quoted.
1055 """
1056- sources = safe_load(config(sources_var))
1057- keys = safe_load(config(keys_var))
1058- if isinstance(sources, basestring) and isinstance(keys, basestring):
1059- add_source(sources, keys)
1060+ sources = safe_load((config(sources_var) or '').strip()) or []
1061+ keys = safe_load((config(keys_var) or '').strip()) or None
1062+
1063+ if isinstance(sources, basestring):
1064+ sources = [sources]
1065+
1066+ if keys is None:
1067+ for source in sources:
1068+ add_source(source, None)
1069 else:
1070- if not len(sources) == len(keys):
1071- msg = 'Install sources and keys lists are different lengths'
1072- raise SourceConfigError(msg)
1073- for src_num in range(len(sources)):
1074- add_source(sources[src_num], keys[src_num])
1075+ if isinstance(keys, basestring):
1076+ keys = [keys]
1077+
1078+ if len(sources) != len(keys):
1079+ raise SourceConfigError(
1080+ 'Install sources and keys lists are different lengths')
1081+ for source, key in zip(sources, keys):
1082+ add_source(source, key)
1083 if update:
1084 apt_update(fatal=True)
1085
1086-# The order of this list is very important. Handlers should be listed in from
1087-# least- to most-specific URL matching.
1088-FETCH_HANDLERS = (
1089- 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
1090- 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
1091-)
1092-
1093-
1094-class UnhandledSource(Exception):
1095- pass
1096-
1097
1098 def install_remote(source):
1099 """
1100@@ -171,28 +301,6 @@
1101 return install_remote(source)
1102
1103
1104-class BaseFetchHandler(object):
1105- """Base class for FetchHandler implementations in fetch plugins"""
1106- def can_handle(self, source):
1107- """Returns True if the source can be handled. Otherwise returns
1108- a string explaining why it cannot"""
1109- return "Wrong source type"
1110-
1111- def install(self, source):
1112- """Try to download and unpack the source. Return the path to the
1113- unpacked files or raise UnhandledSource."""
1114- raise UnhandledSource("Wrong source type {}".format(source))
1115-
1116- def parse_url(self, url):
1117- return urlparse(url)
1118-
1119- def base_url(self, url):
1120- """Return url without querystring or fragment"""
1121- parts = list(self.parse_url(url))
1122- parts[4:] = ['' for i in parts[4:]]
1123- return urlunparse(parts)
1124-
1125-
1126 def plugins(fetch_handlers=None):
1127 if not fetch_handlers:
1128 fetch_handlers = FETCH_HANDLERS
1129@@ -200,10 +308,50 @@
1130 for handler_name in fetch_handlers:
1131 package, classname = handler_name.rsplit('.', 1)
1132 try:
1133- handler_class = getattr(importlib.import_module(package), classname)
1134+ handler_class = getattr(
1135+ importlib.import_module(package),
1136+ classname)
1137 plugin_list.append(handler_class())
1138 except (ImportError, AttributeError):
1139 # Skip missing plugins so that they can be ommitted from
1140 # installation if desired
1141- log("FetchHandler {} not found, skipping plugin".format(handler_name))
1142+ log("FetchHandler {} not found, skipping plugin".format(
1143+ handler_name))
1144 return plugin_list
1145+
1146+
1147+def _run_apt_command(cmd, fatal=False):
1148+ """
1149+ Run an APT command, checking output and retrying if the fatal flag is set
1150+ to True.
1151+
1152+ :param: cmd: str: The apt command to run.
1153+ :param: fatal: bool: Whether the command's output should be checked and
1154+ retried.
1155+ """
1156+ env = os.environ.copy()
1157+
1158+ if 'DEBIAN_FRONTEND' not in env:
1159+ env['DEBIAN_FRONTEND'] = 'noninteractive'
1160+
1161+ if fatal:
1162+ retry_count = 0
1163+ result = None
1164+
1165+ # If the command is considered "fatal", we need to retry if the apt
1166+ # lock was not acquired.
1167+
1168+ while result is None or result == APT_NO_LOCK:
1169+ try:
1170+ result = subprocess.check_call(cmd, env=env)
1171+ except subprocess.CalledProcessError, e:
1172+ retry_count = retry_count + 1
1173+ if retry_count > APT_NO_LOCK_RETRY_COUNT:
1174+ raise
1175+ result = e.returncode
1176+ log("Couldn't acquire DPKG lock. Will retry in {} seconds."
1177+ "".format(APT_NO_LOCK_RETRY_DELAY))
1178+ time.sleep(APT_NO_LOCK_RETRY_DELAY)
1179+
1180+ else:
1181+ subprocess.call(cmd, env=env)
1182
1183=== modified file 'hooks/charmhelpers/fetch/archiveurl.py'
1184--- hooks/charmhelpers/fetch/archiveurl.py 2013-08-29 18:41:41 +0000
1185+++ hooks/charmhelpers/fetch/archiveurl.py 2014-07-16 05:51:26 +0000
1186@@ -1,5 +1,7 @@
1187 import os
1188 import urllib2
1189+import urlparse
1190+
1191 from charmhelpers.fetch import (
1192 BaseFetchHandler,
1193 UnhandledSource
1194@@ -24,6 +26,19 @@
1195 def download(self, source, dest):
1196 # propogate all exceptions
1197 # URLError, OSError, etc
1198+ proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
1199+ if proto in ('http', 'https'):
1200+ auth, barehost = urllib2.splituser(netloc)
1201+ if auth is not None:
1202+ source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
1203+ username, password = urllib2.splitpasswd(auth)
1204+ passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
1205+ # Realm is set to None in add_password to force the username and password
1206+ # to be used whatever the realm
1207+ passman.add_password(None, source, username, password)
1208+ authhandler = urllib2.HTTPBasicAuthHandler(passman)
1209+ opener = urllib2.build_opener(authhandler)
1210+ urllib2.install_opener(opener)
1211 response = urllib2.urlopen(source)
1212 try:
1213 with open(dest, 'w') as dest_file:
1214
1215=== modified file 'hooks/charmhelpers/fetch/bzrurl.py'
1216--- hooks/charmhelpers/fetch/bzrurl.py 2013-08-29 18:41:41 +0000
1217+++ hooks/charmhelpers/fetch/bzrurl.py 2014-07-16 05:51:26 +0000
1218@@ -12,6 +12,7 @@
1219 apt_install("python-bzrlib")
1220 from bzrlib.branch import Branch
1221
1222+
1223 class BzrUrlFetchHandler(BaseFetchHandler):
1224 """Handler for bazaar branches via generic and lp URLs"""
1225 def can_handle(self, source):
1226@@ -38,7 +39,8 @@
1227 def install(self, source):
1228 url_parts = self.parse_url(source)
1229 branch_name = url_parts.path.strip("/").split("/")[-1]
1230- dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name)
1231+ dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
1232+ branch_name)
1233 if not os.path.exists(dest_dir):
1234 mkdir(dest_dir, perms=0755)
1235 try:
1236@@ -46,4 +48,3 @@
1237 except OSError as e:
1238 raise UnhandledSource(e.strerror)
1239 return dest_dir
1240-

Subscribers

People subscribed via source and target branches

to all changes: