Merge lp:~hopem/charms/trusty/cinder-ceph/lp1535062 into lp:~openstack-charmers-archive/charms/trusty/cinder-ceph/next

Proposed by Edward Hope-Morley
Status: Merged
Merged at revision: 52
Proposed branch: lp:~hopem/charms/trusty/cinder-ceph/lp1535062
Merge into: lp:~openstack-charmers-archive/charms/trusty/cinder-ceph/next
Diff against target: 638 lines (+185/-114)
5 files modified
hooks/charmhelpers/contrib/openstack/context.py (+12/-2)
hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg (+3/-2)
hooks/charmhelpers/contrib/openstack/utils.py (+95/-63)
hooks/charmhelpers/core/host.py (+73/-42)
hooks/charmhelpers/fetch/giturl.py (+2/-5)
To merge this branch: bzr merge lp:~hopem/charms/trusty/cinder-ceph/lp1535062
Reviewer Review Type Date Requested Status
OpenStack Charmers Pending
Review via email: mp+282880@code.launchpad.net
To post a comment you must log in.
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote :

charm_lint_check #17513 cinder-ceph-next for hopem mp282880
    LINT OK: passed

Build: http://10.245.162.77:8080/job/charm_lint_check/17513/

Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote :

charm_unit_test #16362 cinder-ceph-next for hopem mp282880
    UNIT OK: passed

Build: http://10.245.162.77:8080/job/charm_unit_test/16362/

Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote :

charm_amulet_test #8874 cinder-ceph-next for hopem mp282880
    AMULET OK: passed

Build: http://10.245.162.77:8080/job/charm_amulet_test/8874/

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'hooks/charmhelpers/contrib/openstack/context.py'
2--- hooks/charmhelpers/contrib/openstack/context.py 2016-01-04 21:26:40 +0000
3+++ hooks/charmhelpers/contrib/openstack/context.py 2016-01-17 21:20:37 +0000
4@@ -57,6 +57,7 @@
5 get_nic_hwaddr,
6 mkdir,
7 write_file,
8+ pwgen,
9 )
10 from charmhelpers.contrib.hahelpers.cluster import (
11 determine_apache_port,
12@@ -87,6 +88,8 @@
13 is_bridge_member,
14 )
15 from charmhelpers.contrib.openstack.utils import get_host_ip
16+from charmhelpers.core.unitdata import kv
17+
18 CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt'
19 ADDRESS_TYPES = ['admin', 'internal', 'public']
20
21@@ -636,11 +639,18 @@
22 ctxt['ipv6'] = True
23 ctxt['local_host'] = 'ip6-localhost'
24 ctxt['haproxy_host'] = '::'
25- ctxt['stat_port'] = ':::8888'
26 else:
27 ctxt['local_host'] = '127.0.0.1'
28 ctxt['haproxy_host'] = '0.0.0.0'
29- ctxt['stat_port'] = ':8888'
30+
31+ ctxt['stat_port'] = '8888'
32+
33+ db = kv()
34+ ctxt['stat_password'] = db.get('stat-password')
35+ if not ctxt['stat_password']:
36+ ctxt['stat_password'] = db.set('stat-password',
37+ pwgen(32))
38+ db.flush()
39
40 for frontend in cluster_hosts:
41 if (len(cluster_hosts[frontend]['backends']) > 1 or
42
43=== modified file 'hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg'
44--- hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg 2016-01-04 21:26:40 +0000
45+++ hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg 2016-01-17 21:20:37 +0000
46@@ -33,13 +33,14 @@
47 timeout server 30000
48 {%- endif %}
49
50-listen stats {{ stat_port }}
51+listen stats
52+ bind {{ local_host }}:{{ stat_port }}
53 mode http
54 stats enable
55 stats hide-version
56 stats realm Haproxy\ Statistics
57 stats uri /
58- stats auth admin:password
59+ stats auth admin:{{ stat_password }}
60
61 {% if frontends -%}
62 {% for service, ports in service_ports.items() -%}
63
64=== modified file 'hooks/charmhelpers/contrib/openstack/utils.py'
65--- hooks/charmhelpers/contrib/openstack/utils.py 2016-01-04 21:26:40 +0000
66+++ hooks/charmhelpers/contrib/openstack/utils.py 2016-01-17 21:20:37 +0000
67@@ -103,68 +103,67 @@
68 ('2016.1', 'mitaka'),
69 ])
70
71-# The ugly duckling
72+# The ugly duckling - must list releases oldest to newest
73 SWIFT_CODENAMES = OrderedDict([
74- ('1.4.3', 'diablo'),
75- ('1.4.8', 'essex'),
76- ('1.7.4', 'folsom'),
77- ('1.8.0', 'grizzly'),
78- ('1.7.7', 'grizzly'),
79- ('1.7.6', 'grizzly'),
80- ('1.10.0', 'havana'),
81- ('1.9.1', 'havana'),
82- ('1.9.0', 'havana'),
83- ('1.13.1', 'icehouse'),
84- ('1.13.0', 'icehouse'),
85- ('1.12.0', 'icehouse'),
86- ('1.11.0', 'icehouse'),
87- ('2.0.0', 'juno'),
88- ('2.1.0', 'juno'),
89- ('2.2.0', 'juno'),
90- ('2.2.1', 'kilo'),
91- ('2.2.2', 'kilo'),
92- ('2.3.0', 'liberty'),
93- ('2.4.0', 'liberty'),
94- ('2.5.0', 'liberty'),
95+ ('diablo',
96+ ['1.4.3']),
97+ ('essex',
98+ ['1.4.8']),
99+ ('folsom',
100+ ['1.7.4']),
101+ ('grizzly',
102+ ['1.7.6', '1.7.7', '1.8.0']),
103+ ('havana',
104+ ['1.9.0', '1.9.1', '1.10.0']),
105+ ('icehouse',
106+ ['1.11.0', '1.12.0', '1.13.0', '1.13.1']),
107+ ('juno',
108+ ['2.0.0', '2.1.0', '2.2.0']),
109+ ('kilo',
110+ ['2.2.1', '2.2.2']),
111+ ('liberty',
112+ ['2.3.0', '2.4.0', '2.5.0']),
113+ ('mitaka',
114+ ['2.5.0']),
115 ])
116
117 # >= Liberty version->codename mapping
118 PACKAGE_CODENAMES = {
119 'nova-common': OrderedDict([
120- ('12.0.0', 'liberty'),
121- ('13.0.0', 'mitaka'),
122+ ('12.0', 'liberty'),
123+ ('13.0', 'mitaka'),
124 ]),
125 'neutron-common': OrderedDict([
126- ('7.0.0', 'liberty'),
127- ('8.0.0', 'mitaka'),
128+ ('7.0', 'liberty'),
129+ ('8.0', 'mitaka'),
130 ]),
131 'cinder-common': OrderedDict([
132- ('7.0.0', 'liberty'),
133- ('8.0.0', 'mitaka'),
134+ ('7.0', 'liberty'),
135+ ('8.0', 'mitaka'),
136 ]),
137 'keystone': OrderedDict([
138- ('8.0.0', 'liberty'),
139- ('9.0.0', 'mitaka'),
140+ ('8.0', 'liberty'),
141+ ('9.0', 'mitaka'),
142 ]),
143 'horizon-common': OrderedDict([
144- ('8.0.0', 'liberty'),
145- ('9.0.0', 'mitaka'),
146+ ('8.0', 'liberty'),
147+ ('9.0', 'mitaka'),
148 ]),
149 'ceilometer-common': OrderedDict([
150- ('5.0.0', 'liberty'),
151- ('6.0.0', 'mitaka'),
152+ ('5.0', 'liberty'),
153+ ('6.0', 'mitaka'),
154 ]),
155 'heat-common': OrderedDict([
156- ('5.0.0', 'liberty'),
157- ('6.0.0', 'mitaka'),
158+ ('5.0', 'liberty'),
159+ ('6.0', 'mitaka'),
160 ]),
161 'glance-common': OrderedDict([
162- ('11.0.0', 'liberty'),
163- ('12.0.0', 'mitaka'),
164+ ('11.0', 'liberty'),
165+ ('12.0', 'mitaka'),
166 ]),
167 'openstack-dashboard': OrderedDict([
168- ('8.0.0', 'liberty'),
169- ('9.0.0', 'mitaka'),
170+ ('8.0', 'liberty'),
171+ ('9.0', 'mitaka'),
172 ]),
173 }
174
175@@ -227,6 +226,33 @@
176 error_out(e)
177
178
179+def get_os_version_codename_swift(codename):
180+ '''Determine OpenStack version number of swift from codename.'''
181+ for k, v in six.iteritems(SWIFT_CODENAMES):
182+ if k == codename:
183+ return v[-1]
184+ e = 'Could not derive swift version for '\
185+ 'codename: %s' % codename
186+ error_out(e)
187+
188+
189+def get_swift_codename(version):
190+ '''Determine OpenStack codename that corresponds to swift version.'''
191+ codenames = [k for k, v in six.iteritems(SWIFT_CODENAMES) if version in v]
192+ if len(codenames) > 1:
193+ # If more than one release codename contains this version we determine
194+ # the actual codename based on the highest available install source.
195+ for codename in reversed(codenames):
196+ releases = UBUNTU_OPENSTACK_RELEASE
197+ release = [k for k, v in six.iteritems(releases) if codename in v]
198+ ret = subprocess.check_output(['apt-cache', 'policy', 'swift'])
199+ if codename in ret or release[0] in ret:
200+ return codename
201+ elif len(codenames) == 1:
202+ return codenames[0]
203+ return None
204+
205+
206 def get_os_codename_package(package, fatal=True):
207 '''Derive OpenStack release codename from an installed package.'''
208 import apt_pkg as apt
209@@ -251,7 +277,14 @@
210 error_out(e)
211
212 vers = apt.upstream_version(pkg.current_ver.ver_str)
213- match = re.match('^(\d+)\.(\d+)\.(\d+)', vers)
214+ if 'swift' in pkg.name:
215+ # Fully x.y.z match for swift versions
216+ match = re.match('^(\d+)\.(\d+)\.(\d+)', vers)
217+ else:
218+ # x.y match only for 20XX.X
219+ # and ignore patch level for other packages
220+ match = re.match('^(\d+)\.(\d+)', vers)
221+
222 if match:
223 vers = match.group(0)
224
225@@ -263,13 +296,8 @@
226 # < Liberty co-ordinated project versions
227 try:
228 if 'swift' in pkg.name:
229- swift_vers = vers[:5]
230- if swift_vers not in SWIFT_CODENAMES:
231- # Deal with 1.10.0 upward
232- swift_vers = vers[:6]
233- return SWIFT_CODENAMES[swift_vers]
234+ return get_swift_codename(vers)
235 else:
236- vers = vers[:6]
237 return OPENSTACK_CODENAMES[vers]
238 except KeyError:
239 if not fatal:
240@@ -287,12 +315,14 @@
241
242 if 'swift' in pkg:
243 vers_map = SWIFT_CODENAMES
244+ for cname, version in six.iteritems(vers_map):
245+ if cname == codename:
246+ return version[-1]
247 else:
248 vers_map = OPENSTACK_CODENAMES
249-
250- for version, cname in six.iteritems(vers_map):
251- if cname == codename:
252- return version
253+ for version, cname in six.iteritems(vers_map):
254+ if cname == codename:
255+ return version
256 # e = "Could not determine OpenStack version for package: %s" % pkg
257 # error_out(e)
258
259@@ -458,11 +488,16 @@
260 cur_vers = get_os_version_package(package)
261 if "swift" in package:
262 codename = get_os_codename_install_source(src)
263- available_vers = get_os_version_codename(codename, SWIFT_CODENAMES)
264+ avail_vers = get_os_version_codename_swift(codename)
265 else:
266- available_vers = get_os_version_install_source(src)
267+ avail_vers = get_os_version_install_source(src)
268 apt.init()
269- return apt.version_compare(available_vers, cur_vers) == 1
270+ if "swift" in package:
271+ major_cur_vers = cur_vers.split('.', 1)[0]
272+ major_avail_vers = avail_vers.split('.', 1)[0]
273+ major_diff = apt.version_compare(major_avail_vers, major_cur_vers)
274+ return avail_vers > cur_vers and (major_diff == 1 or major_diff == 0)
275+ return apt.version_compare(avail_vers, cur_vers) == 1
276
277
278 def ensure_block_device(block_device):
279@@ -591,7 +626,7 @@
280 return yaml.load(projects_yaml)
281
282
283-def git_clone_and_install(projects_yaml, core_project, depth=1):
284+def git_clone_and_install(projects_yaml, core_project):
285 """
286 Clone/install all specified OpenStack repositories.
287
288@@ -641,6 +676,9 @@
289 for p in projects['repositories']:
290 repo = p['repository']
291 branch = p['branch']
292+ depth = '1'
293+ if 'depth' in p.keys():
294+ depth = p['depth']
295 if p['name'] == 'requirements':
296 repo_dir = _git_clone_and_install_single(repo, branch, depth,
297 parent_dir, http_proxy,
298@@ -685,19 +723,13 @@
299 """
300 Clone and install a single git repository.
301 """
302- dest_dir = os.path.join(parent_dir, os.path.basename(repo))
303-
304 if not os.path.exists(parent_dir):
305 juju_log('Directory already exists at {}. '
306 'No need to create directory.'.format(parent_dir))
307 os.mkdir(parent_dir)
308
309- if not os.path.exists(dest_dir):
310- juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch))
311- repo_dir = install_remote(repo, dest=parent_dir, branch=branch,
312- depth=depth)
313- else:
314- repo_dir = dest_dir
315+ juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch))
316+ repo_dir = install_remote(repo, dest=parent_dir, branch=branch, depth=depth)
317
318 venv = os.path.join(parent_dir, 'venv')
319
320
321=== modified file 'hooks/charmhelpers/core/host.py'
322--- hooks/charmhelpers/core/host.py 2016-01-04 21:26:40 +0000
323+++ hooks/charmhelpers/core/host.py 2016-01-17 21:20:37 +0000
324@@ -72,7 +72,9 @@
325 stopped = service_stop(service_name)
326 upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
327 sysv_file = os.path.join(initd_dir, service_name)
328- if os.path.exists(upstart_file):
329+ if init_is_systemd():
330+ service('disable', service_name)
331+ elif os.path.exists(upstart_file):
332 override_path = os.path.join(
333 init_dir, '{}.override'.format(service_name))
334 with open(override_path, 'w') as fh:
335@@ -80,9 +82,9 @@
336 elif os.path.exists(sysv_file):
337 subprocess.check_call(["update-rc.d", service_name, "disable"])
338 else:
339- # XXX: Support SystemD too
340 raise ValueError(
341- "Unable to detect {0} as either Upstart {1} or SysV {2}".format(
342+ "Unable to detect {0} as SystemD, Upstart {1} or"
343+ " SysV {2}".format(
344 service_name, upstart_file, sysv_file))
345 return stopped
346
347@@ -94,7 +96,9 @@
348 Reenable starting again at boot. Start the service"""
349 upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
350 sysv_file = os.path.join(initd_dir, service_name)
351- if os.path.exists(upstart_file):
352+ if init_is_systemd():
353+ service('enable', service_name)
354+ elif os.path.exists(upstart_file):
355 override_path = os.path.join(
356 init_dir, '{}.override'.format(service_name))
357 if os.path.exists(override_path):
358@@ -102,9 +106,9 @@
359 elif os.path.exists(sysv_file):
360 subprocess.check_call(["update-rc.d", service_name, "enable"])
361 else:
362- # XXX: Support SystemD too
363 raise ValueError(
364- "Unable to detect {0} as either Upstart {1} or SysV {2}".format(
365+ "Unable to detect {0} as SystemD, Upstart {1} or"
366+ " SysV {2}".format(
367 service_name, upstart_file, sysv_file))
368
369 started = service_running(service_name)
370@@ -115,23 +119,29 @@
371
372 def service(action, service_name):
373 """Control a system service"""
374- cmd = ['service', service_name, action]
375+ if init_is_systemd():
376+ cmd = ['systemctl', action, service_name]
377+ else:
378+ cmd = ['service', service_name, action]
379 return subprocess.call(cmd) == 0
380
381
382-def service_running(service):
383+def service_running(service_name):
384 """Determine whether a system service is running"""
385- try:
386- output = subprocess.check_output(
387- ['service', service, 'status'],
388- stderr=subprocess.STDOUT).decode('UTF-8')
389- except subprocess.CalledProcessError:
390- return False
391+ if init_is_systemd():
392+ return service('is-active', service_name)
393 else:
394- if ("start/running" in output or "is running" in output):
395- return True
396- else:
397+ try:
398+ output = subprocess.check_output(
399+ ['service', service_name, 'status'],
400+ stderr=subprocess.STDOUT).decode('UTF-8')
401+ except subprocess.CalledProcessError:
402 return False
403+ else:
404+ if ("start/running" in output or "is running" in output):
405+ return True
406+ else:
407+ return False
408
409
410 def service_available(service_name):
411@@ -146,10 +156,17 @@
412 return True
413
414
415+SYSTEMD_SYSTEM = '/run/systemd/system'
416+
417+
418+def init_is_systemd():
419+ """Return True if the host system uses systemd, False otherwise."""
420+ return os.path.isdir(SYSTEMD_SYSTEM)
421+
422+
423 def adduser(username, password=None, shell='/bin/bash', system_user=False,
424 primary_group=None, secondary_groups=None):
425- """
426- Add a user to the system.
427+ """Add a user to the system.
428
429 Will log but otherwise succeed if the user already exists.
430
431@@ -157,7 +174,7 @@
432 :param str password: Password for user; if ``None``, create a system user
433 :param str shell: The default shell for the user
434 :param bool system_user: Whether to create a login or system user
435- :param str primary_group: Primary group for user; defaults to their username
436+ :param str primary_group: Primary group for user; defaults to username
437 :param list secondary_groups: Optional list of additional groups
438
439 :returns: The password database entry struct, as returned by `pwd.getpwnam`
440@@ -283,14 +300,12 @@
441
442
443 def fstab_remove(mp):
444- """Remove the given mountpoint entry from /etc/fstab
445- """
446+ """Remove the given mountpoint entry from /etc/fstab"""
447 return Fstab.remove_by_mountpoint(mp)
448
449
450 def fstab_add(dev, mp, fs, options=None):
451- """Adds the given device entry to the /etc/fstab file
452- """
453+ """Adds the given device entry to the /etc/fstab file"""
454 return Fstab.add(dev, mp, fs, options=options)
455
456
457@@ -346,8 +361,7 @@
458
459
460 def file_hash(path, hash_type='md5'):
461- """
462- Generate a hash checksum of the contents of 'path' or None if not found.
463+ """Generate a hash checksum of the contents of 'path' or None if not found.
464
465 :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`,
466 such as md5, sha1, sha256, sha512, etc.
467@@ -362,10 +376,9 @@
468
469
470 def path_hash(path):
471- """
472- Generate a hash checksum of all files matching 'path'. Standard wildcards
473- like '*' and '?' are supported, see documentation for the 'glob' module for
474- more information.
475+ """Generate a hash checksum of all files matching 'path'. Standard
476+ wildcards like '*' and '?' are supported, see documentation for the 'glob'
477+ module for more information.
478
479 :return: dict: A { filename: hash } dictionary for all matched files.
480 Empty if none found.
481@@ -377,8 +390,7 @@
482
483
484 def check_hash(path, checksum, hash_type='md5'):
485- """
486- Validate a file using a cryptographic checksum.
487+ """Validate a file using a cryptographic checksum.
488
489 :param str checksum: Value of the checksum used to validate the file.
490 :param str hash_type: Hash algorithm used to generate `checksum`.
491@@ -393,6 +405,7 @@
492
493
494 class ChecksumError(ValueError):
495+ """A class derived from Value error to indicate the checksum failed."""
496 pass
497
498
499@@ -498,7 +511,7 @@
500
501
502 def list_nics(nic_type=None):
503- '''Return a list of nics of given type(s)'''
504+ """Return a list of nics of given type(s)"""
505 if isinstance(nic_type, six.string_types):
506 int_types = [nic_type]
507 else:
508@@ -540,12 +553,13 @@
509
510
511 def set_nic_mtu(nic, mtu):
512- '''Set MTU on a network interface'''
513+ """Set the Maximum Transmission Unit (MTU) on a network interface."""
514 cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
515 subprocess.check_call(cmd)
516
517
518 def get_nic_mtu(nic):
519+ """Return the Maximum Transmission Unit (MTU) for a network interface."""
520 cmd = ['ip', 'addr', 'show', nic]
521 ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
522 mtu = ""
523@@ -557,6 +571,7 @@
524
525
526 def get_nic_hwaddr(nic):
527+ """Return the Media Access Control (MAC) for a network interface."""
528 cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
529 ip_output = subprocess.check_output(cmd).decode('UTF-8')
530 hwaddr = ""
531@@ -567,7 +582,7 @@
532
533
534 def cmp_pkgrevno(package, revno, pkgcache=None):
535- '''Compare supplied revno with the revno of the installed package
536+ """Compare supplied revno with the revno of the installed package
537
538 * 1 => Installed revno is greater than supplied arg
539 * 0 => Installed revno is the same as supplied arg
540@@ -576,7 +591,7 @@
541 This function imports apt_cache function from charmhelpers.fetch if
542 the pkgcache argument is None. Be sure to add charmhelpers.fetch if
543 you call this function, or pass an apt_pkg.Cache() instance.
544- '''
545+ """
546 import apt_pkg
547 if not pkgcache:
548 from charmhelpers.fetch import apt_cache
549@@ -586,19 +601,27 @@
550
551
552 @contextmanager
553-def chdir(d):
554+def chdir(directory):
555+ """Change the current working directory to a different directory for a code
556+ block and return the previous directory after the block exits. Useful to
557+ run commands from a specificed directory.
558+
559+ :param str directory: The directory path to change to for this context.
560+ """
561 cur = os.getcwd()
562 try:
563- yield os.chdir(d)
564+ yield os.chdir(directory)
565 finally:
566 os.chdir(cur)
567
568
569 def chownr(path, owner, group, follow_links=True, chowntopdir=False):
570- """
571- Recursively change user and group ownership of files and directories
572+ """Recursively change user and group ownership of files and directories
573 in given path. Doesn't chown path itself by default, only its children.
574
575+ :param str path: The string path to start changing ownership.
576+ :param str owner: The owner string to use when looking up the uid.
577+ :param str group: The group string to use when looking up the gid.
578 :param bool follow_links: Also Chown links if True
579 :param bool chowntopdir: Also chown path itself if True
580 """
581@@ -622,15 +645,23 @@
582
583
584 def lchownr(path, owner, group):
585+ """Recursively change user and group ownership of files and directories
586+ in a given path, not following symbolic links. See the documentation for
587+ 'os.lchown' for more information.
588+
589+ :param str path: The string path to start changing ownership.
590+ :param str owner: The owner string to use when looking up the uid.
591+ :param str group: The group string to use when looking up the gid.
592+ """
593 chownr(path, owner, group, follow_links=False)
594
595
596 def get_total_ram():
597- '''The total amount of system RAM in bytes.
598+ """The total amount of system RAM in bytes.
599
600 This is what is reported by the OS, and may be overcommitted when
601 there are multiple containers hosted on the same machine.
602- '''
603+ """
604 with open('/proc/meminfo', 'r') as f:
605 for line in f.readlines():
606 if line:
607
608=== modified file 'hooks/charmhelpers/fetch/giturl.py'
609--- hooks/charmhelpers/fetch/giturl.py 2016-01-04 21:26:40 +0000
610+++ hooks/charmhelpers/fetch/giturl.py 2016-01-17 21:20:37 +0000
611@@ -22,7 +22,6 @@
612 filter_installed_packages,
613 apt_install,
614 )
615-from charmhelpers.core.host import mkdir
616
617 if filter_installed_packages(['git']) != []:
618 apt_install(['git'])
619@@ -50,8 +49,8 @@
620 cmd = ['git', '-C', dest, 'pull', source, branch]
621 else:
622 cmd = ['git', 'clone', source, dest, '--branch', branch]
623- if depth:
624- cmd.extend(['--depth', depth])
625+ if depth:
626+ cmd.extend(['--depth', depth])
627 check_call(cmd)
628
629 def install(self, source, branch="master", dest=None, depth=None):
630@@ -62,8 +61,6 @@
631 else:
632 dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
633 branch_name)
634- if not os.path.exists(dest_dir):
635- mkdir(dest_dir, perms=0o755)
636 try:
637 self.clone(source, dest_dir, branch, depth)
638 except OSError as e:

Subscribers

People subscribed via source and target branches