Merge lp:~teknico/charm-helpers/lint-fixes into lp:charm-helpers
- lint-fixes
- Merge into devel
Proposed by
Nicola Larosa
Status: | Merged |
---|---|
Merge reported by: | James Page |
Merged at revision: | not available |
Proposed branch: | lp:~teknico/charm-helpers/lint-fixes |
Merge into: | lp:charm-helpers |
Diff against target: |
1224 lines (+172/-156) 22 files modified
README.test (+3/-0) bin/contrib/saltstack/salt-call (+3/-2) charmhelpers/cli/commands.py (+2/-2) charmhelpers/cli/host.py (+2/-1) charmhelpers/core/hookenv.py (+33/-32) charmhelpers/core/host.py (+19/-19) charmhelpers/fetch/__init__.py (+18/-16) charmhelpers/fetch/archiveurl.py (+5/-4) charmhelpers/fetch/bzrurl.py (+9/-7) charmhelpers/payload/__init__.py (+3/-1) charmhelpers/payload/archive.py (+3/-2) charmhelpers/payload/execd.py (+2/-1) tests/contrib/hahelpers/test_ceph_utils.py (+3/-3) tests/core/test_hookenv.py (+4/-4) tests/core/test_host.py (+7/-6) tests/fetch/test_archiveurl.py (+4/-2) tests/fetch/test_bzrurl.py (+3/-5) tests/fetch/test_fetch.py (+12/-14) tests/payload/test_archive.py (+6/-4) tests/payload/test_execd.py (+6/-4) tests/tools/test_charm_helper_sync.py (+19/-19) tools/charm_helpers_sync/charm_helpers_sync.py (+6/-8) |
To merge this branch: | bzr merge lp:~teknico/charm-helpers/lint-fixes |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
James Page | Abstain | ||
Matthew Wedgwood | Pending | ||
Review via email: mp+181859@code.launchpad.net |
Commit message
Description of the change
Lint and styling fixes. Sorry for the diff size.
To post a comment you must log in.
- 74. By Nicola Larosa
-
Merge from trunk, one conflict resolved.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'README.test' | |||
2 | --- README.test 2013-05-22 21:11:01 +0000 | |||
3 | +++ README.test 2013-09-02 08:01:21 +0000 | |||
4 | @@ -1,7 +1,10 @@ | |||
5 | 1 | Required Packages for Running Tests | 1 | Required Packages for Running Tests |
6 | 2 | ----------------------------------- | 2 | ----------------------------------- |
7 | 3 | |||
8 | 3 | python-shelltoolbox | 4 | python-shelltoolbox |
9 | 4 | python-tempita | 5 | python-tempita |
10 | 5 | python-nose | 6 | python-nose |
11 | 6 | python-mock | 7 | python-mock |
12 | 7 | python-testtools | 8 | python-testtools |
13 | 9 | |||
14 | 10 | Also, install flake8 from PyPI. | ||
15 | 8 | 11 | ||
16 | === modified file 'bin/contrib/saltstack/salt-call' | |||
17 | --- bin/contrib/saltstack/salt-call 2013-06-19 09:54:19 +0000 | |||
18 | +++ bin/contrib/saltstack/salt-call 2013-09-02 08:01:21 +0000 | |||
19 | @@ -1,8 +1,9 @@ | |||
20 | 1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python |
22 | 2 | ''' | 2 | |
23 | 3 | """ | ||
24 | 3 | Directly call a salt command in the modules, does not require a running salt | 4 | Directly call a salt command in the modules, does not require a running salt |
25 | 4 | minion to run. | 5 | minion to run. |
27 | 5 | ''' | 6 | """ |
28 | 6 | 7 | ||
29 | 7 | from salt.scripts import salt_call | 8 | from salt.scripts import salt_call |
30 | 8 | 9 | ||
31 | 9 | 10 | ||
32 | === modified file 'charmhelpers/cli/commands.py' | |||
33 | --- charmhelpers/cli/commands.py 2013-06-19 23:00:23 +0000 | |||
34 | +++ charmhelpers/cli/commands.py 2013-09-02 08:01:21 +0000 | |||
35 | @@ -1,2 +1,2 @@ | |||
38 | 1 | from . import CommandLine | 1 | # from . import CommandLine |
39 | 2 | import host | 2 | # import host |
40 | 3 | 3 | ||
41 | === modified file 'charmhelpers/cli/host.py' | |||
42 | --- charmhelpers/cli/host.py 2013-06-20 15:03:24 +0000 | |||
43 | +++ charmhelpers/cli/host.py 2013-09-02 08:01:21 +0000 | |||
44 | @@ -4,9 +4,10 @@ | |||
45 | 4 | 4 | ||
46 | 5 | @cmdline.subcommand() | 5 | @cmdline.subcommand() |
47 | 6 | def mounts(): | 6 | def mounts(): |
49 | 7 | "List mounts" | 7 | """List mounts.""" |
50 | 8 | return host.mounts() | 8 | return host.mounts() |
51 | 9 | 9 | ||
52 | 10 | |||
53 | 10 | @cmdline.subcommand_builder('service', description="Control system services") | 11 | @cmdline.subcommand_builder('service', description="Control system services") |
54 | 11 | def service(subparser): | 12 | def service(subparser): |
55 | 12 | subparser.add_argument("action", help="The action to perform (start, stop, etc...)") | 13 | subparser.add_argument("action", help="The action to perform (start, stop, etc...)") |
56 | 13 | 14 | ||
57 | === modified file 'charmhelpers/core/hookenv.py' | |||
58 | --- charmhelpers/core/hookenv.py 2013-07-18 16:13:49 +0000 | |||
59 | +++ charmhelpers/core/hookenv.py 2013-09-02 08:01:21 +0000 | |||
60 | @@ -1,14 +1,15 @@ | |||
61 | 1 | "Interactions with the Juju environment" | ||
62 | 2 | # Copyright 2013 Canonical Ltd. | 1 | # Copyright 2013 Canonical Ltd. |
63 | 3 | # | 2 | # |
64 | 4 | # Authors: | 3 | # Authors: |
65 | 5 | # Charm Helpers Developers <juju@lists.ubuntu.com> | 4 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
66 | 6 | 5 | ||
67 | 6 | """Interactions with the Juju environment.""" | ||
68 | 7 | |||
69 | 8 | import json | ||
70 | 7 | import os | 9 | import os |
71 | 8 | import json | ||
72 | 9 | import yaml | ||
73 | 10 | import subprocess | 10 | import subprocess |
74 | 11 | import UserDict | 11 | import UserDict |
75 | 12 | import yaml | ||
76 | 12 | 13 | ||
77 | 13 | CRITICAL = "CRITICAL" | 14 | CRITICAL = "CRITICAL" |
78 | 14 | ERROR = "ERROR" | 15 | ERROR = "ERROR" |
79 | @@ -21,7 +22,8 @@ | |||
80 | 21 | 22 | ||
81 | 22 | 23 | ||
82 | 23 | def cached(func): | 24 | def cached(func): |
84 | 24 | ''' Cache return values for multiple executions of func + args | 25 | """ |
85 | 26 | Decorator that caches return values for multiple executions of func + args. | ||
86 | 25 | 27 | ||
87 | 26 | For example: | 28 | For example: |
88 | 27 | 29 | ||
89 | @@ -32,7 +34,7 @@ | |||
90 | 32 | unit_get('test') | 34 | unit_get('test') |
91 | 33 | 35 | ||
92 | 34 | will cache the result of unit_get + 'test' for future calls. | 36 | will cache the result of unit_get + 'test' for future calls. |
94 | 35 | ''' | 37 | """ |
95 | 36 | def wrapper(*args, **kwargs): | 38 | def wrapper(*args, **kwargs): |
96 | 37 | global cache | 39 | global cache |
97 | 38 | key = str((func, args, kwargs)) | 40 | key = str((func, args, kwargs)) |
98 | @@ -46,8 +48,8 @@ | |||
99 | 46 | 48 | ||
100 | 47 | 49 | ||
101 | 48 | def flush(key): | 50 | def flush(key): |
104 | 49 | ''' Flushes any entries from function cache where the | 51 | """Flush any entries from function cache where the |
105 | 50 | key is found in the function+args ''' | 52 | key is found in the function+args.""" |
106 | 51 | flush_list = [] | 53 | flush_list = [] |
107 | 52 | for item in cache: | 54 | for item in cache: |
108 | 53 | if key in item: | 55 | if key in item: |
109 | @@ -57,7 +59,7 @@ | |||
110 | 57 | 59 | ||
111 | 58 | 60 | ||
112 | 59 | def log(message, level=None): | 61 | def log(message, level=None): |
114 | 60 | "Write a message to the juju log" | 62 | """Write a message to the juju log.""" |
115 | 61 | command = ['juju-log'] | 63 | command = ['juju-log'] |
116 | 62 | if level: | 64 | if level: |
117 | 63 | command += ['-l', level] | 65 | command += ['-l', level] |
118 | @@ -66,7 +68,7 @@ | |||
119 | 66 | 68 | ||
120 | 67 | 69 | ||
121 | 68 | class Serializable(UserDict.IterableUserDict): | 70 | class Serializable(UserDict.IterableUserDict): |
123 | 69 | "Wrapper, an object that can be serialized to yaml or json" | 71 | """Wrapper, an object that can be serialized to yaml or json.""" |
124 | 70 | 72 | ||
125 | 71 | def __init__(self, obj): | 73 | def __init__(self, obj): |
126 | 72 | # wrap the object | 74 | # wrap the object |
127 | @@ -96,16 +98,16 @@ | |||
128 | 96 | self.data = state | 98 | self.data = state |
129 | 97 | 99 | ||
130 | 98 | def json(self): | 100 | def json(self): |
132 | 99 | "Serialize the object to json" | 101 | """Serialize the object to json.""" |
133 | 100 | return json.dumps(self.data) | 102 | return json.dumps(self.data) |
134 | 101 | 103 | ||
135 | 102 | def yaml(self): | 104 | def yaml(self): |
137 | 103 | "Serialize the object to yaml" | 105 | """Serialize the object to yaml.""" |
138 | 104 | return yaml.dump(self.data) | 106 | return yaml.dump(self.data) |
139 | 105 | 107 | ||
140 | 106 | 108 | ||
141 | 107 | def execution_environment(): | 109 | def execution_environment(): |
143 | 108 | """A convenient bundling of the current execution context""" | 110 | """A convenient bundling of the current execution context.""" |
144 | 109 | context = {} | 111 | context = {} |
145 | 110 | context['conf'] = config() | 112 | context['conf'] = config() |
146 | 111 | if relation_id(): | 113 | if relation_id(): |
147 | @@ -119,38 +121,38 @@ | |||
148 | 119 | 121 | ||
149 | 120 | 122 | ||
150 | 121 | def in_relation_hook(): | 123 | def in_relation_hook(): |
152 | 122 | "Determine whether we're running in a relation hook" | 124 | """Determine whether we're running in a relation hook.""" |
153 | 123 | return 'JUJU_RELATION' in os.environ | 125 | return 'JUJU_RELATION' in os.environ |
154 | 124 | 126 | ||
155 | 125 | 127 | ||
156 | 126 | def relation_type(): | 128 | def relation_type(): |
158 | 127 | "The scope for the current relation hook" | 129 | """The scope for the current relation hook.""" |
159 | 128 | return os.environ.get('JUJU_RELATION', None) | 130 | return os.environ.get('JUJU_RELATION', None) |
160 | 129 | 131 | ||
161 | 130 | 132 | ||
162 | 131 | def relation_id(): | 133 | def relation_id(): |
164 | 132 | "The relation ID for the current relation hook" | 134 | """The relation ID for the current relation hook.""" |
165 | 133 | return os.environ.get('JUJU_RELATION_ID', None) | 135 | return os.environ.get('JUJU_RELATION_ID', None) |
166 | 134 | 136 | ||
167 | 135 | 137 | ||
168 | 136 | def local_unit(): | 138 | def local_unit(): |
170 | 137 | "Local unit ID" | 139 | """Local unit ID.""" |
171 | 138 | return os.environ['JUJU_UNIT_NAME'] | 140 | return os.environ['JUJU_UNIT_NAME'] |
172 | 139 | 141 | ||
173 | 140 | 142 | ||
174 | 141 | def remote_unit(): | 143 | def remote_unit(): |
176 | 142 | "The remote unit for the current relation hook" | 144 | """The remote unit for the current relation hook.""" |
177 | 143 | return os.environ['JUJU_REMOTE_UNIT'] | 145 | return os.environ['JUJU_REMOTE_UNIT'] |
178 | 144 | 146 | ||
179 | 145 | 147 | ||
180 | 146 | def service_name(): | 148 | def service_name(): |
182 | 147 | "The name service group this unit belongs to" | 149 | """The name service group this unit belongs to.""" |
183 | 148 | return local_unit().split('/')[0] | 150 | return local_unit().split('/')[0] |
184 | 149 | 151 | ||
185 | 150 | 152 | ||
186 | 151 | @cached | 153 | @cached |
187 | 152 | def config(scope=None): | 154 | def config(scope=None): |
189 | 153 | "Juju charm configuration" | 155 | """Juju charm configuration.""" |
190 | 154 | config_cmd_line = ['config-get'] | 156 | config_cmd_line = ['config-get'] |
191 | 155 | if scope is not None: | 157 | if scope is not None: |
192 | 156 | config_cmd_line.append(scope) | 158 | config_cmd_line.append(scope) |
193 | @@ -192,7 +194,7 @@ | |||
194 | 192 | 194 | ||
195 | 193 | @cached | 195 | @cached |
196 | 194 | def relation_ids(reltype=None): | 196 | def relation_ids(reltype=None): |
198 | 195 | "A list of relation_ids" | 197 | """A list of relation_ids.""" |
199 | 196 | reltype = reltype or relation_type() | 198 | reltype = reltype or relation_type() |
200 | 197 | relid_cmd_line = ['relation-ids', '--format=json'] | 199 | relid_cmd_line = ['relation-ids', '--format=json'] |
201 | 198 | if reltype is not None: | 200 | if reltype is not None: |
202 | @@ -203,7 +205,7 @@ | |||
203 | 203 | 205 | ||
204 | 204 | @cached | 206 | @cached |
205 | 205 | def related_units(relid=None): | 207 | def related_units(relid=None): |
207 | 206 | "A list of related units" | 208 | """A list of related units.""" |
208 | 207 | relid = relid or relation_id() | 209 | relid = relid or relation_id() |
209 | 208 | units_cmd_line = ['relation-list', '--format=json'] | 210 | units_cmd_line = ['relation-list', '--format=json'] |
210 | 209 | if relid is not None: | 211 | if relid is not None: |
211 | @@ -213,7 +215,7 @@ | |||
212 | 213 | 215 | ||
213 | 214 | @cached | 216 | @cached |
214 | 215 | def relation_for_unit(unit=None, rid=None): | 217 | def relation_for_unit(unit=None, rid=None): |
216 | 216 | "Get the json represenation of a unit's relation" | 218 | """Get the json represenation of a unit's relation.""" |
217 | 217 | unit = unit or remote_unit() | 219 | unit = unit or remote_unit() |
218 | 218 | relation = relation_get(unit=unit, rid=rid) | 220 | relation = relation_get(unit=unit, rid=rid) |
219 | 219 | for key in relation: | 221 | for key in relation: |
220 | @@ -225,7 +227,7 @@ | |||
221 | 225 | 227 | ||
222 | 226 | @cached | 228 | @cached |
223 | 227 | def relations_for_id(relid=None): | 229 | def relations_for_id(relid=None): |
225 | 228 | "Get relations of a specific relation ID" | 230 | """Get relations of a specific relation ID.""" |
226 | 229 | relation_data = [] | 231 | relation_data = [] |
227 | 230 | relid = relid or relation_ids() | 232 | relid = relid or relation_ids() |
228 | 231 | for unit in related_units(relid): | 233 | for unit in related_units(relid): |
229 | @@ -237,7 +239,7 @@ | |||
230 | 237 | 239 | ||
231 | 238 | @cached | 240 | @cached |
232 | 239 | def relations_of_type(reltype=None): | 241 | def relations_of_type(reltype=None): |
234 | 240 | "Get relations of a specific type" | 242 | """Get relations of a specific type.""" |
235 | 241 | relation_data = [] | 243 | relation_data = [] |
236 | 242 | reltype = reltype or relation_type() | 244 | reltype = reltype or relation_type() |
237 | 243 | for relid in relation_ids(reltype): | 245 | for relid in relation_ids(reltype): |
238 | @@ -249,7 +251,7 @@ | |||
239 | 249 | 251 | ||
240 | 250 | @cached | 252 | @cached |
241 | 251 | def relation_types(): | 253 | def relation_types(): |
243 | 252 | "Get a list of relation types supported by this charm" | 254 | """Get a list of relation types supported by this charm.""" |
244 | 253 | charmdir = os.environ.get('CHARM_DIR', '') | 255 | charmdir = os.environ.get('CHARM_DIR', '') |
245 | 254 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) | 256 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) |
246 | 255 | md = yaml.safe_load(mdf) | 257 | md = yaml.safe_load(mdf) |
247 | @@ -278,14 +280,14 @@ | |||
248 | 278 | 280 | ||
249 | 279 | 281 | ||
250 | 280 | def open_port(port, protocol="TCP"): | 282 | def open_port(port, protocol="TCP"): |
252 | 281 | "Open a service network port" | 283 | """Open a service network port.""" |
253 | 282 | _args = ['open-port'] | 284 | _args = ['open-port'] |
254 | 283 | _args.append('{}/{}'.format(port, protocol)) | 285 | _args.append('{}/{}'.format(port, protocol)) |
255 | 284 | subprocess.check_call(_args) | 286 | subprocess.check_call(_args) |
256 | 285 | 287 | ||
257 | 286 | 288 | ||
258 | 287 | def close_port(port, protocol="TCP"): | 289 | def close_port(port, protocol="TCP"): |
260 | 288 | "Close a service network port" | 290 | """Close a service network port.""" |
261 | 289 | _args = ['close-port'] | 291 | _args = ['close-port'] |
262 | 290 | _args.append('{}/{}'.format(port, protocol)) | 292 | _args.append('{}/{}'.format(port, protocol)) |
263 | 291 | subprocess.check_call(_args) | 293 | subprocess.check_call(_args) |
264 | @@ -327,11 +329,10 @@ | |||
265 | 327 | def wrapper(decorated): | 329 | def wrapper(decorated): |
266 | 328 | for hook_name in hook_names: | 330 | for hook_name in hook_names: |
267 | 329 | self.register(hook_name, decorated) | 331 | self.register(hook_name, decorated) |
273 | 330 | else: | 332 | self.register(decorated.__name__, decorated) |
274 | 331 | self.register(decorated.__name__, decorated) | 333 | if '_' in decorated.__name__: |
275 | 332 | if '_' in decorated.__name__: | 334 | self.register( |
276 | 333 | self.register( | 335 | decorated.__name__.replace('_', '-'), decorated) |
272 | 334 | decorated.__name__.replace('_', '-'), decorated) | ||
277 | 335 | return decorated | 336 | return decorated |
278 | 336 | return wrapper | 337 | return wrapper |
279 | 337 | 338 | ||
280 | 338 | 339 | ||
281 | === modified file 'charmhelpers/core/host.py' | |||
282 | --- charmhelpers/core/host.py 2013-08-23 16:42:43 +0000 | |||
283 | +++ charmhelpers/core/host.py 2013-09-02 08:01:21 +0000 | |||
284 | @@ -1,19 +1,19 @@ | |||
285 | 1 | """Tools for working with the host system""" | ||
286 | 2 | # Copyright 2012 Canonical Ltd. | 1 | # Copyright 2012 Canonical Ltd. |
287 | 3 | # | 2 | # |
288 | 4 | # Authors: | 3 | # Authors: |
289 | 5 | # Nick Moffitt <nick.moffitt@canonical.com> | 4 | # Nick Moffitt <nick.moffitt@canonical.com> |
290 | 6 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | 5 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
291 | 7 | 6 | ||
292 | 7 | """Tools for working with the host system.""" | ||
293 | 8 | |||
294 | 9 | from collections import OrderedDict | ||
295 | 10 | import grp | ||
296 | 11 | import hashlib | ||
297 | 8 | import os | 12 | import os |
298 | 9 | import pwd | 13 | import pwd |
299 | 10 | import grp | ||
300 | 11 | import random | 14 | import random |
301 | 12 | import string | 15 | import string |
302 | 13 | import subprocess | 16 | import subprocess |
303 | 14 | import hashlib | ||
304 | 15 | |||
305 | 16 | from collections import OrderedDict | ||
306 | 17 | 17 | ||
307 | 18 | from hookenv import log | 18 | from hookenv import log |
308 | 19 | 19 | ||
309 | @@ -55,7 +55,7 @@ | |||
310 | 55 | 55 | ||
311 | 56 | 56 | ||
312 | 57 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | 57 | def adduser(username, password=None, shell='/bin/bash', system_user=False): |
314 | 58 | """Add a user""" | 58 | """Add a user.""" |
315 | 59 | try: | 59 | try: |
316 | 60 | user_info = pwd.getpwnam(username) | 60 | user_info = pwd.getpwnam(username) |
317 | 61 | log('user {0} already exists!'.format(username)) | 61 | log('user {0} already exists!'.format(username)) |
318 | @@ -77,7 +77,7 @@ | |||
319 | 77 | 77 | ||
320 | 78 | 78 | ||
321 | 79 | def add_user_to_group(username, group): | 79 | def add_user_to_group(username, group): |
323 | 80 | """Add a user to a group""" | 80 | """Add a user to a group.""" |
324 | 81 | cmd = [ | 81 | cmd = [ |
325 | 82 | 'gpasswd', '-a', | 82 | 'gpasswd', '-a', |
326 | 83 | username, | 83 | username, |
327 | @@ -88,7 +88,7 @@ | |||
328 | 88 | 88 | ||
329 | 89 | 89 | ||
330 | 90 | def rsync(from_path, to_path, flags='-r', options=None): | 90 | def rsync(from_path, to_path, flags='-r', options=None): |
332 | 91 | """Replicate the contents of a path""" | 91 | """Replicate the contents of a path.""" |
333 | 92 | options = options or ['--delete', '--executability'] | 92 | options = options or ['--delete', '--executability'] |
334 | 93 | cmd = ['/usr/bin/rsync', flags] | 93 | cmd = ['/usr/bin/rsync', flags] |
335 | 94 | cmd.extend(options) | 94 | cmd.extend(options) |
336 | @@ -99,7 +99,7 @@ | |||
337 | 99 | 99 | ||
338 | 100 | 100 | ||
339 | 101 | def symlink(source, destination): | 101 | def symlink(source, destination): |
341 | 102 | """Create a symbolic link""" | 102 | """Create a symbolic link.""" |
342 | 103 | log("Symlinking {} as {}".format(source, destination)) | 103 | log("Symlinking {} as {}".format(source, destination)) |
343 | 104 | cmd = [ | 104 | cmd = [ |
344 | 105 | 'ln', | 105 | 'ln', |
345 | @@ -111,7 +111,7 @@ | |||
346 | 111 | 111 | ||
347 | 112 | 112 | ||
348 | 113 | def mkdir(path, owner='root', group='root', perms=0555, force=False): | 113 | def mkdir(path, owner='root', group='root', perms=0555, force=False): |
350 | 114 | """Create a directory""" | 114 | """Create a directory.""" |
351 | 115 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | 115 | log("Making dir {} {}:{} {:o}".format(path, owner, group, |
352 | 116 | perms)) | 116 | perms)) |
353 | 117 | uid = pwd.getpwnam(owner).pw_uid | 117 | uid = pwd.getpwnam(owner).pw_uid |
354 | @@ -127,7 +127,7 @@ | |||
355 | 127 | 127 | ||
356 | 128 | 128 | ||
357 | 129 | def write_file(path, content, owner='root', group='root', perms=0444): | 129 | def write_file(path, content, owner='root', group='root', perms=0444): |
359 | 130 | """Create or overwrite a file with the contents of a string""" | 130 | """Create or overwrite a file with the contents of a string.""" |
360 | 131 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | 131 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
361 | 132 | uid = pwd.getpwnam(owner).pw_uid | 132 | uid = pwd.getpwnam(owner).pw_uid |
362 | 133 | gid = grp.getgrnam(group).gr_gid | 133 | gid = grp.getgrnam(group).gr_gid |
363 | @@ -138,7 +138,7 @@ | |||
364 | 138 | 138 | ||
365 | 139 | 139 | ||
366 | 140 | def mount(device, mountpoint, options=None, persist=False): | 140 | def mount(device, mountpoint, options=None, persist=False): |
368 | 141 | '''Mount a filesystem''' | 141 | """Mount a filesystem.""" |
369 | 142 | cmd_args = ['mount'] | 142 | cmd_args = ['mount'] |
370 | 143 | if options is not None: | 143 | if options is not None: |
371 | 144 | cmd_args.extend(['-o', options]) | 144 | cmd_args.extend(['-o', options]) |
372 | @@ -155,7 +155,7 @@ | |||
373 | 155 | 155 | ||
374 | 156 | 156 | ||
375 | 157 | def umount(mountpoint, persist=False): | 157 | def umount(mountpoint, persist=False): |
377 | 158 | '''Unmount a filesystem''' | 158 | """Unmount a filesystem.""" |
378 | 159 | cmd_args = ['umount', mountpoint] | 159 | cmd_args = ['umount', mountpoint] |
379 | 160 | try: | 160 | try: |
380 | 161 | subprocess.check_output(cmd_args) | 161 | subprocess.check_output(cmd_args) |
381 | @@ -169,7 +169,7 @@ | |||
382 | 169 | 169 | ||
383 | 170 | 170 | ||
384 | 171 | def mounts(): | 171 | def mounts(): |
386 | 172 | '''List of all mounted volumes as [[mountpoint,device],[...]]''' | 172 | """List of all mounted volumes as [[mountpoint,device],[...]].""" |
387 | 173 | with open('/proc/mounts') as f: | 173 | with open('/proc/mounts') as f: |
388 | 174 | # [['/mount/point','/dev/path'],[...]] | 174 | # [['/mount/point','/dev/path'],[...]] |
389 | 175 | system_mounts = [m[1::-1] for m in [l.strip().split() | 175 | system_mounts = [m[1::-1] for m in [l.strip().split() |
390 | @@ -178,7 +178,7 @@ | |||
391 | 178 | 178 | ||
392 | 179 | 179 | ||
393 | 180 | def file_hash(path): | 180 | def file_hash(path): |
395 | 181 | ''' Generate a md5 hash of the contents of 'path' or None if not found ''' | 181 | """Generate a md5 hash of the contents of 'path' or None if not found.""" |
396 | 182 | if os.path.exists(path): | 182 | if os.path.exists(path): |
397 | 183 | h = hashlib.md5() | 183 | h = hashlib.md5() |
398 | 184 | with open(path, 'r') as source: | 184 | with open(path, 'r') as source: |
399 | @@ -189,7 +189,7 @@ | |||
400 | 189 | 189 | ||
401 | 190 | 190 | ||
402 | 191 | def restart_on_change(restart_map): | 191 | def restart_on_change(restart_map): |
404 | 192 | ''' Restart services based on configuration files changing | 192 | """Restart services based on configuration files changing. |
405 | 193 | 193 | ||
406 | 194 | This function is used a decorator, for example | 194 | This function is used a decorator, for example |
407 | 195 | 195 | ||
408 | @@ -202,7 +202,7 @@ | |||
409 | 202 | In this example, the cinder-api and cinder-volume services | 202 | In this example, the cinder-api and cinder-volume services |
410 | 203 | would be restarted if /etc/ceph/ceph.conf is changed by the | 203 | would be restarted if /etc/ceph/ceph.conf is changed by the |
411 | 204 | ceph_client_changed function. | 204 | ceph_client_changed function. |
413 | 205 | ''' | 205 | """ |
414 | 206 | def wrap(f): | 206 | def wrap(f): |
415 | 207 | def wrapped_f(*args): | 207 | def wrapped_f(*args): |
416 | 208 | checksums = {} | 208 | checksums = {} |
417 | @@ -220,7 +220,7 @@ | |||
418 | 220 | 220 | ||
419 | 221 | 221 | ||
420 | 222 | def lsb_release(): | 222 | def lsb_release(): |
422 | 223 | '''Return /etc/lsb-release in a dict''' | 223 | """Return /etc/lsb-release in a dict.""" |
423 | 224 | d = {} | 224 | d = {} |
424 | 225 | with open('/etc/lsb-release', 'r') as lsb: | 225 | with open('/etc/lsb-release', 'r') as lsb: |
425 | 226 | for l in lsb: | 226 | for l in lsb: |
426 | @@ -230,7 +230,7 @@ | |||
427 | 230 | 230 | ||
428 | 231 | 231 | ||
429 | 232 | def pwgen(length=None): | 232 | def pwgen(length=None): |
431 | 233 | '''Generate a random pasword.''' | 233 | """Generate a random password.""" |
432 | 234 | if length is None: | 234 | if length is None: |
433 | 235 | length = random.choice(range(35, 45)) | 235 | length = random.choice(range(35, 45)) |
434 | 236 | alphanumeric_chars = [ | 236 | alphanumeric_chars = [ |
435 | 237 | 237 | ||
436 | === modified file 'charmhelpers/fetch/__init__.py' | |||
437 | --- charmhelpers/fetch/__init__.py 2013-08-21 11:45:37 +0000 | |||
438 | +++ charmhelpers/fetch/__init__.py 2013-09-02 08:01:21 +0000 | |||
439 | @@ -1,18 +1,19 @@ | |||
440 | 1 | import importlib | 1 | import importlib |
445 | 2 | from yaml import safe_load | 2 | import subprocess |
442 | 3 | from charmhelpers.core.host import ( | ||
443 | 4 | lsb_release | ||
444 | 5 | ) | ||
446 | 6 | from urlparse import ( | 3 | from urlparse import ( |
447 | 7 | urlparse, | 4 | urlparse, |
448 | 8 | urlunparse, | 5 | urlunparse, |
449 | 9 | ) | 6 | ) |
451 | 10 | import subprocess | 7 | |
452 | 8 | import apt_pkg | ||
453 | 9 | from yaml import safe_load | ||
454 | 10 | |||
455 | 11 | from charmhelpers.core.hookenv import ( | 11 | from charmhelpers.core.hookenv import ( |
456 | 12 | config, | 12 | config, |
457 | 13 | log, | 13 | log, |
458 | 14 | ) | 14 | ) |
460 | 15 | import apt_pkg | 15 | from charmhelpers.core.host import lsb_release |
461 | 16 | |||
462 | 16 | 17 | ||
463 | 17 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | 18 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
464 | 18 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | 19 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
465 | @@ -23,7 +24,7 @@ | |||
466 | 23 | 24 | ||
467 | 24 | 25 | ||
468 | 25 | def filter_installed_packages(packages): | 26 | def filter_installed_packages(packages): |
470 | 26 | """Returns a list of packages that require installation""" | 27 | """Return a list of packages that require installation.""" |
471 | 27 | apt_pkg.init() | 28 | apt_pkg.init() |
472 | 28 | cache = apt_pkg.Cache() | 29 | cache = apt_pkg.Cache() |
473 | 29 | _pkgs = [] | 30 | _pkgs = [] |
474 | @@ -39,7 +40,7 @@ | |||
475 | 39 | 40 | ||
476 | 40 | 41 | ||
477 | 41 | def apt_install(packages, options=None, fatal=False): | 42 | def apt_install(packages, options=None, fatal=False): |
479 | 42 | """Install one or more packages""" | 43 | """Install one or more packages.""" |
480 | 43 | options = options or [] | 44 | options = options or [] |
481 | 44 | cmd = ['apt-get', '-y'] | 45 | cmd = ['apt-get', '-y'] |
482 | 45 | cmd.extend(options) | 46 | cmd.extend(options) |
483 | @@ -57,7 +58,7 @@ | |||
484 | 57 | 58 | ||
485 | 58 | 59 | ||
486 | 59 | def apt_update(fatal=False): | 60 | def apt_update(fatal=False): |
488 | 60 | """Update local apt cache""" | 61 | """Update local apt cache.""" |
489 | 61 | cmd = ['apt-get', 'update'] | 62 | cmd = ['apt-get', 'update'] |
490 | 62 | if fatal: | 63 | if fatal: |
491 | 63 | subprocess.check_call(cmd) | 64 | subprocess.check_call(cmd) |
492 | @@ -66,7 +67,7 @@ | |||
493 | 66 | 67 | ||
494 | 67 | 68 | ||
495 | 68 | def apt_purge(packages, fatal=False): | 69 | def apt_purge(packages, fatal=False): |
497 | 69 | """Purge one or more packages""" | 70 | """Purge one or more packages.""" |
498 | 70 | cmd = ['apt-get', '-y', 'purge'] | 71 | cmd = ['apt-get', '-y', 'purge'] |
499 | 71 | if isinstance(packages, basestring): | 72 | if isinstance(packages, basestring): |
500 | 72 | cmd.append(packages) | 73 | cmd.append(packages) |
501 | @@ -105,7 +106,7 @@ | |||
502 | 105 | sources_var='install_sources', | 106 | sources_var='install_sources', |
503 | 106 | keys_var='install_keys'): | 107 | keys_var='install_keys'): |
504 | 107 | """ | 108 | """ |
506 | 108 | Configure multiple sources from charm configuration | 109 | Configure multiple sources from charm configuration. |
507 | 109 | 110 | ||
508 | 110 | Example config: | 111 | Example config: |
509 | 111 | install_sources: | 112 | install_sources: |
510 | @@ -144,13 +145,14 @@ | |||
511 | 144 | 145 | ||
512 | 145 | def install_remote(source): | 146 | def install_remote(source): |
513 | 146 | """ | 147 | """ |
515 | 147 | Install a file tree from a remote source | 148 | Install a file tree from a remote source. |
516 | 148 | 149 | ||
517 | 149 | The specified source should be a url of the form: | 150 | The specified source should be a url of the form: |
518 | 150 | scheme://[host]/path[#[option=value][&...]] | 151 | scheme://[host]/path[#[option=value][&...]] |
519 | 151 | 152 | ||
522 | 152 | Schemes supported are based on this modules submodules | 153 | Schemes supported are based on this modules submodules. |
523 | 153 | Options supported are submodule-specific""" | 154 | Options supported are submodule-specific. |
524 | 155 | """ | ||
525 | 154 | # We ONLY check for True here because can_handle may return a string | 156 | # We ONLY check for True here because can_handle may return a string |
526 | 155 | # explaining why it can't handle a given source. | 157 | # explaining why it can't handle a given source. |
527 | 156 | handlers = [h for h in plugins() if h.can_handle(source) is True] | 158 | handlers = [h for h in plugins() if h.can_handle(source) is True] |
528 | @@ -172,7 +174,7 @@ | |||
529 | 172 | 174 | ||
530 | 173 | 175 | ||
531 | 174 | class BaseFetchHandler(object): | 176 | class BaseFetchHandler(object): |
533 | 175 | """Base class for FetchHandler implementations in fetch plugins""" | 177 | """Base class for FetchHandler implementations in fetch plugins.""" |
534 | 176 | def can_handle(self, source): | 178 | def can_handle(self, source): |
535 | 177 | """Returns True if the source can be handled. Otherwise returns | 179 | """Returns True if the source can be handled. Otherwise returns |
536 | 178 | a string explaining why it cannot""" | 180 | a string explaining why it cannot""" |
537 | @@ -187,7 +189,7 @@ | |||
538 | 187 | return urlparse(url) | 189 | return urlparse(url) |
539 | 188 | 190 | ||
540 | 189 | def base_url(self, url): | 191 | def base_url(self, url): |
542 | 190 | """Return url without querystring or fragment""" | 192 | """Return url without querystring or fragment.""" |
543 | 191 | parts = list(self.parse_url(url)) | 193 | parts = list(self.parse_url(url)) |
544 | 192 | parts[4:] = ['' for i in parts[4:]] | 194 | parts[4:] = ['' for i in parts[4:]] |
545 | 193 | return urlunparse(parts) | 195 | return urlunparse(parts) |
546 | 194 | 196 | ||
547 | === modified file 'charmhelpers/fetch/archiveurl.py' | |||
548 | --- charmhelpers/fetch/archiveurl.py 2013-07-10 18:57:39 +0000 | |||
549 | +++ charmhelpers/fetch/archiveurl.py 2013-09-02 08:01:21 +0000 | |||
550 | @@ -1,5 +1,7 @@ | |||
551 | 1 | import os | 1 | import os |
552 | 2 | import urllib2 | 2 | import urllib2 |
553 | 3 | |||
554 | 4 | from charmhelpers.core.host import mkdir | ||
555 | 3 | from charmhelpers.fetch import ( | 5 | from charmhelpers.fetch import ( |
556 | 4 | BaseFetchHandler, | 6 | BaseFetchHandler, |
557 | 5 | UnhandledSource | 7 | UnhandledSource |
558 | @@ -8,11 +10,11 @@ | |||
559 | 8 | get_archive_handler, | 10 | get_archive_handler, |
560 | 9 | extract, | 11 | extract, |
561 | 10 | ) | 12 | ) |
562 | 11 | from charmhelpers.core.host import mkdir | ||
563 | 12 | 13 | ||
564 | 13 | 14 | ||
565 | 14 | class ArchiveUrlFetchHandler(BaseFetchHandler): | 15 | class ArchiveUrlFetchHandler(BaseFetchHandler): |
567 | 15 | """Handler for archives via generic URLs""" | 16 | """Handler for archives via generic URLs.""" |
568 | 17 | |||
569 | 16 | def can_handle(self, source): | 18 | def can_handle(self, source): |
570 | 17 | url_parts = self.parse_url(source) | 19 | url_parts = self.parse_url(source) |
571 | 18 | if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): | 20 | if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): |
572 | @@ -22,8 +24,7 @@ | |||
573 | 22 | return False | 24 | return False |
574 | 23 | 25 | ||
575 | 24 | def download(self, source, dest): | 26 | def download(self, source, dest): |
578 | 25 | # propogate all exceptions | 27 | # Propagate all exceptions: URLError, OSError, etc. |
577 | 26 | # URLError, OSError, etc | ||
579 | 27 | response = urllib2.urlopen(source) | 28 | response = urllib2.urlopen(source) |
580 | 28 | try: | 29 | try: |
581 | 29 | with open(dest, 'w') as dest_file: | 30 | with open(dest, 'w') as dest_file: |
582 | 30 | 31 | ||
583 | === modified file 'charmhelpers/fetch/bzrurl.py' | |||
584 | --- charmhelpers/fetch/bzrurl.py 2013-08-22 10:19:51 +0000 | |||
585 | +++ charmhelpers/fetch/bzrurl.py 2013-09-02 08:01:21 +0000 | |||
586 | @@ -1,10 +1,10 @@ | |||
587 | 1 | import os | 1 | import os |
588 | 2 | |||
589 | 3 | from charmhelpers.core.host import mkdir | ||
590 | 2 | from charmhelpers.fetch import ( | 4 | from charmhelpers.fetch import ( |
591 | 3 | BaseFetchHandler, | 5 | BaseFetchHandler, |
592 | 4 | UnhandledSource | 6 | UnhandledSource |
593 | 5 | ) | 7 | ) |
594 | 6 | from charmhelpers.core.host import mkdir | ||
595 | 7 | |||
596 | 8 | try: | 8 | try: |
597 | 9 | from bzrlib.branch import Branch | 9 | from bzrlib.branch import Branch |
598 | 10 | except ImportError: | 10 | except ImportError: |
599 | @@ -12,8 +12,10 @@ | |||
600 | 12 | apt_install("python-bzrlib") | 12 | apt_install("python-bzrlib") |
601 | 13 | from bzrlib.branch import Branch | 13 | from bzrlib.branch import Branch |
602 | 14 | 14 | ||
603 | 15 | |||
604 | 15 | class BzrUrlFetchHandler(BaseFetchHandler): | 16 | class BzrUrlFetchHandler(BaseFetchHandler): |
606 | 16 | """Handler for bazaar branches via generic and lp URLs""" | 17 | """Handler for bazaar branches via generic and lp URLs.""" |
607 | 18 | |||
608 | 17 | def can_handle(self, source): | 19 | def can_handle(self, source): |
609 | 18 | url_parts = self.parse_url(source) | 20 | url_parts = self.parse_url(source) |
610 | 19 | if url_parts.scheme not in ('bzr+ssh', 'lp'): | 21 | if url_parts.scheme not in ('bzr+ssh', 'lp'): |
611 | @@ -23,7 +25,7 @@ | |||
612 | 23 | 25 | ||
613 | 24 | def branch(self, source, dest): | 26 | def branch(self, source, dest): |
614 | 25 | url_parts = self.parse_url(source) | 27 | url_parts = self.parse_url(source) |
616 | 26 | # If we use lp:branchname scheme we need to load plugins | 28 | # If we use lp:branchname scheme we need to load plugins. |
617 | 27 | if not self.can_handle(source): | 29 | if not self.can_handle(source): |
618 | 28 | raise UnhandledSource("Cannot handle {}".format(source)) | 30 | raise UnhandledSource("Cannot handle {}".format(source)) |
619 | 29 | if url_parts.scheme == "lp": | 31 | if url_parts.scheme == "lp": |
620 | @@ -37,8 +39,9 @@ | |||
621 | 37 | 39 | ||
622 | 38 | def install(self, source): | 40 | def install(self, source): |
623 | 39 | url_parts = self.parse_url(source) | 41 | url_parts = self.parse_url(source) |
626 | 40 | branch_name = url_parts.path.strip("/").split("/")[-1] | 42 | branch_name = url_parts.path.strip('/').split('/')[-1] |
627 | 41 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) | 43 | dest_dir = os.path.join( |
628 | 44 | os.environ.get('CHARM_DIR'), 'fetched', branch_name) | ||
629 | 42 | if not os.path.exists(dest_dir): | 45 | if not os.path.exists(dest_dir): |
630 | 43 | mkdir(dest_dir, perms=0755) | 46 | mkdir(dest_dir, perms=0755) |
631 | 44 | try: | 47 | try: |
632 | @@ -46,4 +49,3 @@ | |||
633 | 46 | except OSError as e: | 49 | except OSError as e: |
634 | 47 | raise UnhandledSource(e.strerror) | 50 | raise UnhandledSource(e.strerror) |
635 | 48 | return dest_dir | 51 | return dest_dir |
636 | 49 | |||
637 | 50 | 52 | ||
638 | === modified file 'charmhelpers/payload/__init__.py' | |||
639 | --- charmhelpers/payload/__init__.py 2013-05-18 23:25:48 +0000 | |||
640 | +++ charmhelpers/payload/__init__.py 2013-09-02 08:01:21 +0000 | |||
641 | @@ -1,1 +1,3 @@ | |||
643 | 1 | "Tools for working with files injected into a charm just before deployment." | 1 | """ |
644 | 2 | Tools for working with files injected into a charm just before deployment. | ||
645 | 3 | """ | ||
646 | 2 | 4 | ||
647 | === modified file 'charmhelpers/payload/archive.py' | |||
648 | --- charmhelpers/payload/archive.py 2013-07-03 23:04:00 +0000 | |||
649 | +++ charmhelpers/payload/archive.py 2013-09-02 08:01:21 +0000 | |||
650 | @@ -1,6 +1,7 @@ | |||
651 | 1 | import os | 1 | import os |
652 | 2 | import tarfile | 2 | import tarfile |
653 | 3 | import zipfile | 3 | import zipfile |
654 | 4 | |||
655 | 4 | from charmhelpers.core import ( | 5 | from charmhelpers.core import ( |
656 | 5 | host, | 6 | host, |
657 | 6 | hookenv, | 7 | hookenv, |
658 | @@ -46,12 +47,12 @@ | |||
659 | 46 | 47 | ||
660 | 47 | 48 | ||
661 | 48 | def extract_tarfile(archive_name, destpath): | 49 | def extract_tarfile(archive_name, destpath): |
663 | 49 | "Unpack a tar archive, optionally compressed" | 50 | """Unpack a tar archive, optionally compressed.""" |
664 | 50 | archive = tarfile.open(archive_name) | 51 | archive = tarfile.open(archive_name) |
665 | 51 | archive.extractall(destpath) | 52 | archive.extractall(destpath) |
666 | 52 | 53 | ||
667 | 53 | 54 | ||
668 | 54 | def extract_zipfile(archive_name, destpath): | 55 | def extract_zipfile(archive_name, destpath): |
670 | 55 | "Unpack a zip file" | 56 | """Unpack a zip file.""" |
671 | 56 | archive = zipfile.ZipFile(archive_name) | 57 | archive = zipfile.ZipFile(archive_name) |
672 | 57 | archive.extractall(destpath) | 58 | archive.extractall(destpath) |
673 | 58 | 59 | ||
674 | === modified file 'charmhelpers/payload/execd.py' | |||
675 | --- charmhelpers/payload/execd.py 2013-06-07 12:34:25 +0000 | |||
676 | +++ charmhelpers/payload/execd.py 2013-09-02 08:01:21 +0000 | |||
677 | @@ -1,8 +1,9 @@ | |||
678 | 1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python |
679 | 2 | 2 | ||
680 | 3 | import os | 3 | import os |
681 | 4 | import subprocess | ||
682 | 4 | import sys | 5 | import sys |
684 | 5 | import subprocess | 6 | |
685 | 6 | from charmhelpers.core import hookenv | 7 | from charmhelpers.core import hookenv |
686 | 7 | 8 | ||
687 | 8 | 9 | ||
688 | 9 | 10 | ||
689 | === modified file 'tests/contrib/hahelpers/test_ceph_utils.py' | |||
690 | --- tests/contrib/hahelpers/test_ceph_utils.py 2013-08-13 01:12:03 +0000 | |||
691 | +++ tests/contrib/hahelpers/test_ceph_utils.py 2013-09-02 08:01:21 +0000 | |||
692 | @@ -126,6 +126,6 @@ | |||
693 | 126 | fstype = 'xfs' | 126 | fstype = 'xfs' |
694 | 127 | ceph_utils.make_filesystem(device, fstype) | 127 | ceph_utils.make_filesystem(device, fstype) |
695 | 128 | self.check_call.assert_called_with(['mkfs', '-t', fstype, device]) | 128 | self.check_call.assert_called_with(['mkfs', '-t', fstype, device]) |
699 | 129 | self.log.assert_called_with('ceph: Formatting block device %s as ' | 129 | self.log.assert_called_with( |
700 | 130 | 'filesystem %s.' % (device, fstype), level='INFO') | 130 | 'ceph: Formatting block device %s as filesystem %s.' % ( |
701 | 131 | 131 | device, fstype), level='INFO') | |
702 | 132 | 132 | ||
703 | === modified file 'tests/core/test_hookenv.py' | |||
704 | --- tests/core/test_hookenv.py 2013-08-07 17:05:39 +0000 | |||
705 | +++ tests/core/test_hookenv.py 2013-09-02 08:01:21 +0000 | |||
706 | @@ -1,9 +1,7 @@ | |||
707 | 1 | import cPickle as pickle | ||
708 | 1 | import json | 2 | import json |
712 | 2 | 3 | from mock import call, MagicMock, mock_open, patch | |
710 | 3 | import cPickle as pickle | ||
711 | 4 | from mock import patch, call, mock_open | ||
713 | 5 | from StringIO import StringIO | 4 | from StringIO import StringIO |
714 | 6 | from mock import MagicMock | ||
715 | 7 | from testtools import TestCase | 5 | from testtools import TestCase |
716 | 8 | import yaml | 6 | import yaml |
717 | 9 | 7 | ||
718 | @@ -25,6 +23,7 @@ | |||
719 | 25 | 23 | ||
720 | 26 | 24 | ||
721 | 27 | class SerializableTest(TestCase): | 25 | class SerializableTest(TestCase): |
722 | 26 | |||
723 | 28 | def test_serializes_object_to_json(self): | 27 | def test_serializes_object_to_json(self): |
724 | 29 | foo = { | 28 | foo = { |
725 | 30 | 'bar': 'baz', | 29 | 'bar': 'baz', |
726 | @@ -115,6 +114,7 @@ | |||
727 | 115 | 114 | ||
728 | 116 | 115 | ||
729 | 117 | class HelpersTest(TestCase): | 116 | class HelpersTest(TestCase): |
730 | 117 | |||
731 | 118 | def setUp(self): | 118 | def setUp(self): |
732 | 119 | super(HelpersTest, self).setUp() | 119 | super(HelpersTest, self).setUp() |
733 | 120 | # Reset hookenv cache for each test | 120 | # Reset hookenv cache for each test |
734 | 121 | 121 | ||
735 | === modified file 'tests/core/test_host.py' | |||
736 | --- tests/core/test_host.py 2013-08-23 16:42:43 +0000 | |||
737 | +++ tests/core/test_host.py 2013-09-02 08:01:21 +0000 | |||
738 | @@ -1,7 +1,7 @@ | |||
739 | 1 | from collections import OrderedDict | 1 | from collections import OrderedDict |
740 | 2 | from contextlib import contextmanager | 2 | from contextlib import contextmanager |
741 | 3 | import io | ||
742 | 3 | import subprocess | 4 | import subprocess |
743 | 4 | import io | ||
744 | 5 | 5 | ||
745 | 6 | from mock import patch, call, MagicMock | 6 | from mock import patch, call, MagicMock |
746 | 7 | from testtools import TestCase | 7 | from testtools import TestCase |
747 | @@ -18,19 +18,20 @@ | |||
748 | 18 | """rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0 | 18 | """rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0 |
749 | 19 | """).strip().split('\n') | 19 | """).strip().split('\n') |
750 | 20 | 20 | ||
752 | 21 | LSB_RELEASE = u'''DISTRIB_ID=Ubuntu | 21 | LSB_RELEASE = u"""DISTRIB_ID=Ubuntu |
753 | 22 | DISTRIB_RELEASE=13.10 | 22 | DISTRIB_RELEASE=13.10 |
754 | 23 | DISTRIB_CODENAME=saucy | 23 | DISTRIB_CODENAME=saucy |
755 | 24 | DISTRIB_DESCRIPTION="Ubuntu Saucy Salamander (development branch)" | 24 | DISTRIB_DESCRIPTION="Ubuntu Saucy Salamander (development branch)" |
757 | 25 | ''' | 25 | """ |
758 | 26 | 26 | ||
759 | 27 | 27 | ||
760 | 28 | @contextmanager | 28 | @contextmanager |
761 | 29 | def patch_open(): | 29 | def patch_open(): |
763 | 30 | '''Patch open() to allow mocking both open() itself and the file that is | 30 | """Patch open() to allow mocking both open() itself and the file that is |
764 | 31 | yielded. | 31 | yielded. |
765 | 32 | 32 | ||
767 | 33 | Yields the mock for "open" and "file", respectively.''' | 33 | Yields the mock for "open" and "file", respectively. |
768 | 34 | """ | ||
769 | 34 | mock_open = MagicMock(spec=open) | 35 | mock_open = MagicMock(spec=open) |
770 | 35 | mock_file = MagicMock(spec=file) | 36 | mock_file = MagicMock(spec=file) |
771 | 36 | 37 | ||
772 | @@ -45,7 +46,7 @@ | |||
773 | 45 | 46 | ||
774 | 46 | @contextmanager | 47 | @contextmanager |
775 | 47 | def mock_open(filename, contents=None): | 48 | def mock_open(filename, contents=None): |
777 | 48 | ''' Slightly simpler mock of open to return contents for filename ''' | 49 | """Slightly simpler mock of open to return contents for filename """ |
778 | 49 | def mock_file(*args): | 50 | def mock_file(*args): |
779 | 50 | if args[0] == filename: | 51 | if args[0] == filename: |
780 | 51 | return io.StringIO(contents) | 52 | return io.StringIO(contents) |
781 | 52 | 53 | ||
782 | === modified file 'tests/fetch/test_archiveurl.py' | |||
783 | --- tests/fetch/test_archiveurl.py 2013-07-10 18:57:39 +0000 | |||
784 | +++ tests/fetch/test_archiveurl.py 2013-09-02 08:01:21 +0000 | |||
785 | @@ -1,16 +1,18 @@ | |||
786 | 1 | import os | 1 | import os |
788 | 2 | from testtools import TestCase | 2 | import urllib2 |
789 | 3 | from urlparse import urlparse | 3 | from urlparse import urlparse |
790 | 4 | |||
791 | 4 | from mock import ( | 5 | from mock import ( |
792 | 5 | MagicMock, | 6 | MagicMock, |
793 | 6 | patch, | 7 | patch, |
794 | 7 | mock_open, | 8 | mock_open, |
795 | 8 | ) | 9 | ) |
796 | 10 | from testtools import TestCase | ||
797 | 11 | |||
798 | 9 | from charmhelpers.fetch import ( | 12 | from charmhelpers.fetch import ( |
799 | 10 | archiveurl, | 13 | archiveurl, |
800 | 11 | UnhandledSource, | 14 | UnhandledSource, |
801 | 12 | ) | 15 | ) |
802 | 13 | import urllib2 | ||
803 | 14 | 16 | ||
804 | 15 | 17 | ||
805 | 16 | class ArchiveUrlFetchHandlerTest(TestCase): | 18 | class ArchiveUrlFetchHandlerTest(TestCase): |
806 | 17 | 19 | ||
807 | === modified file 'tests/fetch/test_bzrurl.py' | |||
808 | --- tests/fetch/test_bzrurl.py 2013-08-21 11:45:37 +0000 | |||
809 | +++ tests/fetch/test_bzrurl.py 2013-09-02 08:01:21 +0000 | |||
810 | @@ -1,10 +1,12 @@ | |||
811 | 1 | import os | 1 | import os |
812 | 2 | from testtools import TestCase | ||
813 | 3 | from urlparse import urlparse | 2 | from urlparse import urlparse |
814 | 3 | |||
815 | 4 | from mock import ( | 4 | from mock import ( |
816 | 5 | MagicMock, | 5 | MagicMock, |
817 | 6 | patch, | 6 | patch, |
818 | 7 | ) | 7 | ) |
819 | 8 | from testtools import TestCase | ||
820 | 9 | |||
821 | 8 | from charmhelpers.fetch import ( | 10 | from charmhelpers.fetch import ( |
822 | 9 | bzrurl, | 11 | bzrurl, |
823 | 10 | UnhandledSource, | 12 | UnhandledSource, |
824 | @@ -41,7 +43,6 @@ | |||
825 | 41 | ) | 43 | ) |
826 | 42 | self.fh = bzrurl.BzrUrlFetchHandler() | 44 | self.fh = bzrurl.BzrUrlFetchHandler() |
827 | 43 | 45 | ||
828 | 44 | |||
829 | 45 | def test_handles_bzr_urls(self): | 46 | def test_handles_bzr_urls(self): |
830 | 46 | for url in self.valid_urls: | 47 | for url in self.valid_urls: |
831 | 47 | result = self.fh.can_handle(url) | 48 | result = self.fh.can_handle(url) |
832 | @@ -50,7 +51,6 @@ | |||
833 | 50 | result = self.fh.can_handle(url) | 51 | result = self.fh.can_handle(url) |
834 | 51 | self.assertNotEqual(result, True, url) | 52 | self.assertNotEqual(result, True, url) |
835 | 52 | 53 | ||
836 | 53 | |||
837 | 54 | @patch('bzrlib.branch.Branch.open') | 54 | @patch('bzrlib.branch.Branch.open') |
838 | 55 | def test_branch(self, _open): | 55 | def test_branch(self, _open): |
839 | 56 | dest_path = "/destination/path" | 56 | dest_path = "/destination/path" |
840 | @@ -65,7 +65,6 @@ | |||
841 | 65 | with patch.dict('os.environ', {'CHARM_DIR': 'foo'}): | 65 | with patch.dict('os.environ', {'CHARM_DIR': 'foo'}): |
842 | 66 | self.assertRaises(UnhandledSource, self.fh.branch, url, dest_path) | 66 | self.assertRaises(UnhandledSource, self.fh.branch, url, dest_path) |
843 | 67 | 67 | ||
844 | 68 | |||
845 | 69 | @patch('charmhelpers.fetch.bzrurl.mkdir') | 68 | @patch('charmhelpers.fetch.bzrurl.mkdir') |
846 | 70 | def test_installs(self, _mkdir): | 69 | def test_installs(self, _mkdir): |
847 | 71 | self.fh.branch = MagicMock() | 70 | self.fh.branch = MagicMock() |
848 | @@ -77,4 +76,3 @@ | |||
849 | 77 | where = self.fh.install(url) | 76 | where = self.fh.install(url) |
850 | 78 | self.assertEqual(where, dest) | 77 | self.assertEqual(where, dest) |
851 | 79 | _mkdir.assert_called_with(where, perms=0755) | 78 | _mkdir.assert_called_with(where, perms=0755) |
852 | 80 | |||
853 | 81 | 79 | ||
854 | === modified file 'tests/fetch/test_fetch.py' | |||
855 | --- tests/fetch/test_fetch.py 2013-08-21 15:45:53 +0000 | |||
856 | +++ tests/fetch/test_fetch.py 2013-09-02 08:01:21 +0000 | |||
857 | @@ -1,13 +1,16 @@ | |||
858 | 1 | from contextlib import contextmanager | 1 | from contextlib import contextmanager |
860 | 2 | from testtools import TestCase | 2 | from urlparse import urlparse |
861 | 3 | |||
862 | 3 | from mock import ( | 4 | from mock import ( |
863 | 4 | patch, | 5 | patch, |
864 | 5 | MagicMock, | 6 | MagicMock, |
865 | 6 | call, | 7 | call, |
866 | 7 | ) | 8 | ) |
868 | 8 | from urlparse import urlparse | 9 | from testtools import TestCase |
869 | 10 | import yaml | ||
870 | 11 | |||
871 | 9 | from charmhelpers import fetch | 12 | from charmhelpers import fetch |
873 | 10 | import yaml | 13 | |
874 | 11 | 14 | ||
875 | 12 | FAKE_APT_CACHE = { | 15 | FAKE_APT_CACHE = { |
876 | 13 | # an installed package | 16 | # an installed package |
877 | @@ -38,10 +41,10 @@ | |||
878 | 38 | 41 | ||
879 | 39 | @contextmanager | 42 | @contextmanager |
880 | 40 | def patch_open(): | 43 | def patch_open(): |
882 | 41 | '''Patch open() to allow mocking both open() itself and the file that is | 44 | """Patch open() to allow mocking both open() itself and the file that is |
883 | 42 | yielded. | 45 | yielded. |
884 | 43 | 46 | ||
886 | 44 | Yields the mock for "open" and "file", respectively.''' | 47 | Yields the mock for "open" and "file", respectively.""" |
887 | 45 | mock_open = MagicMock(spec=open) | 48 | mock_open = MagicMock(spec=open) |
888 | 46 | mock_file = MagicMock(spec=file) | 49 | mock_file = MagicMock(spec=file) |
889 | 47 | 50 | ||
890 | @@ -96,9 +99,9 @@ | |||
891 | 96 | @patch.object(fetch, 'apt_install') | 99 | @patch.object(fetch, 'apt_install') |
892 | 97 | def test_add_source_cloud(self, apt_install, filter_pkg): | 100 | def test_add_source_cloud(self, apt_install, filter_pkg): |
893 | 98 | source = "cloud:havana-updates" | 101 | source = "cloud:havana-updates" |
895 | 99 | result = '''# Ubuntu Cloud Archive | 102 | result = """# Ubuntu Cloud Archive |
896 | 100 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu havana-updates main | 103 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu havana-updates main |
898 | 101 | ''' | 104 | """ |
899 | 102 | with patch_open() as (mock_open, mock_file): | 105 | with patch_open() as (mock_open, mock_file): |
900 | 103 | fetch.add_source(source=source) | 106 | fetch.add_source(source=source) |
901 | 104 | mock_file.write.assert_called_with(result) | 107 | mock_file.write.assert_called_with(result) |
902 | @@ -352,27 +355,24 @@ | |||
903 | 352 | check_call.assert_called_with(['apt-get', '-y', '--foo', '--bar', | 355 | check_call.assert_called_with(['apt-get', '-y', '--foo', '--bar', |
904 | 353 | 'install', 'foo', 'bar']) | 356 | 'install', 'foo', 'bar']) |
905 | 354 | 357 | ||
906 | 355 | |||
907 | 356 | @patch('subprocess.check_call') | 358 | @patch('subprocess.check_call') |
908 | 357 | @patch.object(fetch, 'log') | 359 | @patch.object(fetch, 'log') |
909 | 358 | def test_purges_apt_packages_as_string_fatal(self, log, mock_call): | 360 | def test_purges_apt_packages_as_string_fatal(self, log, mock_call): |
910 | 359 | packages = 'irrelevant names' | 361 | packages = 'irrelevant names' |
911 | 360 | mock_call.side_effect = OSError('fail') | 362 | mock_call.side_effect = OSError('fail') |
912 | 361 | 363 | ||
914 | 362 | mock_call.assertRaises(OSError, fetch.apt_purge, packages, fatal=True ) | 364 | mock_call.assertRaises(OSError, fetch.apt_purge, packages, fatal=True) |
915 | 363 | log.assert_called() | 365 | log.assert_called() |
916 | 364 | 366 | ||
917 | 365 | |||
918 | 366 | @patch('subprocess.check_call') | 367 | @patch('subprocess.check_call') |
919 | 367 | @patch.object(fetch, 'log') | 368 | @patch.object(fetch, 'log') |
920 | 368 | def test_purges_apt_packages_fatal(self, log, mock_call): | 369 | def test_purges_apt_packages_fatal(self, log, mock_call): |
921 | 369 | packages = ['irrelevant', 'names'] | 370 | packages = ['irrelevant', 'names'] |
922 | 370 | mock_call.side_effect = OSError('fail') | 371 | mock_call.side_effect = OSError('fail') |
923 | 371 | 372 | ||
925 | 372 | mock_call.assertRaises(OSError, fetch.apt_purge, packages, fatal=True ) | 373 | mock_call.assertRaises(OSError, fetch.apt_purge, packages, fatal=True) |
926 | 373 | log.assert_called() | 374 | log.assert_called() |
927 | 374 | 375 | ||
928 | 375 | |||
929 | 376 | @patch('subprocess.call') | 376 | @patch('subprocess.call') |
930 | 377 | @patch.object(fetch, 'log') | 377 | @patch.object(fetch, 'log') |
931 | 378 | def test_purges_apt_packages_as_string_nofatal(self, log, mock_call): | 378 | def test_purges_apt_packages_as_string_nofatal(self, log, mock_call): |
932 | @@ -383,7 +383,6 @@ | |||
933 | 383 | log.assert_called() | 383 | log.assert_called() |
934 | 384 | mock_call.assert_called_with(['apt-get', '-y', 'purge', 'foo bar']) | 384 | mock_call.assert_called_with(['apt-get', '-y', 'purge', 'foo bar']) |
935 | 385 | 385 | ||
936 | 386 | |||
937 | 387 | @patch('subprocess.call') | 386 | @patch('subprocess.call') |
938 | 388 | @patch.object(fetch, 'log') | 387 | @patch.object(fetch, 'log') |
939 | 389 | def test_purges_apt_packages_nofatal(self, log, mock_call): | 388 | def test_purges_apt_packages_nofatal(self, log, mock_call): |
940 | @@ -395,7 +394,6 @@ | |||
941 | 395 | mock_call.assert_called_with(['apt-get', '-y', 'purge', 'foo', | 394 | mock_call.assert_called_with(['apt-get', '-y', 'purge', 'foo', |
942 | 396 | 'bar']) | 395 | 'bar']) |
943 | 397 | 396 | ||
944 | 398 | |||
945 | 399 | @patch('subprocess.check_call') | 397 | @patch('subprocess.check_call') |
946 | 400 | def test_apt_update_fatal(self, check_call): | 398 | def test_apt_update_fatal(self, check_call): |
947 | 401 | fetch.apt_update(fatal=True) | 399 | fetch.apt_update(fatal=True) |
948 | 402 | 400 | ||
949 | === modified file 'tests/payload/test_archive.py' | |||
950 | --- tests/payload/test_archive.py 2013-07-03 23:04:00 +0000 | |||
951 | +++ tests/payload/test_archive.py 2013-09-02 08:01:21 +0000 | |||
952 | @@ -1,13 +1,15 @@ | |||
953 | 1 | import os | 1 | import os |
955 | 2 | from testtools import TestCase | 2 | from shutil import rmtree |
956 | 3 | import subprocess | ||
957 | 4 | from tempfile import mkdtemp | ||
958 | 5 | |||
959 | 3 | from mock import ( | 6 | from mock import ( |
960 | 4 | patch, | 7 | patch, |
961 | 5 | MagicMock, | 8 | MagicMock, |
962 | 6 | ) | 9 | ) |
963 | 10 | from testtools import TestCase | ||
964 | 11 | |||
965 | 7 | from charmhelpers.payload import archive | 12 | from charmhelpers.payload import archive |
966 | 8 | from tempfile import mkdtemp | ||
967 | 9 | from shutil import rmtree | ||
968 | 10 | import subprocess | ||
969 | 11 | 13 | ||
970 | 12 | 14 | ||
971 | 13 | class ArchiveTestCase(TestCase): | 15 | class ArchiveTestCase(TestCase): |
972 | 14 | 16 | ||
973 | === modified file 'tests/payload/test_execd.py' | |||
974 | --- tests/payload/test_execd.py 2013-07-11 08:31:49 +0000 | |||
975 | +++ tests/payload/test_execd.py 2013-09-02 08:01:21 +0000 | |||
976 | @@ -1,11 +1,11 @@ | |||
977 | 1 | from testtools import TestCase | ||
978 | 2 | from mock import patch | ||
979 | 3 | import os | 1 | import os |
980 | 4 | import shutil | 2 | import shutil |
981 | 5 | import stat | 3 | import stat |
982 | 6 | |||
983 | 7 | from tempfile import mkdtemp | 4 | from tempfile import mkdtemp |
984 | 8 | 5 | ||
985 | 6 | from mock import patch | ||
986 | 7 | from testtools import TestCase | ||
987 | 8 | |||
988 | 9 | from charmhelpers.payload import execd | 9 | from charmhelpers.payload import execd |
989 | 10 | 10 | ||
990 | 11 | 11 | ||
991 | @@ -58,7 +58,9 @@ | |||
992 | 58 | expected_file = os.path.join(self.test_charm_dir, execd_dir, | 58 | expected_file = os.path.join(self.test_charm_dir, execd_dir, |
993 | 59 | module_dir, 'charm-pre-install-success') | 59 | module_dir, 'charm-pre-install-success') |
994 | 60 | files = os.listdir(os.path.dirname(expected_file)) | 60 | files = os.listdir(os.path.dirname(expected_file)) |
996 | 61 | self.assertTrue(os.path.exists(expected_file), "files were: %s. charmdir is: %s" % (files, self.test_charm_dir)) | 61 | self.assertTrue( |
997 | 62 | os.path.exists(expected_file), | ||
998 | 63 | 'files were: %s. charmdir is: %s' % (files, self.test_charm_dir)) | ||
999 | 62 | 64 | ||
1000 | 63 | def test_execd_preinstall(self): | 65 | def test_execd_preinstall(self): |
1001 | 64 | """All charm-pre-install hooks are executed.""" | 66 | """All charm-pre-install hooks are executed.""" |
1002 | 65 | 67 | ||
1003 | === modified file 'tests/tools/test_charm_helper_sync.py' | |||
1004 | --- tests/tools/test_charm_helper_sync.py 2013-07-12 01:28:42 +0000 | |||
1005 | +++ tests/tools/test_charm_helper_sync.py 2013-09-02 08:01:21 +0000 | |||
1006 | @@ -19,7 +19,7 @@ | |||
1007 | 19 | 19 | ||
1008 | 20 | class HelperSyncTests(unittest.TestCase): | 20 | class HelperSyncTests(unittest.TestCase): |
1009 | 21 | def test_clone_helpers(self): | 21 | def test_clone_helpers(self): |
1011 | 22 | '''It properly branches the correct helpers branch''' | 22 | """It properly branches the correct helpers branch.""" |
1012 | 23 | with patch('subprocess.check_call') as check_call: | 23 | with patch('subprocess.check_call') as check_call: |
1013 | 24 | sync.clone_helpers(work_dir='/tmp/foo', branch='lp:charm-helpers') | 24 | sync.clone_helpers(work_dir='/tmp/foo', branch='lp:charm-helpers') |
1014 | 25 | check_call.assert_called_with(['bzr', 'branch', | 25 | check_call.assert_called_with(['bzr', 'branch', |
1015 | @@ -27,19 +27,19 @@ | |||
1016 | 27 | '/tmp/foo/charm-helpers']) | 27 | '/tmp/foo/charm-helpers']) |
1017 | 28 | 28 | ||
1018 | 29 | def test_module_path(self): | 29 | def test_module_path(self): |
1020 | 30 | '''It converts a python module path to a filesystem path''' | 30 | """It converts a python module path to a filesystem path.""" |
1021 | 31 | self.assertEquals(sync._module_path('some.test.module'), | 31 | self.assertEquals(sync._module_path('some.test.module'), |
1022 | 32 | 'some/test/module') | 32 | 'some/test/module') |
1023 | 33 | 33 | ||
1024 | 34 | def test_src_path(self): | 34 | def test_src_path(self): |
1026 | 35 | '''It renders the correct path to module within charm-helpers tree''' | 35 | """It renders the correct path to module within charm-helpers tree.""" |
1027 | 36 | path = sync._src_path(src='/tmp/charm-helpers', | 36 | path = sync._src_path(src='/tmp/charm-helpers', |
1028 | 37 | module='contrib.openstack') | 37 | module='contrib.openstack') |
1029 | 38 | self.assertEquals('/tmp/charm-helpers/charmhelpers/contrib/openstack', | 38 | self.assertEquals('/tmp/charm-helpers/charmhelpers/contrib/openstack', |
1030 | 39 | path) | 39 | path) |
1031 | 40 | 40 | ||
1032 | 41 | def test_dest_path(self): | 41 | def test_dest_path(self): |
1034 | 42 | '''It correctly finds the correct install path within a charm''' | 42 | """It correctly finds the correct install path within a charm.""" |
1035 | 43 | path = sync._dest_path(dest='/tmp/mycharm/hooks/charmhelpers', | 43 | path = sync._dest_path(dest='/tmp/mycharm/hooks/charmhelpers', |
1036 | 44 | module='contrib.openstack') | 44 | module='contrib.openstack') |
1037 | 45 | self.assertEquals('/tmp/mycharm/hooks/charmhelpers/contrib/openstack', | 45 | self.assertEquals('/tmp/mycharm/hooks/charmhelpers/contrib/openstack', |
1038 | @@ -49,7 +49,7 @@ | |||
1039 | 49 | @patch('os.path.exists') | 49 | @patch('os.path.exists') |
1040 | 50 | @patch('os.walk') | 50 | @patch('os.walk') |
1041 | 51 | def test_ensure_init(self, walk, exists, _open): | 51 | def test_ensure_init(self, walk, exists, _open): |
1043 | 52 | '''It ensures all subdirectories of a parent are python importable''' | 52 | """It ensures all subdirectories of a parent are python importable.""" |
1044 | 53 | # os walk | 53 | # os walk |
1045 | 54 | # os.path.join | 54 | # os.path.join |
1046 | 55 | # os.path.exists | 55 | # os.path.exists |
1047 | @@ -73,7 +73,7 @@ | |||
1048 | 73 | @patch('os.makedirs') | 73 | @patch('os.makedirs') |
1049 | 74 | @patch('os.path.exists') | 74 | @patch('os.path.exists') |
1050 | 75 | def test_sync_pyfile(self, exists, mkdirs, copy, isfile, ensure_init): | 75 | def test_sync_pyfile(self, exists, mkdirs, copy, isfile, ensure_init): |
1052 | 76 | '''It correctly syncs a py src file from src to dest''' | 76 | """It correctly syncs a py src file from src to dest.""" |
1053 | 77 | exists.return_value = False | 77 | exists.return_value = False |
1054 | 78 | isfile.return_value = True | 78 | isfile.return_value = True |
1055 | 79 | sync.sync_pyfile('/tmp/charm-helpers/core/host', | 79 | sync.sync_pyfile('/tmp/charm-helpers/core/host', |
1056 | @@ -88,7 +88,7 @@ | |||
1057 | 88 | ensure_init.assert_called_with('hooks/charmhelpers/core') | 88 | ensure_init.assert_called_with('hooks/charmhelpers/core') |
1058 | 89 | 89 | ||
1059 | 90 | def _test_filter_dir(self, opts, isfile, isdir): | 90 | def _test_filter_dir(self, opts, isfile, isdir): |
1061 | 91 | '''It filters non-python files and non-module dirs from source''' | 91 | """It filters non-python files and non-module dirs from source.""" |
1062 | 92 | files = { | 92 | files = { |
1063 | 93 | 'bad_file.bin': 'f', | 93 | 'bad_file.bin': 'f', |
1064 | 94 | 'some_dir': 'd', | 94 | 'some_dir': 'd', |
1065 | @@ -119,7 +119,7 @@ | |||
1066 | 119 | @patch('os.path.isdir') | 119 | @patch('os.path.isdir') |
1067 | 120 | @patch('os.path.isfile') | 120 | @patch('os.path.isfile') |
1068 | 121 | def test_filter_dir_no_opts(self, isfile, isdir): | 121 | def test_filter_dir_no_opts(self, isfile, isdir): |
1070 | 122 | '''It filters out all non-py files by default''' | 122 | """It filters out all non-py files by default.""" |
1071 | 123 | result = self._test_filter_dir(opts=None, isfile=isfile, isdir=isdir) | 123 | result = self._test_filter_dir(opts=None, isfile=isfile, isdir=isdir) |
1072 | 124 | ex = ['bad_file.bin', 'bad_file.img', 'some_dir'] | 124 | ex = ['bad_file.bin', 'bad_file.img', 'some_dir'] |
1073 | 125 | self.assertEquals(ex, result) | 125 | self.assertEquals(ex, result) |
1074 | @@ -127,7 +127,7 @@ | |||
1075 | 127 | @patch('os.path.isdir') | 127 | @patch('os.path.isdir') |
1076 | 128 | @patch('os.path.isfile') | 128 | @patch('os.path.isfile') |
1077 | 129 | def test_filter_dir_with_include(self, isfile, isdir): | 129 | def test_filter_dir_with_include(self, isfile, isdir): |
1079 | 130 | '''It includes non-py files if specified as an include opt''' | 130 | """It includes non-py files if specified as an include opt.""" |
1080 | 131 | result = self._test_filter_dir(opts=['inc=*.img'], | 131 | result = self._test_filter_dir(opts=['inc=*.img'], |
1081 | 132 | isfile=isfile, isdir=isdir) | 132 | isfile=isfile, isdir=isdir) |
1082 | 133 | ex = ['bad_file.bin', 'some_dir'] | 133 | ex = ['bad_file.bin', 'some_dir'] |
1083 | @@ -136,7 +136,7 @@ | |||
1084 | 136 | @patch('os.path.isdir') | 136 | @patch('os.path.isdir') |
1085 | 137 | @patch('os.path.isfile') | 137 | @patch('os.path.isfile') |
1086 | 138 | def test_filter_dir_include_all(self, isfile, isdir): | 138 | def test_filter_dir_include_all(self, isfile, isdir): |
1088 | 139 | '''It does not filter anything if option specified to include all''' | 139 | """It does not filter anything if option specified to include all.""" |
1089 | 140 | self.assertEquals(sync.get_filter(opts=['inc=*']), None) | 140 | self.assertEquals(sync.get_filter(opts=['inc=*']), None) |
1090 | 141 | 141 | ||
1091 | 142 | @patch('tools.charm_helpers_sync.charm_helpers_sync.get_filter') | 142 | @patch('tools.charm_helpers_sync.charm_helpers_sync.get_filter') |
1092 | @@ -146,7 +146,7 @@ | |||
1093 | 146 | @patch('os.path.exists') | 146 | @patch('os.path.exists') |
1094 | 147 | def test_sync_directory(self, exists, rmtree, copytree, ensure_init, | 147 | def test_sync_directory(self, exists, rmtree, copytree, ensure_init, |
1095 | 148 | _filter): | 148 | _filter): |
1097 | 149 | '''It correctly syncs src directory to dest directory''' | 149 | """It correctly syncs src directory to dest directory.""" |
1098 | 150 | _filter.return_value = None | 150 | _filter.return_value = None |
1099 | 151 | sync.sync_directory('/tmp/charm-helpers/charmhelpers/core', | 151 | sync.sync_directory('/tmp/charm-helpers/charmhelpers/core', |
1100 | 152 | 'hooks/charmhelpers/core') | 152 | 'hooks/charmhelpers/core') |
1101 | @@ -158,7 +158,7 @@ | |||
1102 | 158 | 158 | ||
1103 | 159 | @patch('os.path.isfile') | 159 | @patch('os.path.isfile') |
1104 | 160 | def test_is_pyfile(self, isfile): | 160 | def test_is_pyfile(self, isfile): |
1106 | 161 | '''It correctly identifies incomplete path to a py src file as such''' | 161 | """It correctly identifies incomplete path to a py src file as such.""" |
1107 | 162 | sync._is_pyfile('/tmp/charm-helpers/charmhelpers/core/host') | 162 | sync._is_pyfile('/tmp/charm-helpers/charmhelpers/core/host') |
1108 | 163 | isfile.assert_called_with( | 163 | isfile.assert_called_with( |
1109 | 164 | '/tmp/charm-helpers/charmhelpers/core/host.py' | 164 | '/tmp/charm-helpers/charmhelpers/core/host.py' |
1110 | @@ -167,7 +167,7 @@ | |||
1111 | 167 | @patch('tools.charm_helpers_sync.charm_helpers_sync.sync_directory') | 167 | @patch('tools.charm_helpers_sync.charm_helpers_sync.sync_directory') |
1112 | 168 | @patch('os.path.isdir') | 168 | @patch('os.path.isdir') |
1113 | 169 | def test_syncs_directory(self, is_dir, sync_dir): | 169 | def test_syncs_directory(self, is_dir, sync_dir): |
1115 | 170 | '''It correctly syncs a module directory''' | 170 | """It correctly syncs a module directory.""" |
1116 | 171 | is_dir.return_value = True | 171 | is_dir.return_value = True |
1117 | 172 | sync.sync(src='/tmp/charm-helpers', | 172 | sync.sync(src='/tmp/charm-helpers', |
1118 | 173 | dest='hooks/charmhelpers', | 173 | dest='hooks/charmhelpers', |
1119 | @@ -181,7 +181,7 @@ | |||
1120 | 181 | @patch('tools.charm_helpers_sync.charm_helpers_sync._is_pyfile') | 181 | @patch('tools.charm_helpers_sync.charm_helpers_sync._is_pyfile') |
1121 | 182 | @patch('os.path.isdir') | 182 | @patch('os.path.isdir') |
1122 | 183 | def test_syncs_file(self, is_dir, is_pyfile, sync_pyfile): | 183 | def test_syncs_file(self, is_dir, is_pyfile, sync_pyfile): |
1124 | 184 | '''It correctly syncs a module file''' | 184 | """It correctly syncs a module file.""" |
1125 | 185 | is_dir.return_value = False | 185 | is_dir.return_value = False |
1126 | 186 | is_pyfile.return_value = True | 186 | is_pyfile.return_value = True |
1127 | 187 | sync.sync(src='/tmp/charm-helpers', | 187 | sync.sync(src='/tmp/charm-helpers', |
1128 | @@ -195,7 +195,7 @@ | |||
1129 | 195 | @patch('tools.charm_helpers_sync.charm_helpers_sync.sync') | 195 | @patch('tools.charm_helpers_sync.charm_helpers_sync.sync') |
1130 | 196 | @patch('os.path.isdir') | 196 | @patch('os.path.isdir') |
1131 | 197 | def test_sync_helpers_from_config(self, isdir, _sync): | 197 | def test_sync_helpers_from_config(self, isdir, _sync): |
1133 | 198 | '''It correctly syncs a list of included helpers''' | 198 | """It correctly syncs a list of included helpers.""" |
1134 | 199 | include = yaml.load(INCLUDE)['include'] | 199 | include = yaml.load(INCLUDE)['include'] |
1135 | 200 | isdir.return_value = True | 200 | isdir.return_value = True |
1136 | 201 | sync.sync_helpers(include=include, | 201 | sync.sync_helpers(include=include, |
1137 | @@ -219,14 +219,14 @@ | |||
1138 | 219 | self.assertEquals(ex_calls, _sync.call_args_list) | 219 | self.assertEquals(ex_calls, _sync.call_args_list) |
1139 | 220 | 220 | ||
1140 | 221 | def test_extract_option_no_globals(self): | 221 | def test_extract_option_no_globals(self): |
1142 | 222 | '''It extracts option from an included item with no global options''' | 222 | """It extracts option from an included item with no global options.""" |
1143 | 223 | inc = 'contrib.openstack.templates|inc=*.template' | 223 | inc = 'contrib.openstack.templates|inc=*.template' |
1144 | 224 | result = sync.extract_options(inc) | 224 | result = sync.extract_options(inc) |
1145 | 225 | ex = ('contrib.openstack.templates', ['inc=*.template']) | 225 | ex = ('contrib.openstack.templates', ['inc=*.template']) |
1146 | 226 | self.assertEquals(ex, result) | 226 | self.assertEquals(ex, result) |
1147 | 227 | 227 | ||
1148 | 228 | def test_extract_option_with_global_as_string(self): | 228 | def test_extract_option_with_global_as_string(self): |
1150 | 229 | '''It extracts option for include with global options as str''' | 229 | """It extracts option for include with global options as str.""" |
1151 | 230 | inc = 'contrib.openstack.templates|inc=*.template' | 230 | inc = 'contrib.openstack.templates|inc=*.template' |
1152 | 231 | result = sync.extract_options(inc, global_options='inc=foo.*') | 231 | result = sync.extract_options(inc, global_options='inc=foo.*') |
1153 | 232 | ex = ('contrib.openstack.templates', | 232 | ex = ('contrib.openstack.templates', |
1154 | @@ -234,14 +234,14 @@ | |||
1155 | 234 | self.assertEquals(ex, result) | 234 | self.assertEquals(ex, result) |
1156 | 235 | 235 | ||
1157 | 236 | def test_extract_option_with_globals(self): | 236 | def test_extract_option_with_globals(self): |
1159 | 237 | '''It extracts option from an included item with global options''' | 237 | """It extracts option from an included item with global options.""" |
1160 | 238 | inc = 'contrib.openstack.templates|inc=*.template' | 238 | inc = 'contrib.openstack.templates|inc=*.template' |
1161 | 239 | result = sync.extract_options(inc, global_options=['inc=*.cfg']) | 239 | result = sync.extract_options(inc, global_options=['inc=*.cfg']) |
1162 | 240 | ex = ('contrib.openstack.templates', ['inc=*.template', 'inc=*.cfg']) | 240 | ex = ('contrib.openstack.templates', ['inc=*.template', 'inc=*.cfg']) |
1163 | 241 | self.assertEquals(ex, result) | 241 | self.assertEquals(ex, result) |
1164 | 242 | 242 | ||
1165 | 243 | def test_extract_multiple_options_with_globals(self): | 243 | def test_extract_multiple_options_with_globals(self): |
1167 | 244 | '''It extracts multiple options from an included item''' | 244 | """It extracts multiple options from an included item.""" |
1168 | 245 | inc = 'contrib.openstack.templates|inc=*.template,inc=foo.*' | 245 | inc = 'contrib.openstack.templates|inc=*.template,inc=foo.*' |
1169 | 246 | result = sync.extract_options(inc, global_options=['inc=*.cfg']) | 246 | result = sync.extract_options(inc, global_options=['inc=*.cfg']) |
1170 | 247 | ex = ('contrib.openstack.templates', | 247 | ex = ('contrib.openstack.templates', |
1171 | 248 | 248 | ||
1172 | === modified file 'tools/charm_helpers_sync/charm_helpers_sync.py' | |||
1173 | --- tools/charm_helpers_sync/charm_helpers_sync.py 2013-07-12 01:28:42 +0000 | |||
1174 | +++ tools/charm_helpers_sync/charm_helpers_sync.py 2013-09-02 08:01:21 +0000 | |||
1175 | @@ -1,22 +1,19 @@ | |||
1176 | 1 | #!/usr/bin/python | 1 | #!/usr/bin/python |
1177 | 2 | # | 2 | # |
1178 | 3 | # Copyright 2013 Canonical Ltd. | 3 | # Copyright 2013 Canonical Ltd. |
1179 | 4 | |||
1180 | 5 | # Authors: | 4 | # Authors: |
1181 | 6 | # Adam Gandelman <adamg@ubuntu.com> | 5 | # Adam Gandelman <adamg@ubuntu.com> |
1182 | 7 | # | ||
1183 | 8 | 6 | ||
1184 | 7 | from fnmatch import fnmatch | ||
1185 | 9 | import logging | 8 | import logging |
1186 | 10 | import optparse | 9 | import optparse |
1187 | 11 | import os | 10 | import os |
1188 | 11 | import shutil | ||
1189 | 12 | import subprocess | 12 | import subprocess |
1190 | 13 | import shutil | ||
1191 | 14 | import sys | 13 | import sys |
1192 | 15 | import tempfile | 14 | import tempfile |
1193 | 16 | import yaml | 15 | import yaml |
1194 | 17 | 16 | ||
1195 | 18 | from fnmatch import fnmatch | ||
1196 | 19 | |||
1197 | 20 | CHARM_HELPERS_BRANCH = 'lp:charm-helpers' | 17 | CHARM_HELPERS_BRANCH = 'lp:charm-helpers' |
1198 | 21 | 18 | ||
1199 | 22 | 19 | ||
1200 | @@ -52,13 +49,13 @@ | |||
1201 | 52 | 49 | ||
1202 | 53 | 50 | ||
1203 | 54 | def ensure_init(path): | 51 | def ensure_init(path): |
1206 | 55 | ''' | 52 | """ |
1207 | 56 | ensure directories leading up to path are importable, omitting | 53 | Ensure directories leading up to path are importable, omitting |
1208 | 57 | parent directory, eg path='/hooks/helpers/foo'/: | 54 | parent directory, eg path='/hooks/helpers/foo'/: |
1209 | 58 | hooks/ | 55 | hooks/ |
1210 | 59 | hooks/helpers/__init__.py | 56 | hooks/helpers/__init__.py |
1211 | 60 | hooks/helpers/foo/__init__.py | 57 | hooks/helpers/foo/__init__.py |
1213 | 61 | ''' | 58 | """ |
1214 | 62 | for d, dirs, files in os.walk(os.path.join(*path.split('/')[:2])): | 59 | for d, dirs, files in os.walk(os.path.join(*path.split('/')[:2])): |
1215 | 63 | _i = os.path.join(d, '__init__.py') | 60 | _i = os.path.join(d, '__init__.py') |
1216 | 64 | if not os.path.exists(_i): | 61 | if not os.path.exists(_i): |
1217 | @@ -165,6 +162,7 @@ | |||
1218 | 165 | inc, opts = extract_options(m, global_options) | 162 | inc, opts = extract_options(m, global_options) |
1219 | 166 | sync(src, dest, '%s.%s' % (k, inc), opts) | 163 | sync(src, dest, '%s.%s' % (k, inc), opts) |
1220 | 167 | 164 | ||
1221 | 165 | |||
1222 | 168 | if __name__ == '__main__': | 166 | if __name__ == '__main__': |
1223 | 169 | parser = optparse.OptionParser() | 167 | parser = optparse.OptionParser() |
1224 | 170 | parser.add_option('-c', '--config', action='store', dest='config', | 168 | parser.add_option('-c', '--config', action='store', dest='config', |
Hi Nicola
Looking at the current master branch, this is all now fixed up.
Thanks for the MP