Merge lp:~abentley/workspace-runner/s3-artifacts into lp:workspace-runner
- s3-artifacts
- Merge into trunk
Proposed by
Aaron Bentley
Status: | Merged |
---|---|
Merged at revision: | 21 |
Proposed branch: | lp:~abentley/workspace-runner/s3-artifacts |
Merge into: | lp:workspace-runner |
Prerequisite: | lp:~abentley/workspace-runner/s3-script |
Diff against target: |
352 lines (+168/-36) 4 files modified
upload.yaml (+6/-0) workspace_runner/__init__.py (+64/-6) workspace_runner/tests/__init__.py (+95/-27) workspace_runner/upload_artifacts.py (+3/-3) |
To merge this branch: | bzr merge lp:~abentley/workspace-runner/s3-artifacts |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Curtis Hovey (community) | code | Approve | |
Review via email: mp+263384@code.launchpad.net |
Commit message
Upload artifacts to s3 from workspace runner.
Description of the change
This branch applies the upload_artifacts script.
It updates workspace_run to accept artifact_prefix and --s3-config. It uses the access-key and secret key from the s3config file, plus the artifact prefix, plus the 'bucket' and 'artifacts' config values to generate a config file. That file is then used to remotely run upload_artifacts.
To post a comment you must log in.
- 37. By Aaron Bentley
-
Merged trunk into s3-artifacts.
Revision history for this message
Aaron Bentley (abentley) wrote : | # |
AFAICT, the main advantage of SafeConfigParser over RawConfigParser is that SafeConfigParser supports interpolation. I didn't think interpolation was useful for this case so, I went with RawConfigParser.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file 'upload.yaml' | |||
2 | --- upload.yaml 1970-01-01 00:00:00 +0000 | |||
3 | +++ upload.yaml 2015-06-30 18:41:09 +0000 | |||
4 | @@ -0,0 +1,6 @@ | |||
5 | 1 | command: [ls > foo] | ||
6 | 2 | install: | ||
7 | 3 | docs: [README] | ||
8 | 4 | artifacts: | ||
9 | 5 | ls_output: [foo] | ||
10 | 6 | bucket: ws-runner-test | ||
11 | 0 | 7 | ||
12 | === modified file 'workspace_runner/__init__.py' | |||
13 | --- workspace_runner/__init__.py 2015-06-25 17:05:49 +0000 | |||
14 | +++ workspace_runner/__init__.py 2015-06-30 18:41:09 +0000 | |||
15 | @@ -1,15 +1,20 @@ | |||
16 | 1 | from argparse import ArgumentParser | 1 | from argparse import ArgumentParser |
17 | 2 | from ConfigParser import RawConfigParser | ||
18 | 2 | from contextlib import contextmanager | 3 | from contextlib import contextmanager |
19 | 3 | from itertools import chain | 4 | from itertools import chain |
20 | 5 | import json | ||
21 | 4 | import logging | 6 | import logging |
22 | 5 | from pipes import quote | 7 | from pipes import quote |
23 | 6 | import os | 8 | import os |
24 | 9 | from shutil import rmtree | ||
25 | 7 | import subprocess | 10 | import subprocess |
26 | 8 | import sys | 11 | import sys |
27 | 12 | from tempfile import mkdtemp | ||
28 | 9 | from textwrap import dedent | 13 | from textwrap import dedent |
29 | 10 | 14 | ||
32 | 11 | from yaml import safe_load | 15 | from yaml import ( |
33 | 12 | 16 | safe_load, | |
34 | 17 | ) | ||
35 | 13 | __metaclass__ = type | 18 | __metaclass__ = type |
36 | 14 | 19 | ||
37 | 15 | 20 | ||
38 | @@ -18,9 +23,13 @@ | |||
39 | 18 | parser = ArgumentParser() | 23 | parser = ArgumentParser() |
40 | 19 | parser.add_argument('config', help='Config file to use.') | 24 | parser.add_argument('config', help='Config file to use.') |
41 | 20 | parser.add_argument('host', help='Machine to run the command on.') | 25 | parser.add_argument('host', help='Machine to run the command on.') |
42 | 26 | parser.add_argument('artifact_prefix', nargs='?', | ||
43 | 27 | help='Prefix to use storing artifacts.') | ||
44 | 21 | parser.add_argument('--private-key', '-i', help='Private SSH key to use.') | 28 | parser.add_argument('--private-key', '-i', help='Private SSH key to use.') |
45 | 22 | parser.add_argument('--verbose', '-v', help='Verbose output.', | 29 | parser.add_argument('--verbose', '-v', help='Verbose output.', |
46 | 23 | action='store_true') | 30 | action='store_true') |
47 | 31 | parser.add_argument('--s3-config', | ||
48 | 32 | help='s3cmd config file for credentials.') | ||
49 | 24 | return parser.parse_args(argv) | 33 | return parser.parse_args(argv) |
50 | 25 | 34 | ||
51 | 26 | 35 | ||
52 | @@ -185,7 +194,49 @@ | |||
53 | 185 | primitives.destroy() | 194 | primitives.destroy() |
54 | 186 | 195 | ||
55 | 187 | 196 | ||
57 | 188 | def workspace_run(argv=None, primitives_factory=SSHPrimitives): | 197 | @contextmanager |
58 | 198 | def temp_dir(): | ||
59 | 199 | temp_dir = mkdtemp() | ||
60 | 200 | try: | ||
61 | 201 | yield temp_dir | ||
62 | 202 | finally: | ||
63 | 203 | rmtree(temp_dir) | ||
64 | 204 | |||
65 | 205 | |||
66 | 206 | @contextmanager | ||
67 | 207 | def temp_config(config): | ||
68 | 208 | with temp_dir() as config_dir: | ||
69 | 209 | config_filename = os.path.join(config_dir, 'upload.json') | ||
70 | 210 | with open(config_filename, 'w') as config_file: | ||
71 | 211 | json.dump(config, config_file) | ||
72 | 212 | config_file.flush() | ||
73 | 213 | yield config_filename | ||
74 | 214 | |||
75 | 215 | |||
76 | 216 | def run_from_config(primitives, config, s3_config, artifact_prefix, output): | ||
77 | 217 | """Install the files and run the command.""" | ||
78 | 218 | for target, sources in config['install'].items(): | ||
79 | 219 | primitives.install(sources, target) | ||
80 | 220 | if 'artifacts' in config: | ||
81 | 221 | import upload_artifacts | ||
82 | 222 | sources = [upload_artifacts.__file__.replace('.pyc', '.py')] | ||
83 | 223 | upload_config = { | ||
84 | 224 | 'bucket': config['bucket'], | ||
85 | 225 | 'files': config['artifacts'], | ||
86 | 226 | 'prefix': artifact_prefix, | ||
87 | 227 | } | ||
88 | 228 | upload_config.update(s3_config) | ||
89 | 229 | with temp_config(upload_config) as upload_config_filename: | ||
90 | 230 | sources.append(upload_config_filename) | ||
91 | 231 | primitives.install(sources, '.wsr') | ||
92 | 232 | primitives.run(config['command'], output) | ||
93 | 233 | if 'artifacts' in config: | ||
94 | 234 | primitives.run(['python', '.wsr/upload_artifacts.py', | ||
95 | 235 | '.wsr/upload.json', primitives.workspace], output) | ||
96 | 236 | |||
97 | 237 | |||
98 | 238 | def workspace_run(argv=None, primitives_factory=SSHPrimitives, | ||
99 | 239 | output=sys.stdout): | ||
100 | 189 | """Run an operation in a workspace.""" | 240 | """Run an operation in a workspace.""" |
101 | 190 | args = parse_args(argv) | 241 | args = parse_args(argv) |
102 | 191 | if args.verbose: | 242 | if args.verbose: |
103 | @@ -195,8 +246,15 @@ | |||
104 | 195 | logging.basicConfig(level=level) | 246 | logging.basicConfig(level=level) |
105 | 196 | with open(args.config) as config_file: | 247 | with open(args.config) as config_file: |
106 | 197 | config = safe_load(config_file) | 248 | config = safe_load(config_file) |
107 | 249 | s3_config = None | ||
108 | 250 | if args.s3_config is not None: | ||
109 | 251 | config_parser = RawConfigParser() | ||
110 | 252 | config_parser.read(args.s3_config) | ||
111 | 253 | s3_config = { | ||
112 | 254 | 'access_key': config_parser.get('default', 'access_key'), | ||
113 | 255 | 'secret_key': config_parser.get('default', 'secret_key'), | ||
114 | 256 | } | ||
115 | 198 | with workspace_context(args.host, args.private_key, | 257 | with workspace_context(args.host, args.private_key, |
116 | 199 | primitives_factory) as runner: | 258 | primitives_factory) as runner: |
120 | 200 | for target, sources in config['install'].items(): | 259 | run_from_config(runner, config, s3_config, args.artifact_prefix, |
121 | 201 | runner.install(sources, target) | 260 | output) |
119 | 202 | runner.run(config['command'], sys.stdout) | ||
122 | 203 | 261 | ||
123 | === modified file 'workspace_runner/tests/__init__.py' | |||
124 | --- workspace_runner/tests/__init__.py 2015-06-25 17:05:49 +0000 | |||
125 | +++ workspace_runner/tests/__init__.py 2015-06-30 18:41:09 +0000 | |||
126 | @@ -1,5 +1,6 @@ | |||
127 | 1 | from argparse import Namespace | 1 | from argparse import Namespace |
128 | 2 | from contextlib import contextmanager | 2 | from contextlib import contextmanager |
129 | 3 | import json | ||
130 | 3 | import os | 4 | import os |
131 | 4 | import logging | 5 | import logging |
132 | 5 | from mock import patch | 6 | from mock import patch |
133 | @@ -9,6 +10,7 @@ | |||
134 | 9 | rmtree, | 10 | rmtree, |
135 | 10 | ) | 11 | ) |
136 | 11 | from StringIO import StringIO | 12 | from StringIO import StringIO |
137 | 13 | import sys | ||
138 | 12 | from tempfile import ( | 14 | from tempfile import ( |
139 | 13 | mkdtemp, | 15 | mkdtemp, |
140 | 14 | NamedTemporaryFile, | 16 | NamedTemporaryFile, |
141 | @@ -16,7 +18,6 @@ | |||
142 | 16 | from textwrap import dedent | 18 | from textwrap import dedent |
143 | 17 | from unittest import TestCase | 19 | from unittest import TestCase |
144 | 18 | import subprocess | 20 | import subprocess |
145 | 19 | |||
146 | 20 | from yaml import safe_dump | 21 | from yaml import safe_dump |
147 | 21 | 22 | ||
148 | 22 | from workspace_runner import ( | 23 | from workspace_runner import ( |
149 | @@ -25,26 +26,21 @@ | |||
150 | 25 | retry_ssh, | 26 | retry_ssh, |
151 | 26 | SSHConnection, | 27 | SSHConnection, |
152 | 27 | SSHPrimitives, | 28 | SSHPrimitives, |
153 | 29 | temp_config, | ||
154 | 30 | temp_dir, | ||
155 | 31 | run_from_config, | ||
156 | 28 | workspace_context, | 32 | workspace_context, |
157 | 29 | workspace_run, | 33 | workspace_run, |
158 | 30 | ) | 34 | ) |
159 | 31 | 35 | ||
160 | 32 | 36 | ||
161 | 33 | @contextmanager | ||
162 | 34 | def temp_dir(): | ||
163 | 35 | temp_dir = mkdtemp() | ||
164 | 36 | try: | ||
165 | 37 | yield temp_dir | ||
166 | 38 | finally: | ||
167 | 39 | rmtree(temp_dir) | ||
168 | 40 | |||
169 | 41 | |||
170 | 42 | class TestParseArgs(TestCase): | 37 | class TestParseArgs(TestCase): |
171 | 43 | 38 | ||
172 | 44 | def test_minimal(self): | 39 | def test_minimal(self): |
173 | 45 | args = parse_args(['foo', 'bar']) | 40 | args = parse_args(['foo', 'bar']) |
176 | 46 | self.assertEqual(args, Namespace(config='foo', host='bar', | 41 | self.assertEqual(args, Namespace( |
177 | 47 | private_key=None, verbose=False)) | 42 | config='foo', host='bar', private_key=None, verbose=False, |
178 | 43 | s3_config=None, artifact_prefix=None)) | ||
179 | 48 | 44 | ||
180 | 49 | def test_private_key(self): | 45 | def test_private_key(self): |
181 | 50 | args = parse_args(['foo', 'bar', '--private-key', 'key']) | 46 | args = parse_args(['foo', 'bar', '--private-key', 'key']) |
182 | @@ -58,6 +54,10 @@ | |||
183 | 58 | args = parse_args(['foo', 'bar', '-v']) | 54 | args = parse_args(['foo', 'bar', '-v']) |
184 | 59 | self.assertEqual(args.verbose, True) | 55 | self.assertEqual(args.verbose, True) |
185 | 60 | 56 | ||
186 | 57 | def test_s3_config(self): | ||
187 | 58 | args = parse_args(['foo', 'bar', '--s3-config', 'foobar']) | ||
188 | 59 | self.assertEqual(args.s3_config, 'foobar') | ||
189 | 60 | |||
190 | 61 | 61 | ||
191 | 62 | class FakePrimitives(Primitives): | 62 | class FakePrimitives(Primitives): |
192 | 63 | 63 | ||
193 | @@ -364,26 +364,75 @@ | |||
194 | 364 | self.assertFalse(os.path.exists(primitives.workspace)) | 364 | self.assertFalse(os.path.exists(primitives.workspace)) |
195 | 365 | 365 | ||
196 | 366 | 366 | ||
197 | 367 | class TestRunFromConfig(TestCase): | ||
198 | 368 | |||
199 | 369 | def test_minimal(self): | ||
200 | 370 | config = { | ||
201 | 371 | 'command': ['run', 'this'], | ||
202 | 372 | 'install': {}, | ||
203 | 373 | } | ||
204 | 374 | with workspace_context('foo', None, FakePrimitives) as primitives: | ||
205 | 375 | run_from_config(primitives, config, None, None, StringIO()) | ||
206 | 376 | self.assertEqual(primitives.run_calls, [['run', 'this']]) | ||
207 | 377 | self.assertEqual(primitives.walk_results, | ||
208 | 378 | [(primitives.workspace, [], [])]) | ||
209 | 379 | |||
210 | 380 | def test_s3_upload(self): | ||
211 | 381 | config = { | ||
212 | 382 | 'command': ['run', 'this'], | ||
213 | 383 | 'install': {}, | ||
214 | 384 | 'artifacts': {'foo': ['bar']}, | ||
215 | 385 | 'bucket': 'bucket1', | ||
216 | 386 | } | ||
217 | 387 | s3_config = {'access_key': 'access1', 'secret_key': 'secret1'} | ||
218 | 388 | with workspace_context('foo', None, FakePrimitives) as primitives: | ||
219 | 389 | run_from_config(primitives, config, s3_config, 'prefix/midfix', | ||
220 | 390 | StringIO()) | ||
221 | 391 | upload_json_path = os.path.join(primitives.workspace, '.wsr', | ||
222 | 392 | 'upload.json') | ||
223 | 393 | with open(upload_json_path) as remote_config_file: | ||
224 | 394 | remote_config = json.load(remote_config_file) | ||
225 | 395 | self.assertEqual(remote_config, { | ||
226 | 396 | 'access_key': 'access1', | ||
227 | 397 | 'secret_key': 'secret1', | ||
228 | 398 | 'bucket': 'bucket1', | ||
229 | 399 | 'files': {'foo': ['bar']}, | ||
230 | 400 | 'prefix': 'prefix/midfix', | ||
231 | 401 | }) | ||
232 | 402 | self.assertEqual(primitives.run_calls, [ | ||
233 | 403 | ['run', 'this'], | ||
234 | 404 | ['python', '.wsr/upload_artifacts.py', '.wsr/upload.json', | ||
235 | 405 | primitives.workspace], | ||
236 | 406 | ]) | ||
237 | 407 | self.assertEqual(primitives.walk_results, [ | ||
238 | 408 | (primitives.workspace, ['.wsr'], []), | ||
239 | 409 | (primitives.workspace + '/.wsr', [], [ | ||
240 | 410 | 'upload.json', 'upload_artifacts.py']) | ||
241 | 411 | ]) | ||
242 | 412 | |||
243 | 413 | |||
244 | 367 | class TestWorkspaceRun(TestCase): | 414 | class TestWorkspaceRun(TestCase): |
245 | 368 | 415 | ||
246 | 369 | @contextmanager | 416 | @contextmanager |
247 | 370 | def config_file(self): | 417 | def config_file(self): |
254 | 371 | with NamedTemporaryFile() as config_file: | 418 | config = { |
255 | 372 | safe_dump({ | 419 | 'command': ['run', 'this'], |
256 | 373 | 'command': ['run', 'this'], | 420 | 'install': {}, |
257 | 374 | 'install': {}, | 421 | } |
258 | 375 | }, config_file) | 422 | with temp_config(config) as config_file_name: |
259 | 376 | yield config_file | 423 | yield config_file_name |
260 | 377 | 424 | ||
262 | 378 | def run_primitives(self, args): | 425 | def run_primitives(self, args, output=None): |
263 | 379 | fp_factory = FakePrimitivesFactory() | 426 | fp_factory = FakePrimitivesFactory() |
265 | 380 | workspace_run(args, fp_factory) | 427 | if output is None: |
266 | 428 | output = StringIO() | ||
267 | 429 | workspace_run(args, fp_factory, output=output) | ||
268 | 381 | return fp_factory.last_instance | 430 | return fp_factory.last_instance |
269 | 382 | 431 | ||
270 | 383 | def test_minimal(self): | 432 | def test_minimal(self): |
272 | 384 | with self.config_file() as config_file: | 433 | with self.config_file() as config_file_name: |
273 | 385 | with patch('logging.root.handlers', []): | 434 | with patch('logging.root.handlers', []): |
275 | 386 | primitives = self.run_primitives([config_file.name, 'bar']) | 435 | primitives = self.run_primitives([config_file_name, 'bar']) |
276 | 387 | self.assertEqual(logging.getLogger().getEffectiveLevel(), | 436 | self.assertEqual(logging.getLogger().getEffectiveLevel(), |
277 | 388 | logging.WARNING) | 437 | logging.WARNING) |
278 | 389 | self.assertEqual(primitives.run_calls, [['run', 'this']]) | 438 | self.assertEqual(primitives.run_calls, [['run', 'this']]) |
279 | @@ -392,9 +441,9 @@ | |||
280 | 392 | [(primitives.workspace, [], [])]) | 441 | [(primitives.workspace, [], [])]) |
281 | 393 | 442 | ||
282 | 394 | def test_private_key(self): | 443 | def test_private_key(self): |
284 | 395 | with self.config_file() as config_file: | 444 | with self.config_file() as config_file_name: |
285 | 396 | primitives = self.run_primitives( | 445 | primitives = self.run_primitives( |
287 | 397 | [config_file.name, 'bar', '-i', 'qux']) | 446 | [config_file_name, 'bar', '-i', 'qux']) |
288 | 398 | self.assertEqual(primitives.ssh_connection.private_key, 'qux') | 447 | self.assertEqual(primitives.ssh_connection.private_key, 'qux') |
289 | 399 | 448 | ||
290 | 400 | def test_install(self): | 449 | def test_install(self): |
291 | @@ -407,7 +456,7 @@ | |||
292 | 407 | 'install': {'bin-dir': [install_file.name]}, | 456 | 'install': {'bin-dir': [install_file.name]}, |
293 | 408 | }, config_file) | 457 | }, config_file) |
294 | 409 | workspace_run([config_file.name, 'bar'], | 458 | workspace_run([config_file.name, 'bar'], |
296 | 410 | fp_factory) | 459 | fp_factory, StringIO()) |
297 | 411 | primitives = fp_factory.last_instance | 460 | primitives = fp_factory.last_instance |
298 | 412 | bin_path = os.path.join(primitives.workspace, 'bin-dir') | 461 | bin_path = os.path.join(primitives.workspace, 'bin-dir') |
299 | 413 | install_base = os.path.basename(install_file.name) | 462 | install_base = os.path.basename(install_file.name) |
300 | @@ -417,8 +466,27 @@ | |||
301 | 417 | ]) | 466 | ]) |
302 | 418 | 467 | ||
303 | 419 | def test_verbose(self): | 468 | def test_verbose(self): |
305 | 420 | with self.config_file() as config_file: | 469 | with self.config_file() as config_file_name: |
306 | 421 | with patch('logging.root.handlers', []): | 470 | with patch('logging.root.handlers', []): |
308 | 422 | self.run_primitives([config_file.name, 'bar', '-v']) | 471 | self.run_primitives([config_file_name, 'bar', '-v']) |
309 | 423 | self.assertEqual(logging.getLogger().getEffectiveLevel(), | 472 | self.assertEqual(logging.getLogger().getEffectiveLevel(), |
310 | 424 | logging.INFO) | 473 | logging.INFO) |
311 | 474 | |||
312 | 475 | def test_s3config(self): | ||
313 | 476 | config = {'command': ['run', 'this'], 'install': {}} | ||
314 | 477 | credentials = {'access_key': 'foobar', 'secret_key': 'barfoo'} | ||
315 | 478 | with NamedTemporaryFile() as s3_config_file: | ||
316 | 479 | s3_config_file.write(dedent("""\ | ||
317 | 480 | [default] | ||
318 | 481 | access_key = foobar | ||
319 | 482 | secret_key = barfoo | ||
320 | 483 | """)) | ||
321 | 484 | s3_config_file.flush() | ||
322 | 485 | with self.config_file() as config_file_name: | ||
323 | 486 | with patch('workspace_runner.run_from_config', | ||
324 | 487 | autospec=True) as rfc_mock: | ||
325 | 488 | argv = [config_file_name, 'bar', | ||
326 | 489 | '--s3-config', s3_config_file.name] | ||
327 | 490 | primitives = self.run_primitives(argv, output=sys.stdout) | ||
328 | 491 | rfc_mock.assert_called_once_with( | ||
329 | 492 | primitives, config, credentials, None, sys.stdout) | ||
330 | 425 | 493 | ||
331 | === modified file 'workspace_runner/upload_artifacts.py' | |||
332 | --- workspace_runner/upload_artifacts.py 2015-06-30 18:41:09 +0000 | |||
333 | +++ workspace_runner/upload_artifacts.py 2015-06-30 18:41:09 +0000 | |||
334 | @@ -11,8 +11,8 @@ | |||
335 | 11 | 11 | ||
336 | 12 | def parse_args(argv=None): | 12 | def parse_args(argv=None): |
337 | 13 | parser = ArgumentParser() | 13 | parser = ArgumentParser() |
340 | 14 | parser.add_argument('artifacts_file') | 14 | parser.add_argument('artifacts_file', help='Configuration file.') |
341 | 15 | parser.add_argument('root') | 15 | parser.add_argument('root', help='The root directory to upload from.') |
342 | 16 | return parser.parse_args(argv) | 16 | return parser.parse_args(argv) |
343 | 17 | 17 | ||
344 | 18 | 18 | ||
345 | @@ -40,7 +40,7 @@ | |||
346 | 40 | args = parse_args(argv) | 40 | args = parse_args(argv) |
347 | 41 | # Use JSON rather than YAML because a program will emit it and no external | 41 | # Use JSON rather than YAML because a program will emit it and no external |
348 | 42 | # libs required. | 42 | # libs required. |
350 | 43 | with file(args.artifacts_file) as settings_file: | 43 | with open(args.artifacts_file) as settings_file: |
351 | 44 | settings = json.load(settings_file) | 44 | settings = json.load(settings_file) |
352 | 45 | upload_artifacts(args.root, settings) | 45 | upload_artifacts(args.root, settings) |
353 | 46 | 46 |
Thank you. I have a question/suggestion inline.