Merge lp:~abentley/workspace-runner/s3-script into lp:workspace-runner

Proposed by Aaron Bentley
Status: Merged
Merged at revision: 20
Proposed branch: lp:~abentley/workspace-runner/s3-script
Merge into: lp:workspace-runner
Diff against target: 181 lines (+172/-0)
2 files modified
workspace_runner/tests/test_upload_artifacts.py (+123/-0)
workspace_runner/upload_artifacts.py (+49/-0)
To merge this branch: bzr merge lp:~abentley/workspace-runner/s3-script
Reviewer Review Type Date Requested Status
Curtis Hovey (community) code Approve
John George (community) Approve
Review via email: mp+263383@code.launchpad.net

Commit message

Provide a script that uploads to s3.

Description of the change

This branch provides the upload_artifacts script, which will typically be run remotely, but is tested locally.

It uses a JSON file for data. (JSON so that it doesn't require external dependencies.)

It doesn't currently pay attention to MIME types, but I plan to address that in future branch.

In a the follow-on branch, the workspace runner is taught to dump this JSON, upload it and the script to the remote machine, and run the script.

To post a comment you must log in.
Revision history for this message
John George (jog) wrote :

Would you please add help= to add_arguments() in upload_artifacts.py, other than that it looks good.

review: Approve
Revision history for this message
Curtis Hovey (sinzui) wrote :

Thank you. I have a suggestion inline

review: Approve (code)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== added file 'workspace_runner/tests/test_upload_artifacts.py'
--- workspace_runner/tests/test_upload_artifacts.py 1970-01-01 00:00:00 +0000
+++ workspace_runner/tests/test_upload_artifacts.py 2015-06-30 16:28:18 +0000
@@ -0,0 +1,123 @@
1from argparse import Namespace
2import json
3import os
4from tempfile import NamedTemporaryFile
5from unittest import TestCase
6
7from mock import patch
8
9from workspace_runner.tests import temp_dir
10from workspace_runner.upload_artifacts import (
11 main,
12 parse_args,
13 resolve_rules,
14 upload_artifacts,
15 )
16
17
18class TestParseArgs(TestCase):
19
20 def test_minimal(self):
21 result = parse_args(['artifacts1', 'root1'])
22 self.assertEqual(result,
23 Namespace(artifacts_file='artifacts1', root='root1'))
24
25
26class TestResolveRules(TestCase):
27
28 def test_resolve_rules(self):
29 settings = {'files': {
30 'text-files': ['*.txt', '*.rst'],
31 'foo-files': ['foo.*'],
32 'subdir-files': ['subdir/*'],
33 }}
34 filenames = ['foo.txt', 'bar.gif', 'foo.gif', 'subdir/baz.txt',
35 'qux.rst']
36 with temp_dir() as files_dir:
37 os.mkdir(os.path.join(files_dir, 'subdir'))
38 for filename in filenames:
39 open(os.path.join(files_dir, filename), 'w').close()
40 result = resolve_rules(files_dir, settings['files'])
41 expected = {
42 (os.path.join(files_dir, 'foo.txt'), 'text-files/foo.txt'),
43 (os.path.join(files_dir, 'qux.rst'), 'text-files/qux.rst'),
44 (os.path.join(files_dir, 'foo.txt'), 'foo-files/foo.txt'),
45 (os.path.join(files_dir, 'foo.gif'), 'foo-files/foo.gif'),
46 (os.path.join(files_dir, 'subdir/baz.txt'),
47 'subdir-files/baz.txt'),
48 }
49 self.assertEqual(result, expected)
50
51
52class FakeS3Connection:
53
54 def __init__(self, access_key, secret_key):
55 self.access_key = access_key
56 self.secret_key = secret_key
57 self.buckets = {}
58
59 def get_bucket(self, bucket_name):
60 return self.buckets.setdefault(bucket_name, FakeBucket())
61
62
63class FakeBucket:
64
65 def __init__(self):
66 self.keys = []
67
68 def new_key(self, path):
69 self.keys.append(FakeKey(path))
70 return self.keys[-1]
71
72
73class FakeKey:
74
75 def __init__(self, path):
76 self.contents = None
77 self.path = path
78
79 def set_contents_from_file(self, fp):
80 self.contents = fp.read()
81
82
83class TestUploadArtifacts(TestCase):
84
85 def test_upload_artifacts(self):
86 settings = {
87 'access_key': 'baz',
88 'secret_key': 'qux',
89 'bucket': 'my-pale-bucket',
90 'files': {'text-files': ['*.txt', '*.rst']},
91 'prefix': 'prefix1',
92 }
93
94 connections = []
95
96 def fake_factory(*args, **kwargs):
97 connections.append(FakeS3Connection(*args, **kwargs))
98 return connections[-1]
99
100 with temp_dir() as files_dir:
101 with open(os.path.join(files_dir, 'foo.txt'), 'w') as foo_file:
102 foo_file.write('foo file')
103 upload_artifacts(files_dir, settings, s3_factory=fake_factory)
104 self.assertEqual(len(connections), 1)
105 connection = connections[0]
106 self.assertEqual(connection.access_key, 'baz')
107 self.assertEqual(connection.secret_key, 'qux')
108 bucket = connection.buckets['my-pale-bucket']
109 key = bucket.keys[0]
110 self.assertEqual(key.path, 'prefix1/text-files/foo.txt')
111 self.assertEqual(key.contents, 'foo file')
112
113
114class TestMain(TestCase):
115
116 def test_minimal(self):
117 with NamedTemporaryFile() as settings_file:
118 json.dump({'foo': 'bar'}, settings_file)
119 settings_file.flush()
120 with patch('workspace_runner.upload_artifacts.upload_artifacts',
121 autospec=True) as ua_mock:
122 main([settings_file.name, 'root1'])
123 ua_mock.assert_called_once_with('root1', {'foo': 'bar'})
0124
=== added file 'workspace_runner/upload_artifacts.py'
--- workspace_runner/upload_artifacts.py 1970-01-01 00:00:00 +0000
+++ workspace_runner/upload_artifacts.py 2015-06-30 16:28:18 +0000
@@ -0,0 +1,49 @@
1#!/usr/bin/env python
2# NOTE: This script is typically run on the REMOTE machine.
3from argparse import ArgumentParser
4from glob import glob
5import json
6import os
7import sys
8
9from boto.s3.connection import S3Connection
10
11
12def parse_args(argv=None):
13 parser = ArgumentParser()
14 parser.add_argument('artifacts_file')
15 parser.add_argument('root')
16 return parser.parse_args(argv)
17
18
19def resolve_rules(root, rules):
20 result = set()
21 for destination, globs in rules.items():
22 for file_glob in globs:
23 glob_path = os.path.join(root, file_glob)
24 for filename in glob(glob_path):
25 dest_url = '/'.join([destination, os.path.basename(filename)])
26 result.add((filename, dest_url))
27 return result
28
29
30def upload_artifacts(root, settings, s3_factory=S3Connection):
31 connection = s3_factory(settings['access_key'], settings['secret_key'])
32 bucket = connection.get_bucket(settings['bucket'])
33 for source_name, dest in resolve_rules(root, settings['files']):
34 key = bucket.new_key('/'.join((settings['prefix'], dest)))
35 with open(source_name) as source:
36 key.set_contents_from_file(source)
37
38
39def main(argv=None):
40 args = parse_args(argv)
41 # Use JSON rather than YAML because a program will emit it and no external
42 # libs required.
43 with file(args.artifacts_file) as settings_file:
44 settings = json.load(settings_file)
45 upload_artifacts(args.root, settings)
46
47
48if __name__ == '__main__':
49 sys.exit(main())

Subscribers

People subscribed via source and target branches