Merge lp:~abentley/workspace-runner/s3-script into lp:workspace-runner

Proposed by Aaron Bentley on 2015-06-30
Status: Merged
Merged at revision: 20
Proposed branch: lp:~abentley/workspace-runner/s3-script
Merge into: lp:workspace-runner
Diff against target: 181 lines (+172/-0)
2 files modified
workspace_runner/tests/test_upload_artifacts.py (+123/-0)
workspace_runner/upload_artifacts.py (+49/-0)
To merge this branch: bzr merge lp:~abentley/workspace-runner/s3-script
Reviewer Review Type Date Requested Status
Curtis Hovey (community) code Approve on 2015-06-30
John George 2015-06-30 Approve on 2015-06-30
Review via email: mp+263383@code.launchpad.net

Commit message

Provide a script that uploads to s3.

Description of the change

This branch provides the upload_artifacts script, which will typically be run remotely, but is tested locally.

It uses a JSON file for data. (JSON so that it doesn't require external dependencies.)

It doesn't currently pay attention to MIME types, but I plan to address that in future branch.

In a the follow-on branch, the workspace runner is taught to dump this JSON, upload it and the script to the remote machine, and run the script.

To post a comment you must log in.
John George (jog) wrote :

Would you please add help= to add_arguments() in upload_artifacts.py, other than that it looks good.

review: Approve
Curtis Hovey (sinzui) wrote :

Thank you. I have a suggestion inline

review: Approve (code)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== added file 'workspace_runner/tests/test_upload_artifacts.py'
2--- workspace_runner/tests/test_upload_artifacts.py 1970-01-01 00:00:00 +0000
3+++ workspace_runner/tests/test_upload_artifacts.py 2015-06-30 16:28:18 +0000
4@@ -0,0 +1,123 @@
5+from argparse import Namespace
6+import json
7+import os
8+from tempfile import NamedTemporaryFile
9+from unittest import TestCase
10+
11+from mock import patch
12+
13+from workspace_runner.tests import temp_dir
14+from workspace_runner.upload_artifacts import (
15+ main,
16+ parse_args,
17+ resolve_rules,
18+ upload_artifacts,
19+ )
20+
21+
22+class TestParseArgs(TestCase):
23+
24+ def test_minimal(self):
25+ result = parse_args(['artifacts1', 'root1'])
26+ self.assertEqual(result,
27+ Namespace(artifacts_file='artifacts1', root='root1'))
28+
29+
30+class TestResolveRules(TestCase):
31+
32+ def test_resolve_rules(self):
33+ settings = {'files': {
34+ 'text-files': ['*.txt', '*.rst'],
35+ 'foo-files': ['foo.*'],
36+ 'subdir-files': ['subdir/*'],
37+ }}
38+ filenames = ['foo.txt', 'bar.gif', 'foo.gif', 'subdir/baz.txt',
39+ 'qux.rst']
40+ with temp_dir() as files_dir:
41+ os.mkdir(os.path.join(files_dir, 'subdir'))
42+ for filename in filenames:
43+ open(os.path.join(files_dir, filename), 'w').close()
44+ result = resolve_rules(files_dir, settings['files'])
45+ expected = {
46+ (os.path.join(files_dir, 'foo.txt'), 'text-files/foo.txt'),
47+ (os.path.join(files_dir, 'qux.rst'), 'text-files/qux.rst'),
48+ (os.path.join(files_dir, 'foo.txt'), 'foo-files/foo.txt'),
49+ (os.path.join(files_dir, 'foo.gif'), 'foo-files/foo.gif'),
50+ (os.path.join(files_dir, 'subdir/baz.txt'),
51+ 'subdir-files/baz.txt'),
52+ }
53+ self.assertEqual(result, expected)
54+
55+
56+class FakeS3Connection:
57+
58+ def __init__(self, access_key, secret_key):
59+ self.access_key = access_key
60+ self.secret_key = secret_key
61+ self.buckets = {}
62+
63+ def get_bucket(self, bucket_name):
64+ return self.buckets.setdefault(bucket_name, FakeBucket())
65+
66+
67+class FakeBucket:
68+
69+ def __init__(self):
70+ self.keys = []
71+
72+ def new_key(self, path):
73+ self.keys.append(FakeKey(path))
74+ return self.keys[-1]
75+
76+
77+class FakeKey:
78+
79+ def __init__(self, path):
80+ self.contents = None
81+ self.path = path
82+
83+ def set_contents_from_file(self, fp):
84+ self.contents = fp.read()
85+
86+
87+class TestUploadArtifacts(TestCase):
88+
89+ def test_upload_artifacts(self):
90+ settings = {
91+ 'access_key': 'baz',
92+ 'secret_key': 'qux',
93+ 'bucket': 'my-pale-bucket',
94+ 'files': {'text-files': ['*.txt', '*.rst']},
95+ 'prefix': 'prefix1',
96+ }
97+
98+ connections = []
99+
100+ def fake_factory(*args, **kwargs):
101+ connections.append(FakeS3Connection(*args, **kwargs))
102+ return connections[-1]
103+
104+ with temp_dir() as files_dir:
105+ with open(os.path.join(files_dir, 'foo.txt'), 'w') as foo_file:
106+ foo_file.write('foo file')
107+ upload_artifacts(files_dir, settings, s3_factory=fake_factory)
108+ self.assertEqual(len(connections), 1)
109+ connection = connections[0]
110+ self.assertEqual(connection.access_key, 'baz')
111+ self.assertEqual(connection.secret_key, 'qux')
112+ bucket = connection.buckets['my-pale-bucket']
113+ key = bucket.keys[0]
114+ self.assertEqual(key.path, 'prefix1/text-files/foo.txt')
115+ self.assertEqual(key.contents, 'foo file')
116+
117+
118+class TestMain(TestCase):
119+
120+ def test_minimal(self):
121+ with NamedTemporaryFile() as settings_file:
122+ json.dump({'foo': 'bar'}, settings_file)
123+ settings_file.flush()
124+ with patch('workspace_runner.upload_artifacts.upload_artifacts',
125+ autospec=True) as ua_mock:
126+ main([settings_file.name, 'root1'])
127+ ua_mock.assert_called_once_with('root1', {'foo': 'bar'})
128
129=== added file 'workspace_runner/upload_artifacts.py'
130--- workspace_runner/upload_artifacts.py 1970-01-01 00:00:00 +0000
131+++ workspace_runner/upload_artifacts.py 2015-06-30 16:28:18 +0000
132@@ -0,0 +1,49 @@
133+#!/usr/bin/env python
134+# NOTE: This script is typically run on the REMOTE machine.
135+from argparse import ArgumentParser
136+from glob import glob
137+import json
138+import os
139+import sys
140+
141+from boto.s3.connection import S3Connection
142+
143+
144+def parse_args(argv=None):
145+ parser = ArgumentParser()
146+ parser.add_argument('artifacts_file')
147+ parser.add_argument('root')
148+ return parser.parse_args(argv)
149+
150+
151+def resolve_rules(root, rules):
152+ result = set()
153+ for destination, globs in rules.items():
154+ for file_glob in globs:
155+ glob_path = os.path.join(root, file_glob)
156+ for filename in glob(glob_path):
157+ dest_url = '/'.join([destination, os.path.basename(filename)])
158+ result.add((filename, dest_url))
159+ return result
160+
161+
162+def upload_artifacts(root, settings, s3_factory=S3Connection):
163+ connection = s3_factory(settings['access_key'], settings['secret_key'])
164+ bucket = connection.get_bucket(settings['bucket'])
165+ for source_name, dest in resolve_rules(root, settings['files']):
166+ key = bucket.new_key('/'.join((settings['prefix'], dest)))
167+ with open(source_name) as source:
168+ key.set_contents_from_file(source)
169+
170+
171+def main(argv=None):
172+ args = parse_args(argv)
173+ # Use JSON rather than YAML because a program will emit it and no external
174+ # libs required.
175+ with file(args.artifacts_file) as settings_file:
176+ settings = json.load(settings_file)
177+ upload_artifacts(args.root, settings)
178+
179+
180+if __name__ == '__main__':
181+ sys.exit(main())

Subscribers

People subscribed via source and target branches