Merge lp:~abentley/juju-core/update-streams into lp:~juju-qa/juju-core/cd-release-juju

Proposed by Aaron Bentley
Status: Merged
Merged at revision: 274
Proposed branch: lp:~abentley/juju-core/update-streams
Merge into: lp:~juju-qa/juju-core/cd-release-juju
Diff against target: 836 lines (+581/-131)
9 files modified
download_agents.py (+3/-1)
release-juju.bash (+5/-0)
release.config (+10/-2)
sign_branch.py (+6/-60)
tests/test_sign_branch.py (+3/-68)
tests/test_update_streams.py (+277/-0)
tests/test_utility.py (+81/-0)
update_streams.py (+138/-0)
utility.py (+58/-0)
To merge this branch: bzr merge lp:~abentley/juju-core/update-streams
Reviewer Review Type Date Requested Status
Curtis Hovey (community) code Approve
Review via email: mp+286931@code.launchpad.net

Commit message

Add update-streams operation.

Description of the change

This branch provides an update-streams operation.

It downloads metadata and agents to a temp directory, verifies the agents against the metadata sha256 hashes, moves the agents into place, then moves the metada into place.

If no agents need to be downloaded, e.g. when moving proposed -> released, the agent version number can be omitted.

The log-related functionality has been moved from sign-branches to utility.

Some lint was fixed in download_agents, although I don't expect we'll ever run it again.

To post a comment you must log in.
279. By Aaron Bentley

Fix lint.

Revision history for this message
Curtis Hovey (sinzui) wrote :

Thank you. This branch is good to merge.

The name of "check_log" doesn't seem correct anymore, but I cannot think of a better name. We are actually asking can_do_operation or try_operation.

We need to think about how to handle retractions in the future. With this script in production, the metadata will be copied and Juju will see agents disappear, but the agents are still on disk. We probably want another operation or an extension to this operation to remove orphaned agents.

review: Approve (code)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'download_agents.py'
--- download_agents.py 2016-02-18 17:37:30 +0000
+++ download_agents.py 2016-02-23 17:54:06 +0000
@@ -1,16 +1,18 @@
1#!/usr/bin/env python1#!/usr/bin/env python
2from argparse import ArgumentParser2from argparse import ArgumentParser
3import errno
3from hashlib import sha2564from hashlib import sha256
4import os5import os
5from shutil import rmtree6from shutil import rmtree
6import subprocess7import subprocess
7from tempfile import mkdtemp8from tempfile import mkdtemp
89
9from sign_branch import (10from utility import (
10 acquire_log_dir,11 acquire_log_dir,
11 check_log,12 check_log,
12 )13 )
1314
15
14def main():16def main():
15 cd_release_juju = os.path.dirname(__file__)17 cd_release_juju = os.path.dirname(__file__)
16 downloads = os.path.join(cd_release_juju, 'downloads-new-paths')18 downloads = os.path.join(cd_release_juju, 'downloads-new-paths')
1719
=== modified file 'release-juju.bash'
--- release-juju.bash 2016-02-18 17:53:20 +0000
+++ release-juju.bash 2016-02-23 17:54:06 +0000
@@ -16,6 +16,11 @@
16 $scripts/download_agents.py $TOOLS_BASE/juju-release-tools \16 $scripts/download_agents.py $TOOLS_BASE/juju-release-tools \
17 $STREAMS_OFFICIAL_DEST17 $STREAMS_OFFICIAL_DEST
18 ;;18 ;;
19 "update-streams" )
20 $scripts/update_streams.py $TOOLS_BASE/cloud-city/juju-qa.s3cfg \
21 $S3_ROOT $STREAMS_OFFICIAL_DEST/juju/tools $STREAMS_VERSION \
22 --timestamp "$STREAMS_TIMESTAMP" --poke $POKE
23 ;;
19 "none" )24 "none" )
20 exit 025 exit 0
21 ;;26 ;;
2227
=== modified file 'release.config'
--- release.config 2016-02-20 14:50:21 +0000
+++ release.config 2016-02-23 17:54:06 +0000
@@ -1,5 +1,5 @@
1# One of: "release", "sign-branch", "download-agents", "none"1# One of: "release", "sign-branch", "download-agents", "update-streams", "none"
2OPERATION="release"2OPERATION="none"
3POKE='0'3POKE='0'
4SIGNING_KEY="6A157DB3"4SIGNING_KEY="6A157DB3"
55
@@ -14,3 +14,11 @@
14UNSIGNED_BRANCH="lp:~juju-qa/+junk/cpc-unsigned"14UNSIGNED_BRANCH="lp:~juju-qa/+junk/cpc-unsigned"
15REVISION_ID="curtis@hovey.name-20160220035019-mwt82mwv0vvivnpl"15REVISION_ID="curtis@hovey.name-20160220035019-mwt82mwv0vvivnpl"
16SIGNED_BRANCH="lp:~canonical-juju-qa/+junk/cpc-signed"16SIGNED_BRANCH="lp:~canonical-juju-qa/+junk/cpc-signed"
17
18# streams update configuration
19# Preferably the value from index2.json, though this is not currently
20# enforced.
21STREAMS_TIMESTAMP="Wed, 17 Feb 2016 20:44:59 +0000"
22# Use "" when no agents need be downloaded, e.g. proposed -> released
23STREAMS_VERSION="2.0-alpha2"
24S3_ROOT="s3://temp-streams/new-scc"
1725
=== modified file 'sign_branch.py'
--- sign_branch.py 2016-02-18 17:37:30 +0000
+++ sign_branch.py 2016-02-23 17:54:06 +0000
@@ -1,15 +1,15 @@
1#!/usr/bin/env python1#!/usr/bin/env python
2from argparse import ArgumentParser2from argparse import ArgumentParser
3import datetime
4import errno
5import logging
6import os3import os
7import shutil
8import subprocess4import subprocess
9import sys5import sys
106
11from sign_metadata import sign_metadata7from sign_metadata import sign_metadata
12from utility import temp_dir8from utility import (
9 acquire_log_dir,
10 check_log,
11 temp_dir,
12 )
1313
1414
15SIGN_BRANCH_LOG = 'sign-branch.log'15SIGN_BRANCH_LOG = 'sign-branch.log'
@@ -104,66 +104,12 @@
104 return parser.parse_args(argv)104 return parser.parse_args(argv)
105105
106106
107def get_log_subdir(root_dir):
108 return os.path.join(root_dir, 'new-tools', 'juju-dist', 'tools')
109
110
111def acquire_log_dir():
112 """Return the path of the log dir, creating if need be."""
113 tools_base = os.environ.get('TOOLS_BASE')
114 if tools_base is None:
115 tools_base = os.getcwd()
116 log_dir = get_log_subdir(tools_base)
117 if not os.path.exists(log_dir):
118 os.makedirs(log_dir)
119 return log_dir
120
121
122def check_log(log_dir, parameters, log_basename=SIGN_BRANCH_LOG):
123 """Check for a previous entry with the same parameters in the log.
124
125 If one exists, return False. Otherwise, log the parameters.
126
127 This is deliberately done before attempting the operation, to avoid
128 endless retries if the operation fails.
129 """
130 log_filename = os.path.join(log_dir, log_basename)
131 log_entry = ' '.join(parameters + [''])
132 try:
133 log_branch = open(log_filename)
134 except IOError as e:
135 if e.errno != errno.ENOENT:
136 raise
137 else:
138 with log_branch:
139 for line in log_branch:
140 if line.startswith(log_entry):
141 return False
142 with open(log_filename, 'a') as log_branch:
143 now = datetime.datetime.utcnow().replace(microsecond=0)
144 strdate = now.isoformat(' ')
145 log_branch.write('{}{}\n'.format(log_entry, strdate))
146 official_dest = os.environ.get('STREAMS_OFFICIAL_DEST')
147 if official_dest is None:
148 logging.warning('STREAMS_OFFICIAL_DEST is not defined.')
149 else:
150 parent = get_log_subdir(official_dest)
151 log_dest = os.path.join(parent, SIGN_BRANCH_LOG)
152 try:
153 os.makedirs(parent)
154 except OSError as e:
155 if e.errno != errno.EEXIST:
156 raise
157 shutil.copy2(log_filename, log_dest)
158 return True
159
160
161def main():107def main():
162 args = parse_args()108 args = parse_args()
163 if args.check_log is not None:109 if args.check_log is not None:
164 parameters = [args.revision_id, args.unsigned, args.signed,110 parameters = [args.revision_id, args.unsigned, args.signed,
165 args.signing_key, str(args.check_log)]111 args.signing_key, str(args.check_log)]
166 if not check_log(acquire_log_dir(), parameters):112 if not check_log(acquire_log_dir(), parameters, SIGN_BRANCH_LOG):
167 sys.exit(0)113 sys.exit(0)
168 with temp_dir() as temp_branch:114 with temp_dir() as temp_branch:
169 sb = SignBranch(args.unsigned, args.revision_id, args.signed,115 sb = SignBranch(args.unsigned, args.revision_id, args.signed,
170116
=== modified file 'tests/test_sign_branch.py'
--- tests/test_sign_branch.py 2016-02-05 15:23:07 +0000
+++ tests/test_sign_branch.py 2016-02-23 17:54:06 +0000
@@ -10,20 +10,18 @@
10 )10 )
1111
12from sign_branch import (12from sign_branch import (
13 acquire_log_dir,
14 check_log,
15 get_log_subdir,
16 parse_args,13 parse_args,
17 RunBzr,14 RunBzr,
18 SignBranch,15 SignBranch,
19 SIGN_BRANCH_LOG,
20 )16 )
21from tests.test_sign_metadata import (17from tests.test_sign_metadata import (
22 fake_gpg,18 fake_gpg,
23 gpg_header,19 gpg_header,
24 gpg_footer,20 gpg_footer,
25 )21 )
26from utility import temp_dir22from utility import (
23 temp_dir,
24 )
2725
2826
29class TestBzr(TestCase):27class TestBzr(TestCase):
@@ -39,69 +37,6 @@
39 stderr=None, stdout=None)37 stderr=None, stdout=None)
4038
4139
42class TestAcquireLogDir(TestCase):
43
44 def test_cwd(self):
45 with temp_dir() as new_cwd:
46 old_cwd = os.getcwd()
47 os.chdir(new_cwd)
48 try:
49 log_dir = acquire_log_dir()
50 self.assertTrue(os.path.isdir(log_dir))
51 finally:
52 os.chdir(old_cwd)
53 expected = os.path.join(new_cwd, 'new-tools', 'juju-dist', 'tools')
54 self.assertEqual(expected, log_dir)
55
56 def test_tools_base(self):
57 with temp_dir() as tools_base:
58 os.chdir(tools_base)
59 with patch.dict(os.environ, {'TOOLS_BASE': tools_base}):
60 log_dir = acquire_log_dir()
61 self.assertTrue(os.path.isdir(log_dir))
62 expected = os.path.join(tools_base, 'new-tools', 'juju-dist', 'tools')
63 self.assertEqual(expected, log_dir)
64
65
66class TestCheckLog(TestCase):
67
68 def test_write_log(self):
69 with temp_dir() as log_dir:
70 with patch('logging.warning'):
71 check_log(log_dir, ['a', 'b', 'c', 'd'])
72 check_log(log_dir, ['z', 'y', 'x', 'w'])
73 with open(os.path.join(log_dir, 'sign-branch.log')) as log_file:
74 log_lines = log_file.readlines()
75 self.assertEqual(2, len(log_lines))
76 self.assertRegexpMatches(log_lines[0], '^a b c d ')
77 self.assertRegexpMatches(log_lines[1], '^z y x w ')
78
79 def test_false_for_repeat(self):
80 with temp_dir() as log_dir:
81 with patch('logging.warning'):
82 self.assertIs(True, check_log(log_dir, ['a', 'b', 'c', 'd']))
83 self.assertIs(True, check_log(log_dir, ['z', 'b', 'c', 'd']))
84 self.assertIs(False, check_log(log_dir, ['a', 'b', 'c', 'd']))
85 self.assertIs(False, check_log(log_dir, ['z', 'b', 'c', 'd']))
86
87 def test_copies_to_official_dest(self):
88 with temp_dir() as root:
89 tools = os.path.join(root, 'tools')
90 os.mkdir(tools)
91 dest = os.path.join(root, 'dest')
92 with patch.dict(os.environ, {'STREAMS_OFFICIAL_DEST': dest}):
93 check_log(tools, ['a', 'b', 'c', 'd'])
94 check_log(tools, ['z', 'b', 'c', 'd'])
95 tools_filename = os.path.join(tools, SIGN_BRANCH_LOG)
96 with open(tools_filename) as tools_file:
97 tools_content = tools_file.read()
98 dest_filename = os.path.join(get_log_subdir(dest),
99 SIGN_BRANCH_LOG)
100 with open(dest_filename) as dest_file:
101 dest_content = dest_file.read()
102 self.assertEqual(tools_content, dest_content)
103
104
105class TestParseArgs(TestCase):40class TestParseArgs(TestCase):
10641
107 def test_minimum(self):42 def test_minimum(self):
10843
=== added file 'tests/test_update_streams.py'
--- tests/test_update_streams.py 1970-01-01 00:00:00 +0000
+++ tests/test_update_streams.py 2016-02-23 17:54:06 +0000
@@ -0,0 +1,277 @@
1from argparse import Namespace
2import os
3from textwrap import dedent
4from unittest import TestCase
5
6from mock import (
7 call,
8 patch,
9 )
10
11from update_streams import (
12 ensure_dirs,
13 parse_args,
14 Updater,
15 )
16from utility import temp_dir
17
18
19__metaclass__ = type
20
21
22class TestParseArgs(TestCase):
23
24 def test_default(self):
25 parsed = parse_args(['foo', 'bar', 'baz'])
26 expected = Namespace(
27 config_file='foo', s3_root='bar', dest='baz', version=None,
28 poke='0', timestamp=None)
29 self.assertEqual(expected, parsed)
30
31 def test_version(self):
32 parsed = parse_args(['foo', 'bar', 'baz', 'qux'])
33 self.assertEqual('qux', parsed.version)
34
35 def test_poke(self):
36 parsed = parse_args(['foo', 'bar', 'baz', '--poke', '123'])
37 self.assertEqual('123', parsed.poke)
38
39 def test_timestamp(self):
40 parsed = parse_args(['foo', 'bar', 'baz', '--timestamp',
41 'Wed, 17 Feb 2016 20:44:59 +0000'])
42 self.assertEqual('Wed, 17 Feb 2016 20:44:59 +0000', parsed.timestamp)
43
44
45def mock_get(command, args):
46 url, dest = args
47 ensure_dirs(os.path.dirname(dest))
48 with open(dest, 'w') as f:
49 f.write(os.path.basename(dest))
50
51
52def path_content(path):
53 with open(os.path.join(*path)) as f:
54 return f.read()
55
56
57class TestUpdater(TestCase):
58
59 def test_from_args(self):
60 updater = Updater.from_args(Namespace(
61 config_file='foo', s3_root='bar', dest='baz', version='qux',
62 poke='123'), 'temp_dir1')
63 self.assertEqual(updater.config_file, 'foo')
64 self.assertEqual(updater.s3_root, 'bar')
65 self.assertEqual(updater.dest, 'baz')
66 self.assertEqual(updater.version, 'qux')
67 self.assertEqual(updater.temp_dir, 'temp_dir1')
68
69 def make_updater(self, download_dir='temp_dir1', dest_dir='dest1'):
70 return Updater('config1', 's3_root1', download_dir, dest_dir,
71 'version1')
72
73 def test_s3cmd(self):
74 updater = self.make_updater()
75 with patch('subprocess.check_call') as cc_mock:
76 updater.s3cmd('sync', ['foo', 'bar'])
77 cc_mock.assert_called_once_with([
78 's3cmd', 'sync', '--config', 'config1', 'foo', 'bar'])
79
80 def test_get_path_hashes(self):
81 updater = self.make_updater()
82 with patch('subprocess.check_output', autospec=True) as cc_mock:
83 result = updater.get_path_hashes()
84 cc_mock.assert_called_once_with([
85 'sstream-query', 'temp_dir1/streams/v1/index2.json',
86 'version=version1', '--output-format=%(path)s %(sha256)s'])
87 self.assertIs(cc_mock.return_value, result)
88
89 def test_get_path_hashes_none_version(self):
90 updater = self.make_updater()
91 updater.version = None
92 with patch('subprocess.check_output', autospec=True) as cc_mock:
93 result = updater.get_path_hashes()
94 self.assertEqual(0, cc_mock.call_count)
95 self.assertEqual('', result)
96
97 def test_iter_path_hash(self):
98 updater = self.make_updater()
99 output = dedent("""\
100 foo asdf
101 bar sdf
102 baz fasd
103 bar sdf
104 """)
105 with patch.object(updater, 'get_path_hashes',
106 return_value=output, autospec=True) as gph_mock:
107 result = updater.get_path_hash_dict()
108 gph_mock.assert_called_once_with()
109 self.assertEqual({
110 'bar': 'sdf',
111 'baz': 'fasd',
112 'foo': 'asdf',
113 }, result)
114
115 def test_iter_path_conflicting_hash(self):
116 updater = self.make_updater()
117 output = dedent("""\
118 foo asdf
119 bar sdf
120 baz fasd
121 bar sde
122 """)
123 with patch.object(updater, 'get_path_hashes',
124 return_value=output, autospec=True) as gph_mock:
125 with self.assertRaisesRegexp(ValueError,
126 'Conflicting hashes for "bar"'):
127 updater.get_path_hash_dict()
128 gph_mock.assert_called_once_with()
129
130 def test_download_and_verify(self):
131 with temp_dir() as download_dir:
132 updater = self.make_updater(download_dir=download_dir)
133 with patch.object(updater, 's3cmd', side_effect=mock_get,
134 autospec=True) as s3cmd_mock:
135 updater.download_and_verify(
136 'foo', '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f'
137 '98a5e886266e7ae')
138 s3cmd_mock.assert_called_once_with(
139 'get', ['s3_root1/foo', os.path.join(download_dir, 'foo')])
140
141 def test_download_and_verify_fail(self):
142 with temp_dir() as download_dir:
143 updater = self.make_updater(download_dir=download_dir)
144 with patch.object(updater, 's3cmd', side_effect=mock_get,
145 autospec=True):
146 with self.assertRaisesRegexp(Exception, 'Hashes differ.'):
147 updater.download_and_verify(
148 'foo', 'fc26b46b68ffc68ff99b453c1d30413413422d706483b'
149 'fa0f98a5e886266e7ae')
150
151 def test_move_into_place(self):
152 with temp_dir() as root:
153 download_dir = os.path.join(root, 'download')
154 download_streams = os.path.join(download_dir, 'streams', 'v1')
155 os.makedirs(download_streams)
156 index_path = os.path.join(download_streams, 'index2.mason')
157 with open(index_path, 'w') as f:
158 f.write('Masonry is fun!')
159 download_agents = os.path.join(download_dir, 'agent/2.5')
160 os.makedirs(download_agents)
161 agent_basename = '1.25-series-arch.tbd'
162 agent_path = os.path.join(download_agents, agent_basename)
163 with open(agent_path, 'w') as f:
164 f.write('Agency is essential!')
165 dest_dir = os.path.join(root, 'dest')
166 updater = self.make_updater(download_dir=download_dir,
167 dest_dir=dest_dir)
168 updater.move_into_place('agent')
169 dest_agent = os.path.join(dest_dir, 'agent', '2.5',
170 agent_basename)
171 with open(dest_agent) as f:
172 self.assertEqual('Agency is essential!', f.read())
173 dest_index = os.path.join(dest_dir, 'streams', 'v1',
174 'index2.mason')
175 self.assertFalse(os.path.exists(dest_index))
176 updater.move_into_place('streams/v1')
177 with open(dest_index) as f:
178 self.assertEqual('Masonry is fun!', f.read())
179
180 def test_move_into_place_replace_existing_file(self):
181 with temp_dir() as root:
182 download_dir = os.path.join(root, 'download')
183 download_streams = os.path.join(download_dir, 'streams', 'v1')
184 os.makedirs(download_streams)
185 index_path = os.path.join(download_streams, 'index2.mason')
186 with open(index_path, 'w') as f:
187 f.write('Masonry is fun!')
188 dest_dir = os.path.join(root, 'dest')
189 updater = self.make_updater(download_dir=download_dir,
190 dest_dir=dest_dir)
191 dest_index = os.path.join(dest_dir, 'streams', 'v1',
192 'index2.mason')
193 os.makedirs(os.path.dirname(dest_index))
194 with open(dest_index, 'w') as f:
195 f.write('old contents')
196 updater.move_into_place('streams/v1')
197 with open(dest_index) as f:
198 self.assertEqual('Masonry is fun!', f.read())
199
200 def test_s3_download(self):
201 path_hashes = dedent("""\
202 agent/foo/bar asdf
203 agent/foo/bar2 asdf
204 agent/baz/qux sdf
205 """)
206 with temp_dir() as root:
207 download_dir = os.path.join(root, 'download')
208 os.mkdir(download_dir)
209 dest_dir = os.path.join(root, 'dest')
210 updater = self.make_updater(download_dir=download_dir,
211 dest_dir=dest_dir)
212
213 def s3cmd_effect(command, args):
214 parent = os.path.join(updater.temp_streams, 'v1')
215 ensure_dirs(parent)
216 with open(os.path.join(parent, 'index.mason'), 'w') as f:
217 f.write('index!')
218
219 def dv_effect(path, agent_hash):
220 mock_get('get', [None, os.path.join(download_dir, path)])
221
222 s3cmd = patch.object(updater, 's3cmd', side_effect=s3cmd_effect)
223 dl = patch.object(updater, 'download_and_verify',
224 side_effect=dv_effect, autospec=True)
225 gph = patch.object(updater, 'get_path_hashes',
226 return_value=path_hashes)
227 with s3cmd as s3cmd_mock, dl as dv_mock, gph as gph_mock:
228 updater.s3_download()
229 s3cmd_mock.assert_called_once_with(
230 'sync', ['s3_root1/streams/', updater.temp_streams])
231 gph_mock.assert_called_once_with()
232 self.assertEqual([
233 call('agent/baz/qux', 'sdf'),
234 call('agent/foo/bar', 'asdf'),
235 call('agent/foo/bar2', 'asdf'),
236 ], dv_mock.mock_calls)
237 self.assertEqual(
238 'qux', path_content([updater.dest, 'agent', 'baz', 'qux']))
239 self.assertEqual(
240 'bar', path_content([updater.dest, 'agent', 'foo', 'bar']))
241 self.assertEqual(
242 'bar2', path_content([updater.dest, 'agent', 'foo', 'bar2']))
243 self.assertEqual(
244 'index!',
245 path_content([updater.dest, 'streams', 'v1', 'index.mason']))
246
247 def test_s3_download_version_none(self):
248 with temp_dir() as root:
249 download_dir = os.path.join(root, 'download')
250 os.mkdir(download_dir)
251 dest_dir = os.path.join(root, 'dest')
252 updater = self.make_updater(download_dir=download_dir,
253 dest_dir=dest_dir)
254 updater.version = None
255
256 def s3cmd_effect(command, args):
257 parent = os.path.join(updater.temp_streams, 'v1')
258 ensure_dirs(parent)
259 with open(os.path.join(parent, 'index.mason'), 'w') as f:
260 f.write('index!')
261
262 def dv_effect(path, agent_hash):
263 mock_get('get', [None, os.path.join(download_dir, path)])
264
265 s3cmd = patch.object(updater, 's3cmd', side_effect=s3cmd_effect)
266 dl = patch.object(updater, 'download_and_verify',
267 side_effect=dv_effect, autospec=True)
268 with s3cmd as s3cmd_mock, dl as dv_mock:
269 updater.s3_download()
270 s3cmd_mock.assert_called_once_with(
271 'sync', ['s3_root1/streams/', updater.temp_streams])
272 self.assertEqual([], dv_mock.mock_calls)
273 self.assertIs(False,
274 os.path.exists(os.path.join(updater.dest, 'agent')))
275 self.assertEqual(
276 'index!',
277 path_content([updater.dest, 'streams', 'v1', 'index.mason']))
0278
=== added file 'tests/test_utility.py'
--- tests/test_utility.py 1970-01-01 00:00:00 +0000
+++ tests/test_utility.py 2016-02-23 17:54:06 +0000
@@ -0,0 +1,81 @@
1import os
2from unittest import TestCase
3
4from mock import patch
5
6from utility import (
7 acquire_log_dir,
8 check_log,
9 get_log_subdir,
10 temp_dir,
11 )
12
13
14EXAMPLE_LOG = 'example.log'
15
16
17class TestAcquireLogDir(TestCase):
18
19 def test_cwd(self):
20 with temp_dir() as new_cwd:
21 old_cwd = os.getcwd()
22 os.chdir(new_cwd)
23 try:
24 log_dir = acquire_log_dir()
25 self.assertTrue(os.path.isdir(log_dir))
26 finally:
27 os.chdir(old_cwd)
28 expected = os.path.join(new_cwd, 'new-tools', 'juju-dist', 'tools')
29 self.assertEqual(expected, log_dir)
30
31 def test_tools_base(self):
32 with temp_dir() as tools_base:
33 os.chdir(tools_base)
34 with patch.dict(os.environ, {'TOOLS_BASE': tools_base}):
35 log_dir = acquire_log_dir()
36 self.assertTrue(os.path.isdir(log_dir))
37 expected = os.path.join(tools_base, 'new-tools', 'juju-dist', 'tools')
38 self.assertEqual(expected, log_dir)
39
40
41class TestCheckLog(TestCase):
42
43 def test_write_log(self):
44 with temp_dir() as log_dir:
45 with patch('logging.warning'):
46 check_log(log_dir, ['a', 'b', 'c', 'd'], EXAMPLE_LOG)
47 check_log(log_dir, ['z', 'y', 'x', 'w'], EXAMPLE_LOG)
48 with open(os.path.join(log_dir, EXAMPLE_LOG)) as log_file:
49 log_lines = log_file.readlines()
50 self.assertEqual(2, len(log_lines))
51 self.assertRegexpMatches(log_lines[0], '^a b c d ')
52 self.assertRegexpMatches(log_lines[1], '^z y x w ')
53
54 def test_false_for_repeat(self):
55 with temp_dir() as log_dir:
56 with patch('logging.warning'):
57 self.assertIs(True, check_log(log_dir, ['a', 'b', 'c', 'd'],
58 EXAMPLE_LOG))
59 self.assertIs(True, check_log(log_dir, ['z', 'b', 'c', 'd'],
60 EXAMPLE_LOG))
61 self.assertIs(False, check_log(log_dir, ['a', 'b', 'c', 'd'],
62 EXAMPLE_LOG))
63 self.assertIs(False, check_log(log_dir, ['z', 'b', 'c', 'd'],
64 EXAMPLE_LOG))
65
66 def test_copies_to_official_dest(self):
67 with temp_dir() as root:
68 tools = os.path.join(root, 'tools')
69 os.mkdir(tools)
70 dest = os.path.join(root, 'dest')
71 with patch.dict(os.environ, {'STREAMS_OFFICIAL_DEST': dest}):
72 check_log(tools, ['a', 'b', 'c', 'd'], EXAMPLE_LOG)
73 check_log(tools, ['z', 'b', 'c', 'd'], EXAMPLE_LOG)
74 tools_filename = os.path.join(tools, EXAMPLE_LOG)
75 with open(tools_filename) as tools_file:
76 tools_content = tools_file.read()
77 dest_filename = os.path.join(get_log_subdir(dest),
78 EXAMPLE_LOG)
79 with open(dest_filename) as dest_file:
80 dest_content = dest_file.read()
81 self.assertEqual(tools_content, dest_content)
082
=== added file 'update_streams.py'
--- update_streams.py 1970-01-01 00:00:00 +0000
+++ update_streams.py 2016-02-23 17:54:06 +0000
@@ -0,0 +1,138 @@
1#!/usr/bin/env python
2from argparse import ArgumentParser
3import errno
4from hashlib import sha256
5import os
6from shutil import (
7 move,
8 )
9import subprocess
10
11from utility import (
12 acquire_log_dir,
13 check_log,
14 temp_dir,
15 )
16
17
18__metaclass__ = type
19
20
21UPDATE_STREAMS_LOG = 'update-streams.log'
22
23
24def parse_args(argv=None):
25 parser = ArgumentParser()
26 parser.add_argument('config_file')
27 parser.add_argument('s3_root')
28 parser.add_argument('dest')
29 parser.add_argument('version', nargs='?')
30 parser.add_argument('--timestamp')
31 parser.add_argument('--poke', default='0')
32 return parser.parse_args(argv)
33
34
35def ensure_dirs(path):
36 try:
37 os.makedirs(path)
38 except OSError as e:
39 if e.errno != errno.EEXIST:
40 raise
41
42
43class Updater:
44
45 def __init__(self, config_file, s3_root, temp_dir, dest, version):
46 self.config_file = config_file
47 self.s3_root = s3_root
48 self.temp_dir = temp_dir
49 self.dest = dest
50 self.version = version
51
52 @classmethod
53 def from_args(cls, args, temp_dir):
54 return cls(args.config_file, args.s3_root, temp_dir, args.dest,
55 args.version)
56
57 def s3cmd(self, action, args):
58 full_args = ['s3cmd', action, '--config', self.config_file] + args
59 subprocess.check_call(full_args)
60
61 def get_path_hashes(self):
62 if self.version is None:
63 return ''
64 ver_filter = 'version={}'.format(self.version)
65 out_format = '--output-format=%(path)s %(sha256)s'
66 index2 = os.path.join(self.temp_streams, 'v1', 'index2.json')
67 return subprocess.check_output(
68 ['sstream-query', index2, ver_filter, out_format])
69
70 def get_path_hash_dict(self):
71 output = self.get_path_hashes()
72 path_hashes = {}
73 for line in output.splitlines():
74 path, path_hash = line.rsplit(' ', 1)
75 path_hashes.setdefault(path, path_hash)
76 if path_hashes[path] != path_hash:
77 raise ValueError('Conflicting hashes for "{}"'.format(path))
78 return path_hashes
79
80 def s3_url(self, path):
81 return '{}/{}'.format(self.s3_root, path)
82
83 @property
84 def temp_streams(self):
85 return os.path.join(self.temp_dir, 'streams')
86
87 def download_and_verify(self, path, agent_hash):
88 temp_path = os.path.join(self.temp_dir, path)
89 self.s3cmd('get', [self.s3_url(path), temp_path])
90 digest = calculate_sha256(temp_path)
91 if digest != agent_hash:
92 raise Exception(
93 'Hashes differ. Expected: {}'
94 ' Actual: {}'.format(agent_hash, digest))
95
96 def move_into_place(self, parent):
97 dest_path = os.path.join(self.dest, parent)
98 temp_path = os.path.join(self.temp_dir, parent)
99 ensure_dirs(dest_path)
100 for subfile in os.listdir(temp_path):
101 move(os.path.join(temp_path, subfile),
102 os.path.join(dest_path, subfile))
103
104 def s3_download(self):
105 os.mkdir(self.temp_streams)
106 self.s3cmd('sync', [self.s3_url('streams/'), self.temp_streams])
107 path_hashes = self.get_path_hash_dict()
108 for path, agent_hash in sorted(path_hashes.items()):
109 self.download_and_verify(path, agent_hash)
110 if path_hashes != {}:
111 self.move_into_place('agent')
112 self.move_into_place('streams/v1')
113
114
115def calculate_sha256(path):
116 hasher = sha256()
117 with open(path) as f:
118 while True:
119 result = f.read(1000000)
120 hasher.update(result)
121 if result == '':
122 break
123 return hasher.hexdigest()
124
125
126def main():
127 args = parse_args()
128 if args.timestamp is not None:
129 parameters = [args.timestamp, args.poke]
130 if not check_log(acquire_log_dir(), parameters, UPDATE_STREAMS_LOG):
131 return
132 with temp_dir() as download_dir:
133 updater = Updater.from_args(args, download_dir)
134 updater.s3_download()
135
136
137if __name__ == '__main__':
138 main()
0139
=== modified file 'utility.py'
--- utility.py 2016-02-02 17:48:35 +0000
+++ utility.py 2016-02-23 17:54:06 +0000
@@ -1,4 +1,8 @@
1from contextlib import contextmanager1from contextlib import contextmanager
2import datetime
3import errno
4import logging
5import os
2import shutil6import shutil
3from tempfile import mkdtemp7from tempfile import mkdtemp
48
@@ -10,3 +14,57 @@
10 yield dirname14 yield dirname
11 finally:15 finally:
12 shutil.rmtree(dirname)16 shutil.rmtree(dirname)
17
18
19def acquire_log_dir():
20 """Return the path of the log dir, creating if need be."""
21 tools_base = os.environ.get('TOOLS_BASE')
22 if tools_base is None:
23 tools_base = os.getcwd()
24 log_dir = get_log_subdir(tools_base)
25 if not os.path.exists(log_dir):
26 os.makedirs(log_dir)
27 return log_dir
28
29
30def get_log_subdir(root_dir):
31 return os.path.join(root_dir, 'new-tools', 'juju-dist', 'tools')
32
33
34def check_log(log_dir, parameters, log_basename):
35 """Check for a previous entry with the same parameters in the log.
36
37 If one exists, return False. Otherwise, log the parameters.
38
39 This should be done before attempting the operation, to avoid endless
40 retries if the operation fails.
41 """
42 log_filename = os.path.join(log_dir, log_basename)
43 log_entry = ' '.join(parameters + [''])
44 try:
45 log_branch = open(log_filename)
46 except IOError as e:
47 if e.errno != errno.ENOENT:
48 raise
49 else:
50 with log_branch:
51 for line in log_branch:
52 if line.startswith(log_entry):
53 return False
54 with open(log_filename, 'a') as log_branch:
55 now = datetime.datetime.utcnow().replace(microsecond=0)
56 strdate = now.isoformat(' ')
57 log_branch.write('{}{}\n'.format(log_entry, strdate))
58 official_dest = os.environ.get('STREAMS_OFFICIAL_DEST')
59 if official_dest is None:
60 logging.warning('STREAMS_OFFICIAL_DEST is not defined.')
61 else:
62 parent = get_log_subdir(official_dest)
63 log_dest = os.path.join(parent, log_basename)
64 try:
65 os.makedirs(parent)
66 except OSError as e:
67 if e.errno != errno.EEXIST:
68 raise
69 shutil.copy2(log_filename, log_dest)
70 return True

Subscribers

People subscribed via source and target branches