Merge lp:~abentley/juju-core/update-streams into lp:~juju-qa/juju-core/cd-release-juju
- update-streams
- Merge into cd-release-juju
Proposed by
Aaron Bentley
Status: | Merged |
---|---|
Merged at revision: | 274 |
Proposed branch: | lp:~abentley/juju-core/update-streams |
Merge into: | lp:~juju-qa/juju-core/cd-release-juju |
Diff against target: |
836 lines (+581/-131) 9 files modified
download_agents.py (+3/-1) release-juju.bash (+5/-0) release.config (+10/-2) sign_branch.py (+6/-60) tests/test_sign_branch.py (+3/-68) tests/test_update_streams.py (+277/-0) tests/test_utility.py (+81/-0) update_streams.py (+138/-0) utility.py (+58/-0) |
To merge this branch: | bzr merge lp:~abentley/juju-core/update-streams |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Curtis Hovey (community) | code | Approve | |
Review via email: mp+286931@code.launchpad.net |
Commit message
Add update-streams operation.
Description of the change
This branch provides an update-streams operation.
It downloads metadata and agents to a temp directory, verifies the agents against the metadata sha256 hashes, moves the agents into place, then moves the metada into place.
If no agents need to be downloaded, e.g. when moving proposed -> released, the agent version number can be omitted.
The log-related functionality has been moved from sign-branches to utility.
Some lint was fixed in download_agents, although I don't expect we'll ever run it again.
To post a comment you must log in.
- 279. By Aaron Bentley
-
Fix lint.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'download_agents.py' |
2 | --- download_agents.py 2016-02-18 17:37:30 +0000 |
3 | +++ download_agents.py 2016-02-23 17:54:06 +0000 |
4 | @@ -1,16 +1,18 @@ |
5 | #!/usr/bin/env python |
6 | from argparse import ArgumentParser |
7 | +import errno |
8 | from hashlib import sha256 |
9 | import os |
10 | from shutil import rmtree |
11 | import subprocess |
12 | from tempfile import mkdtemp |
13 | |
14 | -from sign_branch import ( |
15 | +from utility import ( |
16 | acquire_log_dir, |
17 | check_log, |
18 | ) |
19 | |
20 | + |
21 | def main(): |
22 | cd_release_juju = os.path.dirname(__file__) |
23 | downloads = os.path.join(cd_release_juju, 'downloads-new-paths') |
24 | |
25 | === modified file 'release-juju.bash' |
26 | --- release-juju.bash 2016-02-18 17:53:20 +0000 |
27 | +++ release-juju.bash 2016-02-23 17:54:06 +0000 |
28 | @@ -16,6 +16,11 @@ |
29 | $scripts/download_agents.py $TOOLS_BASE/juju-release-tools \ |
30 | $STREAMS_OFFICIAL_DEST |
31 | ;; |
32 | + "update-streams" ) |
33 | + $scripts/update_streams.py $TOOLS_BASE/cloud-city/juju-qa.s3cfg \ |
34 | + $S3_ROOT $STREAMS_OFFICIAL_DEST/juju/tools $STREAMS_VERSION \ |
35 | + --timestamp "$STREAMS_TIMESTAMP" --poke $POKE |
36 | + ;; |
37 | "none" ) |
38 | exit 0 |
39 | ;; |
40 | |
41 | === modified file 'release.config' |
42 | --- release.config 2016-02-20 14:50:21 +0000 |
43 | +++ release.config 2016-02-23 17:54:06 +0000 |
44 | @@ -1,5 +1,5 @@ |
45 | -# One of: "release", "sign-branch", "download-agents", "none" |
46 | -OPERATION="release" |
47 | +# One of: "release", "sign-branch", "download-agents", "update-streams", "none" |
48 | +OPERATION="none" |
49 | POKE='0' |
50 | SIGNING_KEY="6A157DB3" |
51 | |
52 | @@ -14,3 +14,11 @@ |
53 | UNSIGNED_BRANCH="lp:~juju-qa/+junk/cpc-unsigned" |
54 | REVISION_ID="curtis@hovey.name-20160220035019-mwt82mwv0vvivnpl" |
55 | SIGNED_BRANCH="lp:~canonical-juju-qa/+junk/cpc-signed" |
56 | + |
57 | +# streams update configuration |
58 | +# Preferably the value from index2.json, though this is not currently |
59 | +# enforced. |
60 | +STREAMS_TIMESTAMP="Wed, 17 Feb 2016 20:44:59 +0000" |
61 | +# Use "" when no agents need be downloaded, e.g. proposed -> released |
62 | +STREAMS_VERSION="2.0-alpha2" |
63 | +S3_ROOT="s3://temp-streams/new-scc" |
64 | |
65 | === modified file 'sign_branch.py' |
66 | --- sign_branch.py 2016-02-18 17:37:30 +0000 |
67 | +++ sign_branch.py 2016-02-23 17:54:06 +0000 |
68 | @@ -1,15 +1,15 @@ |
69 | #!/usr/bin/env python |
70 | from argparse import ArgumentParser |
71 | -import datetime |
72 | -import errno |
73 | -import logging |
74 | import os |
75 | -import shutil |
76 | import subprocess |
77 | import sys |
78 | |
79 | from sign_metadata import sign_metadata |
80 | -from utility import temp_dir |
81 | +from utility import ( |
82 | + acquire_log_dir, |
83 | + check_log, |
84 | + temp_dir, |
85 | + ) |
86 | |
87 | |
88 | SIGN_BRANCH_LOG = 'sign-branch.log' |
89 | @@ -104,66 +104,12 @@ |
90 | return parser.parse_args(argv) |
91 | |
92 | |
93 | -def get_log_subdir(root_dir): |
94 | - return os.path.join(root_dir, 'new-tools', 'juju-dist', 'tools') |
95 | - |
96 | - |
97 | -def acquire_log_dir(): |
98 | - """Return the path of the log dir, creating if need be.""" |
99 | - tools_base = os.environ.get('TOOLS_BASE') |
100 | - if tools_base is None: |
101 | - tools_base = os.getcwd() |
102 | - log_dir = get_log_subdir(tools_base) |
103 | - if not os.path.exists(log_dir): |
104 | - os.makedirs(log_dir) |
105 | - return log_dir |
106 | - |
107 | - |
108 | -def check_log(log_dir, parameters, log_basename=SIGN_BRANCH_LOG): |
109 | - """Check for a previous entry with the same parameters in the log. |
110 | - |
111 | - If one exists, return False. Otherwise, log the parameters. |
112 | - |
113 | - This is deliberately done before attempting the operation, to avoid |
114 | - endless retries if the operation fails. |
115 | - """ |
116 | - log_filename = os.path.join(log_dir, log_basename) |
117 | - log_entry = ' '.join(parameters + ['']) |
118 | - try: |
119 | - log_branch = open(log_filename) |
120 | - except IOError as e: |
121 | - if e.errno != errno.ENOENT: |
122 | - raise |
123 | - else: |
124 | - with log_branch: |
125 | - for line in log_branch: |
126 | - if line.startswith(log_entry): |
127 | - return False |
128 | - with open(log_filename, 'a') as log_branch: |
129 | - now = datetime.datetime.utcnow().replace(microsecond=0) |
130 | - strdate = now.isoformat(' ') |
131 | - log_branch.write('{}{}\n'.format(log_entry, strdate)) |
132 | - official_dest = os.environ.get('STREAMS_OFFICIAL_DEST') |
133 | - if official_dest is None: |
134 | - logging.warning('STREAMS_OFFICIAL_DEST is not defined.') |
135 | - else: |
136 | - parent = get_log_subdir(official_dest) |
137 | - log_dest = os.path.join(parent, SIGN_BRANCH_LOG) |
138 | - try: |
139 | - os.makedirs(parent) |
140 | - except OSError as e: |
141 | - if e.errno != errno.EEXIST: |
142 | - raise |
143 | - shutil.copy2(log_filename, log_dest) |
144 | - return True |
145 | - |
146 | - |
147 | def main(): |
148 | args = parse_args() |
149 | if args.check_log is not None: |
150 | parameters = [args.revision_id, args.unsigned, args.signed, |
151 | args.signing_key, str(args.check_log)] |
152 | - if not check_log(acquire_log_dir(), parameters): |
153 | + if not check_log(acquire_log_dir(), parameters, SIGN_BRANCH_LOG): |
154 | sys.exit(0) |
155 | with temp_dir() as temp_branch: |
156 | sb = SignBranch(args.unsigned, args.revision_id, args.signed, |
157 | |
158 | === modified file 'tests/test_sign_branch.py' |
159 | --- tests/test_sign_branch.py 2016-02-05 15:23:07 +0000 |
160 | +++ tests/test_sign_branch.py 2016-02-23 17:54:06 +0000 |
161 | @@ -10,20 +10,18 @@ |
162 | ) |
163 | |
164 | from sign_branch import ( |
165 | - acquire_log_dir, |
166 | - check_log, |
167 | - get_log_subdir, |
168 | parse_args, |
169 | RunBzr, |
170 | SignBranch, |
171 | - SIGN_BRANCH_LOG, |
172 | ) |
173 | from tests.test_sign_metadata import ( |
174 | fake_gpg, |
175 | gpg_header, |
176 | gpg_footer, |
177 | ) |
178 | -from utility import temp_dir |
179 | +from utility import ( |
180 | + temp_dir, |
181 | + ) |
182 | |
183 | |
184 | class TestBzr(TestCase): |
185 | @@ -39,69 +37,6 @@ |
186 | stderr=None, stdout=None) |
187 | |
188 | |
189 | -class TestAcquireLogDir(TestCase): |
190 | - |
191 | - def test_cwd(self): |
192 | - with temp_dir() as new_cwd: |
193 | - old_cwd = os.getcwd() |
194 | - os.chdir(new_cwd) |
195 | - try: |
196 | - log_dir = acquire_log_dir() |
197 | - self.assertTrue(os.path.isdir(log_dir)) |
198 | - finally: |
199 | - os.chdir(old_cwd) |
200 | - expected = os.path.join(new_cwd, 'new-tools', 'juju-dist', 'tools') |
201 | - self.assertEqual(expected, log_dir) |
202 | - |
203 | - def test_tools_base(self): |
204 | - with temp_dir() as tools_base: |
205 | - os.chdir(tools_base) |
206 | - with patch.dict(os.environ, {'TOOLS_BASE': tools_base}): |
207 | - log_dir = acquire_log_dir() |
208 | - self.assertTrue(os.path.isdir(log_dir)) |
209 | - expected = os.path.join(tools_base, 'new-tools', 'juju-dist', 'tools') |
210 | - self.assertEqual(expected, log_dir) |
211 | - |
212 | - |
213 | -class TestCheckLog(TestCase): |
214 | - |
215 | - def test_write_log(self): |
216 | - with temp_dir() as log_dir: |
217 | - with patch('logging.warning'): |
218 | - check_log(log_dir, ['a', 'b', 'c', 'd']) |
219 | - check_log(log_dir, ['z', 'y', 'x', 'w']) |
220 | - with open(os.path.join(log_dir, 'sign-branch.log')) as log_file: |
221 | - log_lines = log_file.readlines() |
222 | - self.assertEqual(2, len(log_lines)) |
223 | - self.assertRegexpMatches(log_lines[0], '^a b c d ') |
224 | - self.assertRegexpMatches(log_lines[1], '^z y x w ') |
225 | - |
226 | - def test_false_for_repeat(self): |
227 | - with temp_dir() as log_dir: |
228 | - with patch('logging.warning'): |
229 | - self.assertIs(True, check_log(log_dir, ['a', 'b', 'c', 'd'])) |
230 | - self.assertIs(True, check_log(log_dir, ['z', 'b', 'c', 'd'])) |
231 | - self.assertIs(False, check_log(log_dir, ['a', 'b', 'c', 'd'])) |
232 | - self.assertIs(False, check_log(log_dir, ['z', 'b', 'c', 'd'])) |
233 | - |
234 | - def test_copies_to_official_dest(self): |
235 | - with temp_dir() as root: |
236 | - tools = os.path.join(root, 'tools') |
237 | - os.mkdir(tools) |
238 | - dest = os.path.join(root, 'dest') |
239 | - with patch.dict(os.environ, {'STREAMS_OFFICIAL_DEST': dest}): |
240 | - check_log(tools, ['a', 'b', 'c', 'd']) |
241 | - check_log(tools, ['z', 'b', 'c', 'd']) |
242 | - tools_filename = os.path.join(tools, SIGN_BRANCH_LOG) |
243 | - with open(tools_filename) as tools_file: |
244 | - tools_content = tools_file.read() |
245 | - dest_filename = os.path.join(get_log_subdir(dest), |
246 | - SIGN_BRANCH_LOG) |
247 | - with open(dest_filename) as dest_file: |
248 | - dest_content = dest_file.read() |
249 | - self.assertEqual(tools_content, dest_content) |
250 | - |
251 | - |
252 | class TestParseArgs(TestCase): |
253 | |
254 | def test_minimum(self): |
255 | |
256 | === added file 'tests/test_update_streams.py' |
257 | --- tests/test_update_streams.py 1970-01-01 00:00:00 +0000 |
258 | +++ tests/test_update_streams.py 2016-02-23 17:54:06 +0000 |
259 | @@ -0,0 +1,277 @@ |
260 | +from argparse import Namespace |
261 | +import os |
262 | +from textwrap import dedent |
263 | +from unittest import TestCase |
264 | + |
265 | +from mock import ( |
266 | + call, |
267 | + patch, |
268 | + ) |
269 | + |
270 | +from update_streams import ( |
271 | + ensure_dirs, |
272 | + parse_args, |
273 | + Updater, |
274 | + ) |
275 | +from utility import temp_dir |
276 | + |
277 | + |
278 | +__metaclass__ = type |
279 | + |
280 | + |
281 | +class TestParseArgs(TestCase): |
282 | + |
283 | + def test_default(self): |
284 | + parsed = parse_args(['foo', 'bar', 'baz']) |
285 | + expected = Namespace( |
286 | + config_file='foo', s3_root='bar', dest='baz', version=None, |
287 | + poke='0', timestamp=None) |
288 | + self.assertEqual(expected, parsed) |
289 | + |
290 | + def test_version(self): |
291 | + parsed = parse_args(['foo', 'bar', 'baz', 'qux']) |
292 | + self.assertEqual('qux', parsed.version) |
293 | + |
294 | + def test_poke(self): |
295 | + parsed = parse_args(['foo', 'bar', 'baz', '--poke', '123']) |
296 | + self.assertEqual('123', parsed.poke) |
297 | + |
298 | + def test_timestamp(self): |
299 | + parsed = parse_args(['foo', 'bar', 'baz', '--timestamp', |
300 | + 'Wed, 17 Feb 2016 20:44:59 +0000']) |
301 | + self.assertEqual('Wed, 17 Feb 2016 20:44:59 +0000', parsed.timestamp) |
302 | + |
303 | + |
304 | +def mock_get(command, args): |
305 | + url, dest = args |
306 | + ensure_dirs(os.path.dirname(dest)) |
307 | + with open(dest, 'w') as f: |
308 | + f.write(os.path.basename(dest)) |
309 | + |
310 | + |
311 | +def path_content(path): |
312 | + with open(os.path.join(*path)) as f: |
313 | + return f.read() |
314 | + |
315 | + |
316 | +class TestUpdater(TestCase): |
317 | + |
318 | + def test_from_args(self): |
319 | + updater = Updater.from_args(Namespace( |
320 | + config_file='foo', s3_root='bar', dest='baz', version='qux', |
321 | + poke='123'), 'temp_dir1') |
322 | + self.assertEqual(updater.config_file, 'foo') |
323 | + self.assertEqual(updater.s3_root, 'bar') |
324 | + self.assertEqual(updater.dest, 'baz') |
325 | + self.assertEqual(updater.version, 'qux') |
326 | + self.assertEqual(updater.temp_dir, 'temp_dir1') |
327 | + |
328 | + def make_updater(self, download_dir='temp_dir1', dest_dir='dest1'): |
329 | + return Updater('config1', 's3_root1', download_dir, dest_dir, |
330 | + 'version1') |
331 | + |
332 | + def test_s3cmd(self): |
333 | + updater = self.make_updater() |
334 | + with patch('subprocess.check_call') as cc_mock: |
335 | + updater.s3cmd('sync', ['foo', 'bar']) |
336 | + cc_mock.assert_called_once_with([ |
337 | + 's3cmd', 'sync', '--config', 'config1', 'foo', 'bar']) |
338 | + |
339 | + def test_get_path_hashes(self): |
340 | + updater = self.make_updater() |
341 | + with patch('subprocess.check_output', autospec=True) as cc_mock: |
342 | + result = updater.get_path_hashes() |
343 | + cc_mock.assert_called_once_with([ |
344 | + 'sstream-query', 'temp_dir1/streams/v1/index2.json', |
345 | + 'version=version1', '--output-format=%(path)s %(sha256)s']) |
346 | + self.assertIs(cc_mock.return_value, result) |
347 | + |
348 | + def test_get_path_hashes_none_version(self): |
349 | + updater = self.make_updater() |
350 | + updater.version = None |
351 | + with patch('subprocess.check_output', autospec=True) as cc_mock: |
352 | + result = updater.get_path_hashes() |
353 | + self.assertEqual(0, cc_mock.call_count) |
354 | + self.assertEqual('', result) |
355 | + |
356 | + def test_iter_path_hash(self): |
357 | + updater = self.make_updater() |
358 | + output = dedent("""\ |
359 | + foo asdf |
360 | + bar sdf |
361 | + baz fasd |
362 | + bar sdf |
363 | + """) |
364 | + with patch.object(updater, 'get_path_hashes', |
365 | + return_value=output, autospec=True) as gph_mock: |
366 | + result = updater.get_path_hash_dict() |
367 | + gph_mock.assert_called_once_with() |
368 | + self.assertEqual({ |
369 | + 'bar': 'sdf', |
370 | + 'baz': 'fasd', |
371 | + 'foo': 'asdf', |
372 | + }, result) |
373 | + |
374 | + def test_iter_path_conflicting_hash(self): |
375 | + updater = self.make_updater() |
376 | + output = dedent("""\ |
377 | + foo asdf |
378 | + bar sdf |
379 | + baz fasd |
380 | + bar sde |
381 | + """) |
382 | + with patch.object(updater, 'get_path_hashes', |
383 | + return_value=output, autospec=True) as gph_mock: |
384 | + with self.assertRaisesRegexp(ValueError, |
385 | + 'Conflicting hashes for "bar"'): |
386 | + updater.get_path_hash_dict() |
387 | + gph_mock.assert_called_once_with() |
388 | + |
389 | + def test_download_and_verify(self): |
390 | + with temp_dir() as download_dir: |
391 | + updater = self.make_updater(download_dir=download_dir) |
392 | + with patch.object(updater, 's3cmd', side_effect=mock_get, |
393 | + autospec=True) as s3cmd_mock: |
394 | + updater.download_and_verify( |
395 | + 'foo', '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f' |
396 | + '98a5e886266e7ae') |
397 | + s3cmd_mock.assert_called_once_with( |
398 | + 'get', ['s3_root1/foo', os.path.join(download_dir, 'foo')]) |
399 | + |
400 | + def test_download_and_verify_fail(self): |
401 | + with temp_dir() as download_dir: |
402 | + updater = self.make_updater(download_dir=download_dir) |
403 | + with patch.object(updater, 's3cmd', side_effect=mock_get, |
404 | + autospec=True): |
405 | + with self.assertRaisesRegexp(Exception, 'Hashes differ.'): |
406 | + updater.download_and_verify( |
407 | + 'foo', 'fc26b46b68ffc68ff99b453c1d30413413422d706483b' |
408 | + 'fa0f98a5e886266e7ae') |
409 | + |
410 | + def test_move_into_place(self): |
411 | + with temp_dir() as root: |
412 | + download_dir = os.path.join(root, 'download') |
413 | + download_streams = os.path.join(download_dir, 'streams', 'v1') |
414 | + os.makedirs(download_streams) |
415 | + index_path = os.path.join(download_streams, 'index2.mason') |
416 | + with open(index_path, 'w') as f: |
417 | + f.write('Masonry is fun!') |
418 | + download_agents = os.path.join(download_dir, 'agent/2.5') |
419 | + os.makedirs(download_agents) |
420 | + agent_basename = '1.25-series-arch.tbd' |
421 | + agent_path = os.path.join(download_agents, agent_basename) |
422 | + with open(agent_path, 'w') as f: |
423 | + f.write('Agency is essential!') |
424 | + dest_dir = os.path.join(root, 'dest') |
425 | + updater = self.make_updater(download_dir=download_dir, |
426 | + dest_dir=dest_dir) |
427 | + updater.move_into_place('agent') |
428 | + dest_agent = os.path.join(dest_dir, 'agent', '2.5', |
429 | + agent_basename) |
430 | + with open(dest_agent) as f: |
431 | + self.assertEqual('Agency is essential!', f.read()) |
432 | + dest_index = os.path.join(dest_dir, 'streams', 'v1', |
433 | + 'index2.mason') |
434 | + self.assertFalse(os.path.exists(dest_index)) |
435 | + updater.move_into_place('streams/v1') |
436 | + with open(dest_index) as f: |
437 | + self.assertEqual('Masonry is fun!', f.read()) |
438 | + |
439 | + def test_move_into_place_replace_existing_file(self): |
440 | + with temp_dir() as root: |
441 | + download_dir = os.path.join(root, 'download') |
442 | + download_streams = os.path.join(download_dir, 'streams', 'v1') |
443 | + os.makedirs(download_streams) |
444 | + index_path = os.path.join(download_streams, 'index2.mason') |
445 | + with open(index_path, 'w') as f: |
446 | + f.write('Masonry is fun!') |
447 | + dest_dir = os.path.join(root, 'dest') |
448 | + updater = self.make_updater(download_dir=download_dir, |
449 | + dest_dir=dest_dir) |
450 | + dest_index = os.path.join(dest_dir, 'streams', 'v1', |
451 | + 'index2.mason') |
452 | + os.makedirs(os.path.dirname(dest_index)) |
453 | + with open(dest_index, 'w') as f: |
454 | + f.write('old contents') |
455 | + updater.move_into_place('streams/v1') |
456 | + with open(dest_index) as f: |
457 | + self.assertEqual('Masonry is fun!', f.read()) |
458 | + |
459 | + def test_s3_download(self): |
460 | + path_hashes = dedent("""\ |
461 | + agent/foo/bar asdf |
462 | + agent/foo/bar2 asdf |
463 | + agent/baz/qux sdf |
464 | + """) |
465 | + with temp_dir() as root: |
466 | + download_dir = os.path.join(root, 'download') |
467 | + os.mkdir(download_dir) |
468 | + dest_dir = os.path.join(root, 'dest') |
469 | + updater = self.make_updater(download_dir=download_dir, |
470 | + dest_dir=dest_dir) |
471 | + |
472 | + def s3cmd_effect(command, args): |
473 | + parent = os.path.join(updater.temp_streams, 'v1') |
474 | + ensure_dirs(parent) |
475 | + with open(os.path.join(parent, 'index.mason'), 'w') as f: |
476 | + f.write('index!') |
477 | + |
478 | + def dv_effect(path, agent_hash): |
479 | + mock_get('get', [None, os.path.join(download_dir, path)]) |
480 | + |
481 | + s3cmd = patch.object(updater, 's3cmd', side_effect=s3cmd_effect) |
482 | + dl = patch.object(updater, 'download_and_verify', |
483 | + side_effect=dv_effect, autospec=True) |
484 | + gph = patch.object(updater, 'get_path_hashes', |
485 | + return_value=path_hashes) |
486 | + with s3cmd as s3cmd_mock, dl as dv_mock, gph as gph_mock: |
487 | + updater.s3_download() |
488 | + s3cmd_mock.assert_called_once_with( |
489 | + 'sync', ['s3_root1/streams/', updater.temp_streams]) |
490 | + gph_mock.assert_called_once_with() |
491 | + self.assertEqual([ |
492 | + call('agent/baz/qux', 'sdf'), |
493 | + call('agent/foo/bar', 'asdf'), |
494 | + call('agent/foo/bar2', 'asdf'), |
495 | + ], dv_mock.mock_calls) |
496 | + self.assertEqual( |
497 | + 'qux', path_content([updater.dest, 'agent', 'baz', 'qux'])) |
498 | + self.assertEqual( |
499 | + 'bar', path_content([updater.dest, 'agent', 'foo', 'bar'])) |
500 | + self.assertEqual( |
501 | + 'bar2', path_content([updater.dest, 'agent', 'foo', 'bar2'])) |
502 | + self.assertEqual( |
503 | + 'index!', |
504 | + path_content([updater.dest, 'streams', 'v1', 'index.mason'])) |
505 | + |
506 | + def test_s3_download_version_none(self): |
507 | + with temp_dir() as root: |
508 | + download_dir = os.path.join(root, 'download') |
509 | + os.mkdir(download_dir) |
510 | + dest_dir = os.path.join(root, 'dest') |
511 | + updater = self.make_updater(download_dir=download_dir, |
512 | + dest_dir=dest_dir) |
513 | + updater.version = None |
514 | + |
515 | + def s3cmd_effect(command, args): |
516 | + parent = os.path.join(updater.temp_streams, 'v1') |
517 | + ensure_dirs(parent) |
518 | + with open(os.path.join(parent, 'index.mason'), 'w') as f: |
519 | + f.write('index!') |
520 | + |
521 | + def dv_effect(path, agent_hash): |
522 | + mock_get('get', [None, os.path.join(download_dir, path)]) |
523 | + |
524 | + s3cmd = patch.object(updater, 's3cmd', side_effect=s3cmd_effect) |
525 | + dl = patch.object(updater, 'download_and_verify', |
526 | + side_effect=dv_effect, autospec=True) |
527 | + with s3cmd as s3cmd_mock, dl as dv_mock: |
528 | + updater.s3_download() |
529 | + s3cmd_mock.assert_called_once_with( |
530 | + 'sync', ['s3_root1/streams/', updater.temp_streams]) |
531 | + self.assertEqual([], dv_mock.mock_calls) |
532 | + self.assertIs(False, |
533 | + os.path.exists(os.path.join(updater.dest, 'agent'))) |
534 | + self.assertEqual( |
535 | + 'index!', |
536 | + path_content([updater.dest, 'streams', 'v1', 'index.mason'])) |
537 | |
538 | === added file 'tests/test_utility.py' |
539 | --- tests/test_utility.py 1970-01-01 00:00:00 +0000 |
540 | +++ tests/test_utility.py 2016-02-23 17:54:06 +0000 |
541 | @@ -0,0 +1,81 @@ |
542 | +import os |
543 | +from unittest import TestCase |
544 | + |
545 | +from mock import patch |
546 | + |
547 | +from utility import ( |
548 | + acquire_log_dir, |
549 | + check_log, |
550 | + get_log_subdir, |
551 | + temp_dir, |
552 | + ) |
553 | + |
554 | + |
555 | +EXAMPLE_LOG = 'example.log' |
556 | + |
557 | + |
558 | +class TestAcquireLogDir(TestCase): |
559 | + |
560 | + def test_cwd(self): |
561 | + with temp_dir() as new_cwd: |
562 | + old_cwd = os.getcwd() |
563 | + os.chdir(new_cwd) |
564 | + try: |
565 | + log_dir = acquire_log_dir() |
566 | + self.assertTrue(os.path.isdir(log_dir)) |
567 | + finally: |
568 | + os.chdir(old_cwd) |
569 | + expected = os.path.join(new_cwd, 'new-tools', 'juju-dist', 'tools') |
570 | + self.assertEqual(expected, log_dir) |
571 | + |
572 | + def test_tools_base(self): |
573 | + with temp_dir() as tools_base: |
574 | + os.chdir(tools_base) |
575 | + with patch.dict(os.environ, {'TOOLS_BASE': tools_base}): |
576 | + log_dir = acquire_log_dir() |
577 | + self.assertTrue(os.path.isdir(log_dir)) |
578 | + expected = os.path.join(tools_base, 'new-tools', 'juju-dist', 'tools') |
579 | + self.assertEqual(expected, log_dir) |
580 | + |
581 | + |
582 | +class TestCheckLog(TestCase): |
583 | + |
584 | + def test_write_log(self): |
585 | + with temp_dir() as log_dir: |
586 | + with patch('logging.warning'): |
587 | + check_log(log_dir, ['a', 'b', 'c', 'd'], EXAMPLE_LOG) |
588 | + check_log(log_dir, ['z', 'y', 'x', 'w'], EXAMPLE_LOG) |
589 | + with open(os.path.join(log_dir, EXAMPLE_LOG)) as log_file: |
590 | + log_lines = log_file.readlines() |
591 | + self.assertEqual(2, len(log_lines)) |
592 | + self.assertRegexpMatches(log_lines[0], '^a b c d ') |
593 | + self.assertRegexpMatches(log_lines[1], '^z y x w ') |
594 | + |
595 | + def test_false_for_repeat(self): |
596 | + with temp_dir() as log_dir: |
597 | + with patch('logging.warning'): |
598 | + self.assertIs(True, check_log(log_dir, ['a', 'b', 'c', 'd'], |
599 | + EXAMPLE_LOG)) |
600 | + self.assertIs(True, check_log(log_dir, ['z', 'b', 'c', 'd'], |
601 | + EXAMPLE_LOG)) |
602 | + self.assertIs(False, check_log(log_dir, ['a', 'b', 'c', 'd'], |
603 | + EXAMPLE_LOG)) |
604 | + self.assertIs(False, check_log(log_dir, ['z', 'b', 'c', 'd'], |
605 | + EXAMPLE_LOG)) |
606 | + |
607 | + def test_copies_to_official_dest(self): |
608 | + with temp_dir() as root: |
609 | + tools = os.path.join(root, 'tools') |
610 | + os.mkdir(tools) |
611 | + dest = os.path.join(root, 'dest') |
612 | + with patch.dict(os.environ, {'STREAMS_OFFICIAL_DEST': dest}): |
613 | + check_log(tools, ['a', 'b', 'c', 'd'], EXAMPLE_LOG) |
614 | + check_log(tools, ['z', 'b', 'c', 'd'], EXAMPLE_LOG) |
615 | + tools_filename = os.path.join(tools, EXAMPLE_LOG) |
616 | + with open(tools_filename) as tools_file: |
617 | + tools_content = tools_file.read() |
618 | + dest_filename = os.path.join(get_log_subdir(dest), |
619 | + EXAMPLE_LOG) |
620 | + with open(dest_filename) as dest_file: |
621 | + dest_content = dest_file.read() |
622 | + self.assertEqual(tools_content, dest_content) |
623 | |
624 | === added file 'update_streams.py' |
625 | --- update_streams.py 1970-01-01 00:00:00 +0000 |
626 | +++ update_streams.py 2016-02-23 17:54:06 +0000 |
627 | @@ -0,0 +1,138 @@ |
628 | +#!/usr/bin/env python |
629 | +from argparse import ArgumentParser |
630 | +import errno |
631 | +from hashlib import sha256 |
632 | +import os |
633 | +from shutil import ( |
634 | + move, |
635 | + ) |
636 | +import subprocess |
637 | + |
638 | +from utility import ( |
639 | + acquire_log_dir, |
640 | + check_log, |
641 | + temp_dir, |
642 | + ) |
643 | + |
644 | + |
645 | +__metaclass__ = type |
646 | + |
647 | + |
648 | +UPDATE_STREAMS_LOG = 'update-streams.log' |
649 | + |
650 | + |
651 | +def parse_args(argv=None): |
652 | + parser = ArgumentParser() |
653 | + parser.add_argument('config_file') |
654 | + parser.add_argument('s3_root') |
655 | + parser.add_argument('dest') |
656 | + parser.add_argument('version', nargs='?') |
657 | + parser.add_argument('--timestamp') |
658 | + parser.add_argument('--poke', default='0') |
659 | + return parser.parse_args(argv) |
660 | + |
661 | + |
662 | +def ensure_dirs(path): |
663 | + try: |
664 | + os.makedirs(path) |
665 | + except OSError as e: |
666 | + if e.errno != errno.EEXIST: |
667 | + raise |
668 | + |
669 | + |
670 | +class Updater: |
671 | + |
672 | + def __init__(self, config_file, s3_root, temp_dir, dest, version): |
673 | + self.config_file = config_file |
674 | + self.s3_root = s3_root |
675 | + self.temp_dir = temp_dir |
676 | + self.dest = dest |
677 | + self.version = version |
678 | + |
679 | + @classmethod |
680 | + def from_args(cls, args, temp_dir): |
681 | + return cls(args.config_file, args.s3_root, temp_dir, args.dest, |
682 | + args.version) |
683 | + |
684 | + def s3cmd(self, action, args): |
685 | + full_args = ['s3cmd', action, '--config', self.config_file] + args |
686 | + subprocess.check_call(full_args) |
687 | + |
688 | + def get_path_hashes(self): |
689 | + if self.version is None: |
690 | + return '' |
691 | + ver_filter = 'version={}'.format(self.version) |
692 | + out_format = '--output-format=%(path)s %(sha256)s' |
693 | + index2 = os.path.join(self.temp_streams, 'v1', 'index2.json') |
694 | + return subprocess.check_output( |
695 | + ['sstream-query', index2, ver_filter, out_format]) |
696 | + |
697 | + def get_path_hash_dict(self): |
698 | + output = self.get_path_hashes() |
699 | + path_hashes = {} |
700 | + for line in output.splitlines(): |
701 | + path, path_hash = line.rsplit(' ', 1) |
702 | + path_hashes.setdefault(path, path_hash) |
703 | + if path_hashes[path] != path_hash: |
704 | + raise ValueError('Conflicting hashes for "{}"'.format(path)) |
705 | + return path_hashes |
706 | + |
707 | + def s3_url(self, path): |
708 | + return '{}/{}'.format(self.s3_root, path) |
709 | + |
710 | + @property |
711 | + def temp_streams(self): |
712 | + return os.path.join(self.temp_dir, 'streams') |
713 | + |
714 | + def download_and_verify(self, path, agent_hash): |
715 | + temp_path = os.path.join(self.temp_dir, path) |
716 | + self.s3cmd('get', [self.s3_url(path), temp_path]) |
717 | + digest = calculate_sha256(temp_path) |
718 | + if digest != agent_hash: |
719 | + raise Exception( |
720 | + 'Hashes differ. Expected: {}' |
721 | + ' Actual: {}'.format(agent_hash, digest)) |
722 | + |
723 | + def move_into_place(self, parent): |
724 | + dest_path = os.path.join(self.dest, parent) |
725 | + temp_path = os.path.join(self.temp_dir, parent) |
726 | + ensure_dirs(dest_path) |
727 | + for subfile in os.listdir(temp_path): |
728 | + move(os.path.join(temp_path, subfile), |
729 | + os.path.join(dest_path, subfile)) |
730 | + |
731 | + def s3_download(self): |
732 | + os.mkdir(self.temp_streams) |
733 | + self.s3cmd('sync', [self.s3_url('streams/'), self.temp_streams]) |
734 | + path_hashes = self.get_path_hash_dict() |
735 | + for path, agent_hash in sorted(path_hashes.items()): |
736 | + self.download_and_verify(path, agent_hash) |
737 | + if path_hashes != {}: |
738 | + self.move_into_place('agent') |
739 | + self.move_into_place('streams/v1') |
740 | + |
741 | + |
742 | +def calculate_sha256(path): |
743 | + hasher = sha256() |
744 | + with open(path) as f: |
745 | + while True: |
746 | + result = f.read(1000000) |
747 | + hasher.update(result) |
748 | + if result == '': |
749 | + break |
750 | + return hasher.hexdigest() |
751 | + |
752 | + |
753 | +def main(): |
754 | + args = parse_args() |
755 | + if args.timestamp is not None: |
756 | + parameters = [args.timestamp, args.poke] |
757 | + if not check_log(acquire_log_dir(), parameters, UPDATE_STREAMS_LOG): |
758 | + return |
759 | + with temp_dir() as download_dir: |
760 | + updater = Updater.from_args(args, download_dir) |
761 | + updater.s3_download() |
762 | + |
763 | + |
764 | +if __name__ == '__main__': |
765 | + main() |
766 | |
767 | === modified file 'utility.py' |
768 | --- utility.py 2016-02-02 17:48:35 +0000 |
769 | +++ utility.py 2016-02-23 17:54:06 +0000 |
770 | @@ -1,4 +1,8 @@ |
771 | from contextlib import contextmanager |
772 | +import datetime |
773 | +import errno |
774 | +import logging |
775 | +import os |
776 | import shutil |
777 | from tempfile import mkdtemp |
778 | |
779 | @@ -10,3 +14,57 @@ |
780 | yield dirname |
781 | finally: |
782 | shutil.rmtree(dirname) |
783 | + |
784 | + |
785 | +def acquire_log_dir(): |
786 | + """Return the path of the log dir, creating if need be.""" |
787 | + tools_base = os.environ.get('TOOLS_BASE') |
788 | + if tools_base is None: |
789 | + tools_base = os.getcwd() |
790 | + log_dir = get_log_subdir(tools_base) |
791 | + if not os.path.exists(log_dir): |
792 | + os.makedirs(log_dir) |
793 | + return log_dir |
794 | + |
795 | + |
796 | +def get_log_subdir(root_dir): |
797 | + return os.path.join(root_dir, 'new-tools', 'juju-dist', 'tools') |
798 | + |
799 | + |
800 | +def check_log(log_dir, parameters, log_basename): |
801 | + """Check for a previous entry with the same parameters in the log. |
802 | + |
803 | + If one exists, return False. Otherwise, log the parameters. |
804 | + |
805 | + This should be done before attempting the operation, to avoid endless |
806 | + retries if the operation fails. |
807 | + """ |
808 | + log_filename = os.path.join(log_dir, log_basename) |
809 | + log_entry = ' '.join(parameters + ['']) |
810 | + try: |
811 | + log_branch = open(log_filename) |
812 | + except IOError as e: |
813 | + if e.errno != errno.ENOENT: |
814 | + raise |
815 | + else: |
816 | + with log_branch: |
817 | + for line in log_branch: |
818 | + if line.startswith(log_entry): |
819 | + return False |
820 | + with open(log_filename, 'a') as log_branch: |
821 | + now = datetime.datetime.utcnow().replace(microsecond=0) |
822 | + strdate = now.isoformat(' ') |
823 | + log_branch.write('{}{}\n'.format(log_entry, strdate)) |
824 | + official_dest = os.environ.get('STREAMS_OFFICIAL_DEST') |
825 | + if official_dest is None: |
826 | + logging.warning('STREAMS_OFFICIAL_DEST is not defined.') |
827 | + else: |
828 | + parent = get_log_subdir(official_dest) |
829 | + log_dest = os.path.join(parent, log_basename) |
830 | + try: |
831 | + os.makedirs(parent) |
832 | + except OSError as e: |
833 | + if e.errno != errno.EEXIST: |
834 | + raise |
835 | + shutil.copy2(log_filename, log_dest) |
836 | + return True |
Thank you. This branch is good to merge.
The name of "check_log" doesn't seem correct anymore, but I cannot think of a better name. We are actually asking can_do_operation or try_operation.
We need to think about how to handle retractions in the future. With this script in production, the metadata will be copied and Juju will see agents disappear, but the agents are still on disk. We probably want another operation or an extension to this operation to remove orphaned agents.