Merge ~cjwatson/snapstore-client:rename-to-snapstore-client into snapstore-client:master
- Git
- lp:~cjwatson/snapstore-client
- rename-to-snapstore-client
- Merge into master
Proposed by
Colin Watson
Status: | Merged |
---|---|
Merged at revision: | ca6902cbbd628bbd2168b1a5bdc7dceed8ecfb32 |
Proposed branch: | ~cjwatson/snapstore-client:rename-to-snapstore-client |
Merge into: | snapstore-client:master |
Diff against target: |
2166 lines (+1008/-285) 14 files modified
Makefile (+1/-1) dev/null (+0/-278) serve-snaps.py (+3/-3) snapstore (+3/-3) snapstore_client/__init__.py (+18/-0) snapstore_client/cli.py (+66/-0) snapstore_client/config.py (+94/-0) snapstore_client/logic/__init__.py (+2/-0) snapstore_client/logic/dump.py (+193/-0) snapstore_client/logic/upload.py (+200/-0) snapstore_client/tests/__init__.py (+14/-0) snapstore_client/tests/test_cli.py (+64/-0) snapstore_client/tests/test_config.py (+72/-0) snapstore_client/webservices.py (+278/-0) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Ricardo Kirkner (community) | Approve | ||
Review via email: mp+326421@code.launchpad.net |
Commit message
Rename to snapstore-client
siab-client is a pretty opaque name if you aren't familiar with all our
acronyms. snapstore-client is much clearer.
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/Makefile b/Makefile |
2 | index ce817d7..173d2db 100644 |
3 | --- a/Makefile |
4 | +++ b/Makefile |
5 | @@ -1,4 +1,4 @@ |
6 | -SERVICE_PACKAGE = siab_client |
7 | +SERVICE_PACKAGE = snapstore_client |
8 | ENV = $(CURDIR)/env |
9 | PYTHON3 = $(ENV)/bin/python3 |
10 | PIP = $(PYTHON3) -m pip |
11 | diff --git a/serve-snaps.py b/serve-snaps.py |
12 | index 7ab8f5d..09aeed6 100755 |
13 | --- a/serve-snaps.py |
14 | +++ b/serve-snaps.py |
15 | @@ -14,9 +14,9 @@ from urllib.parse import ( |
16 | ) |
17 | from urllib.request import urlopen |
18 | |
19 | -from siab_client import get_storage_root |
20 | -from siab_client.cli import configure_logging |
21 | -from siab_client.config import read_config |
22 | +from snapstore_client import get_storage_root |
23 | +from snapstore_client.cli import configure_logging |
24 | +from snapstore_client.config import read_config |
25 | |
26 | |
27 | logger = logging.getLogger(__name__) |
28 | diff --git a/siab_client/__init__.py b/siab_client/__init__.py |
29 | deleted file mode 100644 |
30 | index e69df82..0000000 |
31 | --- a/siab_client/__init__.py |
32 | +++ /dev/null |
33 | @@ -1,18 +0,0 @@ |
34 | -# -*- coding: utf-8 -*- |
35 | -# Copyright 2017 Canonical Ltd. |
36 | - |
37 | -import os |
38 | - |
39 | - |
40 | -SUPPORTED_ARCHITECTURES = ( |
41 | - 'amd64', 'arm64', 'armhf', 'i386', 'powerpc', 'ppc64el', 's390x') |
42 | - |
43 | - |
44 | -def get_storage_root(): |
45 | - return os.path.abspath( |
46 | - os.path.join( |
47 | - os.path.dirname(__file__), |
48 | - '..', |
49 | - 'snap_storage' |
50 | - ) |
51 | - ) |
52 | diff --git a/siab_client/cli.py b/siab_client/cli.py |
53 | deleted file mode 100644 |
54 | index 0ee6e66..0000000 |
55 | --- a/siab_client/cli.py |
56 | +++ /dev/null |
57 | @@ -1,66 +0,0 @@ |
58 | -# Copyright 2017 Canonical Ltd. |
59 | - |
60 | -"""Command-line interface niceties for this service.""" |
61 | - |
62 | -import logging |
63 | -import os |
64 | -import sys |
65 | - |
66 | - |
67 | -class _StdoutFilter(logging.Filter): |
68 | - |
69 | - def filter(self, record): |
70 | - return record.levelno <= logging.WARNING |
71 | - |
72 | - |
73 | -class _StderrFilter(logging.Filter): |
74 | - |
75 | - def filter(self, record): |
76 | - return record.levelno >= logging.ERROR |
77 | - |
78 | - |
79 | -def _is_dumb_terminal(): |
80 | - """Return True if on a dumb terminal.""" |
81 | - is_stdout_tty = os.isatty(sys.stdout.fileno()) |
82 | - is_term_dumb = os.environ.get('TERM', '') == 'dumb' |
83 | - return not is_stdout_tty or is_term_dumb |
84 | - |
85 | - |
86 | -class _ColouredFormatter(logging.Formatter): |
87 | - |
88 | - _reset = '\033[0m' |
89 | - _level_colours = { |
90 | - 'ERROR': '\033[0;31m', # Dark red |
91 | - } |
92 | - |
93 | - def format(self, record): |
94 | - colour = self._level_colours.get(record.levelname) |
95 | - log_message = super().format(record) |
96 | - if colour is not None: |
97 | - return colour + log_message + self._reset |
98 | - else: |
99 | - return log_message |
100 | - |
101 | - |
102 | -def configure_logging(logger_name=None, log_level=logging.INFO): |
103 | - stdout_handler = logging.StreamHandler(stream=sys.stdout) |
104 | - stdout_handler.addFilter(_StdoutFilter()) |
105 | - stderr_handler = logging.StreamHandler(stream=sys.stderr) |
106 | - stderr_handler.addFilter(_StderrFilter()) |
107 | - handlers = [stdout_handler, stderr_handler] |
108 | - if _is_dumb_terminal(): |
109 | - formatter = logging.Formatter(style='{') |
110 | - else: |
111 | - formatter = _ColouredFormatter(style='{') |
112 | - logger = logging.getLogger(logger_name) |
113 | - for handler in handlers: |
114 | - handler.setFormatter(formatter) |
115 | - logger.addHandler(handler) |
116 | - logger.setLevel(log_level) |
117 | - |
118 | - # The requests library is too noisy at INFO level. |
119 | - if log_level == logging.DEBUG: |
120 | - logging.getLogger('requests').setLevel(log_level) |
121 | - else: |
122 | - logging.getLogger('requests').setLevel( |
123 | - max(logging.WARNING, log_level)) |
124 | diff --git a/siab_client/config.py b/siab_client/config.py |
125 | deleted file mode 100644 |
126 | index eacf0b3..0000000 |
127 | --- a/siab_client/config.py |
128 | +++ /dev/null |
129 | @@ -1,94 +0,0 @@ |
130 | -# -*- coding: utf-8 -*- |
131 | -# Copyright 2017 Canonical Ltd. |
132 | - |
133 | -"""Configuration provider for this service.""" |
134 | - |
135 | -import configparser |
136 | -import os |
137 | - |
138 | - |
139 | -ROOT = os.path.abspath( |
140 | - os.path.join( |
141 | - os.path.dirname(__file__), |
142 | - '..' |
143 | - ) |
144 | -) |
145 | - |
146 | - |
147 | -DEFAULT_TEST_CONFIG = { |
148 | - 'services': { |
149 | - 'storage': 'http://localhost:8005/' |
150 | - } |
151 | -} |
152 | - |
153 | - |
154 | -def _production_read_config(): |
155 | - root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) |
156 | - config = configparser.ConfigParser(dict(ROOT=root)) |
157 | - default_path = os.path.join(root, 'service.conf') |
158 | - config.read(default_path) |
159 | - return {sec: dict(config.items(sec)) for sec in config.sections()} |
160 | - |
161 | - |
162 | -class ConfigProvider(object): |
163 | - |
164 | - """Provide a single entry point for reading config that can be altered. |
165 | - |
166 | - This class allows production code to call 'read_config' and get a copy of |
167 | - test configuration during test runs, but production config during normal |
168 | - operations. |
169 | - """ |
170 | - |
171 | - def __init__(self, default_test_config): |
172 | - self._test_override = [] |
173 | - self._default_test_config = default_test_config |
174 | - |
175 | - def __call__(self): |
176 | - if self._test_override: |
177 | - return self._test_override[-1] |
178 | - else: |
179 | - return _production_read_config() |
180 | - |
181 | - def merge_config(self, config, new_config): |
182 | - merged_config = config.copy() |
183 | - for key in new_config.keys(): |
184 | - if key in config.keys(): |
185 | - if isinstance(config[key], dict) and \ |
186 | - isinstance(new_config[key], dict): |
187 | - merged_config[key] = self.merge_config( |
188 | - config[key], new_config[key]) |
189 | - else: |
190 | - merged_config[key] = new_config[key] |
191 | - else: |
192 | - merged_config[key] = new_config[key] |
193 | - return merged_config |
194 | - |
195 | - def override_for_test(self, new_config): |
196 | - """Override configuration for a test. |
197 | - |
198 | - In a test, if you wanted to set a value, you can do this:: |
199 | - |
200 | - from snapdevicegw.config import read_config |
201 | - |
202 | - def test_something(self): |
203 | - new_config = { |
204 | - 'worker': { |
205 | - 'some_setting': 'some_value' |
206 | - } |
207 | - } |
208 | - self.addCleanup(read_config.override_for_test(new_config)) |
209 | - |
210 | - The configuration provided is merged on top of DEFAULT_TEST_CONFIG |
211 | - (see above), so there's no need to specify values that are in the |
212 | - default config. |
213 | - """ |
214 | - if self._test_override: |
215 | - source = self._test_override[-1] |
216 | - else: |
217 | - source = self._default_test_config.copy() |
218 | - config = self.merge_config(source, new_config) |
219 | - self._test_override.append(config) |
220 | - return self._test_override.pop |
221 | - |
222 | - |
223 | -read_config = ConfigProvider(DEFAULT_TEST_CONFIG) |
224 | diff --git a/siab_client/logic/__init__.py b/siab_client/logic/__init__.py |
225 | deleted file mode 100644 |
226 | index d115bb8..0000000 |
227 | --- a/siab_client/logic/__init__.py |
228 | +++ /dev/null |
229 | @@ -1,2 +0,0 @@ |
230 | -# -*- coding: utf-8 -*- |
231 | -# Copyright 2017 Canonical Ltd. |
232 | diff --git a/siab_client/logic/dump.py b/siab_client/logic/dump.py |
233 | deleted file mode 100644 |
234 | index 554ced5..0000000 |
235 | --- a/siab_client/logic/dump.py |
236 | +++ /dev/null |
237 | @@ -1,193 +0,0 @@ |
238 | -# Copyright 2017 Canonical Ltd. |
239 | - |
240 | -import base64 |
241 | -import codecs |
242 | -import csv |
243 | -import itertools |
244 | -import json |
245 | -import logging |
246 | -import os.path |
247 | - |
248 | -import yaml |
249 | - |
250 | -from siab_client import ( |
251 | - SUPPORTED_ARCHITECTURES, |
252 | - config, |
253 | -) |
254 | -from siab_client import webservices as ws |
255 | - |
256 | - |
257 | -logger = logging.getLogger(__name__) |
258 | - |
259 | - |
260 | -RISK_ORDER = ['stable', 'candidate', 'beta', 'edge'] |
261 | - |
262 | - |
263 | -def parse_channel(channel): |
264 | - segments = channel.split('/') |
265 | - if len(segments) == 1: |
266 | - return None, segments[0], None |
267 | - if len(segments) == 2: |
268 | - return segments[0], segments[1], None |
269 | - return segments |
270 | - |
271 | - |
272 | -def import_dump(args): |
273 | - logger.info('Importing from %s.', args.root) |
274 | - for snap_id in args.snap_id: |
275 | - snap_root = os.path.join(args.root, snap_id) |
276 | - _import_snap(snap_root) |
277 | - |
278 | - |
279 | -def _import_snap(dir): |
280 | - with open(os.path.join(dir, 'snap.json')) as f: |
281 | - snap_meta = json.loads(f.read()) |
282 | - logger.info(' %s -> %s', snap_meta['snap_id'], snap_meta['name']) |
283 | - |
284 | - # XXX: Series and authority hardcoded. |
285 | - error = ws.register_snap_name_and_blob( |
286 | - snap_id=snap_meta['snap_id'], |
287 | - snap_name=snap_meta['name'], |
288 | - snap_blob=snap_meta['_cpi_data'], |
289 | - private=snap_meta['private'], |
290 | - series='16', |
291 | - publisher_id=snap_meta['publisher_id'], |
292 | - authority='canonical', |
293 | - published=snap_meta['status'] == 'Published', |
294 | - stores=snap_meta['stores'], |
295 | - country_whitelist=snap_meta['country_whitelist'] or None, |
296 | - country_blacklist=snap_meta['country_blacklist'] or None, |
297 | - ) |
298 | - if error: |
299 | - return |
300 | - |
301 | - with open(os.path.join(dir, 'channelmap.json')) as f: |
302 | - channelmaps = json.loads(f.read()) |
303 | - |
304 | - # Collect metadata for the interesting revisions. We currently only |
305 | - # import those that are visible in channels now. |
306 | - rev_metas = {} |
307 | - for cm in channelmaps: |
308 | - path = os.path.join( |
309 | - dir, 'revisions', '{:d}.json'.format(cm['revision'])) |
310 | - if not os.path.exists(path): |
311 | - continue |
312 | - with open(path) as f: |
313 | - rev_meta = json.loads(f.read()) |
314 | - assert rev_meta['revision'] == cm['revision'] |
315 | - rev_metas[cm['revision']] = rev_meta |
316 | - |
317 | - # Collect the interesting revisions' SHA3-384s, translating them |
318 | - # from hex to the snappy-style URL-safe base64. |
319 | - sha3_384s = [ |
320 | - base64.urlsafe_b64encode( |
321 | - codecs.decode(rev_meta['binary_sha3_384'], 'hex')).decode('ascii') |
322 | - for rev_meta in rev_metas.values()] |
323 | - |
324 | - # Ensure we have all relevant assertions locally before exposing new |
325 | - # revisions to clients. |
326 | - _sync_snap_assertions( |
327 | - 'https://assertions.ubuntu.com/v1/', snap_meta['snap_id'], '16', |
328 | - sha3_384s) |
329 | - |
330 | - seen_archs = set() |
331 | - seen_tracks = set() |
332 | - seen_track_risk_archs = set() |
333 | - rev_created = [] |
334 | - for cm in channelmaps: |
335 | - rev_meta = rev_metas.get(cm['revision']) |
336 | - if rev_meta is None: |
337 | - continue |
338 | - if cm['revision'] not in rev_created: |
339 | - ws.create_revision( |
340 | - snap_meta['snap_id'], cm['revision'], |
341 | - rev_meta['architectures'], |
342 | - '/remotes/ubuntu/{}'.format(rev_meta['binary_path']), |
343 | - rev_meta['binary_filesize'], rev_meta['binary_sha512'], |
344 | - rev_meta['binary_sha3_384'], rev_meta['version'], |
345 | - rev_meta['confinement'], rev_meta['snap_yaml_raw']) |
346 | - rev_created.append(cm['revision']) |
347 | - track, risk, branch = parse_channel(cm['channel']) |
348 | - exploded_architectures = set(rev_meta['architectures']) |
349 | - seen_archs.update(exploded_architectures) |
350 | - seen_tracks.add(track) |
351 | - seen_track_risk_archs.update( |
352 | - (track, risk, arch) for arch in exploded_architectures) |
353 | - if 'all' in exploded_architectures: |
354 | - exploded_architectures.remove('all') |
355 | - exploded_architectures.update(SUPPORTED_ARCHITECTURES) |
356 | - ws.release_revision( |
357 | - snap_meta['snap_id'], snap_meta['name'], cm['series'], |
358 | - [track, risk, branch], exploded_architectures, cm['revision']) |
359 | - |
360 | - # Fill in tracking. |
361 | - all_track_risk_archs = set( |
362 | - itertools.product(seen_tracks, RISK_ORDER[1:], seen_archs)) |
363 | - for track, risk, arch in all_track_risk_archs - seen_track_risk_archs: |
364 | - ws.release_revision( |
365 | - snap_meta['snap_id'], snap_meta['name'], cm['series'], |
366 | - [track, risk, None], [arch], |
367 | - redirect_channel=[ |
368 | - track, RISK_ORDER[RISK_ORDER.index(risk) - 1], None], |
369 | - permanent_redirect=False) |
370 | - |
371 | - |
372 | -def import_snapsection_dump(args): |
373 | - """Import snapsection tsv dump. |
374 | - |
375 | - Assumes tsv takes form: |
376 | - snap_id\tfeatured\tscore\tsection_name |
377 | - """ |
378 | - logger.info('Importing snapsections from %s.', args.tsv_file) |
379 | - |
380 | - sections = {} |
381 | - with open(args.tsv_file, 'r') as tsvin: |
382 | - tsv = csv.reader(tsvin, delimiter='\t') |
383 | - for snap_id, featured, score, section_name in tsv: |
384 | - if section_name not in sections: |
385 | - sections[section_name] = [] |
386 | - sections[section_name].append({ |
387 | - 'snap_id': snap_id, |
388 | - 'featured': bool(featured), |
389 | - 'score': int(score), |
390 | - 'series': '16', |
391 | - }) |
392 | - payload = {'sections': []} |
393 | - for section in sections: |
394 | - payload['sections'].append({ |
395 | - 'section_name': section, |
396 | - 'snaps': sections[section] |
397 | - }) |
398 | - ws.create_snapsections(payload) |
399 | - |
400 | - |
401 | -def _parse_assertion(assertion): |
402 | - return yaml.safe_load(assertion.split(b'\n\n')[0]) |
403 | - |
404 | - |
405 | -def _sync_snap_assertions(remote, snap_id, series, sha3_384s): |
406 | - local = config.read_config()['services']['assertions'] |
407 | - |
408 | - # Collect the snap-declaration and snap-revisions. |
409 | - decl_raw = ws.get_assertion(remote, 'snap-declaration', (series, snap_id)) |
410 | - rev_raws = [ |
411 | - ws.get_assertion(remote, 'snap-revision', (sha3_384,)) |
412 | - for sha3_384 in sha3_384s |
413 | - if ws.get_assertion(local, 'snap-revision', (sha3_384,)) is None] |
414 | - |
415 | - # Collect dependencies (currently just accounts). |
416 | - accounts = set() |
417 | - accounts.add(_parse_assertion(decl_raw)['publisher-id']) |
418 | - accounts.update( |
419 | - _parse_assertion(rev_raw)['developer-id'] for rev_raw in rev_raws) |
420 | - |
421 | - # Import dependencies that don't exist. |
422 | - for account in accounts: |
423 | - if ws.get_assertion(local, 'account', (account,)) is not None: |
424 | - continue |
425 | - ws.save_assertion(ws.get_assertion(remote, 'account', (account,))) |
426 | - |
427 | - # Now save the main assertions. |
428 | - ws.save_assertion(decl_raw) |
429 | - for rev_raw in rev_raws: |
430 | - ws.save_assertion(rev_raw) |
431 | diff --git a/siab_client/logic/upload.py b/siab_client/logic/upload.py |
432 | deleted file mode 100644 |
433 | index 86264c0..0000000 |
434 | --- a/siab_client/logic/upload.py |
435 | +++ /dev/null |
436 | @@ -1,200 +0,0 @@ |
437 | -# Copyright 2017 Canonical Ltd. |
438 | - |
439 | -import hashlib |
440 | -import logging |
441 | -import os |
442 | -import random |
443 | -import shutil |
444 | -import string |
445 | -import subprocess |
446 | -import sys |
447 | -import tempfile |
448 | - |
449 | -import yaml |
450 | - |
451 | -from siab_client import ( |
452 | - SUPPORTED_ARCHITECTURES, |
453 | - get_storage_root, |
454 | -) |
455 | -from siab_client import webservices as ws |
456 | - |
457 | -if sys.version < '3.6': |
458 | - import sha3 # noqa |
459 | - |
460 | - |
461 | -logger = logging.getLogger(__name__) |
462 | - |
463 | - |
464 | -def upload_snap(args): |
465 | - snap_path = args.snap |
466 | - with tempfile.TemporaryDirectory() as workdir: |
467 | - snap_yaml = _parse_snap_yaml(workdir, snap_path) |
468 | - |
469 | - name = snap_yaml['name'] |
470 | - version = str(snap_yaml['version']) |
471 | - summary = snap_yaml['summary'] |
472 | - description = snap_yaml['description'] |
473 | - confinement = snap_yaml['confinement'] |
474 | - architectures = snap_yaml['architectures'] |
475 | - sha3_384 = _extract_sha3_384(snap_path) |
476 | - sha512 = _extract_sha512(snap_path) |
477 | - binary_filesize = os.path.getsize(snap_path) |
478 | - |
479 | - snap_yaml = _read_snap_yaml(workdir, snap_path) |
480 | - |
481 | - logger.info("Name: %s", name) |
482 | - logger.info("Version: %s", version) |
483 | - logger.info("Summary: %s", summary) |
484 | - logger.info("Description: %s", description) |
485 | - logger.info("Confinement: %s", confinement) |
486 | - logger.info("Architectures: %s", ', '.join(architectures)) |
487 | - logger.info("Sha3_384: %s", sha3_384) |
488 | - logger.info("Sha512: %s", sha512) |
489 | - logger.info("Binary filesize: %d", binary_filesize) |
490 | - |
491 | - series = '16' |
492 | - |
493 | - snap_id = ws.get_snap_id_for_name(name, series) |
494 | - if snap_id is None: |
495 | - snap_id = _generate_new_snap_id() |
496 | - |
497 | - snap_blob = _create_snap_blob( |
498 | - name, |
499 | - description, |
500 | - summary, |
501 | - ) |
502 | - ws.register_snap_name_and_blob( |
503 | - snap_id, name, series, snap_blob, authority='local', |
504 | - stores=['ubuntu']) |
505 | - |
506 | - latest_revision = ws.get_latest_revision_for_snap_id(snap_id) |
507 | - next_revision = latest_revision + 1 if latest_revision else 1 |
508 | - binary_path = _store_snap(snap_path, name, next_revision) |
509 | - logger.info("Binary file path: %s", binary_path) |
510 | - registration_success = ws.create_revision( |
511 | - snap_id, |
512 | - next_revision, |
513 | - architectures, |
514 | - binary_path, |
515 | - binary_filesize, |
516 | - sha512, |
517 | - sha3_384, |
518 | - version, |
519 | - confinement, |
520 | - snap_yaml |
521 | - ) |
522 | - exploded_architectures = set(architectures) |
523 | - if 'all' in exploded_architectures: |
524 | - exploded_architectures.remove('all') |
525 | - exploded_architectures.update(SUPPORTED_ARCHITECTURES) |
526 | - if registration_success and args.channel: |
527 | - # TODO: Track/branch |
528 | - ws.release_revision( |
529 | - snap_id, name, series, [None, args.channel, None], |
530 | - list(exploded_architectures), next_revision) |
531 | - |
532 | - ws.create_or_update_assertions( |
533 | - snap_id, |
534 | - name, |
535 | - series, |
536 | - next_revision, |
537 | - sha3_384, |
538 | - binary_filesize |
539 | - ) |
540 | - |
541 | - |
542 | -def _parse_snap_yaml(workdir, snap_path): |
543 | - """Extract the contents of the snap yaml file, given the path to a .snap""" |
544 | - return yaml.safe_load(_read_snap_yaml(workdir, snap_path)) |
545 | - |
546 | - |
547 | -def _read_snap_yaml(workdir, snap_path): |
548 | - unsquash_path = os.path.join(workdir, 'unsquashed_snap') |
549 | - subprocess.check_call( |
550 | - ['unsquashfs', '-d', unsquash_path, snap_path], |
551 | - stdout=subprocess.DEVNULL) |
552 | - yaml_path = os.path.join(unsquash_path, 'meta', 'snap.yaml') |
553 | - with open(yaml_path, 'r') as yaml_file: |
554 | - return yaml_file.read() |
555 | - |
556 | - |
557 | -def _extract_sha3_384(file_path): |
558 | - blocksize = 2**20 |
559 | - with open(file_path, 'rb') as snap_file: |
560 | - hasher = hashlib.sha3_384() |
561 | - while True: |
562 | - buf = snap_file.read(blocksize) |
563 | - if not buf: |
564 | - break |
565 | - hasher.update(buf) |
566 | - return hasher.hexdigest() |
567 | - |
568 | - |
569 | -def _extract_sha512(file_path): |
570 | - blocksize = 2**20 |
571 | - with open(file_path, 'rb') as snap_file: |
572 | - hasher = hashlib.sha512() |
573 | - while True: |
574 | - buf = snap_file.read(blocksize) |
575 | - if not buf: |
576 | - break |
577 | - hasher.update(buf) |
578 | - return hasher.hexdigest() |
579 | - |
580 | - |
581 | -def _generate_new_snap_id(): |
582 | - """Generate a random ID to identify a snap entity. |
583 | - |
584 | - Snap ids are a sequence of 32 random characters taken out of an alphabet |
585 | - of 62 characters (uppercase letters + lowercase letters + numbers), for |
586 | - a total of 310 bits of space. These unique identifiers are generated |
587 | - centrally or delegated to known parties. |
588 | - |
589 | - This function does not check for duplicates. |
590 | - |
591 | - """ |
592 | - SNAP_ID_ALPHABET = string.ascii_letters + string.digits |
593 | - return ''.join( |
594 | - random.choice(SNAP_ID_ALPHABET) for _ in range(32)) |
595 | - |
596 | - |
597 | -def _create_snap_blob(snap_name, description, summary): |
598 | - """Generate the 'blob', as it's exported from SCA. |
599 | - |
600 | - Many of the fields in the SCA data export aren't used in any part of the |
601 | - snapv2 ecosystem, and have been omitted here. This script doesn't know |
602 | - about some of the fields, so dummy data has been inserted. |
603 | - """ |
604 | - # TODO: Fill in 'origin', 'developer_id', 'developer_name' and 'summary' |
605 | - # somehow |
606 | - return { |
607 | - "origin": "developername", |
608 | - "last_updated": None, |
609 | - "package_name": snap_name, |
610 | - "screenshot_url": None, |
611 | - "developer_id": "46MtBuBZaWy3g8picgdg6YkrCQo84J46", |
612 | - "ratings_average": 0.0, |
613 | - "title": "snap title here", |
614 | - "support_url": "", |
615 | - "icon_url": None, |
616 | - "developer_name": "developername", |
617 | - "screenshot_urls": [], |
618 | - "description": description, |
619 | - "price": 0.0, |
620 | - "translations": {}, |
621 | - "prices": {}, |
622 | - "publisher": "some publisher", |
623 | - "summary": summary, |
624 | - } |
625 | - |
626 | - |
627 | -def _store_snap(snap_path, snap_name, next_revision): |
628 | - """Store a snap in the storage directory. |
629 | - |
630 | - Returns the path that file was stored in, relative to the storage root. |
631 | - """ |
632 | - storage_dir = get_storage_root() |
633 | - target_filename = '%s_%d.snap' % (snap_name, next_revision) |
634 | - target_path = os.path.join(storage_dir, target_filename) |
635 | - shutil.copy(snap_path, target_path) |
636 | - return '/' + os.path.relpath(target_path, storage_dir) |
637 | diff --git a/siab_client/tests/__init__.py b/siab_client/tests/__init__.py |
638 | deleted file mode 100644 |
639 | index de20494..0000000 |
640 | --- a/siab_client/tests/__init__.py |
641 | +++ /dev/null |
642 | @@ -1,14 +0,0 @@ |
643 | -# Copyright 2017 Canonical Ltd. |
644 | - |
645 | -import os |
646 | -from unittest import TestLoader |
647 | - |
648 | -import siab_client |
649 | - |
650 | - |
651 | -def test_suite(): |
652 | - loader = TestLoader() |
653 | - start_dir = os.path.split(siab_client.__file__)[0] |
654 | - top_dir = os.path.join(start_dir, os.path.pardir) |
655 | - suite = loader.discover(start_dir, top_level_dir=top_dir) |
656 | - return suite |
657 | diff --git a/siab_client/tests/test_cli.py b/siab_client/tests/test_cli.py |
658 | deleted file mode 100644 |
659 | index 18b7b8d..0000000 |
660 | --- a/siab_client/tests/test_cli.py |
661 | +++ /dev/null |
662 | @@ -1,64 +0,0 @@ |
663 | -# Copyright 2017 Canonical Ltd. |
664 | - |
665 | -import logging |
666 | - |
667 | -import fixtures |
668 | -from testtools import TestCase |
669 | - |
670 | -from siab_client import cli |
671 | - |
672 | - |
673 | -class ConfigureLoggingTests(TestCase): |
674 | - |
675 | - def setUp(self): |
676 | - super().setUp() |
677 | - self.logger = logging.getLogger(__name__) |
678 | - self.addCleanup( |
679 | - self._restoreLogger, |
680 | - self.logger, self.logger.level, list(self.logger.handlers)) |
681 | - self.stdout = self.useFixture(fixtures.StringStream('stdout')).stream |
682 | - self.stdout.fileno = lambda: 1 |
683 | - self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout)) |
684 | - self.stderr = self.useFixture(fixtures.StringStream('stderr')).stream |
685 | - self.useFixture(fixtures.MonkeyPatch('sys.stderr', self.stderr)) |
686 | - |
687 | - @staticmethod |
688 | - def _restoreLogger(logger, level, handlers): |
689 | - logger.setLevel(logger.level) |
690 | - for handler in list(logger.handlers): |
691 | - logger.removeHandler(handler) |
692 | - for handler in handlers: |
693 | - logger.addHandler(handler) |
694 | - |
695 | - def test_log_levels(self): |
696 | - self.useFixture(fixtures.MonkeyPatch('os.isatty', lambda fd: True)) |
697 | - cli.configure_logging(__name__) |
698 | - self.assertEqual(logging.INFO, self.logger.level) |
699 | - self.logger.debug('Debug') |
700 | - self.logger.info('Info') |
701 | - self.logger.warning('Warning: %s', 'smoke') |
702 | - self.logger.error('Error: %s', 'fire') |
703 | - self.stdout.seek(0) |
704 | - self.assertEqual('Info\nWarning: smoke\n', self.stdout.read()) |
705 | - self.stderr.seek(0) |
706 | - self.assertEqual('\033[0;31mError: fire\033[0m\n', self.stderr.read()) |
707 | - |
708 | - def test_requests_log_level_default(self): |
709 | - cli.configure_logging(__name__) |
710 | - self.assertEqual(logging.WARNING, logging.getLogger('requests').level) |
711 | - |
712 | - def test_requests_log_level_debug(self): |
713 | - cli.configure_logging(__name__, logging.DEBUG) |
714 | - self.assertEqual(logging.DEBUG, logging.getLogger('requests').level) |
715 | - |
716 | - def test_requests_log_level_error(self): |
717 | - cli.configure_logging(__name__, logging.ERROR) |
718 | - self.assertEqual(logging.ERROR, logging.getLogger('requests').level) |
719 | - |
720 | - def test_no_tty(self): |
721 | - self.useFixture(fixtures.MonkeyPatch('os.isatty', lambda fd: False)) |
722 | - self.useFixture(fixtures.EnvironmentVariable('TERM', 'xterm')) |
723 | - cli.configure_logging(__name__) |
724 | - self.logger.error('Error: %s', 'fire') |
725 | - self.stderr.seek(0) |
726 | - self.assertEqual('Error: fire\n', self.stderr.read()) |
727 | diff --git a/siab_client/tests/test_config.py b/siab_client/tests/test_config.py |
728 | deleted file mode 100644 |
729 | index 7bf5271..0000000 |
730 | --- a/siab_client/tests/test_config.py |
731 | +++ /dev/null |
732 | @@ -1,72 +0,0 @@ |
733 | -# Copyright 2017 Canonical Ltd. |
734 | - |
735 | -from testtools import TestCase |
736 | - |
737 | -from siab_client.config import ConfigProvider |
738 | - |
739 | - |
740 | -class ConfigProviderTestCase(TestCase): |
741 | - |
742 | - def test_override_works_for_new_section_and_option(self): |
743 | - default = {} |
744 | - override = { |
745 | - 'new_section': { |
746 | - 'new_option': 'new_value' |
747 | - } |
748 | - } |
749 | - expected = override.copy() |
750 | - |
751 | - provider = ConfigProvider(default) |
752 | - provider.override_for_test(override) |
753 | - |
754 | - self.assertEqual(expected, provider()) |
755 | - |
756 | - def test_override_works_when_adding_new_option_to_existing_section(self): |
757 | - default = { |
758 | - 'section': { |
759 | - 'option': 'value' |
760 | - } |
761 | - } |
762 | - override = { |
763 | - 'section': { |
764 | - 'new_option': 'new_value' |
765 | - } |
766 | - } |
767 | - expected = { |
768 | - 'section': { |
769 | - 'option': 'value', |
770 | - 'new_option': 'new_value' |
771 | - } |
772 | - } |
773 | - |
774 | - provider = ConfigProvider(default) |
775 | - provider.override_for_test(override) |
776 | - |
777 | - self.assertEqual(expected, provider()) |
778 | - |
779 | - def test_override_works_when_changing_existing_option(self): |
780 | - default = { |
781 | - 'section': { |
782 | - 'option': 'value' |
783 | - } |
784 | - } |
785 | - override = { |
786 | - 'section': { |
787 | - 'option': 'new_value' |
788 | - } |
789 | - } |
790 | - expected = { |
791 | - 'section': { |
792 | - 'option': 'new_value', |
793 | - } |
794 | - } |
795 | - |
796 | - provider = ConfigProvider(default) |
797 | - provider.override_for_test(override) |
798 | - |
799 | - self.assertEqual(expected, provider()) |
800 | - |
801 | - |
802 | -def test_suite(): |
803 | - from unittest import TestLoader |
804 | - return TestLoader().loadTestsFromName(__name__) |
805 | diff --git a/siab_client/webservices.py b/siab_client/webservices.py |
806 | deleted file mode 100644 |
807 | index addfd0f..0000000 |
808 | --- a/siab_client/webservices.py |
809 | +++ /dev/null |
810 | @@ -1,278 +0,0 @@ |
811 | -# Copyright 2017 Canonical Ltd. |
812 | - |
813 | -import base64 |
814 | -import binascii |
815 | -import datetime |
816 | -import json |
817 | -import logging |
818 | -import os.path |
819 | -import urllib.parse |
820 | - |
821 | -import requests |
822 | - |
823 | -from siab_client import config |
824 | - |
825 | - |
826 | -logger = logging.getLogger(__name__) |
827 | - |
828 | - |
829 | -def get_snap_id_for_name(snap_name, series): |
830 | - """Return the snap_name associated with this snap_name. |
831 | - |
832 | - Returns None if the snap_name has not been associated with a snap_id yet. |
833 | - """ |
834 | - ident_root = config.read_config()['services']['snapident'] |
835 | - resp = requests.post( |
836 | - urllib.parse.urljoin(ident_root, '/snaps/filter'), |
837 | - json={ |
838 | - 'filters': [{ |
839 | - 'snap_name': snap_name, |
840 | - 'series': series |
841 | - }] |
842 | - } |
843 | - ) |
844 | - snap_results = resp.json()['snaps'] |
845 | - if not snap_results: |
846 | - return None |
847 | - assert len(snap_results) == 1 |
848 | - return snap_results[0]['snap_id'] |
849 | - |
850 | - |
851 | -def register_snap_name_and_blob(snap_id, snap_name, series, snap_blob, |
852 | - private=False, authority=None, |
853 | - publisher_id=None, published=True, |
854 | - stores=['ubuntu'], country_whitelist=None, |
855 | - country_blacklist=None): |
856 | - ident_root = config.read_config()['services']['snapident'] |
857 | - data = {'snap_id': snap_id, |
858 | - 'private': private, |
859 | - 'publisher_id': snap_blob['developer_id'], |
860 | - 'snap_name': snap_name, |
861 | - 'series': series, |
862 | - 'blob': snap_blob, |
863 | - 'authority': authority, |
864 | - 'status': 'published' if published else 'unpublished', |
865 | - 'stores': stores} |
866 | - if country_whitelist is not None: |
867 | - data['country_whitelist'] = country_whitelist |
868 | - if country_blacklist is not None: |
869 | - data['country_blacklist'] = country_blacklist |
870 | - resp = requests.post( |
871 | - urllib.parse.urljoin(ident_root, '/snaps/update'), |
872 | - json={'snaps': [data]}) |
873 | - if resp.status_code == 200: |
874 | - logger.info( |
875 | - "Snap '%s' (%s) registered for series '%s'.", |
876 | - snap_name, snap_id, series |
877 | - ) |
878 | - return False |
879 | - else: |
880 | - _print_error_message('register snap', resp) |
881 | - return True |
882 | - |
883 | - |
884 | -def release_revision(snap_id, snap_name, series, channel, arches, |
885 | - revision=None, redirect_channel=None, |
886 | - permanent_redirect=None): |
887 | - reqs = [] |
888 | - for arch in arches: |
889 | - d = { |
890 | - 'snap_id': snap_id, |
891 | - 'channel': channel, |
892 | - 'architecture': arch, |
893 | - 'series': series, |
894 | - } |
895 | - if revision is not None: |
896 | - d['revision'] = revision |
897 | - else: |
898 | - d['redirect_channel'] = redirect_channel |
899 | - d['permanent_redirect'] = permanent_redirect |
900 | - reqs.append(d) |
901 | - revs_root = config.read_config()['services']['snaprevs'] |
902 | - resp = requests.post( |
903 | - urllib.parse.urljoin(revs_root, '/channelmaps/update'), |
904 | - json={ |
905 | - 'developer_id': 'wgrant', |
906 | - 'release_requests': reqs, |
907 | - } |
908 | - ) |
909 | - if resp.status_code == 200: |
910 | - logger.info( |
911 | - "Snap '%s' (%s) revision %s released to series=%s, " |
912 | - "channel=%s, arches=%s", |
913 | - snap_name, snap_id, revision, series, channel, |
914 | - ','.join(arches) |
915 | - ) |
916 | - else: |
917 | - _print_error_message('release revision', resp) |
918 | - |
919 | - |
920 | -def get_latest_revision_for_snap_id(snap_id): |
921 | - """Return the latest revision for the given snap_id. |
922 | - |
923 | - If no revisions have been published yet, this function returns None. |
924 | - """ |
925 | - snaprevs_root = config.read_config()['services']['snaprevs'] |
926 | - resp = requests.post( |
927 | - urllib.parse.urljoin(snaprevs_root, '/revisions/filter'), |
928 | - json={'filters': [{'snap_id': snap_id}]} |
929 | - ) |
930 | - revisions = [s['revision'] for s in resp.json()['revisions']] |
931 | - return max(revisions) if revisions else None |
932 | - |
933 | - |
934 | -def create_revision(snap_id, revision, architectures, binary_path, filesize, |
935 | - sha512, sha3_384, version, confinement, snap_yaml): |
936 | - """Create a snap revision with snaprevs. |
937 | - |
938 | - Returns True on succcess, False otherwise. |
939 | - """ |
940 | - snaprevs_root = config.read_config()['services']['snaprevs'] |
941 | - # TODO: I suspect this is supposed to be UTC time, not local time? |
942 | - created_at = datetime.datetime.now().isoformat() |
943 | - # TODO: figure out what we're doing with developer ids. |
944 | - created_by = 'TODO' |
945 | - payload = { |
946 | - 'snap_id': snap_id, |
947 | - 'revision': revision, |
948 | - 'created_at': created_at, |
949 | - 'created_by': created_by, |
950 | - 'architectures': architectures, |
951 | - 'binary_path': binary_path, |
952 | - 'binary_filesize': filesize, |
953 | - 'binary_sha512': sha512, |
954 | - 'binary_sha3_384': sha3_384, |
955 | - 'version': version, |
956 | - 'confinement': confinement, |
957 | - 'snap_yaml': snap_yaml or '', |
958 | - 'epoch': 0, |
959 | - 'type': 'app', |
960 | - } |
961 | - resp = requests.post( |
962 | - urllib.parse.urljoin(snaprevs_root, '/revisions/create'), |
963 | - json=[payload] |
964 | - ) |
965 | - if resp.status_code != 201: |
966 | - _print_error_message('create revision', resp) |
967 | - return False |
968 | - |
969 | - logger.info("Revision %s of %s created.", revision, snap_id) |
970 | - return True |
971 | - |
972 | - |
973 | -def create_snapsections(payload): |
974 | - logger.info('Updating sections and snapsections...') |
975 | - |
976 | - snapfind_root = config.read_config()['services']['snapfind'] |
977 | - snapfind_snapsection_endpoint = urllib.parse.urljoin( |
978 | - snapfind_root, '/sections/snaps') |
979 | - |
980 | - response = requests.post(snapfind_snapsection_endpoint, json=payload) |
981 | - if response.ok: |
982 | - logger.info('Done.') |
983 | - else: |
984 | - logger.error(response.text) |
985 | - |
986 | - |
987 | -def get_assertion(root, type_, key): |
988 | - resp = requests.get( |
989 | - urllib.parse.urljoin(root, os.path.join('assertions', type_, *key)), |
990 | - headers={'Accept': 'application/x.ubuntu.assertion'}) |
991 | - if resp.status_code != 200: |
992 | - return None |
993 | - return resp.content |
994 | - |
995 | - |
996 | -def create_or_update_assertions( |
997 | - snap_id, snap_name, series, revision, binary_sha3_384, |
998 | - binary_filesize): |
999 | - assertions_root = config.read_config()['services']['assertions'] |
1000 | - authority = config.read_config()['assertions']['authority'] |
1001 | - timestamp = datetime.datetime.now().isoformat() + 'Z' |
1002 | - |
1003 | - if get_assertion( |
1004 | - assertions_root, 'snap-declaration', (series, snap_id)) is None: |
1005 | - logger.info("Creating snap-declaration ...") |
1006 | - assertion_headers = { |
1007 | - 'type': 'snap-declaration', |
1008 | - 'revision': '0', |
1009 | - 'authority-id': authority, |
1010 | - 'publisher-id': authority, |
1011 | - 'series': series, |
1012 | - 'snap-id': snap_id, |
1013 | - 'snap-name': snap_name, |
1014 | - 'timestamp': timestamp, |
1015 | - } |
1016 | - assertion = sign_assertion(assertion_headers) |
1017 | - save_assertion(assertion) |
1018 | - |
1019 | - logger.info("Creating snap-revision ...") |
1020 | - snap_sha3_384 = base64.urlsafe_b64encode( |
1021 | - binascii.a2b_hex(binary_sha3_384)).decode().rstrip("=") |
1022 | - snap_size = str(binary_filesize) |
1023 | - snap_revision = str(revision) |
1024 | - assertion_headers = { |
1025 | - 'type': 'snap-revision', |
1026 | - 'authority-id': authority, |
1027 | - 'developer-id': authority, |
1028 | - 'snap-sha3-384': snap_sha3_384, |
1029 | - 'snap-id': snap_id, |
1030 | - 'snap-size': snap_size, |
1031 | - 'snap-revision': snap_revision, |
1032 | - 'timestamp': timestamp, |
1033 | - } |
1034 | - assertion = sign_assertion(assertion_headers) |
1035 | - save_assertion(assertion) |
1036 | - |
1037 | - |
1038 | -def save_assertion(assertion): |
1039 | - assertions_root = config.read_config()['services']['assertions'] |
1040 | - request_headers = { |
1041 | - 'Content-Type': 'application/x.ubuntu.assertion', |
1042 | - } |
1043 | - resp = requests.post( |
1044 | - urllib.parse.urljoin(assertions_root, 'assertions'), |
1045 | - data=assertion, headers=request_headers) |
1046 | - |
1047 | - if resp.status_code != 201: |
1048 | - _print_error_message('save assertion', resp) |
1049 | - resp.raise_for_status() |
1050 | - |
1051 | - |
1052 | -def sign_assertion(assertion_headers): |
1053 | - assertions_root = config.read_config()['services']['assertions'] |
1054 | - signing_key_id = config.read_config()['assertions']['signing_key_id'] |
1055 | - request_headers = { |
1056 | - 'Content-Type': 'application/json', |
1057 | - } |
1058 | - data = { |
1059 | - 'key-id': signing_key_id, |
1060 | - 'headers': assertion_headers, |
1061 | - } |
1062 | - resp = requests.post( |
1063 | - urllib.parse.urljoin(assertions_root, 'sign'), |
1064 | - headers=request_headers, json=data) |
1065 | - |
1066 | - if resp.status_code != 200: |
1067 | - _print_error_message('sign assertion', resp) |
1068 | - resp.raise_for_status() |
1069 | - |
1070 | - return resp.text |
1071 | - |
1072 | - |
1073 | -def _print_error_message(action, response): |
1074 | - """Print failure messages from other services in a standard way.""" |
1075 | - logger.error("Failed to %s:", action) |
1076 | - if response.status_code == 500: |
1077 | - logger.error("Server sent 500 response.") |
1078 | - else: |
1079 | - try: |
1080 | - json_document = response.json() |
1081 | - for error in json_document['error_list']: |
1082 | - logger.error(error['message']) |
1083 | - except json.JSONDecodeError: |
1084 | - logger.error( |
1085 | - "Server sent non-json response, printed verbatim below:") |
1086 | - logger.error('=' * 20) |
1087 | - logger.error(response.content) |
1088 | - logger.error('=' * 20) |
1089 | diff --git a/snapstore b/snapstore |
1090 | index 5d66fe9..07794cf 100755 |
1091 | --- a/snapstore |
1092 | +++ b/snapstore |
1093 | @@ -7,9 +7,9 @@ |
1094 | import argparse |
1095 | import sys |
1096 | |
1097 | -from siab_client.cli import configure_logging |
1098 | -from siab_client.logic.upload import upload_snap |
1099 | -from siab_client.logic.dump import ( |
1100 | +from snapstore_client.cli import configure_logging |
1101 | +from snapstore_client.logic.upload import upload_snap |
1102 | +from snapstore_client.logic.dump import ( |
1103 | import_dump, |
1104 | import_snapsection_dump, |
1105 | ) |
1106 | diff --git a/snapstore_client/__init__.py b/snapstore_client/__init__.py |
1107 | new file mode 100644 |
1108 | index 0000000..e69df82 |
1109 | --- /dev/null |
1110 | +++ b/snapstore_client/__init__.py |
1111 | @@ -0,0 +1,18 @@ |
1112 | +# -*- coding: utf-8 -*- |
1113 | +# Copyright 2017 Canonical Ltd. |
1114 | + |
1115 | +import os |
1116 | + |
1117 | + |
1118 | +SUPPORTED_ARCHITECTURES = ( |
1119 | + 'amd64', 'arm64', 'armhf', 'i386', 'powerpc', 'ppc64el', 's390x') |
1120 | + |
1121 | + |
1122 | +def get_storage_root(): |
1123 | + return os.path.abspath( |
1124 | + os.path.join( |
1125 | + os.path.dirname(__file__), |
1126 | + '..', |
1127 | + 'snap_storage' |
1128 | + ) |
1129 | + ) |
1130 | diff --git a/snapstore_client/cli.py b/snapstore_client/cli.py |
1131 | new file mode 100644 |
1132 | index 0000000..0ee6e66 |
1133 | --- /dev/null |
1134 | +++ b/snapstore_client/cli.py |
1135 | @@ -0,0 +1,66 @@ |
1136 | +# Copyright 2017 Canonical Ltd. |
1137 | + |
1138 | +"""Command-line interface niceties for this service.""" |
1139 | + |
1140 | +import logging |
1141 | +import os |
1142 | +import sys |
1143 | + |
1144 | + |
1145 | +class _StdoutFilter(logging.Filter): |
1146 | + |
1147 | + def filter(self, record): |
1148 | + return record.levelno <= logging.WARNING |
1149 | + |
1150 | + |
1151 | +class _StderrFilter(logging.Filter): |
1152 | + |
1153 | + def filter(self, record): |
1154 | + return record.levelno >= logging.ERROR |
1155 | + |
1156 | + |
1157 | +def _is_dumb_terminal(): |
1158 | + """Return True if on a dumb terminal.""" |
1159 | + is_stdout_tty = os.isatty(sys.stdout.fileno()) |
1160 | + is_term_dumb = os.environ.get('TERM', '') == 'dumb' |
1161 | + return not is_stdout_tty or is_term_dumb |
1162 | + |
1163 | + |
1164 | +class _ColouredFormatter(logging.Formatter): |
1165 | + |
1166 | + _reset = '\033[0m' |
1167 | + _level_colours = { |
1168 | + 'ERROR': '\033[0;31m', # Dark red |
1169 | + } |
1170 | + |
1171 | + def format(self, record): |
1172 | + colour = self._level_colours.get(record.levelname) |
1173 | + log_message = super().format(record) |
1174 | + if colour is not None: |
1175 | + return colour + log_message + self._reset |
1176 | + else: |
1177 | + return log_message |
1178 | + |
1179 | + |
1180 | +def configure_logging(logger_name=None, log_level=logging.INFO): |
1181 | + stdout_handler = logging.StreamHandler(stream=sys.stdout) |
1182 | + stdout_handler.addFilter(_StdoutFilter()) |
1183 | + stderr_handler = logging.StreamHandler(stream=sys.stderr) |
1184 | + stderr_handler.addFilter(_StderrFilter()) |
1185 | + handlers = [stdout_handler, stderr_handler] |
1186 | + if _is_dumb_terminal(): |
1187 | + formatter = logging.Formatter(style='{') |
1188 | + else: |
1189 | + formatter = _ColouredFormatter(style='{') |
1190 | + logger = logging.getLogger(logger_name) |
1191 | + for handler in handlers: |
1192 | + handler.setFormatter(formatter) |
1193 | + logger.addHandler(handler) |
1194 | + logger.setLevel(log_level) |
1195 | + |
1196 | + # The requests library is too noisy at INFO level. |
1197 | + if log_level == logging.DEBUG: |
1198 | + logging.getLogger('requests').setLevel(log_level) |
1199 | + else: |
1200 | + logging.getLogger('requests').setLevel( |
1201 | + max(logging.WARNING, log_level)) |
1202 | diff --git a/snapstore_client/config.py b/snapstore_client/config.py |
1203 | new file mode 100644 |
1204 | index 0000000..eacf0b3 |
1205 | --- /dev/null |
1206 | +++ b/snapstore_client/config.py |
1207 | @@ -0,0 +1,94 @@ |
1208 | +# -*- coding: utf-8 -*- |
1209 | +# Copyright 2017 Canonical Ltd. |
1210 | + |
1211 | +"""Configuration provider for this service.""" |
1212 | + |
1213 | +import configparser |
1214 | +import os |
1215 | + |
1216 | + |
1217 | +ROOT = os.path.abspath( |
1218 | + os.path.join( |
1219 | + os.path.dirname(__file__), |
1220 | + '..' |
1221 | + ) |
1222 | +) |
1223 | + |
1224 | + |
1225 | +DEFAULT_TEST_CONFIG = { |
1226 | + 'services': { |
1227 | + 'storage': 'http://localhost:8005/' |
1228 | + } |
1229 | +} |
1230 | + |
1231 | + |
1232 | +def _production_read_config(): |
1233 | + root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) |
1234 | + config = configparser.ConfigParser(dict(ROOT=root)) |
1235 | + default_path = os.path.join(root, 'service.conf') |
1236 | + config.read(default_path) |
1237 | + return {sec: dict(config.items(sec)) for sec in config.sections()} |
1238 | + |
1239 | + |
1240 | +class ConfigProvider(object): |
1241 | + |
1242 | + """Provide a single entry point for reading config that can be altered. |
1243 | + |
1244 | + This class allows production code to call 'read_config' and get a copy of |
1245 | + test configuration during test runs, but production config during normal |
1246 | + operations. |
1247 | + """ |
1248 | + |
1249 | + def __init__(self, default_test_config): |
1250 | + self._test_override = [] |
1251 | + self._default_test_config = default_test_config |
1252 | + |
1253 | + def __call__(self): |
1254 | + if self._test_override: |
1255 | + return self._test_override[-1] |
1256 | + else: |
1257 | + return _production_read_config() |
1258 | + |
1259 | + def merge_config(self, config, new_config): |
1260 | + merged_config = config.copy() |
1261 | + for key in new_config.keys(): |
1262 | + if key in config.keys(): |
1263 | + if isinstance(config[key], dict) and \ |
1264 | + isinstance(new_config[key], dict): |
1265 | + merged_config[key] = self.merge_config( |
1266 | + config[key], new_config[key]) |
1267 | + else: |
1268 | + merged_config[key] = new_config[key] |
1269 | + else: |
1270 | + merged_config[key] = new_config[key] |
1271 | + return merged_config |
1272 | + |
1273 | + def override_for_test(self, new_config): |
1274 | + """Override configuration for a test. |
1275 | + |
1276 | + In a test, if you wanted to set a value, you can do this:: |
1277 | + |
1278 | + from snapdevicegw.config import read_config |
1279 | + |
1280 | + def test_something(self): |
1281 | + new_config = { |
1282 | + 'worker': { |
1283 | + 'some_setting': 'some_value' |
1284 | + } |
1285 | + } |
1286 | + self.addCleanup(read_config.override_for_test(new_config)) |
1287 | + |
1288 | + The configuration provided is merged on top of DEFAULT_TEST_CONFIG |
1289 | + (see above), so there's no need to specify values that are in the |
1290 | + default config. |
1291 | + """ |
1292 | + if self._test_override: |
1293 | + source = self._test_override[-1] |
1294 | + else: |
1295 | + source = self._default_test_config.copy() |
1296 | + config = self.merge_config(source, new_config) |
1297 | + self._test_override.append(config) |
1298 | + return self._test_override.pop |
1299 | + |
1300 | + |
1301 | +read_config = ConfigProvider(DEFAULT_TEST_CONFIG) |
1302 | diff --git a/snapstore_client/logic/__init__.py b/snapstore_client/logic/__init__.py |
1303 | new file mode 100644 |
1304 | index 0000000..d115bb8 |
1305 | --- /dev/null |
1306 | +++ b/snapstore_client/logic/__init__.py |
1307 | @@ -0,0 +1,2 @@ |
1308 | +# -*- coding: utf-8 -*- |
1309 | +# Copyright 2017 Canonical Ltd. |
1310 | diff --git a/snapstore_client/logic/dump.py b/snapstore_client/logic/dump.py |
1311 | new file mode 100644 |
1312 | index 0000000..d2c2a40 |
1313 | --- /dev/null |
1314 | +++ b/snapstore_client/logic/dump.py |
1315 | @@ -0,0 +1,193 @@ |
1316 | +# Copyright 2017 Canonical Ltd. |
1317 | + |
1318 | +import base64 |
1319 | +import codecs |
1320 | +import csv |
1321 | +import itertools |
1322 | +import json |
1323 | +import logging |
1324 | +import os.path |
1325 | + |
1326 | +import yaml |
1327 | + |
1328 | +from snapstore_client import ( |
1329 | + SUPPORTED_ARCHITECTURES, |
1330 | + config, |
1331 | +) |
1332 | +from snapstore_client import webservices as ws |
1333 | + |
1334 | + |
1335 | +logger = logging.getLogger(__name__) |
1336 | + |
1337 | + |
1338 | +RISK_ORDER = ['stable', 'candidate', 'beta', 'edge'] |
1339 | + |
1340 | + |
1341 | +def parse_channel(channel): |
1342 | + segments = channel.split('/') |
1343 | + if len(segments) == 1: |
1344 | + return None, segments[0], None |
1345 | + if len(segments) == 2: |
1346 | + return segments[0], segments[1], None |
1347 | + return segments |
1348 | + |
1349 | + |
1350 | +def import_dump(args): |
1351 | + logger.info('Importing from %s.', args.root) |
1352 | + for snap_id in args.snap_id: |
1353 | + snap_root = os.path.join(args.root, snap_id) |
1354 | + _import_snap(snap_root) |
1355 | + |
1356 | + |
1357 | +def _import_snap(dir): |
1358 | + with open(os.path.join(dir, 'snap.json')) as f: |
1359 | + snap_meta = json.loads(f.read()) |
1360 | + logger.info(' %s -> %s', snap_meta['snap_id'], snap_meta['name']) |
1361 | + |
1362 | + # XXX: Series and authority hardcoded. |
1363 | + error = ws.register_snap_name_and_blob( |
1364 | + snap_id=snap_meta['snap_id'], |
1365 | + snap_name=snap_meta['name'], |
1366 | + snap_blob=snap_meta['_cpi_data'], |
1367 | + private=snap_meta['private'], |
1368 | + series='16', |
1369 | + publisher_id=snap_meta['publisher_id'], |
1370 | + authority='canonical', |
1371 | + published=snap_meta['status'] == 'Published', |
1372 | + stores=snap_meta['stores'], |
1373 | + country_whitelist=snap_meta['country_whitelist'] or None, |
1374 | + country_blacklist=snap_meta['country_blacklist'] or None, |
1375 | + ) |
1376 | + if error: |
1377 | + return |
1378 | + |
1379 | + with open(os.path.join(dir, 'channelmap.json')) as f: |
1380 | + channelmaps = json.loads(f.read()) |
1381 | + |
1382 | + # Collect metadata for the interesting revisions. We currently only |
1383 | + # import those that are visible in channels now. |
1384 | + rev_metas = {} |
1385 | + for cm in channelmaps: |
1386 | + path = os.path.join( |
1387 | + dir, 'revisions', '{:d}.json'.format(cm['revision'])) |
1388 | + if not os.path.exists(path): |
1389 | + continue |
1390 | + with open(path) as f: |
1391 | + rev_meta = json.loads(f.read()) |
1392 | + assert rev_meta['revision'] == cm['revision'] |
1393 | + rev_metas[cm['revision']] = rev_meta |
1394 | + |
1395 | + # Collect the interesting revisions' SHA3-384s, translating them |
1396 | + # from hex to the snappy-style URL-safe base64. |
1397 | + sha3_384s = [ |
1398 | + base64.urlsafe_b64encode( |
1399 | + codecs.decode(rev_meta['binary_sha3_384'], 'hex')).decode('ascii') |
1400 | + for rev_meta in rev_metas.values()] |
1401 | + |
1402 | + # Ensure we have all relevant assertions locally before exposing new |
1403 | + # revisions to clients. |
1404 | + _sync_snap_assertions( |
1405 | + 'https://assertions.ubuntu.com/v1/', snap_meta['snap_id'], '16', |
1406 | + sha3_384s) |
1407 | + |
1408 | + seen_archs = set() |
1409 | + seen_tracks = set() |
1410 | + seen_track_risk_archs = set() |
1411 | + rev_created = [] |
1412 | + for cm in channelmaps: |
1413 | + rev_meta = rev_metas.get(cm['revision']) |
1414 | + if rev_meta is None: |
1415 | + continue |
1416 | + if cm['revision'] not in rev_created: |
1417 | + ws.create_revision( |
1418 | + snap_meta['snap_id'], cm['revision'], |
1419 | + rev_meta['architectures'], |
1420 | + '/remotes/ubuntu/{}'.format(rev_meta['binary_path']), |
1421 | + rev_meta['binary_filesize'], rev_meta['binary_sha512'], |
1422 | + rev_meta['binary_sha3_384'], rev_meta['version'], |
1423 | + rev_meta['confinement'], rev_meta['snap_yaml_raw']) |
1424 | + rev_created.append(cm['revision']) |
1425 | + track, risk, branch = parse_channel(cm['channel']) |
1426 | + exploded_architectures = set(rev_meta['architectures']) |
1427 | + seen_archs.update(exploded_architectures) |
1428 | + seen_tracks.add(track) |
1429 | + seen_track_risk_archs.update( |
1430 | + (track, risk, arch) for arch in exploded_architectures) |
1431 | + if 'all' in exploded_architectures: |
1432 | + exploded_architectures.remove('all') |
1433 | + exploded_architectures.update(SUPPORTED_ARCHITECTURES) |
1434 | + ws.release_revision( |
1435 | + snap_meta['snap_id'], snap_meta['name'], cm['series'], |
1436 | + [track, risk, branch], exploded_architectures, cm['revision']) |
1437 | + |
1438 | + # Fill in tracking. |
1439 | + all_track_risk_archs = set( |
1440 | + itertools.product(seen_tracks, RISK_ORDER[1:], seen_archs)) |
1441 | + for track, risk, arch in all_track_risk_archs - seen_track_risk_archs: |
1442 | + ws.release_revision( |
1443 | + snap_meta['snap_id'], snap_meta['name'], cm['series'], |
1444 | + [track, risk, None], [arch], |
1445 | + redirect_channel=[ |
1446 | + track, RISK_ORDER[RISK_ORDER.index(risk) - 1], None], |
1447 | + permanent_redirect=False) |
1448 | + |
1449 | + |
1450 | +def import_snapsection_dump(args): |
1451 | + """Import snapsection tsv dump. |
1452 | + |
1453 | + Assumes tsv takes form: |
1454 | + snap_id\tfeatured\tscore\tsection_name |
1455 | + """ |
1456 | + logger.info('Importing snapsections from %s.', args.tsv_file) |
1457 | + |
1458 | + sections = {} |
1459 | + with open(args.tsv_file, 'r') as tsvin: |
1460 | + tsv = csv.reader(tsvin, delimiter='\t') |
1461 | + for snap_id, featured, score, section_name in tsv: |
1462 | + if section_name not in sections: |
1463 | + sections[section_name] = [] |
1464 | + sections[section_name].append({ |
1465 | + 'snap_id': snap_id, |
1466 | + 'featured': bool(featured), |
1467 | + 'score': int(score), |
1468 | + 'series': '16', |
1469 | + }) |
1470 | + payload = {'sections': []} |
1471 | + for section in sections: |
1472 | + payload['sections'].append({ |
1473 | + 'section_name': section, |
1474 | + 'snaps': sections[section] |
1475 | + }) |
1476 | + ws.create_snapsections(payload) |
1477 | + |
1478 | + |
1479 | +def _parse_assertion(assertion): |
1480 | + return yaml.safe_load(assertion.split(b'\n\n')[0]) |
1481 | + |
1482 | + |
1483 | +def _sync_snap_assertions(remote, snap_id, series, sha3_384s): |
1484 | + local = config.read_config()['services']['assertions'] |
1485 | + |
1486 | + # Collect the snap-declaration and snap-revisions. |
1487 | + decl_raw = ws.get_assertion(remote, 'snap-declaration', (series, snap_id)) |
1488 | + rev_raws = [ |
1489 | + ws.get_assertion(remote, 'snap-revision', (sha3_384,)) |
1490 | + for sha3_384 in sha3_384s |
1491 | + if ws.get_assertion(local, 'snap-revision', (sha3_384,)) is None] |
1492 | + |
1493 | + # Collect dependencies (currently just accounts). |
1494 | + accounts = set() |
1495 | + accounts.add(_parse_assertion(decl_raw)['publisher-id']) |
1496 | + accounts.update( |
1497 | + _parse_assertion(rev_raw)['developer-id'] for rev_raw in rev_raws) |
1498 | + |
1499 | + # Import dependencies that don't exist. |
1500 | + for account in accounts: |
1501 | + if ws.get_assertion(local, 'account', (account,)) is not None: |
1502 | + continue |
1503 | + ws.save_assertion(ws.get_assertion(remote, 'account', (account,))) |
1504 | + |
1505 | + # Now save the main assertions. |
1506 | + ws.save_assertion(decl_raw) |
1507 | + for rev_raw in rev_raws: |
1508 | + ws.save_assertion(rev_raw) |
1509 | diff --git a/snapstore_client/logic/upload.py b/snapstore_client/logic/upload.py |
1510 | new file mode 100644 |
1511 | index 0000000..5dd5d1d |
1512 | --- /dev/null |
1513 | +++ b/snapstore_client/logic/upload.py |
1514 | @@ -0,0 +1,200 @@ |
1515 | +# Copyright 2017 Canonical Ltd. |
1516 | + |
1517 | +import hashlib |
1518 | +import logging |
1519 | +import os |
1520 | +import random |
1521 | +import shutil |
1522 | +import string |
1523 | +import subprocess |
1524 | +import sys |
1525 | +import tempfile |
1526 | + |
1527 | +import yaml |
1528 | + |
1529 | +from snapstore_client import ( |
1530 | + SUPPORTED_ARCHITECTURES, |
1531 | + get_storage_root, |
1532 | +) |
1533 | +from snapstore_client import webservices as ws |
1534 | + |
1535 | +if sys.version < '3.6': |
1536 | + import sha3 # noqa |
1537 | + |
1538 | + |
1539 | +logger = logging.getLogger(__name__) |
1540 | + |
1541 | + |
1542 | +def upload_snap(args): |
1543 | + snap_path = args.snap |
1544 | + with tempfile.TemporaryDirectory() as workdir: |
1545 | + snap_yaml = _parse_snap_yaml(workdir, snap_path) |
1546 | + |
1547 | + name = snap_yaml['name'] |
1548 | + version = str(snap_yaml['version']) |
1549 | + summary = snap_yaml['summary'] |
1550 | + description = snap_yaml['description'] |
1551 | + confinement = snap_yaml['confinement'] |
1552 | + architectures = snap_yaml['architectures'] |
1553 | + sha3_384 = _extract_sha3_384(snap_path) |
1554 | + sha512 = _extract_sha512(snap_path) |
1555 | + binary_filesize = os.path.getsize(snap_path) |
1556 | + |
1557 | + snap_yaml = _read_snap_yaml(workdir, snap_path) |
1558 | + |
1559 | + logger.info("Name: %s", name) |
1560 | + logger.info("Version: %s", version) |
1561 | + logger.info("Summary: %s", summary) |
1562 | + logger.info("Description: %s", description) |
1563 | + logger.info("Confinement: %s", confinement) |
1564 | + logger.info("Architectures: %s", ', '.join(architectures)) |
1565 | + logger.info("Sha3_384: %s", sha3_384) |
1566 | + logger.info("Sha512: %s", sha512) |
1567 | + logger.info("Binary filesize: %d", binary_filesize) |
1568 | + |
1569 | + series = '16' |
1570 | + |
1571 | + snap_id = ws.get_snap_id_for_name(name, series) |
1572 | + if snap_id is None: |
1573 | + snap_id = _generate_new_snap_id() |
1574 | + |
1575 | + snap_blob = _create_snap_blob( |
1576 | + name, |
1577 | + description, |
1578 | + summary, |
1579 | + ) |
1580 | + ws.register_snap_name_and_blob( |
1581 | + snap_id, name, series, snap_blob, authority='local', |
1582 | + stores=['ubuntu']) |
1583 | + |
1584 | + latest_revision = ws.get_latest_revision_for_snap_id(snap_id) |
1585 | + next_revision = latest_revision + 1 if latest_revision else 1 |
1586 | + binary_path = _store_snap(snap_path, name, next_revision) |
1587 | + logger.info("Binary file path: %s", binary_path) |
1588 | + registration_success = ws.create_revision( |
1589 | + snap_id, |
1590 | + next_revision, |
1591 | + architectures, |
1592 | + binary_path, |
1593 | + binary_filesize, |
1594 | + sha512, |
1595 | + sha3_384, |
1596 | + version, |
1597 | + confinement, |
1598 | + snap_yaml |
1599 | + ) |
1600 | + exploded_architectures = set(architectures) |
1601 | + if 'all' in exploded_architectures: |
1602 | + exploded_architectures.remove('all') |
1603 | + exploded_architectures.update(SUPPORTED_ARCHITECTURES) |
1604 | + if registration_success and args.channel: |
1605 | + # TODO: Track/branch |
1606 | + ws.release_revision( |
1607 | + snap_id, name, series, [None, args.channel, None], |
1608 | + list(exploded_architectures), next_revision) |
1609 | + |
1610 | + ws.create_or_update_assertions( |
1611 | + snap_id, |
1612 | + name, |
1613 | + series, |
1614 | + next_revision, |
1615 | + sha3_384, |
1616 | + binary_filesize |
1617 | + ) |
1618 | + |
1619 | + |
1620 | +def _parse_snap_yaml(workdir, snap_path): |
1621 | + """Extract the contents of the snap yaml file, given the path to a .snap""" |
1622 | + return yaml.safe_load(_read_snap_yaml(workdir, snap_path)) |
1623 | + |
1624 | + |
1625 | +def _read_snap_yaml(workdir, snap_path): |
1626 | + unsquash_path = os.path.join(workdir, 'unsquashed_snap') |
1627 | + subprocess.check_call( |
1628 | + ['unsquashfs', '-d', unsquash_path, snap_path], |
1629 | + stdout=subprocess.DEVNULL) |
1630 | + yaml_path = os.path.join(unsquash_path, 'meta', 'snap.yaml') |
1631 | + with open(yaml_path, 'r') as yaml_file: |
1632 | + return yaml_file.read() |
1633 | + |
1634 | + |
1635 | +def _extract_sha3_384(file_path): |
1636 | + blocksize = 2**20 |
1637 | + with open(file_path, 'rb') as snap_file: |
1638 | + hasher = hashlib.sha3_384() |
1639 | + while True: |
1640 | + buf = snap_file.read(blocksize) |
1641 | + if not buf: |
1642 | + break |
1643 | + hasher.update(buf) |
1644 | + return hasher.hexdigest() |
1645 | + |
1646 | + |
1647 | +def _extract_sha512(file_path): |
1648 | + blocksize = 2**20 |
1649 | + with open(file_path, 'rb') as snap_file: |
1650 | + hasher = hashlib.sha512() |
1651 | + while True: |
1652 | + buf = snap_file.read(blocksize) |
1653 | + if not buf: |
1654 | + break |
1655 | + hasher.update(buf) |
1656 | + return hasher.hexdigest() |
1657 | + |
1658 | + |
1659 | +def _generate_new_snap_id(): |
1660 | + """Generate a random ID to identify a snap entity. |
1661 | + |
1662 | + Snap ids are a sequence of 32 random characters taken out of an alphabet |
1663 | + of 62 characters (uppercase letters + lowercase letters + numbers), for |
1664 | + a total of 310 bits of space. These unique identifiers are generated |
1665 | + centrally or delegated to known parties. |
1666 | + |
1667 | + This function does not check for duplicates. |
1668 | + |
1669 | + """ |
1670 | + SNAP_ID_ALPHABET = string.ascii_letters + string.digits |
1671 | + return ''.join( |
1672 | + random.choice(SNAP_ID_ALPHABET) for _ in range(32)) |
1673 | + |
1674 | + |
1675 | +def _create_snap_blob(snap_name, description, summary): |
1676 | + """Generate the 'blob', as it's exported from SCA. |
1677 | + |
1678 | + Many of the fields in the SCA data export aren't used in any part of the |
1679 | + snapv2 ecosystem, and have been omitted here. This script doesn't know |
1680 | + about some of the fields, so dummy data has been inserted. |
1681 | + """ |
1682 | + # TODO: Fill in 'origin', 'developer_id', 'developer_name' and 'summary' |
1683 | + # somehow |
1684 | + return { |
1685 | + "origin": "developername", |
1686 | + "last_updated": None, |
1687 | + "package_name": snap_name, |
1688 | + "screenshot_url": None, |
1689 | + "developer_id": "46MtBuBZaWy3g8picgdg6YkrCQo84J46", |
1690 | + "ratings_average": 0.0, |
1691 | + "title": "snap title here", |
1692 | + "support_url": "", |
1693 | + "icon_url": None, |
1694 | + "developer_name": "developername", |
1695 | + "screenshot_urls": [], |
1696 | + "description": description, |
1697 | + "price": 0.0, |
1698 | + "translations": {}, |
1699 | + "prices": {}, |
1700 | + "publisher": "some publisher", |
1701 | + "summary": summary, |
1702 | + } |
1703 | + |
1704 | + |
1705 | +def _store_snap(snap_path, snap_name, next_revision): |
1706 | + """Store a snap in the storage directory. |
1707 | + |
1708 | + Returns the path that file was stored in, relative to the storage root. |
1709 | + """ |
1710 | + storage_dir = get_storage_root() |
1711 | + target_filename = '%s_%d.snap' % (snap_name, next_revision) |
1712 | + target_path = os.path.join(storage_dir, target_filename) |
1713 | + shutil.copy(snap_path, target_path) |
1714 | + return '/' + os.path.relpath(target_path, storage_dir) |
1715 | diff --git a/snapstore_client/tests/__init__.py b/snapstore_client/tests/__init__.py |
1716 | new file mode 100644 |
1717 | index 0000000..1c2c9eb |
1718 | --- /dev/null |
1719 | +++ b/snapstore_client/tests/__init__.py |
1720 | @@ -0,0 +1,14 @@ |
1721 | +# Copyright 2017 Canonical Ltd. |
1722 | + |
1723 | +import os |
1724 | +from unittest import TestLoader |
1725 | + |
1726 | +import snapstore_client |
1727 | + |
1728 | + |
1729 | +def test_suite(): |
1730 | + loader = TestLoader() |
1731 | + start_dir = os.path.split(snapstore_client.__file__)[0] |
1732 | + top_dir = os.path.join(start_dir, os.path.pardir) |
1733 | + suite = loader.discover(start_dir, top_level_dir=top_dir) |
1734 | + return suite |
1735 | diff --git a/snapstore_client/tests/test_cli.py b/snapstore_client/tests/test_cli.py |
1736 | new file mode 100644 |
1737 | index 0000000..001159d |
1738 | --- /dev/null |
1739 | +++ b/snapstore_client/tests/test_cli.py |
1740 | @@ -0,0 +1,64 @@ |
1741 | +# Copyright 2017 Canonical Ltd. |
1742 | + |
1743 | +import logging |
1744 | + |
1745 | +import fixtures |
1746 | +from testtools import TestCase |
1747 | + |
1748 | +from snapstore_client import cli |
1749 | + |
1750 | + |
1751 | +class ConfigureLoggingTests(TestCase): |
1752 | + |
1753 | + def setUp(self): |
1754 | + super().setUp() |
1755 | + self.logger = logging.getLogger(__name__) |
1756 | + self.addCleanup( |
1757 | + self._restoreLogger, |
1758 | + self.logger, self.logger.level, list(self.logger.handlers)) |
1759 | + self.stdout = self.useFixture(fixtures.StringStream('stdout')).stream |
1760 | + self.stdout.fileno = lambda: 1 |
1761 | + self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout)) |
1762 | + self.stderr = self.useFixture(fixtures.StringStream('stderr')).stream |
1763 | + self.useFixture(fixtures.MonkeyPatch('sys.stderr', self.stderr)) |
1764 | + |
1765 | + @staticmethod |
1766 | + def _restoreLogger(logger, level, handlers): |
1767 | + logger.setLevel(logger.level) |
1768 | + for handler in list(logger.handlers): |
1769 | + logger.removeHandler(handler) |
1770 | + for handler in handlers: |
1771 | + logger.addHandler(handler) |
1772 | + |
1773 | + def test_log_levels(self): |
1774 | + self.useFixture(fixtures.MonkeyPatch('os.isatty', lambda fd: True)) |
1775 | + cli.configure_logging(__name__) |
1776 | + self.assertEqual(logging.INFO, self.logger.level) |
1777 | + self.logger.debug('Debug') |
1778 | + self.logger.info('Info') |
1779 | + self.logger.warning('Warning: %s', 'smoke') |
1780 | + self.logger.error('Error: %s', 'fire') |
1781 | + self.stdout.seek(0) |
1782 | + self.assertEqual('Info\nWarning: smoke\n', self.stdout.read()) |
1783 | + self.stderr.seek(0) |
1784 | + self.assertEqual('\033[0;31mError: fire\033[0m\n', self.stderr.read()) |
1785 | + |
1786 | + def test_requests_log_level_default(self): |
1787 | + cli.configure_logging(__name__) |
1788 | + self.assertEqual(logging.WARNING, logging.getLogger('requests').level) |
1789 | + |
1790 | + def test_requests_log_level_debug(self): |
1791 | + cli.configure_logging(__name__, logging.DEBUG) |
1792 | + self.assertEqual(logging.DEBUG, logging.getLogger('requests').level) |
1793 | + |
1794 | + def test_requests_log_level_error(self): |
1795 | + cli.configure_logging(__name__, logging.ERROR) |
1796 | + self.assertEqual(logging.ERROR, logging.getLogger('requests').level) |
1797 | + |
1798 | + def test_no_tty(self): |
1799 | + self.useFixture(fixtures.MonkeyPatch('os.isatty', lambda fd: False)) |
1800 | + self.useFixture(fixtures.EnvironmentVariable('TERM', 'xterm')) |
1801 | + cli.configure_logging(__name__) |
1802 | + self.logger.error('Error: %s', 'fire') |
1803 | + self.stderr.seek(0) |
1804 | + self.assertEqual('Error: fire\n', self.stderr.read()) |
1805 | diff --git a/snapstore_client/tests/test_config.py b/snapstore_client/tests/test_config.py |
1806 | new file mode 100644 |
1807 | index 0000000..4e07079 |
1808 | --- /dev/null |
1809 | +++ b/snapstore_client/tests/test_config.py |
1810 | @@ -0,0 +1,72 @@ |
1811 | +# Copyright 2017 Canonical Ltd. |
1812 | + |
1813 | +from testtools import TestCase |
1814 | + |
1815 | +from snapstore_client.config import ConfigProvider |
1816 | + |
1817 | + |
1818 | +class ConfigProviderTestCase(TestCase): |
1819 | + |
1820 | + def test_override_works_for_new_section_and_option(self): |
1821 | + default = {} |
1822 | + override = { |
1823 | + 'new_section': { |
1824 | + 'new_option': 'new_value' |
1825 | + } |
1826 | + } |
1827 | + expected = override.copy() |
1828 | + |
1829 | + provider = ConfigProvider(default) |
1830 | + provider.override_for_test(override) |
1831 | + |
1832 | + self.assertEqual(expected, provider()) |
1833 | + |
1834 | + def test_override_works_when_adding_new_option_to_existing_section(self): |
1835 | + default = { |
1836 | + 'section': { |
1837 | + 'option': 'value' |
1838 | + } |
1839 | + } |
1840 | + override = { |
1841 | + 'section': { |
1842 | + 'new_option': 'new_value' |
1843 | + } |
1844 | + } |
1845 | + expected = { |
1846 | + 'section': { |
1847 | + 'option': 'value', |
1848 | + 'new_option': 'new_value' |
1849 | + } |
1850 | + } |
1851 | + |
1852 | + provider = ConfigProvider(default) |
1853 | + provider.override_for_test(override) |
1854 | + |
1855 | + self.assertEqual(expected, provider()) |
1856 | + |
1857 | + def test_override_works_when_changing_existing_option(self): |
1858 | + default = { |
1859 | + 'section': { |
1860 | + 'option': 'value' |
1861 | + } |
1862 | + } |
1863 | + override = { |
1864 | + 'section': { |
1865 | + 'option': 'new_value' |
1866 | + } |
1867 | + } |
1868 | + expected = { |
1869 | + 'section': { |
1870 | + 'option': 'new_value', |
1871 | + } |
1872 | + } |
1873 | + |
1874 | + provider = ConfigProvider(default) |
1875 | + provider.override_for_test(override) |
1876 | + |
1877 | + self.assertEqual(expected, provider()) |
1878 | + |
1879 | + |
1880 | +def test_suite(): |
1881 | + from unittest import TestLoader |
1882 | + return TestLoader().loadTestsFromName(__name__) |
1883 | diff --git a/snapstore_client/webservices.py b/snapstore_client/webservices.py |
1884 | new file mode 100644 |
1885 | index 0000000..dd42182 |
1886 | --- /dev/null |
1887 | +++ b/snapstore_client/webservices.py |
1888 | @@ -0,0 +1,278 @@ |
1889 | +# Copyright 2017 Canonical Ltd. |
1890 | + |
1891 | +import base64 |
1892 | +import binascii |
1893 | +import datetime |
1894 | +import json |
1895 | +import logging |
1896 | +import os.path |
1897 | +import urllib.parse |
1898 | + |
1899 | +import requests |
1900 | + |
1901 | +from snapstore_client import config |
1902 | + |
1903 | + |
1904 | +logger = logging.getLogger(__name__) |
1905 | + |
1906 | + |
1907 | +def get_snap_id_for_name(snap_name, series): |
1908 | + """Return the snap_name associated with this snap_name. |
1909 | + |
1910 | + Returns None if the snap_name has not been associated with a snap_id yet. |
1911 | + """ |
1912 | + ident_root = config.read_config()['services']['snapident'] |
1913 | + resp = requests.post( |
1914 | + urllib.parse.urljoin(ident_root, '/snaps/filter'), |
1915 | + json={ |
1916 | + 'filters': [{ |
1917 | + 'snap_name': snap_name, |
1918 | + 'series': series |
1919 | + }] |
1920 | + } |
1921 | + ) |
1922 | + snap_results = resp.json()['snaps'] |
1923 | + if not snap_results: |
1924 | + return None |
1925 | + assert len(snap_results) == 1 |
1926 | + return snap_results[0]['snap_id'] |
1927 | + |
1928 | + |
1929 | +def register_snap_name_and_blob(snap_id, snap_name, series, snap_blob, |
1930 | + private=False, authority=None, |
1931 | + publisher_id=None, published=True, |
1932 | + stores=['ubuntu'], country_whitelist=None, |
1933 | + country_blacklist=None): |
1934 | + ident_root = config.read_config()['services']['snapident'] |
1935 | + data = {'snap_id': snap_id, |
1936 | + 'private': private, |
1937 | + 'publisher_id': snap_blob['developer_id'], |
1938 | + 'snap_name': snap_name, |
1939 | + 'series': series, |
1940 | + 'blob': snap_blob, |
1941 | + 'authority': authority, |
1942 | + 'status': 'published' if published else 'unpublished', |
1943 | + 'stores': stores} |
1944 | + if country_whitelist is not None: |
1945 | + data['country_whitelist'] = country_whitelist |
1946 | + if country_blacklist is not None: |
1947 | + data['country_blacklist'] = country_blacklist |
1948 | + resp = requests.post( |
1949 | + urllib.parse.urljoin(ident_root, '/snaps/update'), |
1950 | + json={'snaps': [data]}) |
1951 | + if resp.status_code == 200: |
1952 | + logger.info( |
1953 | + "Snap '%s' (%s) registered for series '%s'.", |
1954 | + snap_name, snap_id, series |
1955 | + ) |
1956 | + return False |
1957 | + else: |
1958 | + _print_error_message('register snap', resp) |
1959 | + return True |
1960 | + |
1961 | + |
1962 | +def release_revision(snap_id, snap_name, series, channel, arches, |
1963 | + revision=None, redirect_channel=None, |
1964 | + permanent_redirect=None): |
1965 | + reqs = [] |
1966 | + for arch in arches: |
1967 | + d = { |
1968 | + 'snap_id': snap_id, |
1969 | + 'channel': channel, |
1970 | + 'architecture': arch, |
1971 | + 'series': series, |
1972 | + } |
1973 | + if revision is not None: |
1974 | + d['revision'] = revision |
1975 | + else: |
1976 | + d['redirect_channel'] = redirect_channel |
1977 | + d['permanent_redirect'] = permanent_redirect |
1978 | + reqs.append(d) |
1979 | + revs_root = config.read_config()['services']['snaprevs'] |
1980 | + resp = requests.post( |
1981 | + urllib.parse.urljoin(revs_root, '/channelmaps/update'), |
1982 | + json={ |
1983 | + 'developer_id': 'wgrant', |
1984 | + 'release_requests': reqs, |
1985 | + } |
1986 | + ) |
1987 | + if resp.status_code == 200: |
1988 | + logger.info( |
1989 | + "Snap '%s' (%s) revision %s released to series=%s, " |
1990 | + "channel=%s, arches=%s", |
1991 | + snap_name, snap_id, revision, series, channel, |
1992 | + ','.join(arches) |
1993 | + ) |
1994 | + else: |
1995 | + _print_error_message('release revision', resp) |
1996 | + |
1997 | + |
1998 | +def get_latest_revision_for_snap_id(snap_id): |
1999 | + """Return the latest revision for the given snap_id. |
2000 | + |
2001 | + If no revisions have been published yet, this function returns None. |
2002 | + """ |
2003 | + snaprevs_root = config.read_config()['services']['snaprevs'] |
2004 | + resp = requests.post( |
2005 | + urllib.parse.urljoin(snaprevs_root, '/revisions/filter'), |
2006 | + json={'filters': [{'snap_id': snap_id}]} |
2007 | + ) |
2008 | + revisions = [s['revision'] for s in resp.json()['revisions']] |
2009 | + return max(revisions) if revisions else None |
2010 | + |
2011 | + |
2012 | +def create_revision(snap_id, revision, architectures, binary_path, filesize, |
2013 | + sha512, sha3_384, version, confinement, snap_yaml): |
2014 | + """Create a snap revision with snaprevs. |
2015 | + |
2016 | + Returns True on succcess, False otherwise. |
2017 | + """ |
2018 | + snaprevs_root = config.read_config()['services']['snaprevs'] |
2019 | + # TODO: I suspect this is supposed to be UTC time, not local time? |
2020 | + created_at = datetime.datetime.now().isoformat() |
2021 | + # TODO: figure out what we're doing with developer ids. |
2022 | + created_by = 'TODO' |
2023 | + payload = { |
2024 | + 'snap_id': snap_id, |
2025 | + 'revision': revision, |
2026 | + 'created_at': created_at, |
2027 | + 'created_by': created_by, |
2028 | + 'architectures': architectures, |
2029 | + 'binary_path': binary_path, |
2030 | + 'binary_filesize': filesize, |
2031 | + 'binary_sha512': sha512, |
2032 | + 'binary_sha3_384': sha3_384, |
2033 | + 'version': version, |
2034 | + 'confinement': confinement, |
2035 | + 'snap_yaml': snap_yaml or '', |
2036 | + 'epoch': 0, |
2037 | + 'type': 'app', |
2038 | + } |
2039 | + resp = requests.post( |
2040 | + urllib.parse.urljoin(snaprevs_root, '/revisions/create'), |
2041 | + json=[payload] |
2042 | + ) |
2043 | + if resp.status_code != 201: |
2044 | + _print_error_message('create revision', resp) |
2045 | + return False |
2046 | + |
2047 | + logger.info("Revision %s of %s created.", revision, snap_id) |
2048 | + return True |
2049 | + |
2050 | + |
2051 | +def create_snapsections(payload): |
2052 | + logger.info('Updating sections and snapsections...') |
2053 | + |
2054 | + snapfind_root = config.read_config()['services']['snapfind'] |
2055 | + snapfind_snapsection_endpoint = urllib.parse.urljoin( |
2056 | + snapfind_root, '/sections/snaps') |
2057 | + |
2058 | + response = requests.post(snapfind_snapsection_endpoint, json=payload) |
2059 | + if response.ok: |
2060 | + logger.info('Done.') |
2061 | + else: |
2062 | + logger.error(response.text) |
2063 | + |
2064 | + |
2065 | +def get_assertion(root, type_, key): |
2066 | + resp = requests.get( |
2067 | + urllib.parse.urljoin(root, os.path.join('assertions', type_, *key)), |
2068 | + headers={'Accept': 'application/x.ubuntu.assertion'}) |
2069 | + if resp.status_code != 200: |
2070 | + return None |
2071 | + return resp.content |
2072 | + |
2073 | + |
2074 | +def create_or_update_assertions( |
2075 | + snap_id, snap_name, series, revision, binary_sha3_384, |
2076 | + binary_filesize): |
2077 | + assertions_root = config.read_config()['services']['assertions'] |
2078 | + authority = config.read_config()['assertions']['authority'] |
2079 | + timestamp = datetime.datetime.now().isoformat() + 'Z' |
2080 | + |
2081 | + if get_assertion( |
2082 | + assertions_root, 'snap-declaration', (series, snap_id)) is None: |
2083 | + logger.info("Creating snap-declaration ...") |
2084 | + assertion_headers = { |
2085 | + 'type': 'snap-declaration', |
2086 | + 'revision': '0', |
2087 | + 'authority-id': authority, |
2088 | + 'publisher-id': authority, |
2089 | + 'series': series, |
2090 | + 'snap-id': snap_id, |
2091 | + 'snap-name': snap_name, |
2092 | + 'timestamp': timestamp, |
2093 | + } |
2094 | + assertion = sign_assertion(assertion_headers) |
2095 | + save_assertion(assertion) |
2096 | + |
2097 | + logger.info("Creating snap-revision ...") |
2098 | + snap_sha3_384 = base64.urlsafe_b64encode( |
2099 | + binascii.a2b_hex(binary_sha3_384)).decode().rstrip("=") |
2100 | + snap_size = str(binary_filesize) |
2101 | + snap_revision = str(revision) |
2102 | + assertion_headers = { |
2103 | + 'type': 'snap-revision', |
2104 | + 'authority-id': authority, |
2105 | + 'developer-id': authority, |
2106 | + 'snap-sha3-384': snap_sha3_384, |
2107 | + 'snap-id': snap_id, |
2108 | + 'snap-size': snap_size, |
2109 | + 'snap-revision': snap_revision, |
2110 | + 'timestamp': timestamp, |
2111 | + } |
2112 | + assertion = sign_assertion(assertion_headers) |
2113 | + save_assertion(assertion) |
2114 | + |
2115 | + |
2116 | +def save_assertion(assertion): |
2117 | + assertions_root = config.read_config()['services']['assertions'] |
2118 | + request_headers = { |
2119 | + 'Content-Type': 'application/x.ubuntu.assertion', |
2120 | + } |
2121 | + resp = requests.post( |
2122 | + urllib.parse.urljoin(assertions_root, 'assertions'), |
2123 | + data=assertion, headers=request_headers) |
2124 | + |
2125 | + if resp.status_code != 201: |
2126 | + _print_error_message('save assertion', resp) |
2127 | + resp.raise_for_status() |
2128 | + |
2129 | + |
2130 | +def sign_assertion(assertion_headers): |
2131 | + assertions_root = config.read_config()['services']['assertions'] |
2132 | + signing_key_id = config.read_config()['assertions']['signing_key_id'] |
2133 | + request_headers = { |
2134 | + 'Content-Type': 'application/json', |
2135 | + } |
2136 | + data = { |
2137 | + 'key-id': signing_key_id, |
2138 | + 'headers': assertion_headers, |
2139 | + } |
2140 | + resp = requests.post( |
2141 | + urllib.parse.urljoin(assertions_root, 'sign'), |
2142 | + headers=request_headers, json=data) |
2143 | + |
2144 | + if resp.status_code != 200: |
2145 | + _print_error_message('sign assertion', resp) |
2146 | + resp.raise_for_status() |
2147 | + |
2148 | + return resp.text |
2149 | + |
2150 | + |
2151 | +def _print_error_message(action, response): |
2152 | + """Print failure messages from other services in a standard way.""" |
2153 | + logger.error("Failed to %s:", action) |
2154 | + if response.status_code == 500: |
2155 | + logger.error("Server sent 500 response.") |
2156 | + else: |
2157 | + try: |
2158 | + json_document = response.json() |
2159 | + for error in json_document['error_list']: |
2160 | + logger.error(error['message']) |
2161 | + except json.JSONDecodeError: |
2162 | + logger.error( |
2163 | + "Server sent non-json response, printed verbatim below:") |
2164 | + logger.error('=' * 20) |
2165 | + logger.error(response.content) |
2166 | + logger.error('=' * 20) |
LGTM