Merge lp:~ltrager/maas-images/read_stream_data into lp:maas-images

Proposed by Lee Trager
Status: Merged
Merged at revision: 238
Proposed branch: lp:~ltrager/maas-images/read_stream_data
Merge into: lp:maas-images
Diff against target: 118 lines (+65/-15)
1 file modified
meph2/commands/meph2_util.py (+65/-15)
To merge this branch: bzr merge lp:~ltrager/maas-images/read_stream_data
Reviewer Review Type Date Requested Status
maintainers of maas images Pending
Review via email: mp+272195@code.launchpad.net

Commit message

Parse existing stream data to prevent images from being regenerated

Description of the change

Parse existing simplestream data during the import and merge process. During the import process this allows us to prevent existing images from being regenerated. During the merge process this allows us to verify that existing images have the same SHA256 sum on both the source and target.

Currently this only supports reading simplestream data from the local filesystem.

To post a comment you must log in.
239. By Lee Trager

Add sys.exit when an error is detected

240. By Lee Trager

If the image exists during merge don't copy it

241. By Lee Trager

Output error to stderr and return 1

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'meph2/commands/meph2_util.py'
--- meph2/commands/meph2_util.py 2015-09-11 18:31:22 +0000
+++ meph2/commands/meph2_util.py 2015-09-24 17:14:55 +0000
@@ -14,10 +14,12 @@
14from meph2 import util14from meph2 import util
15from meph2.url_helper import geturl_text15from meph2.url_helper import geturl_text
1616
17from simplestreams import filters17from simplestreams import (
18from simplestreams import mirrors18 contentsource,
19from simplestreams import util as sutil19 filters,
20from simplestreams import objectstores20 mirrors,
21 util as sutil,
22 objectstores)
2123
22DEF_KEYRING = "/usr/share/keyrings/ubuntu-cloudimage-keyring.gpg"24DEF_KEYRING = "/usr/share/keyrings/ubuntu-cloudimage-keyring.gpg"
2325
@@ -362,6 +364,25 @@
362 return sha256.hexdigest()364 return sha256.hexdigest()
363365
364366
367def load_product_streams(src):
368 index_path = os.path.join(src, STREAMS_D, "index.json")
369 if not os.path.exists(index_path):
370 return []
371 with contentsource.UrlContentSource(index_path) as tcs:
372 index = sutil.load_content(tcs.read())
373 return [product['path'] for product in index['index'].values()]
374
375
376def load_products(path, product_streams):
377 products = {}
378 for product_stream in product_streams:
379 with contentsource.UrlContentSource(
380 os.path.join(path, product_stream)) as tcs:
381 product_listing = sutil.load_content(tcs.read())
382 products.update(product_listing['products'])
383 return products
384
385
365def main_insert(args):386def main_insert(args):
366 (src_url, src_path) = sutil.path_from_mirror_url(args.src, None)387 (src_url, src_path) = sutil.path_from_mirror_url(args.src, None)
367 filter_list = filters.get_filters(args.filters)388 filter_list = filters.get_filters(args.filters)
@@ -409,6 +430,9 @@
409 print("Error: Unable to find config file %s" % args.import_cfg)430 print("Error: Unable to find config file %s" % args.import_cfg)
410 os.exit(1)431 os.exit(1)
411432
433 target_product_streams = load_product_streams(args.target)
434 target_products = load_products(args.target, target_product_streams)
435
412 with open(cfg_path) as fp:436 with open(cfg_path) as fp:
413 cfgdata = yaml.load(fp)437 cfgdata = yaml.load(fp)
414438
@@ -427,6 +451,13 @@
427451
428 product_id = cfgdata['product_id'].format(452 product_id = cfgdata['product_id'].format(
429 version=release_info['version'], arch=arch)453 version=release_info['version'], arch=arch)
454
455 # If the product already exists don't regenerate the image, just copy
456 # its metadata
457 if product_id in target_products:
458 product_tree['products'][product_id] = target_products[product_id]
459 continue
460
430 product_tree['products'][product_id] = {461 product_tree['products'][product_id] = {
431 'subarches': 'generic',462 'subarches': 'generic',
432 'label': 'release',463 'label': 'release',
@@ -479,17 +510,36 @@
479510
480511
481def main_merge(args):512def main_merge(args):
482 for (dir, subdirs, files) in os.walk(args.src):513 src_product_streams = load_product_streams(args.src)
483 for file in files:514 target_product_streams = load_product_streams(args.target)
484 if file.endswith(".sjson"):515 src_products = load_products(args.src, src_product_streams)
485 continue516 target_products = load_products(args.target, target_product_streams)
486 src_path = os.path.join(dir, file)517
487 dest_path = os.path.join(518 for (product_name, product_info) in src_products.items():
488 args.target, '/'.join(src_path.split('/')[1:]))519 for (version, version_info) in product_info['versions'].items():
489 dest_dir = os.path.dirname(dest_path)520 for (item, item_info) in version_info['items'].items():
490 if not os.path.exists(dest_dir):521 if product_name in target_products:
491 os.makedirs(dest_dir)522 target_product = target_products[product_name]
492 shutil.copy2(src_path, dest_path)523 target_version = target_product['versions'][version]
524 target_item = target_version['items'][item]
525 if item_info['sha256'] != target_item['sha256']:
526 print(
527 "Error: SHA256 of %s and %s do not match!" %
528 (item['path'], target_item['path']),
529 file=sys.stderr)
530 sys.exit(1)
531 else:
532 continue
533 file_src = os.path.join(args.src, item_info['path'])
534 file_target = os.path.join(args.target, item_info['path'])
535 target_dir = os.path.dirname(file_target)
536 if not os.path.exists(target_dir):
537 os.makedirs(target_dir)
538 shutil.copy2(file_src, file_target)
539 for product_stream in src_product_streams:
540 shutil.copy2(
541 os.path.join(args.src, product_stream),
542 os.path.join(args.target, product_stream))
493543
494 md_d = os.path.join(args.target, 'streams', 'v1')544 md_d = os.path.join(args.target, 'streams', 'v1')
495 if not os.path.exists(md_d):545 if not os.path.exists(md_d):

Subscribers

People subscribed via source and target branches