Merge lp:~smoser/maas-images/trunk.meph-import-out into lp:maas-images

Proposed by Scott Moser
Status: Merged
Merged at revision: 333
Proposed branch: lp:~smoser/maas-images/trunk.meph-import-out
Merge into: lp:maas-images
Diff against target: 1036 lines (+504/-421)
5 files modified
bin/meph2-import (+27/-0)
meph2/commands/flags.py (+107/-0)
meph2/commands/meph2_util.py (+25/-421)
meph2/commands/mimport.py (+306/-0)
meph2/util.py (+39/-0)
To merge this branch: bzr merge lp:~smoser/maas-images/trunk.meph-import-out
Reviewer Review Type Date Requested Status
Lee Trager (community) Approve
Review via email: mp+306776@code.launchpad.net
To post a comment you must log in.
334. By Scott Moser

fix usage of contentsource

Revision history for this message
Lee Trager (ltrager) wrote :

Looks good, things were just moved around a bit. One thing I noticed when I checked this out is meph2-import doesn't have the executable bit set.

review: Approve
335. By Scott Moser

add -x to bin/meph2-import

Revision history for this message
Scott Moser (smoser) wrote :

fixed the meph2-import executable. that was fallout of just bzr diff | patch -p0
and such.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== added file 'bin/meph2-import'
2--- bin/meph2-import 1970-01-01 00:00:00 +0000
3+++ bin/meph2-import 2016-09-26 19:23:59 +0000
4@@ -0,0 +1,27 @@
5+#!/usr/bin/env python3
6+
7+import os
8+import sys
9+
10+
11+def call_entry_point(name):
12+ (istr, dot, ent) = name.rpartition('.')
13+ try:
14+ __import__(istr)
15+ except ImportError:
16+ # if that import failed, check dirname(__file__/..)
17+ # to support ./bin/program with modules in .
18+ _tdir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
19+ sys.path.insert(0, _tdir)
20+ try:
21+ __import__(istr)
22+ except ImportError as e:
23+ sys.stderr.write("Unable to find %s: %s\n" % (name, e))
24+ sys.exit(2)
25+
26+ sys.exit(getattr(sys.modules[istr], ent)())
27+
28+if __name__ == '__main__':
29+ call_entry_point("meph2.commands.mimport.main")
30+
31+# vi: ts=4 expandtab syntax=python
32
33=== added file 'meph2/commands/flags.py'
34--- meph2/commands/flags.py 1970-01-01 00:00:00 +0000
35+++ meph2/commands/flags.py 2016-09-26 19:23:59 +0000
36@@ -0,0 +1,107 @@
37+DEF_KEYRING = "/usr/share/keyrings/ubuntu-cloudimage-keyring.gpg"
38+
39+LABELS = ('alpha1', 'alpha2', 'alpha3',
40+ 'beta1', 'beta2', 'beta3',
41+ 'rc', 'release')
42+
43+COMMON_ARGS = []
44+COMMON_FLAGS = {
45+ 'dry-run': (('-n', '--dry-run'),
46+ {'help': 'only report what would be done',
47+ 'action': 'store_true', 'default': False}),
48+ 'no-sign': (('-u', '--no-sign'),
49+ {'help': 'do not re-sign files',
50+ 'action': 'store_true', 'default': False}),
51+ 'max': (('--max',),
52+ {'help': 'keep at most N versions per product',
53+ 'default': 2, 'type': int}),
54+ 'orphan-data': (('orphan_data',), {'help': 'the orphan data file'}),
55+ 'src': (('src',), {'help': 'the source streams directory'}),
56+ 'target': (('target',), {'help': 'the target streams directory'}),
57+ 'data_d': (('data_d',),
58+ {'help': ('the base data directory'
59+ '("path"s are relative to this)')}),
60+ 'keyring': (('--keyring',),
61+ {'help': 'gpg keyring to check sjson',
62+ 'default': DEF_KEYRING}),
63+}
64+
65+SUBCOMMANDS = {
66+ 'insert': {
67+ 'help': 'add new items from one stream into another',
68+ 'opts': [
69+ COMMON_FLAGS['dry-run'], COMMON_FLAGS['no-sign'],
70+ COMMON_FLAGS['keyring'],
71+ COMMON_FLAGS['src'], COMMON_FLAGS['target'],
72+ ('filters', {'nargs': '*', 'default': []}),
73+ ]
74+ },
75+ 'import': {
76+ 'help': 'import an image from the specified config into a stream',
77+ 'opts': [
78+ COMMON_FLAGS['no-sign'], COMMON_FLAGS['keyring'],
79+ ('import_cfg', {'help':
80+ 'The config file for the image to import.'}),
81+ COMMON_FLAGS['target'],
82+ ]
83+ },
84+ 'merge': {
85+ 'help': 'merge two product streams together',
86+ 'opts': [
87+ COMMON_FLAGS['no-sign'],
88+ COMMON_FLAGS['src'], COMMON_FLAGS['target'],
89+ ]
90+ },
91+ 'promote': {
92+ 'help': 'promote a product/version from daily to release',
93+ 'opts': [
94+ COMMON_FLAGS['dry-run'], COMMON_FLAGS['no-sign'],
95+ COMMON_FLAGS['keyring'],
96+ (('-l', '--label'),
97+ {'default': 'release', 'choices': LABELS,
98+ 'help': 'the label to use'}),
99+ (('--skip-file-copy',),
100+ {'help': 'do not copy files, only metadata [TEST_ONLY]',
101+ 'action': 'store_true', 'default': False}),
102+ COMMON_FLAGS['src'], COMMON_FLAGS['target'],
103+ ('version', {'help': 'the version_id to promote.'}),
104+ ('filters', {'nargs': '+', 'default': []}),
105+ ]
106+ },
107+ 'clean-md': {
108+ 'help': 'clean streams metadata only to keep "max" items',
109+ 'opts': [
110+ COMMON_FLAGS['dry-run'], COMMON_FLAGS['no-sign'],
111+ COMMON_FLAGS['keyring'],
112+ ('max', {'type': int}), ('target', {}),
113+ ('filters', {'nargs': '*', 'default': []}),
114+ ]
115+ },
116+ 'find-orphans': {
117+ 'help': 'find files in data_d not referenced in a "path"',
118+ 'opts': [
119+ COMMON_FLAGS['orphan-data'], COMMON_FLAGS['data_d'],
120+ COMMON_FLAGS['keyring'],
121+ ('streams_dirs', {'nargs': '*', 'default': []}),
122+ ],
123+ },
124+ 'reap-orphans': {
125+ 'help': 'reap orphans listed in orphan-data from data_d',
126+ 'opts': [
127+ COMMON_FLAGS['orphan-data'], COMMON_FLAGS['dry-run'],
128+ COMMON_FLAGS['data_d'],
129+ ('--older', {'default': '3d',
130+ 'help': ('only remove files orphaned longer than'
131+ 'this. if no unit given, default is days.')
132+ }),
133+ ],
134+ },
135+ 'sign': {
136+ 'help': 'Regenerate index.json and sign the stream',
137+ 'opts': [
138+ COMMON_FLAGS['data_d'], COMMON_FLAGS['no-sign'],
139+ ],
140+ },
141+}
142+
143+# vi: ts=4 expandtab syntax=python
144
145=== modified file 'meph2/commands/meph2_util.py'
146--- meph2/commands/meph2_util.py 2016-09-23 16:19:38 +0000
147+++ meph2/commands/meph2_util.py 2016-09-26 19:23:59 +0000
148@@ -1,139 +1,21 @@
149 #!/usr/bin/python3
150
151 import argparse
152-from datetime import datetime
153 import copy
154 import os
155 from functools import partial
156-import hashlib
157-import re
158 import shutil
159 import sys
160-import subprocess
161-import yaml
162
163 from meph2 import util
164-from meph2.url_helper import geturl_text
165-from meph2.commands.dpkg import (
166- get_package,
167- extract_files_from_packages,
168-)
169+from meph2.commands.flags import COMMON_ARGS, SUBCOMMANDS
170
171 from simplestreams import (
172- contentsource,
173 filters,
174 mirrors,
175 util as sutil,
176 objectstores)
177
178-DEF_KEYRING = "/usr/share/keyrings/ubuntu-cloudimage-keyring.gpg"
179-
180-STREAMS_D = "streams/v1/"
181-
182-LABELS = ('alpha1', 'alpha2', 'alpha3',
183- 'beta1', 'beta2', 'beta3',
184- 'rc', 'release')
185-
186-COMMON_ARGS = []
187-COMMON_FLAGS = {
188- 'dry-run': (('-n', '--dry-run'),
189- {'help': 'only report what would be done',
190- 'action': 'store_true', 'default': False}),
191- 'no-sign': (('-u', '--no-sign'),
192- {'help': 'do not re-sign files',
193- 'action': 'store_true', 'default': False}),
194- 'max': (('--max',),
195- {'help': 'keep at most N versions per product',
196- 'default': 2, 'type': int}),
197- 'orphan-data': (('orphan_data',), {'help': 'the orphan data file'}),
198- 'src': (('src',), {'help': 'the source streams directory'}),
199- 'target': (('target',), {'help': 'the target streams directory'}),
200- 'data_d': (('data_d',),
201- {'help': ('the base data directory'
202- '("path"s are relative to this)')}),
203- 'keyring': (('--keyring',),
204- {'help': 'gpg keyring to check sjson',
205- 'default': DEF_KEYRING}),
206-}
207-
208-SUBCOMMANDS = {
209- 'insert': {
210- 'help': 'add new items from one stream into another',
211- 'opts': [
212- COMMON_FLAGS['dry-run'], COMMON_FLAGS['no-sign'],
213- COMMON_FLAGS['keyring'],
214- COMMON_FLAGS['src'], COMMON_FLAGS['target'],
215- ('filters', {'nargs': '*', 'default': []}),
216- ]
217- },
218- 'import': {
219- 'help': 'import an image from the specified config into a stream',
220- 'opts': [
221- COMMON_FLAGS['no-sign'], COMMON_FLAGS['keyring'],
222- ('import_cfg', {'help':
223- 'The config file for the image to import.'}),
224- COMMON_FLAGS['target'],
225- ]
226- },
227- 'merge': {
228- 'help': 'merge two product streams together',
229- 'opts': [
230- COMMON_FLAGS['no-sign'],
231- COMMON_FLAGS['src'], COMMON_FLAGS['target'],
232- ]
233- },
234- 'promote': {
235- 'help': 'promote a product/version from daily to release',
236- 'opts': [
237- COMMON_FLAGS['dry-run'], COMMON_FLAGS['no-sign'],
238- COMMON_FLAGS['keyring'],
239- (('-l', '--label'),
240- {'default': 'release', 'choices': LABELS,
241- 'help': 'the label to use'}),
242- (('--skip-file-copy',),
243- {'help': 'do not copy files, only metadata [TEST_ONLY]',
244- 'action': 'store_true', 'default': False}),
245- COMMON_FLAGS['src'], COMMON_FLAGS['target'],
246- ('version', {'help': 'the version_id to promote.'}),
247- ('filters', {'nargs': '+', 'default': []}),
248- ]
249- },
250- 'clean-md': {
251- 'help': 'clean streams metadata only to keep "max" items',
252- 'opts': [
253- COMMON_FLAGS['dry-run'], COMMON_FLAGS['no-sign'],
254- COMMON_FLAGS['keyring'],
255- ('max', {'type': int}), ('target', {}),
256- ('filters', {'nargs': '*', 'default': []}),
257- ]
258- },
259- 'find-orphans': {
260- 'help': 'find files in data_d not referenced in a "path"',
261- 'opts': [
262- COMMON_FLAGS['orphan-data'], COMMON_FLAGS['data_d'],
263- COMMON_FLAGS['keyring'],
264- ('streams_dirs', {'nargs': '*', 'default': []}),
265- ],
266- },
267- 'reap-orphans': {
268- 'help': 'reap orphans listed in orphan-data from data_d',
269- 'opts': [
270- COMMON_FLAGS['orphan-data'], COMMON_FLAGS['dry-run'],
271- COMMON_FLAGS['data_d'],
272- ('--older', {'default': '3d',
273- 'help': ('only remove files orphaned longer than'
274- 'this. if no unit given, default is days.')
275- }),
276- ],
277- },
278- 'sign': {
279- 'help': 'Regenerate index.json and sign the stream',
280- 'opts': [
281- COMMON_FLAGS['data_d'], COMMON_FLAGS['no-sign'],
282- ],
283- },
284-}
285-
286
287 class BareMirrorWriter(mirrors.ObjectFilterMirror):
288 # this explicitly avoids reference counting and .data/ storage
289@@ -332,109 +214,6 @@
290 self.removed_versions.append((self.tcontent_id, pedigree,))
291
292
293-def get_sha256_meta_images(url):
294- """ Given a URL to a SHA256SUM file return a dictionary of filenames and
295- SHA256 checksums keyed off the file version found as a date string in
296- the filename. This is used in cases where simplestream data isn't
297- avalible.
298- """
299- ret = dict()
300- content = geturl_text(url)
301- # http://cloud.centos.org/centos/ contains images using two version
302- # strings. The first is only used on older images and uses the format
303- # YYYYMMDD_XX. The second is used on images generated monthly using the
304- # format YYMM. We know the second format is referencing the year and month
305- # by looking at the timestamp of each image.
306- prog = re.compile('([\d]{8}(_[\d]+))|(\d{4})')
307-
308- for i in content.split('\n'):
309- try:
310- sha256, img_name = i.split()
311- except ValueError:
312- continue
313- if (not img_name.endswith('qcow2.xz') and
314- not img_name.endswith('qcow2')):
315- continue
316- m = prog.search(img_name)
317- if m is None:
318- continue
319- img_version = m.group(0)
320-
321- # Turn the short version string into a long version string so that MAAS
322- # uses the latest version, not the longest
323- if len(img_version) == 4:
324- img_version = "20%s01_01" % img_version
325-
326- # Prefer compressed image over uncompressed
327- if (img_version in ret and
328- ret[img_version]['img_name'].endswith('qcow2.xz')):
329- continue
330- ret[img_version] = {
331- 'img_name': img_name,
332- 'sha256': sha256,
333- }
334- return ret
335-
336-
337-def import_qcow2(url, expected_sha256, out, curtin_files=None):
338- """ Call the maas-qcow2targz script to convert a qcow2 or qcow2.xz file at
339- a given URL or local path. Return the SHA256SUM of the outputted file.
340- """
341- # Assume maas-qcow2targz is in the path
342- qcow2targz_cmd = ["maas-qcow2targz", url, expected_sha256, out]
343- if curtin_files:
344- curtin_path = os.path.join(
345- os.path.dirname(__file__), "..", "..", "curtin")
346- qcow2targz_cmd.append(curtin_files.format(curtin_path=curtin_path))
347- proc = subprocess.Popen(qcow2targz_cmd)
348- proc.communicate()
349- if proc.wait() != 0:
350- raise subprocess.CalledProcessError(
351- cmd=qcow2targz_cmd, returncode=proc.returncode)
352-
353- sha256 = hashlib.sha256()
354- with open(out, 'rb') as fp:
355- while True:
356- chunk = fp.read(2**20)
357- if not chunk:
358- break
359- sha256.update(chunk)
360- return sha256.hexdigest()
361-
362-
363-def load_product_streams(src):
364- index_path = os.path.join(src, STREAMS_D, "index.json")
365- if not os.path.exists(index_path):
366- return []
367- with contentsource.UrlContentSource(index_path) as tcs:
368- index = sutil.load_content(tcs.read())
369- return [product['path'] for product in index['index'].values()]
370-
371-
372-def load_products(path, product_streams):
373- products = {}
374- for product_stream in product_streams:
375- product_stream_path = os.path.join(path, product_stream)
376- if os.path.exists(product_stream_path):
377- with contentsource.UrlContentSource(
378- product_stream_path) as tcs:
379- product_listing = sutil.load_content(tcs.read())
380- products.update(product_listing['products'])
381- return products
382-
383-
384-def gen_index_and_sign(data_d, sign=True):
385- md_d = os.path.join(data_d, "streams", "v1")
386- if not os.path.exists(md_d):
387- os.makedirs(md_d)
388- index = util.create_index(md_d, files=None)
389- with open(os.path.join(md_d, "index.json"), "wb") as fp:
390- fp.write(util.dump_data(index))
391-
392- if sign:
393- util.sign_streams_d(md_d)
394-
395-
396 def main_insert(args):
397 (src_url, src_path) = sutil.path_from_mirror_url(args.src, None)
398 filter_list = filters.get_filters(args.filters)
399@@ -461,205 +240,15 @@
400 tmirror = InsertBareMirrorWriter(config=mirror_config, objectstore=tstore)
401 tmirror.sync(smirror, src_path)
402
403- gen_index_and_sign(args.target, sign=not args.no_sign)
404+ util.gen_index_and_sign(args.target, sign=not args.no_sign)
405 return 0
406
407
408-def import_sha256(args, product_tree, cfgdata):
409- for (release, release_info) in cfgdata['versions'].items():
410- if 'arch' in release_info:
411- arch = release_info['arch']
412- else:
413- arch = cfgdata['arch']
414- if 'os' in release_info:
415- os_name = release_info['os']
416- else:
417- os_name = cfgdata['os']
418- if 'path_version' in release_info:
419- path_version = release_info['path_version']
420- else:
421- path_version = release_info['version']
422- product_id = cfgdata['product_id'].format(
423- version=release_info['version'], arch=arch)
424- url = cfgdata['sha256_meta_data_path'].format(version=path_version)
425- images = get_sha256_meta_images(url)
426- base_url = os.path.dirname(url)
427-
428- if product_tree['products'].get(product_id) is None:
429- print("Creating new product %s" % product_id)
430- product_tree['products'][product_id] = {
431- 'subarches': 'generic',
432- 'label': 'daily',
433- 'subarch': 'generic',
434- 'arch': arch,
435- 'os': os_name,
436- 'version': release_info['version'],
437- 'release': release,
438- 'versions': {},
439- }
440-
441- for (image, image_info) in images.items():
442- if (
443- product_id in product_tree['products'] and
444- image in product_tree['products'][product_id]['versions']):
445- print(
446- "Product %s at version %s exists, skipping" % (
447- product_id, image))
448- continue
449- print(
450- "Downloading and creating %s version %s" % (
451- (product_id, image)))
452- image_path = '/'.join([release, arch, image, 'root-tgz'])
453- real_image_path = os.path.join(
454- os.path.realpath(args.target), image_path)
455- sha256 = import_qcow2(
456- '/'.join([base_url, image_info['img_name']]),
457- image_info['sha256'], real_image_path,
458- release_info.get('curtin_files'))
459- product_tree['products'][product_id]['versions'][image] = {
460- 'items': {
461- 'root-image.gz': {
462- 'ftype': 'root-tgz',
463- 'sha256': sha256,
464- 'path': image_path,
465- 'size': os.path.getsize(real_image_path),
466- }
467- }
468- }
469-
470-
471-def import_bootloaders(args, product_tree, cfgdata):
472- for firmware_platform in cfgdata['bootloaders']:
473- product_id = cfgdata['product_id'].format(
474- os=firmware_platform['os'],
475- firmware_platform=firmware_platform['firmware-platform'],
476- arch=firmware_platform['arch'])
477- # Compile a list of the latest packages in the archive this bootloader
478- # pulls files from
479- src_packages = {}
480- for package in firmware_platform['packages']:
481- package_info = get_package(
482- firmware_platform['archive'], package,
483- firmware_platform['arch'], firmware_platform['release'])
484- # Some source packages include the package version in the source
485- # name. Only take the name, not the version.
486- src_package_name = package_info['Source'].split(' ')[0]
487- src_packages[src_package_name] = {
488- 'src_version': package_info['Version'],
489- 'src_release': firmware_platform['release'],
490- 'found': False,
491- }
492- # Check if the bootloader has been built from the latest version of
493- # the packages in the archive
494- if product_id in product_tree['products']:
495- versions = product_tree['products'][product_id]['versions']
496- for data in versions.values():
497- for item in data['items'].values():
498- src_package = src_packages.get(item['src_package'])
499- if (
500- src_package is not None and
501- src_package['src_version'] == item['src_version']
502- and
503- src_package['src_release'] == item['src_release']):
504- src_packages[item['src_package']]['found'] = True
505- bootloader_uptodate = True
506- for src_package in src_packages.values():
507- if not src_package['found']:
508- bootloader_uptodate = False
509- # Bootloader built from the latest packages already in stream
510- if bootloader_uptodate:
511- print(
512- "Product %s built from the latest package set, skipping"
513- % product_id)
514- continue
515- # Find an unused version
516- today = datetime.utcnow().strftime('%Y%m%d')
517- point = 0
518- while True:
519- version = "%s.%d" % (today, point)
520- products = product_tree['products']
521- if (
522- product_id not in products or
523- version not in products[product_id]['versions'].keys()):
524- break
525- point += 1
526- if product_tree['products'].get(product_id) is None:
527- print("Creating new product %s" % product_id)
528- product_tree['products'][product_id] = {
529- 'label': 'daily',
530- 'arch': firmware_platform['arch'],
531- 'arches': firmware_platform['arches'],
532- 'os': firmware_platform['os'],
533- 'bootloader-type': firmware_platform['firmware-platform'],
534- 'versions': {},
535- }
536- path = os.path.join(
537- 'bootloaders', firmware_platform['firmware-platform'],
538- firmware_platform['arch'], version)
539- dest = os.path.join(args.target, path)
540- os.makedirs(dest)
541- grub_format = firmware_platform.get('grub_format')
542- if grub_format is not None:
543- dest = os.path.join(dest, firmware_platform['grub_output'])
544- print(
545- "Downloading and creating %s version %s" % (
546- product_id, version))
547- items = extract_files_from_packages(
548- firmware_platform['archive'], firmware_platform['packages'],
549- firmware_platform['arch'], firmware_platform['files'],
550- firmware_platform['release'], args.target, path, grub_format,
551- firmware_platform.get('grub_config'),
552- firmware_platform.get('grub_output'))
553- product_tree['products'][product_id]['versions'][version] = {
554- 'items': items
555- }
556-
557-
558-def main_import(args):
559- cfg_path = os.path.join(
560- os.path.dirname(__file__), "..", "..", "conf", args.import_cfg)
561- if not os.path.exists(cfg_path):
562- if os.path.exists(args.import_cfg):
563- cfg_path = args.import_cfg
564- else:
565- print("Error: Unable to find config file %s" % args.import_cfg)
566- sys.exit(1)
567-
568- with open(cfg_path) as fp:
569- cfgdata = yaml.load(fp)
570-
571- target_product_stream = os.path.join(
572- 'streams', 'v1', cfgdata['content_id'] + '.json')
573-
574- product_tree = util.empty_iid_products(cfgdata['content_id'])
575- product_tree['products'] = load_products(
576- args.target, [target_product_stream])
577- product_tree['updated'] = sutil.timestamp()
578- product_tree['datatype'] = 'image-downloads'
579-
580- if cfgdata.get('sha256_meta_data_path', None) is not None:
581- import_sha256(args, product_tree, cfgdata)
582- elif cfgdata.get('bootloaders', None) is not None:
583- import_bootloaders(args, product_tree, cfgdata)
584- else:
585- sys.stderr.write('Unsupported import yaml!')
586- sys.exit(1)
587-
588- md_d = os.path.join(args.target, 'streams', 'v1')
589- if not os.path.exists(md_d):
590- os.makedirs(md_d)
591-
592- with open(os.path.join(args.target, target_product_stream), 'wb') as fp:
593- fp.write(util.dump_data(product_tree))
594-
595- gen_index_and_sign(args.target, not args.no_sign)
596-
597-
598 def main_merge(args):
599- src_product_streams = load_product_streams(args.src)
600- target_product_streams = load_product_streams(args.target)
601- src_products = load_products(args.src, src_product_streams)
602- target_products = load_products(args.target, target_product_streams)
603+ src_product_streams = util.load_product_streams(args.src)
604+ target_product_streams = util.load_product_streams(args.target)
605+ src_products = util.load_products(args.src, src_product_streams)
606+ target_products = util.load_products(args.target, target_product_streams)
607
608 for (product_name, product_info) in src_products.items():
609 for (version, version_info) in product_info['versions'].items():
610@@ -686,7 +275,7 @@
611 os.path.join(args.src, product_stream),
612 os.path.join(args.target, product_stream))
613
614- gen_index_and_sign(args.target, not args.no_sign)
615+ util.gen_index_and_sign(args.target, not args.no_sign)
616
617
618 def main_promote(args):
619@@ -719,7 +308,7 @@
620 label=args.label)
621 tmirror.sync(smirror, src_path)
622
623- gen_index_and_sign(args.target, not args.no_sign)
624+ util.gen_index_and_sign(args.target, not args.no_sign)
625 return 0
626
627
628@@ -747,7 +336,7 @@
629 tmirror = BareMirrorWriter(config=mirror_config, objectstore=tstore)
630 tmirror.sync(smirror, mirror_path)
631
632- gen_index_and_sign(mirror_url, not args.no_sign)
633+ util.gen_index_and_sign(mirror_url, not args.no_sign)
634 return 0
635
636
637@@ -810,10 +399,25 @@
638
639
640 def main_sign(args):
641- gen_index_and_sign(args.data_d)
642+ util.gen_index_and_sign(args.data_d)
643 return 0
644
645
646+def main_import(args):
647+ """meph2-util import wraps the preferred command 'meph2-import'.
648+
649+ 'meph2-util import' is left for backwards compatibility, but relies
650+ on modules not in the standard library in python3.2 (specifically lzma).
651+ meph2-util needs to run with only dependencies available in the
652+ Ubuntu 12.04 (precise) distro."""
653+
654+ sys.stderr.write(
655+ "=== WARNING: DEPRECATED ===\n" + main_import.__doc__ + "\n")
656+
657+ from meph2.commands import mimport
658+ return(mimport.main_import(args))
659+
660+
661 def main():
662 parser = argparse.ArgumentParser()
663
664
665=== added file 'meph2/commands/mimport.py'
666--- meph2/commands/mimport.py 1970-01-01 00:00:00 +0000
667+++ meph2/commands/mimport.py 2016-09-26 19:23:59 +0000
668@@ -0,0 +1,306 @@
669+#!/usr/bin/python3
670+
671+import argparse
672+from datetime import datetime
673+import hashlib
674+import os
675+import re
676+import subprocess
677+import sys
678+import yaml
679+
680+from meph2 import util
681+from meph2.commands.dpkg import (
682+ get_package,
683+ extract_files_from_packages,
684+)
685+
686+from meph2.commands.flags import COMMON_ARGS, SUBCOMMANDS
687+from meph2.url_helper import geturl_text
688+
689+
690+def import_sha256(args, product_tree, cfgdata):
691+ for (release, release_info) in cfgdata['versions'].items():
692+ if 'arch' in release_info:
693+ arch = release_info['arch']
694+ else:
695+ arch = cfgdata['arch']
696+ if 'os' in release_info:
697+ os_name = release_info['os']
698+ else:
699+ os_name = cfgdata['os']
700+ if 'path_version' in release_info:
701+ path_version = release_info['path_version']
702+ else:
703+ path_version = release_info['version']
704+ product_id = cfgdata['product_id'].format(
705+ version=release_info['version'], arch=arch)
706+ url = cfgdata['sha256_meta_data_path'].format(version=path_version)
707+ images = get_sha256_meta_images(url)
708+ base_url = os.path.dirname(url)
709+
710+ if product_tree['products'].get(product_id) is None:
711+ print("Creating new product %s" % product_id)
712+ product_tree['products'][product_id] = {
713+ 'subarches': 'generic',
714+ 'label': 'daily',
715+ 'subarch': 'generic',
716+ 'arch': arch,
717+ 'os': os_name,
718+ 'version': release_info['version'],
719+ 'release': release,
720+ 'versions': {},
721+ }
722+
723+ for (image, image_info) in images.items():
724+ if (
725+ product_id in product_tree['products'] and
726+ image in product_tree['products'][product_id]['versions']):
727+ print(
728+ "Product %s at version %s exists, skipping" % (
729+ product_id, image))
730+ continue
731+ print(
732+ "Downloading and creating %s version %s" % (
733+ (product_id, image)))
734+ image_path = '/'.join([release, arch, image, 'root-tgz'])
735+ real_image_path = os.path.join(
736+ os.path.realpath(args.target), image_path)
737+ sha256 = import_qcow2(
738+ '/'.join([base_url, image_info['img_name']]),
739+ image_info['sha256'], real_image_path,
740+ release_info.get('curtin_files'))
741+ product_tree['products'][product_id]['versions'][image] = {
742+ 'items': {
743+ 'root-image.gz': {
744+ 'ftype': 'root-tgz',
745+ 'sha256': sha256,
746+ 'path': image_path,
747+ 'size': os.path.getsize(real_image_path),
748+ }
749+ }
750+ }
751+
752+
753+def import_bootloaders(args, product_tree, cfgdata):
754+ for firmware_platform in cfgdata['bootloaders']:
755+ product_id = cfgdata['product_id'].format(
756+ os=firmware_platform['os'],
757+ firmware_platform=firmware_platform['firmware-platform'],
758+ arch=firmware_platform['arch'])
759+ # Compile a list of the latest packages in the archive this bootloader
760+ # pulls files from
761+ src_packages = {}
762+ for package in firmware_platform['packages']:
763+ package_info = get_package(
764+ firmware_platform['archive'], package,
765+ firmware_platform['arch'], firmware_platform['release'])
766+ # Some source packages include the package version in the source
767+ # name. Only take the name, not the version.
768+ src_package_name = package_info['Source'].split(' ')[0]
769+ src_packages[src_package_name] = {
770+ 'src_version': package_info['Version'],
771+ 'src_release': firmware_platform['release'],
772+ 'found': False,
773+ }
774+ # Check if the bootloader has been built from the latest version of
775+ # the packages in the archive
776+ if product_id in product_tree['products']:
777+ versions = product_tree['products'][product_id]['versions']
778+ for data in versions.values():
779+ for item in data['items'].values():
780+ src_package = src_packages.get(item['src_package'])
781+ if (
782+ src_package is not None and
783+ src_package['src_version'] == item['src_version']
784+ and
785+ src_package['src_release'] == item['src_release']):
786+ src_packages[item['src_package']]['found'] = True
787+ bootloader_uptodate = True
788+ for src_package in src_packages.values():
789+ if not src_package['found']:
790+ bootloader_uptodate = False
791+ # Bootloader built from the latest packages already in stream
792+ if bootloader_uptodate:
793+ print(
794+ "Product %s built from the latest package set, skipping"
795+ % product_id)
796+ continue
797+ # Find an unused version
798+ today = datetime.utcnow().strftime('%Y%m%d')
799+ point = 0
800+ while True:
801+ version = "%s.%d" % (today, point)
802+ products = product_tree['products']
803+ if (
804+ product_id not in products or
805+ version not in products[product_id]['versions'].keys()):
806+ break
807+ point += 1
808+ if product_tree['products'].get(product_id) is None:
809+ print("Creating new product %s" % product_id)
810+ product_tree['products'][product_id] = {
811+ 'label': 'daily',
812+ 'arch': firmware_platform['arch'],
813+ 'arches': firmware_platform['arches'],
814+ 'os': firmware_platform['os'],
815+ 'bootloader-type': firmware_platform['firmware-platform'],
816+ 'versions': {},
817+ }
818+ path = os.path.join(
819+ 'bootloaders', firmware_platform['firmware-platform'],
820+ firmware_platform['arch'], version)
821+ dest = os.path.join(args.target, path)
822+ os.makedirs(dest)
823+ grub_format = firmware_platform.get('grub_format')
824+ if grub_format is not None:
825+ dest = os.path.join(dest, firmware_platform['grub_output'])
826+ print(
827+ "Downloading and creating %s version %s" % (
828+ product_id, version))
829+ items = extract_files_from_packages(
830+ firmware_platform['archive'], firmware_platform['packages'],
831+ firmware_platform['arch'], firmware_platform['files'],
832+ firmware_platform['release'], args.target, path, grub_format,
833+ firmware_platform.get('grub_config'),
834+ firmware_platform.get('grub_output'))
835+ product_tree['products'][product_id]['versions'][version] = {
836+ 'items': items
837+ }
838+
839+
840+def get_sha256_meta_images(url):
841+ """ Given a URL to a SHA256SUM file return a dictionary of filenames and
842+ SHA256 checksums keyed off the file version found as a date string in
843+ the filename. This is used in cases where simplestream data isn't
844+ avalible.
845+ """
846+ ret = dict()
847+ content = geturl_text(url)
848+ # http://cloud.centos.org/centos/ contains images using two version
849+ # strings. The first is only used on older images and uses the format
850+ # YYYYMMDD_XX. The second is used on images generated monthly using the
851+ # format YYMM. We know the second format is referencing the year and month
852+ # by looking at the timestamp of each image.
853+ prog = re.compile('([\d]{8}(_[\d]+))|(\d{4})')
854+
855+ for i in content.split('\n'):
856+ try:
857+ sha256, img_name = i.split()
858+ except ValueError:
859+ continue
860+ if (not img_name.endswith('qcow2.xz') and
861+ not img_name.endswith('qcow2')):
862+ continue
863+ m = prog.search(img_name)
864+ if m is None:
865+ continue
866+ img_version = m.group(0)
867+
868+ # Turn the short version string into a long version string so that MAAS
869+ # uses the latest version, not the longest
870+ if len(img_version) == 4:
871+ img_version = "20%s01_01" % img_version
872+
873+ # Prefer compressed image over uncompressed
874+ if (img_version in ret and
875+ ret[img_version]['img_name'].endswith('qcow2.xz')):
876+ continue
877+ ret[img_version] = {
878+ 'img_name': img_name,
879+ 'sha256': sha256,
880+ }
881+ return ret
882+
883+
884+def import_qcow2(url, expected_sha256, out, curtin_files=None):
885+ """ Call the maas-qcow2targz script to convert a qcow2 or qcow2.xz file at
886+ a given URL or local path. Return the SHA256SUM of the outputted file.
887+ """
888+ # Assume maas-qcow2targz is in the path
889+ qcow2targz_cmd = ["maas-qcow2targz", url, expected_sha256, out]
890+ if curtin_files:
891+ curtin_path = os.path.join(
892+ os.path.dirname(__file__), "..", "..", "curtin")
893+ qcow2targz_cmd.append(curtin_files.format(curtin_path=curtin_path))
894+ proc = subprocess.Popen(qcow2targz_cmd)
895+ proc.communicate()
896+ if proc.wait() != 0:
897+ raise subprocess.CalledProcessError(
898+ cmd=qcow2targz_cmd, returncode=proc.returncode)
899+
900+ sha256 = hashlib.sha256()
901+ with open(out, 'rb') as fp:
902+ while True:
903+ chunk = fp.read(2**20)
904+ if not chunk:
905+ break
906+ sha256.update(chunk)
907+ return sha256.hexdigest()
908+
909+
910+def main_import(args):
911+ cfg_path = os.path.join(
912+ os.path.dirname(__file__), "..", "..", "conf", args.import_cfg)
913+ if not os.path.exists(cfg_path):
914+ if os.path.exists(args.import_cfg):
915+ cfg_path = args.import_cfg
916+ else:
917+ print("Error: Unable to find config file %s" % args.import_cfg)
918+ sys.exit(1)
919+
920+ with open(cfg_path) as fp:
921+ cfgdata = yaml.load(fp)
922+
923+ target_product_stream = os.path.join(
924+ 'streams', 'v1', cfgdata['content_id'] + '.json')
925+
926+ product_tree = util.empty_iid_products(cfgdata['content_id'])
927+ product_tree['products'] = util.load_products(
928+ args.target, [target_product_stream])
929+ product_tree['updated'] = util.timestamp()
930+ product_tree['datatype'] = 'image-downloads'
931+
932+ if cfgdata.get('sha256_meta_data_path', None) is not None:
933+ import_sha256(args, product_tree, cfgdata)
934+ elif cfgdata.get('bootloaders', None) is not None:
935+ import_bootloaders(args, product_tree, cfgdata)
936+ else:
937+ sys.stderr.write('Unsupported import yaml!')
938+ sys.exit(1)
939+
940+ md_d = os.path.join(args.target, 'streams', 'v1')
941+ if not os.path.exists(md_d):
942+ os.makedirs(md_d)
943+
944+ with open(os.path.join(args.target, target_product_stream), 'wb') as fp:
945+ fp.write(util.dump_data(product_tree))
946+
947+ util.gen_index_and_sign(args.target, not args.no_sign)
948+
949+
950+def main():
951+ subc = SUBCOMMANDS['import']
952+ parser = argparse.ArgumentParser(description=subc['help'])
953+
954+ # Top level args
955+ for (args, kwargs) in COMMON_ARGS:
956+ parser.add_argument(*args, **kwargs)
957+
958+ for (args, kwargs) in subc['opts']:
959+ if isinstance(args, str):
960+ args = [args]
961+ parser.add_argument(*args, **kwargs)
962+ parser.set_defaults(action=main_import)
963+
964+ args = parser.parse_args()
965+ if not getattr(args, 'action', None):
966+ # http://bugs.python.org/issue16308
967+ parser.print_help()
968+ return 1
969+
970+ return args.action(args)
971+
972+
973+if __name__ == '__main__':
974+ sys.exit(main())
975
976=== modified file 'meph2/util.py'
977--- meph2/util.py 2016-01-25 18:35:31 +0000
978+++ meph2/util.py 2016-09-26 19:23:59 +0000
979@@ -15,6 +15,7 @@
980 # You should have received a copy of the GNU Affero General Public License
981 # along with Simplestreams. If not, see <http://www.gnu.org/licenses/>.
982
983+from simplestreams import contentsource as scontentsource
984 from simplestreams import util as sutil
985 from simplestreams import mirrors
986
987@@ -28,6 +29,11 @@
988 import sys
989 import tempfile
990
991+# for callers convenience
992+timestamp = sutil.timestamp
993+
994+STREAMS_D = "streams/v1/"
995+
996
997 def create_index(target_d, files=None, path_prefix="streams/v1/"):
998 if files is None:
999@@ -253,4 +259,37 @@
1000 return bytestr
1001
1002
1003+def load_products(path, product_streams):
1004+ products = {}
1005+ for product_stream in product_streams:
1006+ product_stream_path = os.path.join(path, product_stream)
1007+ if os.path.exists(product_stream_path):
1008+ with scontentsource.UrlContentSource(
1009+ product_stream_path) as tcs:
1010+ product_listing = sutil.load_content(tcs.read())
1011+ products.update(product_listing['products'])
1012+ return products
1013+
1014+
1015+def load_product_streams(src):
1016+ index_path = os.path.join(src, STREAMS_D, "index.json")
1017+ if not os.path.exists(index_path):
1018+ return []
1019+ with scontentsource.UrlContentSource(index_path) as tcs:
1020+ index = sutil.load_content(tcs.read())
1021+ return [product['path'] for product in index['index'].values()]
1022+
1023+
1024+def gen_index_and_sign(data_d, sign=True):
1025+ md_d = os.path.join(data_d, "streams", "v1")
1026+ if not os.path.exists(md_d):
1027+ os.makedirs(md_d)
1028+ index = create_index(md_d, files=None)
1029+ with open(os.path.join(md_d, "index.json"), "wb") as fp:
1030+ fp.write(dump_data(index))
1031+
1032+ if sign:
1033+ sign_streams_d(md_d)
1034+
1035+
1036 # vi: ts=4 expandtab syntax=python

Subscribers

People subscribed via source and target branches