Merge lp:~mvo/ubuntu-system-image/server-raw-device-tarball into lp:~registry/ubuntu-system-image/client

Proposed by Michael Vogt
Status: Superseded
Proposed branch: lp:~mvo/ubuntu-system-image/server-raw-device-tarball
Merge into: lp:~registry/ubuntu-system-image/client
Diff against target: 7652 lines (+7510/-0) (has conflicts)
25 files modified
.bzrignore (+9/-0)
README (+23/-0)
bin/copy-image (+308/-0)
bin/generate-keyrings (+87/-0)
bin/generate-keys (+61/-0)
bin/import-images (+305/-0)
bin/set-phased-percentage (+90/-0)
bin/si-shell (+79/-0)
etc/config.example (+48/-0)
lib/systemimage/config.py (+206/-0)
lib/systemimage/diff.py (+245/-0)
lib/systemimage/generators.py (+1287/-0)
lib/systemimage/gpg.py (+239/-0)
lib/systemimage/tools.py (+373/-0)
lib/systemimage/tree.py (+1013/-0)
tests/generate-keys (+52/-0)
tests/run (+60/-0)
tests/test_config.py (+281/-0)
tests/test_diff.py (+279/-0)
tests/test_generators.py (+1134/-0)
tests/test_gpg.py (+164/-0)
tests/test_static.py (+78/-0)
tests/test_tools.py (+303/-0)
tests/test_tree.py (+689/-0)
utils/check-latest (+97/-0)
Conflict adding file .bzrignore.  Moved existing file to .bzrignore.moved.
To merge this branch: bzr merge lp:~mvo/ubuntu-system-image/server-raw-device-tarball
Reviewer Review Type Date Requested Status
Registry Administrators Pending
Review via email: mp+241776@code.launchpad.net

This proposal has been superseded by a proposal from 2014-11-14.

Description of the change

This adds a new cdimage-device-raw generator that can import the ubuntu-core flat device tarball.

To post a comment you must log in.
258. By Michael Vogt

fix pep8

Unmerged revisions

258. By Michael Vogt

fix pep8

257. By Michael Vogt

add tests for device-raw

256. By Michael Vogt

lib/systemimage/tools.py: close input files

255. By Michael Vogt

make tests work with py3 (still lot of warnings though)

254. By Michael Vogt

update README and add SKIP_SLOW_TESTS environment

253. By Michael Vogt

fix tests

252. By Michael Vogt

refactor and extract common code into list_versions()

251. By Michael Vogt

add raw-device generator

250. By Stéphane Graber

Prevent change_channel_alias on a redirect channel

249. By Stéphane Graber

Introduce list_devices

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== added file '.bzrignore'
2--- .bzrignore 1970-01-01 00:00:00 +0000
3+++ .bzrignore 2014-11-14 10:18:00 +0000
4@@ -0,0 +1,9 @@
5+etc/config
6+lib/systemimage/__pycache__
7+secret/gpg/keyrings/*
8+secret/gpg/keys/*
9+secret/ssh/*
10+tests/coverage
11+tests/keys/*
12+www/*
13+state/*
14
15=== renamed file '.bzrignore' => '.bzrignore.moved'
16=== added file 'README'
17--- README 1970-01-01 00:00:00 +0000
18+++ README 2014-11-14 10:18:00 +0000
19@@ -0,0 +1,23 @@
20+Runtime dependencies:
21+ - pxz | xz-utils
22+ - python3, python3-gpgme | python, python-gpgme
23+ - e2fsprogs
24+ - android-tools-fsutils
25+ - abootimg
26+
27+Test dependencies:
28+ - python-mock, python3-mock
29+ - python-coverage, python3-coverage
30+ - pep8
31+ - pyflakes3, pyflakes
32+
33+
34+Run once:
35+$ ./tests/generate-keys
36+
37+Then run the tests with:
38+$ ./tests/run
39+or
40+$ SKIP_SLOW_TESTS=1 ./tests/run
41+or
42+$ PYTHONPATH=lib python -m unittest tests.test_generators.GeneratorsTests
43
44=== added directory 'bin'
45=== added file 'bin/copy-image'
46--- bin/copy-image 1970-01-01 00:00:00 +0000
47+++ bin/copy-image 2014-11-14 10:18:00 +0000
48@@ -0,0 +1,308 @@
49+#!/usr/bin/python
50+# -*- coding: utf-8 -*-
51+
52+# Copyright (C) 2013 Canonical Ltd.
53+# Author: Stéphane Graber <stgraber@ubuntu.com>
54+
55+# This program is free software: you can redistribute it and/or modify
56+# it under the terms of the GNU General Public License as published by
57+# the Free Software Foundation; version 3 of the License.
58+#
59+# This program is distributed in the hope that it will be useful,
60+# but WITHOUT ANY WARRANTY; without even the implied warranty of
61+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
62+# GNU General Public License for more details.
63+#
64+# You should have received a copy of the GNU General Public License
65+# along with this program. If not, see <http://www.gnu.org/licenses/>.
66+
67+import json
68+import os
69+import sys
70+sys.path.insert(0, os.path.join(sys.path[0], os.pardir, "lib"))
71+
72+from systemimage import config, generators, tools, tree
73+
74+import argparse
75+import fcntl
76+import logging
77+
78+if __name__ == '__main__':
79+ parser = argparse.ArgumentParser(description="image copier")
80+ parser.add_argument("source_channel", metavar="SOURCE-CHANNEL")
81+ parser.add_argument("destination_channel", metavar="DESTINATION-CHANNEL")
82+ parser.add_argument("device", metavar="DEVICE")
83+ parser.add_argument("version", metavar="VERSION", type=int)
84+ parser.add_argument("-k", "--keep-version", action="store_true",
85+ help="Keep the original verison number")
86+ parser.add_argument("--verbose", "-v", action="count", default=0)
87+
88+ args = parser.parse_args()
89+
90+ # Setup logging
91+ formatter = logging.Formatter(
92+ "%(asctime)s %(levelname)s %(message)s")
93+
94+ levels = {1: logging.ERROR,
95+ 2: logging.WARNING,
96+ 3: logging.INFO,
97+ 4: logging.DEBUG}
98+
99+ if args.verbose > 0:
100+ stdoutlogger = logging.StreamHandler(sys.stdout)
101+ stdoutlogger.setFormatter(formatter)
102+ logging.root.setLevel(levels[min(4, args.verbose)])
103+ logging.root.addHandler(stdoutlogger)
104+ else:
105+ logging.root.addHandler(logging.NullHandler())
106+
107+ # Load the configuration
108+ conf = config.Config()
109+
110+ # Try to acquire a global lock
111+ lock_file = os.path.join(conf.state_path, "global.lock")
112+ lock_fd = open(lock_file, 'w')
113+
114+ try:
115+ fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
116+ except IOError:
117+ print("Something else holds the global lock. exiting.")
118+ sys.exit(0)
119+
120+ # Load the tree
121+ pub = tree.Tree(conf)
122+
123+ # Do some checks
124+ if args.source_channel not in pub.list_channels():
125+ parser.error("Invalid source channel: %s" % args.source_channel)
126+
127+ if args.destination_channel not in pub.list_channels():
128+ parser.error("Invalid destination channel: %s" %
129+ args.destination_channel)
130+
131+ if args.device not in pub.list_channels()[args.source_channel]['devices']:
132+ parser.error("Invalid device for source channel: %s" %
133+ args.device)
134+
135+ if args.device not in \
136+ pub.list_channels()[args.destination_channel]['devices']:
137+ parser.error("Invalid device for destination channel: %s" %
138+ args.device)
139+
140+ if "alias" in pub.list_channels()[args.source_channel] and \
141+ pub.list_channels()[args.source_channel]['alias'] \
142+ != args.source_channel:
143+ parser.error("Source channel is an alias.")
144+
145+ if "alias" in pub.list_channels()[args.destination_channel] and \
146+ pub.list_channels()[args.destination_channel]['alias'] \
147+ != args.destination_channel:
148+ parser.error("Destination channel is an alias.")
149+
150+ if "redirect" in pub.list_channels()[args.source_channel]:
151+ parser.error("Source channel is a redirect.")
152+
153+ if "redirect" in pub.list_channels()[args.destination_channel]:
154+ parser.error("Destination channel is a redirect.")
155+
156+ source_device = pub.get_device(args.source_channel, args.device)
157+ destination_device = pub.get_device(args.destination_channel, args.device)
158+
159+ if args.keep_version:
160+ images = [image for image in destination_device.list_images()
161+ if image['version'] == args.version]
162+ if images:
163+ parser.error("Version number is already used: %s" % args.version)
164+
165+ # Assign a new version number
166+ new_version = args.version
167+ if not args.keep_version:
168+ # Find the next available version
169+ new_version = 1
170+ for image in destination_device.list_images():
171+ if image['version'] >= new_version:
172+ new_version = image['version'] + 1
173+ logging.debug("Version for next image: %s" % new_version)
174+
175+ # Extract the build we want to copy
176+ images = [image for image in source_device.list_images()
177+ if image['type'] == "full" and image['version'] == args.version]
178+ if not images:
179+ parser.error("Can't find version: %s" % args.version)
180+ source_image = images[0]
181+
182+ # Extract the list of existing full images
183+ full_images = {image['version']: image
184+ for image in destination_device.list_images()
185+ if image['type'] == "full"}
186+
187+ # Check that the last full and the new image aren't one and the same
188+ source_files = [entry['path'].split("/")[-1]
189+ for entry in source_image['files']
190+ if not entry['path'].split("/")[-1].startswith("version-")]
191+ destination_files = []
192+ if full_images:
193+ latest_full = sorted(full_images.values(),
194+ key=lambda image: image['version'])[-1]
195+ destination_files = [entry['path'].split("/")[-1]
196+ for entry in latest_full['files']
197+ if not entry['path'].split(
198+ "/")[-1].startswith("version-")]
199+ if source_files == destination_files:
200+ parser.error("Source image is already latest full in "
201+ "destination channel.")
202+
203+ # Generate a list of required deltas
204+ delta_base = []
205+
206+ if args.destination_channel in conf.channels:
207+ for base_channel in conf.channels[args.destination_channel].deltabase:
208+ # Skip missing channels
209+ if base_channel not in pub.list_channels():
210+ continue
211+
212+ # Skip missing devices
213+ if args.device not in (pub.list_channels()
214+ [base_channel]['devices']):
215+ continue
216+
217+ # Extract the latest full image
218+ base_device = pub.get_device(base_channel, args.device)
219+ base_images = sorted([image
220+ for image in base_device.list_images()
221+ if image['type'] == "full"],
222+ key=lambda image: image['version'])
223+
224+ # Check if the version is valid and add it
225+ if base_images and base_images[-1]['version'] in full_images:
226+ if (full_images[base_images[-1]['version']]
227+ not in delta_base):
228+ delta_base.append(full_images
229+ [base_images[-1]['version']])
230+ logging.debug("Source version for delta: %s" %
231+ base_images[-1]['version'])
232+
233+ # Create new empty entries
234+ new_images = {'full': {'files': []}}
235+ for delta in delta_base:
236+ new_images["delta_%s" % delta['version']] = {'files': []}
237+
238+ # Extract current version_detail and files
239+ version_detail = ""
240+ for entry in source_image['files']:
241+ path = os.path.realpath("%s/%s" % (conf.publish_path, entry['path']))
242+
243+ filename = path.split("/")[-1]
244+
245+ # Look for version-X.tar.xz
246+ if filename == "version-%s.tar.xz" % args.version:
247+ # Extract the metadata
248+ if os.path.exists(path.replace(".tar.xz", ".json")):
249+ with open(path.replace(".tar.xz", ".json"), "r") as fd:
250+ metadata = json.loads(fd.read())
251+ if "channel.ini" in metadata:
252+ version_detail = metadata['channel.ini'].get(
253+ "version_detail", None)
254+ else:
255+ new_images['full']['files'].append(path)
256+ logging.debug("Source version_detail is: %s" % version_detail)
257+
258+ # Generate new version tarball
259+ environment = {}
260+ environment['channel_name'] = args.destination_channel
261+ environment['device'] = destination_device
262+ environment['device_name'] = args.device
263+ environment['version'] = new_version
264+ environment['version_detail'] = [entry
265+ for entry in version_detail.split(",")
266+ if not entry.startswith("version=")]
267+ environment['new_files'] = new_images['full']['files']
268+
269+ logging.info("Generating new version tarball for '%s' (%s)"
270+ % (new_version, "," % environment['version_detail']))
271+ version_path = generators.generate_file(conf, "version", [], environment)
272+ if version_path:
273+ new_images['full']['files'].append(version_path)
274+
275+ # Generate deltas
276+ for abspath in new_images['full']['files']:
277+ prefix = abspath.split("/")[-1].rsplit("-", 1)[0]
278+ for delta in delta_base:
279+ # Extract the source
280+ src_path = None
281+ for file_dict in delta['files']:
282+ if (file_dict['path'].split("/")[-1]
283+ .startswith(prefix)):
284+ src_path = "%s/%s" % (conf.publish_path,
285+ file_dict['path'])
286+ break
287+
288+ # Check that it's not the current file
289+ if src_path:
290+ src_path = os.path.realpath(src_path)
291+
292+ # FIXME: the keyring- is a big hack...
293+ if src_path == abspath and "keyring-" not in src_path:
294+ continue
295+
296+ # Generators are allowed to return None when no delta
297+ # exists at all.
298+ logging.info("Generating delta from '%s' for '%s'" %
299+ (delta['version'],
300+ prefix))
301+ delta_path = generators.generate_delta(conf, src_path,
302+ abspath)
303+ else:
304+ delta_path = abspath
305+
306+ if not delta_path:
307+ continue
308+
309+ # Get the full and relative paths
310+ delta_abspath, delta_relpath = tools.expand_path(
311+ delta_path, conf.publish_path)
312+
313+ new_images['delta_%s' % delta['version']]['files'] \
314+ .append(delta_abspath)
315+
316+ # Add full image
317+ logging.info("Publishing new image '%s' (%s) with %s files."
318+ % (new_version, ",".join(environment['version_detail']),
319+ len(new_images['full']['files'])))
320+ destination_device.create_image("full", new_version,
321+ ",".join(environment['version_detail']),
322+ new_images['full']['files'])
323+
324+ # Add delta images
325+ for delta in delta_base:
326+ files = new_images["delta_%s" % delta['version']]['files']
327+ logging.info("Publishing new delta from '%s' (%s)"
328+ " to '%s' (%s) with %s files" %
329+ (delta['version'], delta.get("description", ""),
330+ new_version, ",".join(environment['version_detail']),
331+ len(files)))
332+
333+ destination_device.create_image(
334+ "delta", new_version,
335+ ",".join(environment['version_detail']),
336+ files,
337+ base=delta['version'])
338+
339+ # Expire images
340+ if args.destination_channel in conf.channels:
341+ if conf.channels[args.destination_channel].fullcount > 0:
342+ logging.info("Expiring old images")
343+ destination_device.expire_images(
344+ conf.channels[args.destination_channel].fullcount)
345+
346+ # Sync all channel aliases
347+ logging.info("Syncing any existing alias")
348+ pub.sync_aliases(args.destination_channel)
349+
350+ # Remove any orphaned file
351+ logging.info("Removing orphaned files from the pool")
352+ pub.cleanup_tree()
353+
354+ # Sync the mirrors
355+ logging.info("Triggering a mirror sync")
356+ tools.sync_mirrors(conf)
357
358=== added file 'bin/generate-keyrings'
359--- bin/generate-keyrings 1970-01-01 00:00:00 +0000
360+++ bin/generate-keyrings 2014-11-14 10:18:00 +0000
361@@ -0,0 +1,87 @@
362+#!/usr/bin/python
363+# -*- coding: utf-8 -*-
364+
365+# Copyright (C) 2013 Canonical Ltd.
366+# Author: Stéphane Graber <stgraber@ubuntu.com>
367+
368+# This program is free software: you can redistribute it and/or modify
369+# it under the terms of the GNU General Public License as published by
370+# the Free Software Foundation; version 3 of the License.
371+#
372+# This program is distributed in the hope that it will be useful,
373+# but WITHOUT ANY WARRANTY; without even the implied warranty of
374+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
375+# GNU General Public License for more details.
376+#
377+# You should have received a copy of the GNU General Public License
378+# along with this program. If not, see <http://www.gnu.org/licenses/>.
379+
380+import os
381+import sys
382+import time
383+sys.path.insert(0, 'lib')
384+
385+from systemimage import config
386+from systemimage import gpg
387+from systemimage import tools
388+
389+conf = config.Config()
390+
391+# archive-master keyring
392+if os.path.exists(os.path.join(conf.gpg_key_path, "archive-master")):
393+ archive_master = gpg.Keyring(conf, "archive-master")
394+ archive_master.set_metadata("archive-master")
395+ archive_master.import_keys(os.path.join(conf.gpg_key_path,
396+ "archive-master"))
397+ path = archive_master.generate_tarball()
398+ tools.xz_compress(path)
399+ os.remove(path)
400+ gpg.sign_file(conf, "archive-master", "%s.xz" % path)
401+
402+# image-master keyring
403+if os.path.exists(os.path.join(conf.gpg_key_path, "image-master")) and \
404+ os.path.exists(os.path.join(conf.gpg_key_path, "archive-master")):
405+ image_master = gpg.Keyring(conf, "image-master")
406+ image_master.set_metadata("image-master")
407+ image_master.import_keys(os.path.join(conf.gpg_key_path, "image-master"))
408+ path = image_master.generate_tarball()
409+ tools.xz_compress(path)
410+ os.remove(path)
411+ gpg.sign_file(conf, "archive-master", "%s.xz" % path)
412+
413+# image-signing keyring
414+if os.path.exists(os.path.join(conf.gpg_key_path, "image-signing")) and \
415+ os.path.exists(os.path.join(conf.gpg_key_path, "image-master")):
416+ image_signing = gpg.Keyring(conf, "image-signing")
417+ image_signing.set_metadata("image-signing",
418+ int(time.strftime("%s",
419+ time.localtime())) + 63072000)
420+ image_signing.import_keys(os.path.join(conf.gpg_key_path, "image-signing"))
421+ path = image_signing.generate_tarball()
422+ tools.xz_compress(path)
423+ os.remove(path)
424+ gpg.sign_file(conf, "image-master", "%s.xz" % path)
425+
426+# device-signing keyring
427+if os.path.exists(os.path.join(conf.gpg_key_path, "device-signing")) and \
428+ os.path.exists(os.path.join(conf.gpg_key_path, "image-signing")):
429+ device_signing = gpg.Keyring(conf, "device-signing")
430+ device_signing.set_metadata("device-signing",
431+ int(time.strftime("%s",
432+ time.localtime())) + 2678400)
433+ device_signing.import_keys(os.path.join(conf.gpg_key_path,
434+ "device-signing"))
435+ path = device_signing.generate_tarball()
436+ tools.xz_compress(path)
437+ os.remove(path)
438+ gpg.sign_file(conf, "image-signing", "%s.xz" % path)
439+
440+# blacklist keyring
441+if os.path.exists(os.path.join(conf.gpg_key_path, "blacklist")) and \
442+ os.path.exists(os.path.join(conf.gpg_key_path, "image-master")):
443+ blacklist = gpg.Keyring(conf, "blacklist")
444+ blacklist.set_metadata("blacklist")
445+ path = blacklist.generate_tarball()
446+ tools.xz_compress(path)
447+ os.remove(path)
448+ gpg.sign_file(conf, "image-master", "%s.xz" % path)
449
450=== added file 'bin/generate-keys'
451--- bin/generate-keys 1970-01-01 00:00:00 +0000
452+++ bin/generate-keys 2014-11-14 10:18:00 +0000
453@@ -0,0 +1,61 @@
454+#!/usr/bin/python
455+# -*- coding: utf-8 -*-
456+#
457+# Copyright (C) 2014 Canonical Ltd.
458+# Author: Timothy Chavez <timothy.chavez@canonical.com>
459+#
460+# This program is free software: you can redistribute it and/or modify
461+# it under the terms of the GNU General Public License as published by
462+# the Free Software Foundation; version 3 of the License.
463+#
464+# This program is distributed in the hope that it will be useful,
465+# but WITHOUT ANY WARRANTY; without even the implied warranty of
466+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
467+# GNU General Public License for more details.
468+#
469+# You should have received a copy of the GNU General Public License
470+# along with this program. If not, see <http://www.gnu.org/licenses/>.
471+
472+import argparse
473+import os
474+import sys
475+
476+sys.path.insert(0, 'lib')
477+from systemimage import config
478+from systemimage.gpg import generate_signing_key
479+
480+
481+KEYS = {
482+ "archive-master": ("{0} Archive Master key", 0),
483+ "image-master": ("{0} Image Master key", 0),
484+ "device-signing": ("{0} Device Signing key", "2y"),
485+ "image-signing": ("{0} Image Signing key", "2y")
486+}
487+
488+
489+def main():
490+ parser = argparse.ArgumentParser(description='Generate signing keya.')
491+ parser.add_argument("--email", dest="email", required=True,
492+ help="An email address to associate with the keys")
493+ parser.add_argument("--prefix", dest="prefix", required=True,
494+ help="A prefix to include in the key name")
495+ args = parser.parse_args()
496+
497+ conf = config.Config()
498+
499+ print("I: Generating signing keys...")
500+
501+ for key_id, (key_name, key_expiry) in KEYS.iteritems():
502+ key_path = os.path.join(conf.gpg_key_path, key_id)
503+ if os.path.exists(key_path):
504+ print("W: The key \"{0}\" already exists".format(key_id))
505+ continue
506+ os.makedirs(key_path)
507+ generate_signing_key(
508+ key_path, key_name.format(args.prefix), args.email, key_expiry)
509+
510+ print("I: Done")
511+
512+
513+if __name__ == "__main__":
514+ main()
515
516=== added file 'bin/import-images'
517--- bin/import-images 1970-01-01 00:00:00 +0000
518+++ bin/import-images 2014-11-14 10:18:00 +0000
519@@ -0,0 +1,305 @@
520+#!/usr/bin/python
521+# -*- coding: utf-8 -*-
522+
523+# Copyright (C) 2013 Canonical Ltd.
524+# Author: Stéphane Graber <stgraber@ubuntu.com>
525+
526+# This program is free software: you can redistribute it and/or modify
527+# it under the terms of the GNU General Public License as published by
528+# the Free Software Foundation; version 3 of the License.
529+#
530+# This program is distributed in the hope that it will be useful,
531+# but WITHOUT ANY WARRANTY; without even the implied warranty of
532+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
533+# GNU General Public License for more details.
534+#
535+# You should have received a copy of the GNU General Public License
536+# along with this program. If not, see <http://www.gnu.org/licenses/>.
537+
538+import os
539+import sys
540+sys.path.insert(0, os.path.join(sys.path[0], os.pardir, "lib"))
541+
542+from systemimage import config, generators, tools, tree
543+
544+import argparse
545+import fcntl
546+import logging
547+
548+if __name__ == '__main__':
549+ parser = argparse.ArgumentParser(description="image importer")
550+ parser.add_argument("--verbose", "-v", action="count", default=0)
551+ args = parser.parse_args()
552+
553+ # Setup logging
554+ formatter = logging.Formatter(
555+ "%(asctime)s %(levelname)s %(message)s")
556+
557+ levels = {1: logging.ERROR,
558+ 2: logging.WARNING,
559+ 3: logging.INFO,
560+ 4: logging.DEBUG}
561+
562+ if args.verbose > 0:
563+ stdoutlogger = logging.StreamHandler(sys.stdout)
564+ stdoutlogger.setFormatter(formatter)
565+ logging.root.setLevel(levels[min(4, args.verbose)])
566+ logging.root.addHandler(stdoutlogger)
567+ else:
568+ logging.root.addHandler(logging.NullHandler())
569+
570+ # Load the configuration
571+ conf = config.Config()
572+
573+ # Try to acquire a global lock
574+ lock_file = os.path.join(conf.state_path, "global.lock")
575+ lock_fd = open(lock_file, 'w')
576+
577+ try:
578+ fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
579+ except IOError:
580+ logging.info("Something else holds the global lock. exiting.")
581+ sys.exit(0)
582+
583+ # Load the tree
584+ pub = tree.Tree(conf)
585+
586+ # Iterate through the channels
587+ for channel_name, channel in conf.channels.items():
588+ # We're only interested in automated channels
589+ if channel.type != "auto":
590+ logging.debug("Skipping non-auto channel: %s" % channel_name)
591+ continue
592+
593+ logging.info("Processing channel: %s" % channel_name)
594+
595+ # Check the channel exists
596+ if channel_name not in pub.list_channels():
597+ logging.error("Invalid channel name: %s" % channel_name)
598+ continue
599+
600+ # Iterate through the devices
601+ for device_name in pub.list_channels()[channel_name]['devices']:
602+ logging.info("Processing device: %s" % device_name)
603+
604+ device = pub.get_device(channel_name, device_name)
605+
606+ # Extract last full version
607+ full_images = {image['version']: image
608+ for image in device.list_images()
609+ if image['type'] == "full"}
610+
611+ last_full = None
612+ if full_images:
613+ last_full = sorted(full_images.values(),
614+ key=lambda image: image['version'])[-1]
615+ logging.debug("Last full image: %s" % last_full['version'])
616+ else:
617+ logging.debug("This is the first full image.")
618+
619+ # Extract all delta base versions
620+ delta_base = []
621+
622+ for base_channel in channel.deltabase:
623+ # Skip missing channels
624+ if base_channel not in pub.list_channels():
625+ logging.warn("Invalid base channel: %s" % base_channel)
626+ continue
627+
628+ # Skip missing devices
629+ if device_name not in (pub.list_channels()
630+ [base_channel]['devices']):
631+ logging.warn("Missing device in base channel: %s in %s" %
632+ (device_name, base_channel))
633+ continue
634+
635+ # Extract the latest full image
636+ base_device = pub.get_device(base_channel, device_name)
637+ base_images = sorted([image
638+ for image in base_device.list_images()
639+ if image['type'] == "full"],
640+ key=lambda image: image['version'])
641+
642+ # Check if the version is valid and add it
643+ if base_images and base_images[-1]['version'] in full_images:
644+ if (full_images[base_images[-1]['version']]
645+ not in delta_base):
646+ delta_base.append(full_images
647+ [base_images[-1]['version']])
648+ logging.debug("Source version for delta: %s" %
649+ base_images[-1]['version'])
650+
651+ # Allocate new version number
652+ new_version = channel.versionbase
653+ if last_full:
654+ new_version = last_full['version'] + 1
655+ logging.debug("Version for next image: %s" % new_version)
656+
657+ # And the list used to generate version_detail
658+ version_detail = []
659+
660+ # And a list of new files
661+ new_files = []
662+
663+ # Keep track of what files we've processed
664+ processed_files = []
665+
666+ # Create new empty entries
667+ new_images = {}
668+ new_images['full'] = {'files': []}
669+ for delta in delta_base:
670+ new_images["delta_%s" % delta['version']] = {'files': []}
671+
672+ # Iterate through the files
673+ for file_entry in channel.files:
674+ # Deal with device specific overrides
675+ if "," in file_entry['name']:
676+ file_name, file_device = file_entry['name'].split(',', 1)
677+ if file_device != device_name:
678+ logging.debug("Skipping '%s' because the device name"
679+ "doesn't match" % file_entry['name'])
680+ continue
681+ else:
682+ file_name = file_entry['name']
683+
684+ if file_name in processed_files:
685+ logging.debug("Skipping '%s' because a more specific"
686+ "generator was already called."
687+ % file_entry['name'])
688+ continue
689+
690+ processed_files.append(file_name)
691+
692+ # Generate the environment
693+ environment = {}
694+ environment['channel_name'] = channel_name
695+ environment['device'] = device
696+ environment['device_name'] = device_name
697+ environment['version'] = new_version
698+ environment['version_detail'] = version_detail
699+ environment['new_files'] = new_files
700+
701+ # Call file generator
702+ logging.info("Calling '%s' generator for a new file"
703+ % file_entry['generator'])
704+ path = generators.generate_file(conf,
705+ file_entry['generator'],
706+ file_entry['arguments'],
707+ environment)
708+
709+ # Generators are allowed to return None when no build
710+ # exists at all. This cancels the whole image.
711+ if not path:
712+ new_files = []
713+ logging.info("No image will be produced because the "
714+ "'%s' generator returned None" %
715+ file_entry['generator'])
716+ break
717+
718+ # Get the full and relative paths
719+ abspath, relpath = tools.expand_path(path, conf.publish_path)
720+ urlpath = "/%s" % "/".join(relpath.split(os.sep))
721+
722+ # FIXME: Extract the prefix, used later for matching between
723+ # full images. This forces a specific filename format.
724+ prefix = abspath.split("/")[-1].rsplit("-", 1)[0]
725+
726+ # Add the file to the full image
727+ new_images['full']['files'].append(abspath)
728+
729+ # Check if same as current
730+ new_file = True
731+ if last_full:
732+ for file_dict in last_full['files']:
733+ if file_dict['path'] == urlpath:
734+ new_file = False
735+ break
736+
737+ if new_file:
738+ logging.info("New file from '%s': %s" %
739+ (file_entry['generator'], relpath))
740+ new_files.append(abspath)
741+ else:
742+ logging.info("File from '%s' is already current" %
743+ (file_entry['generator']))
744+
745+ # Generate deltas
746+ for delta in delta_base:
747+ # Extract the source
748+ src_path = None
749+ for file_dict in delta['files']:
750+ if (file_dict['path'].split("/")[-1]
751+ .startswith(prefix)):
752+ src_path = "%s/%s" % (conf.publish_path,
753+ file_dict['path'])
754+ break
755+
756+ # Check that it's not the current file
757+ if src_path:
758+ src_path = os.path.realpath(src_path)
759+
760+ # FIXME: the keyring- is a big hack...
761+ if src_path == abspath and "keyring-" not in src_path:
762+ continue
763+
764+ # Generators are allowed to return None when no delta
765+ # exists at all.
766+ logging.info("Generating delta from '%s' for '%s'" %
767+ (delta['version'],
768+ file_entry['generator']))
769+ delta_path = generators.generate_delta(conf, src_path,
770+ abspath)
771+ else:
772+ delta_path = abspath
773+
774+ if not delta_path:
775+ continue
776+
777+ # Get the full and relative paths
778+ delta_abspath, delta_relpath = tools.expand_path(
779+ delta_path, conf.publish_path)
780+
781+ new_images['delta_%s' % delta['version']]['files'] \
782+ .append(delta_abspath)
783+
784+ # Check if we've got a new image
785+ if len(new_files):
786+ # Publish full image
787+ logging.info("Publishing new image '%s' (%s) with %s files."
788+ % (new_version,
789+ ",".join(environment['version_detail']),
790+ len(new_images['full']['files'])))
791+ device.create_image("full", new_version,
792+ ",".join(environment['version_detail']),
793+ new_images['full']['files'])
794+ # Publish deltas
795+ for delta in delta_base:
796+ files = new_images["delta_%s" % delta['version']]['files']
797+ logging.info("Publishing new delta from '%s' (%s)"
798+ " to '%s' (%s) with %s files" %
799+ (delta['version'],
800+ delta.get("description", ""),
801+ new_version,
802+ ",".join(environment['version_detail']),
803+ len(files)))
804+ device.create_image(
805+ "delta", new_version,
806+ ",".join(environment['version_detail']), files,
807+ base=delta['version'])
808+
809+ # Expire images
810+ if channel.fullcount > 0:
811+ logging.info("Expiring old images")
812+ device.expire_images(channel.fullcount)
813+
814+ # Sync all channel aliases
815+ logging.info("Syncing any existing alias")
816+ pub.sync_aliases(channel_name)
817+
818+ # Remove any orphaned file
819+ logging.info("Removing orphaned files from the pool")
820+ pub.cleanup_tree()
821+
822+ # Sync the mirrors
823+ logging.info("Triggering a mirror sync")
824+ tools.sync_mirrors(conf)
825
826=== added file 'bin/set-phased-percentage'
827--- bin/set-phased-percentage 1970-01-01 00:00:00 +0000
828+++ bin/set-phased-percentage 2014-11-14 10:18:00 +0000
829@@ -0,0 +1,90 @@
830+#!/usr/bin/python
831+# -*- coding: utf-8 -*-
832+
833+# Copyright (C) 2013 Canonical Ltd.
834+# Author: Stéphane Graber <stgraber@ubuntu.com>
835+
836+# This program is free software: you can redistribute it and/or modify
837+# it under the terms of the GNU General Public License as published by
838+# the Free Software Foundation; version 3 of the License.
839+#
840+# This program is distributed in the hope that it will be useful,
841+# but WITHOUT ANY WARRANTY; without even the implied warranty of
842+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
843+# GNU General Public License for more details.
844+#
845+# You should have received a copy of the GNU General Public License
846+# along with this program. If not, see <http://www.gnu.org/licenses/>.
847+
848+import os
849+import sys
850+sys.path.insert(0, os.path.join(sys.path[0], os.pardir, "lib"))
851+
852+from systemimage import config, tools, tree
853+
854+import argparse
855+import logging
856+
857+if __name__ == '__main__':
858+ parser = argparse.ArgumentParser(description="set phased percentage")
859+ parser.add_argument("channel", metavar="CHANNEL")
860+ parser.add_argument("device", metavar="DEVICE")
861+ parser.add_argument("version", metavar="VERSION", type=int)
862+ parser.add_argument("percentage", metavar="PERCENTAGE", type=int)
863+ parser.add_argument("--verbose", "-v", action="count")
864+
865+ args = parser.parse_args()
866+
867+ # Setup logging
868+ formatter = logging.Formatter(
869+ "%(asctime)s %(levelname)s %(message)s")
870+
871+ levels = {1: logging.ERROR,
872+ 2: logging.WARNING,
873+ 3: logging.INFO,
874+ 4: logging.DEBUG}
875+
876+ if args.verbose > 0:
877+ stdoutlogger = logging.StreamHandler(sys.stdout)
878+ stdoutlogger.setFormatter(formatter)
879+ logging.root.setLevel(levels[min(4, args.verbose)])
880+ logging.root.addHandler(stdoutlogger)
881+ else:
882+ logging.root.addHandler(logging.NullHandler())
883+
884+ # Load the configuration
885+ conf = config.Config()
886+
887+ # Load the tree
888+ pub = tree.Tree(conf)
889+
890+ # Do some checks
891+ if args.channel not in pub.list_channels():
892+ parser.error("Invalid channel: %s" % args.channel)
893+
894+ if args.device not in pub.list_channels()[args.channel]['devices']:
895+ parser.error("Invalid device for source channel: %s" %
896+ args.device)
897+
898+ if args.percentage < 0 or args.percentage > 100:
899+ parser.error("Invalid value: %s" % args.percentage)
900+
901+ if "alias" in pub.list_channels()[args.channel] and \
902+ pub.list_channels()[args.channel]['alias'] != args.channel:
903+ parser.error("Channel is an alias.")
904+
905+ if "redirect" in pub.list_channels()[args.channel]:
906+ parser.error("Channel is a redirect.")
907+
908+ dev = pub.get_device(args.channel, args.device)
909+ logging.info("Setting phased-percentage of '%s' to %s%%" %
910+ (args.version, args.percentage))
911+ dev.set_phased_percentage(args.version, args.percentage)
912+
913+ # Sync all channel aliases
914+ logging.info("Syncing any existing alias")
915+ pub.sync_aliases(args.channel)
916+
917+ # Sync the mirrors
918+ logging.info("Triggering a mirror sync")
919+ tools.sync_mirrors(conf)
920
921=== added file 'bin/si-shell'
922--- bin/si-shell 1970-01-01 00:00:00 +0000
923+++ bin/si-shell 2014-11-14 10:18:00 +0000
924@@ -0,0 +1,79 @@
925+#!/usr/bin/python
926+# -*- coding: utf-8 -*-
927+
928+# Copyright (C) 2013 Canonical Ltd.
929+# Author: Stéphane Graber <stgraber@ubuntu.com>
930+
931+# This program is free software: you can redistribute it and/or modify
932+# it under the terms of the GNU General Public License as published by
933+# the Free Software Foundation; version 3 of the License.
934+#
935+# This program is distributed in the hope that it will be useful,
936+# but WITHOUT ANY WARRANTY; without even the implied warranty of
937+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
938+# GNU General Public License for more details.
939+#
940+# You should have received a copy of the GNU General Public License
941+# along with this program. If not, see <http://www.gnu.org/licenses/>.
942+
943+import code
944+import logging
945+import os
946+import sys
947+sys.path.insert(0, os.path.join(sys.path[0], os.pardir, "lib"))
948+
949+from systemimage import config, tree
950+
951+import argparse
952+
953+if __name__ == '__main__':
954+ parser = argparse.ArgumentParser(description="system-image shell")
955+ parser.add_argument("--verbose", "-v", action="count", default=0)
956+
957+ args = parser.parse_args()
958+
959+ # Setup logging
960+ formatter = logging.Formatter(
961+ "%(asctime)s %(levelname)s %(message)s")
962+
963+ levels = {1: logging.ERROR,
964+ 2: logging.WARNING,
965+ 3: logging.INFO,
966+ 4: logging.DEBUG}
967+
968+ if args.verbose > 0:
969+ stdoutlogger = logging.StreamHandler(sys.stdout)
970+ stdoutlogger.setFormatter(formatter)
971+ stdoutlogger.setLevel(levels[min(4, args.verbose)])
972+ logging.root.addHandler(stdoutlogger)
973+ else:
974+ logging.root.addHandler(logging.NullHandler())
975+
976+ # Load the configuration
977+ conf = config.Config()
978+
979+ # Load the tree
980+ pub = tree.Tree(conf)
981+
982+ # Start the shell
983+ banner = """Welcome to the system-image shell.
984+The configuration is available as: conf
985+The system-image tree is availabe as: pub
986+"""
987+
988+ class CompleterConsole(code.InteractiveConsole):
989+ def __init__(self):
990+ local = {'conf': conf,
991+ 'pub': pub}
992+ code.InteractiveConsole.__init__(self, locals=local)
993+ try:
994+ import readline
995+ except ImportError:
996+ print('I: readline module not available.')
997+ else:
998+ import rlcompleter
999+ rlcompleter # Silence pyflakes
1000+ readline.parse_and_bind("tab: complete")
1001+
1002+ console = CompleterConsole()
1003+ console.interact(banner)
1004
1005=== added directory 'etc'
1006=== added file 'etc/config.example'
1007--- etc/config.example 1970-01-01 00:00:00 +0000
1008+++ etc/config.example 2014-11-14 10:18:00 +0000
1009@@ -0,0 +1,48 @@
1010+[global]
1011+base_path = /some/fs/path
1012+channels = trusty, trusty-proposed, trusty-customized
1013+gpg_key_path = secret/gpg/keys/
1014+gpg_keyring_path = secret/gpg/keyrings/
1015+publish_path = www/
1016+state_path = state/
1017+mirrors = a, b
1018+public_fqdn = system-image.example.net
1019+public_http_port = 80
1020+public_https_port = 443
1021+
1022+[channel_trusty]
1023+type = manual
1024+versionbase = 1
1025+fullcount = 10
1026+
1027+[channel_trusty-proposed]
1028+type = auto
1029+versionbase = 1
1030+fullcount = 20
1031+deltabase = trusty, trusty-proposed
1032+files = ubuntu, device, version
1033+file_ubuntu = cdimage-ubuntu;daily-preinstalled;trusty,import=any
1034+file_device = cdimage-device;daily-preinstalled;trusty,import=any
1035+file_version = version
1036+
1037+[channel_trusty-customized]
1038+type = auto
1039+versionbase = 1
1040+fullcount = 15
1041+files = ubuntu, device, custom, version
1042+file_ubuntu = system-image;trusty;file=ubuntu
1043+file_device = system-image;trusty;file=device
1044+file_custom = http;http://www.example.net/custom/custom.tar.xz;name=custom,monitor=http://www.example.net/custom/build_number
1045+file_version = version
1046+
1047+[mirror_default]
1048+ssh_user = mirror
1049+ssh_key = secret/ssh/mirror
1050+ssh_port = 22
1051+ssh_command = sync-mirror
1052+
1053+[mirror_a]
1054+ssh_host = a.example.com
1055+
1056+[mirror_b]
1057+ssh_host = b.example.com
1058
1059=== added directory 'lib'
1060=== added directory 'lib/systemimage'
1061=== added file 'lib/systemimage/__init__.py'
1062=== added file 'lib/systemimage/config.py'
1063--- lib/systemimage/config.py 1970-01-01 00:00:00 +0000
1064+++ lib/systemimage/config.py 2014-11-14 10:18:00 +0000
1065@@ -0,0 +1,206 @@
1066+# -*- coding: utf-8 -*-
1067+
1068+# Copyright (C) 2013 Canonical Ltd.
1069+# Author: Stéphane Graber <stgraber@ubuntu.com>
1070+
1071+# This program is free software: you can redistribute it and/or modify
1072+# it under the terms of the GNU General Public License as published by
1073+# the Free Software Foundation; version 3 of the License.
1074+#
1075+# This program is distributed in the hope that it will be useful,
1076+# but WITHOUT ANY WARRANTY; without even the implied warranty of
1077+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
1078+# GNU General Public License for more details.
1079+#
1080+# You should have received a copy of the GNU General Public License
1081+# along with this program. If not, see <http://www.gnu.org/licenses/>.
1082+
1083+import os
1084+
1085+try:
1086+ from configparser import ConfigParser
1087+except ImportError: # pragma: no cover
1088+ from ConfigParser import ConfigParser
1089+
1090+
1091+def parse_config(path):
1092+ config = {}
1093+
1094+ configp = ConfigParser()
1095+ try:
1096+ configp.read(path)
1097+ except:
1098+ return config
1099+
1100+ for section in configp.sections():
1101+ config_section = {}
1102+ for option in configp.options(section):
1103+ value = configp.get(section, option)
1104+ if ", " in value:
1105+ value = [entry.strip('"').strip()
1106+ for entry in value.split(", ")]
1107+ else:
1108+ value = value.strip('"').strip()
1109+ config_section[option] = value
1110+ config[section] = config_section
1111+
1112+ return config
1113+
1114+
1115+class Config:
1116+ def __init__(self, path=None):
1117+ if not path:
1118+ path = "%s/etc/config" % os.environ.get("SYSTEM_IMAGE_ROOT",
1119+ os.getcwd())
1120+ if not os.path.exists(path):
1121+ path = os.path.realpath(os.path.join(os.path.dirname(__file__),
1122+ "../../etc/config"))
1123+
1124+ self.load_config(path)
1125+
1126+ def load_config(self, path):
1127+ if not os.path.exists(path):
1128+ raise Exception("Configuration file doesn't exist: %s" % path)
1129+
1130+ # Read the config
1131+ config = parse_config(path)
1132+
1133+ if 'global' not in config:
1134+ config['global'] = {}
1135+
1136+ # Set defaults
1137+ self.base_path = config['global'].get(
1138+ "base_path", os.environ.get("SYSTEM_IMAGE_ROOT", os.getcwd()))
1139+
1140+ self.gpg_key_path = config['global'].get(
1141+ "gpg_key_path", os.path.join(self.base_path,
1142+ "secret", "gpg", "keys"))
1143+ if not self.gpg_key_path.startswith("/"):
1144+ self.gpg_key_path = os.path.join(self.base_path, self.gpg_key_path)
1145+
1146+ self.gpg_keyring_path = config['global'].get(
1147+ "gpg_keyring_path", os.path.join(self.base_path,
1148+ "secret", "gpg", "keyrings"))
1149+ if not self.gpg_keyring_path.startswith("/"):
1150+ self.gpg_keyring_path = os.path.join(self.base_path,
1151+ self.gpg_keyring_path)
1152+
1153+ self.publish_path = config['global'].get(
1154+ "publish_path", os.path.join(self.base_path, "www"))
1155+ if not self.publish_path.startswith("/"):
1156+ self.publish_path = os.path.join(self.base_path, self.publish_path)
1157+
1158+ self.state_path = config['global'].get(
1159+ "state_path", os.path.join(self.base_path, "state"))
1160+ if not self.state_path.startswith("/"):
1161+ self.state_path = os.path.join(self.base_path, self.state_path)
1162+
1163+ # Export some more keys as-is
1164+ for key in ("public_fqdn", "public_http_port", "public_https_port"):
1165+ if key not in config['global']:
1166+ continue
1167+
1168+ setattr(self, key, config['global'][key])
1169+
1170+ # Parse the mirror configuration
1171+ self.mirrors = {}
1172+ if "mirrors" in config['global']:
1173+ if not isinstance(config['global']['mirrors'], list):
1174+ config['global']['mirrors'] = [config['global']['mirrors']]
1175+
1176+ if len(config['global']['mirrors']) != 0:
1177+ if "mirror_default" not in config:
1178+ raise KeyError("Missing mirror_default section.")
1179+
1180+ for key in ("ssh_user", "ssh_key", "ssh_port", "ssh_command"):
1181+ if key not in config['mirror_default']:
1182+ raise KeyError("Missing key in mirror_default: %s" %
1183+ key)
1184+
1185+ for entry in config['global']['mirrors']:
1186+ dict_entry = "mirror_%s" % entry
1187+ if dict_entry not in config:
1188+ raise KeyError("Missing mirror section: %s" %
1189+ dict_entry)
1190+
1191+ mirror = type("Mirror", (object,), {})
1192+
1193+ if "ssh_host" not in config[dict_entry]:
1194+ raise KeyError("Missing key in %s: ssh_host" %
1195+ dict_entry)
1196+ else:
1197+ mirror.ssh_host = config[dict_entry]['ssh_host']
1198+
1199+ mirror.ssh_user = config[dict_entry].get(
1200+ "ssh_user", config['mirror_default']['ssh_user'])
1201+ mirror.ssh_key = config[dict_entry].get(
1202+ "ssh_key", config['mirror_default']['ssh_key'])
1203+ if not mirror.ssh_key.startswith("/"):
1204+ mirror.ssh_key = os.path.join(self.base_path,
1205+ mirror.ssh_key)
1206+ mirror.ssh_port = int(config[dict_entry].get(
1207+ "ssh_port", config['mirror_default']['ssh_port']))
1208+ mirror.ssh_command = config[dict_entry].get(
1209+ "ssh_command", config['mirror_default']['ssh_command'])
1210+
1211+ self.mirrors[entry] = mirror
1212+
1213+ # Parse the channel configuration
1214+ self.channels = {}
1215+ if "channels" in config['global']:
1216+ if not isinstance(config['global']['channels'], list):
1217+ config['global']['channels'] = \
1218+ [config['global']['channels']]
1219+
1220+ if len(config['global']['channels']) != 0:
1221+ for entry in config['global']['channels']:
1222+ dict_entry = "channel_%s" % entry
1223+ if dict_entry not in config:
1224+ raise KeyError("Missing channel section: %s" %
1225+ dict_entry)
1226+
1227+ channel = type("Channel", (object,), {})
1228+
1229+ channel.versionbase = int(config[dict_entry].get(
1230+ 'versionbase', 1))
1231+
1232+ channel.type = config[dict_entry].get(
1233+ "type", "manual")
1234+
1235+ channel.fullcount = int(config[dict_entry].get(
1236+ "fullcount", 0))
1237+
1238+ channel.deltabase = [entry]
1239+ if "deltabase" in config[dict_entry]:
1240+ if isinstance(config[dict_entry]["deltabase"],
1241+ list):
1242+ channel.deltabase = \
1243+ config[dict_entry]["deltabase"]
1244+ else:
1245+ channel.deltabase = \
1246+ [config[dict_entry]["deltabase"]]
1247+
1248+ # Parse the file list
1249+ files = config[dict_entry].get("files", [])
1250+ if isinstance(files, str):
1251+ files = [files]
1252+
1253+ channel.files = []
1254+ for file_entry in files:
1255+ if "file_%s" % file_entry not in config[dict_entry]:
1256+ raise KeyError("Missing file entry: %s" %
1257+ "file_%s" % file_entry)
1258+
1259+ fields = (config[dict_entry]
1260+ ["file_%s" % file_entry].split(";"))
1261+
1262+ file_dict = {}
1263+ file_dict['name'] = file_entry
1264+ file_dict['generator'] = fields[0]
1265+ file_dict['arguments'] = []
1266+ if len(fields) > 1:
1267+ file_dict['arguments'] = fields[1:]
1268+
1269+ channel.files.append(file_dict)
1270+
1271+ self.channels[entry] = channel
1272
1273=== added file 'lib/systemimage/diff.py'
1274--- lib/systemimage/diff.py 1970-01-01 00:00:00 +0000
1275+++ lib/systemimage/diff.py 2014-11-14 10:18:00 +0000
1276@@ -0,0 +1,245 @@
1277+# -*- coding: utf-8 -*-
1278+
1279+# Copyright (C) 2013 Canonical Ltd.
1280+# Author: Stéphane Graber <stgraber@ubuntu.com>
1281+
1282+# This program is free software: you can redistribute it and/or modify
1283+# it under the terms of the GNU General Public License as published by
1284+# the Free Software Foundation; version 3 of the License.
1285+#
1286+# This program is distributed in the hope that it will be useful,
1287+# but WITHOUT ANY WARRANTY; without even the implied warranty of
1288+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
1289+# GNU General Public License for more details.
1290+#
1291+# You should have received a copy of the GNU General Public License
1292+# along with this program. If not, see <http://www.gnu.org/licenses/>.
1293+
1294+import os
1295+import sys
1296+import tarfile
1297+import time
1298+
1299+from io import BytesIO
1300+
1301+
1302+def compare_files(fd_source, fd_target):
1303+ """
1304+ Compare two files.
1305+
1306+ Returns True if their content matches.
1307+ Returns False if they don't match.
1308+ Returns None if the files can't be compared.
1309+ """
1310+
1311+ if fd_source == fd_target:
1312+ return True
1313+
1314+ if not fd_source or not fd_target:
1315+ return False
1316+
1317+ return fd_source.read() == fd_target.read()
1318+
1319+
1320+def list_tarfile(tarfile):
1321+ """
1322+ Walk through a tarfile and generate a list of the content.
1323+
1324+ Returns a tuple containing a set and a dict.
1325+ The set is typically used for simple diffs between tarballs.
1326+ The dict is used to easily grab the details of a specific entry.
1327+ """
1328+
1329+ set_content = set()
1330+ dict_content = {}
1331+
1332+ for entry in tarfile:
1333+ if entry.isdir():
1334+ set_content.add((entry.path, 'dir', None))
1335+ dict_content[entry.path] = ('dir', None)
1336+ else:
1337+ fhash = ("%s" % entry.mode,
1338+ "%s" % entry.devmajor,
1339+ "%s" % entry.devminor,
1340+ "%s" % entry.type.decode('utf-8'),
1341+ "%s" % entry.uid,
1342+ "%s" % entry.gid,
1343+ "%s" % entry.size,
1344+ "%s" % entry.mtime)
1345+
1346+ set_content.add((entry.path, 'file', fhash))
1347+ dict_content[entry.path] = ('file', fhash)
1348+
1349+ return (set_content, dict_content)
1350+
1351+
1352+class ImageDiff:
1353+ source_content = None
1354+ target_content = None
1355+ diff = None
1356+
1357+ def __init__(self, source, target):
1358+ self.source_file = tarfile.open(source, 'r:')
1359+ self.target_file = tarfile.open(target, 'r:')
1360+
1361+ def scan_content(self, image):
1362+ """
1363+ Scan the content of an image and return the image tuple.
1364+ This also caches the content for further use.
1365+ """
1366+
1367+ if image not in ("source", "target"):
1368+ raise KeyError("Invalid image '%s'." % image)
1369+
1370+ image_file = getattr(self, "%s_file" % image)
1371+
1372+ content = list_tarfile(image_file)
1373+
1374+ setattr(self, "%s_content" % image, content)
1375+ return content
1376+
1377+ def compare_images(self):
1378+ """
1379+ Compare the file listing of two images and return a set.
1380+ This also caches the diff for further use.
1381+
1382+ The set contains tuples of (path, changetype).
1383+ """
1384+ if not self.source_content:
1385+ self.scan_content("source")
1386+
1387+ if not self.target_content:
1388+ self.scan_content("target")
1389+
1390+ # Find the changes in the two trees
1391+ changes = set()
1392+ for change in self.source_content[0] \
1393+ .symmetric_difference(self.target_content[0]):
1394+ if change[0] not in self.source_content[1]:
1395+ changetype = "add"
1396+ elif change[0] not in self.target_content[1]:
1397+ changetype = "del"
1398+ else:
1399+ changetype = "mod"
1400+ changes.add((change[0], changetype))
1401+
1402+ # Ignore files that only vary in mtime
1403+ # (separate loop to run after de-dupe)
1404+ for change in sorted(changes):
1405+ if change[1] == "mod":
1406+ fstat_source = self.source_content[1][change[0]][1]
1407+ fstat_target = self.target_content[1][change[0]][1]
1408+
1409+ # Skip differences between directories and files
1410+ if not fstat_source or not fstat_target: # pragma: no cover
1411+ continue
1412+
1413+ # Deal with switched hardlinks
1414+ if (fstat_source[0:2] == fstat_target[0:2] and
1415+ fstat_source[3] != fstat_target[3] and
1416+ (fstat_source[3] == "1" or fstat_target[3] == "1") and
1417+ fstat_source[4:5] == fstat_target[4:5] and
1418+ fstat_source[7] == fstat_target[7]):
1419+ source_file = self.source_file.getmember(change[0])
1420+ target_file = self.target_file.getmember(change[0])
1421+ if compare_files(
1422+ self.source_file.extractfile(change[0]),
1423+ self.target_file.extractfile(change[0])):
1424+ changes.remove(change)
1425+ continue
1426+
1427+ # Deal with regular files
1428+ if fstat_source[0:7] == fstat_target[0:7]:
1429+ source_file = self.source_file.getmember(change[0])
1430+ target_file = self.target_file.getmember(change[0])
1431+
1432+ if (source_file.linkpath
1433+ and source_file.linkpath == target_file.linkpath):
1434+ changes.remove(change)
1435+ continue
1436+
1437+ if (source_file.isfile() and target_file.isfile()
1438+ and compare_files(
1439+ self.source_file.extractfile(change[0]),
1440+ self.target_file.extractfile(change[0]))):
1441+ changes.remove(change)
1442+ continue
1443+
1444+ self.diff = changes
1445+ return changes
1446+
1447+ def print_changes(self):
1448+ """
1449+ Simply print the list of changes.
1450+ """
1451+
1452+ if not self.diff:
1453+ self.compare_images()
1454+
1455+ for change in sorted(self.diff):
1456+ print(" - %s (%s)" % (change[0], change[1]))
1457+
1458+ def generate_diff_tarball(self, path):
1459+ """
1460+ Generate a tarball containing all files that are
1461+ different between the source and target iamge as well
1462+ as a file listing all removals.
1463+ """
1464+
1465+ if not self.diff:
1466+ self.compare_images()
1467+
1468+ output = tarfile.open(path, 'w:')
1469+
1470+ # List both deleted files and modified files in the removal list
1471+ # that's needed to allow file type change (e.g. directory to symlink)
1472+ removed_files_list = sorted([entry[0] for entry in self.diff
1473+ if entry[1] in ("del", "mod")])
1474+
1475+ removed_files = "%s\n" % "\n".join(removed_files_list)
1476+
1477+ if sys.version_info.major > 2: # pragma: no cover
1478+ removed_files = removed_files.encode('utf-8')
1479+
1480+ removals = tarfile.TarInfo()
1481+ removals.name = "removed"
1482+ removals.size = len(removed_files)
1483+ removals.mtime = int(time.strftime("%s", time.localtime()))
1484+ removals.uname = "root"
1485+ removals.gname = "root"
1486+
1487+ output.addfile(removals, BytesIO(removed_files))
1488+
1489+ # Copy all the added and modified
1490+ added = []
1491+ for name, action in sorted(self.diff):
1492+ if action == 'del':
1493+ continue
1494+
1495+ if name in added:
1496+ continue
1497+
1498+ newfile = self.target_file.getmember(name)
1499+ if newfile.islnk():
1500+ if newfile.linkname.startswith("system/"):
1501+ targetfile_path = newfile.linkname
1502+ else:
1503+ targetfile_path = os.path.normpath(os.path.join(
1504+ os.path.dirname(newfile.name), newfile.linkname))
1505+
1506+ targetfile = self.target_file.getmember(targetfile_path)
1507+
1508+ if ((targetfile_path, 'add') in self.diff or
1509+ (targetfile_path, 'mod') in self.diff) and \
1510+ targetfile_path not in added:
1511+ fileptr = self.target_file.extractfile(targetfile)
1512+ output.addfile(targetfile, fileptr)
1513+ added.append(targetfile.name)
1514+
1515+ fileptr = None
1516+ if newfile.isfile():
1517+ fileptr = self.target_file.extractfile(name)
1518+ output.addfile(newfile, fileobj=fileptr)
1519+ added.append(newfile.name)
1520+
1521+ output.close()
1522
1523=== added file 'lib/systemimage/generators.py'
1524--- lib/systemimage/generators.py 1970-01-01 00:00:00 +0000
1525+++ lib/systemimage/generators.py 2014-11-14 10:18:00 +0000
1526@@ -0,0 +1,1287 @@
1527+# -*- coding: utf-8 -*-
1528+
1529+# Copyright (C) 2013 Canonical Ltd.
1530+# Author: Stéphane Graber <stgraber@ubuntu.com>
1531+
1532+# This program is free software: you can redistribute it and/or modify
1533+# it under the terms of the GNU General Public License as published by
1534+# the Free Software Foundation; version 3 of the License.
1535+#
1536+# This program is distributed in the hope that it will be useful,
1537+# but WITHOUT ANY WARRANTY; without even the implied warranty of
1538+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
1539+# GNU General Public License for more details.
1540+#
1541+# You should have received a copy of the GNU General Public License
1542+# along with this program. If not, see <http://www.gnu.org/licenses/>.
1543+
1544+from hashlib import sha256
1545+from systemimage import diff, gpg, tree, tools
1546+import json
1547+import os
1548+import socket
1549+import shutil
1550+import subprocess
1551+import tarfile
1552+import tempfile
1553+import time
1554+
1555+try:
1556+ from urllib.request import urlopen, urlretrieve
1557+except ImportError: # pragma: no cover
1558+ from urllib import urlopen, urlretrieve
1559+
1560+# Global
1561+CACHE = {}
1562+
1563+
1564+def list_versions(cdimage_path):
1565+ return sorted([version for version in os.listdir(cdimage_path)
1566+ if version not in ("pending", "current")],
1567+ reverse=True)
1568+
1569+
1570+def root_ownership(tarinfo):
1571+ tarinfo.mode = 0o644
1572+ tarinfo.mtime = int(time.strftime("%s", time.localtime()))
1573+ tarinfo.uname = "root"
1574+ tarinfo.gname = "root"
1575+ return tarinfo
1576+
1577+
1578+def unpack_arguments(arguments):
1579+ """
1580+ Takes a string representing comma separate key=value options and
1581+ returns a dict.
1582+ """
1583+ arg_dict = {}
1584+
1585+ for option in arguments.split(","):
1586+ fields = option.split("=")
1587+ if len(fields) != 2:
1588+ continue
1589+
1590+ arg_dict[fields[0]] = fields[1]
1591+
1592+ return arg_dict
1593+
1594+
1595+def generate_delta(conf, source_path, target_path):
1596+ """
1597+ Take two .tar.xz file and generate a third file, stored in the pool.
1598+ The path to the pool file is then returned and <path>.asc is also
1599+ generated using the default signing key.
1600+ """
1601+ source_filename = source_path.split("/")[-1].replace(".tar.xz", "")
1602+ target_filename = target_path.split("/")[-1].replace(".tar.xz", "")
1603+
1604+ # FIXME: This is a bit of an hack, it'd be better not to have to hardcode
1605+ # that kind of stuff...
1606+ if (source_filename.startswith("version-")
1607+ and target_filename.startswith("version-")):
1608+ return target_path
1609+
1610+ if (source_filename.startswith("keyring-")
1611+ and target_filename.startswith("keyring-")):
1612+ return target_path
1613+
1614+ # Now for everything else
1615+ path = os.path.realpath(os.path.join(conf.publish_path, "pool",
1616+ "%s.delta-%s.tar.xz" %
1617+ (target_filename, source_filename)))
1618+
1619+ # Return pre-existing entries
1620+ if os.path.exists(path):
1621+ return path
1622+
1623+ # Create the pool if it doesn't exist
1624+ if not os.path.exists(os.path.join(conf.publish_path, "pool")):
1625+ os.makedirs(os.path.join(conf.publish_path, "pool"))
1626+
1627+ # Generate the diff
1628+ tempdir = tempfile.mkdtemp()
1629+ tools.xz_uncompress(source_path, os.path.join(tempdir, "source.tar"))
1630+ tools.xz_uncompress(target_path, os.path.join(tempdir, "target.tar"))
1631+
1632+ imagediff = diff.ImageDiff(os.path.join(tempdir, "source.tar"),
1633+ os.path.join(tempdir, "target.tar"))
1634+
1635+ imagediff.generate_diff_tarball(os.path.join(tempdir, "output.tar"))
1636+ tools.xz_compress(os.path.join(tempdir, "output.tar"), path)
1637+ shutil.rmtree(tempdir)
1638+
1639+ # Sign the result
1640+ gpg.sign_file(conf, "image-signing", path)
1641+
1642+ # Generate the metadata file
1643+ metadata = {}
1644+ metadata['generator'] = "delta"
1645+ metadata['source'] = {}
1646+ metadata['target'] = {}
1647+
1648+ if os.path.exists(source_path.replace(".tar.xz", ".json")):
1649+ with open(source_path.replace(".tar.xz", ".json"), "r") as fd:
1650+ metadata['source'] = json.loads(fd.read())
1651+
1652+ if os.path.exists(target_path.replace(".tar.xz", ".json")):
1653+ with open(target_path.replace(".tar.xz", ".json"), "r") as fd:
1654+ metadata['target'] = json.loads(fd.read())
1655+
1656+ with open(path.replace(".tar.xz", ".json"), "w+") as fd:
1657+ fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
1658+ indent=4, separators=(',', ': ')))
1659+ gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
1660+
1661+ return path
1662+
1663+
1664+def generate_file(conf, generator, arguments, environment):
1665+ """
1666+ Dispatcher for the various generators and importers.
1667+ It calls the right generator and signs the generated file
1668+ before returning the path.
1669+ """
1670+
1671+ if generator == "version":
1672+ path = generate_file_version(conf, arguments, environment)
1673+ elif generator == "cdimage-device":
1674+ path = generate_file_cdimage_device_android(
1675+ conf, arguments, environment)
1676+ elif generator == "cdimage-ubuntu":
1677+ path = generate_file_cdimage_ubuntu(conf, arguments, environment)
1678+ elif generator == "cdimage-custom":
1679+ path = generate_file_cdimage_custom(conf, arguments, environment)
1680+ elif generator == "cdimage-device-raw":
1681+ path = generate_file_cdimage_device_raw(conf, arguments, environment)
1682+ elif generator == "http":
1683+ path = generate_file_http(conf, arguments, environment)
1684+ elif generator == "keyring":
1685+ path = generate_file_keyring(conf, arguments, environment)
1686+ elif generator == "system-image":
1687+ path = generate_file_system_image(conf, arguments, environment)
1688+ elif generator == "remote-system-image":
1689+ path = generate_file_remote_system_image(conf, arguments, environment)
1690+ else:
1691+ raise Exception("Invalid generator: %s" % generator)
1692+
1693+ return path
1694+
1695+
1696+def generate_file_cdimage_device_android(conf, arguments, environment):
1697+ """
1698+ Scan a cdimage tree for new device files.
1699+ """
1700+
1701+ # We need at least a path and a series
1702+ if len(arguments) < 2:
1703+ return None
1704+
1705+ # Read the arguments
1706+ cdimage_path = arguments[0]
1707+ series = arguments[1]
1708+
1709+ options = {}
1710+ if len(arguments) > 2:
1711+ options = unpack_arguments(arguments[2])
1712+
1713+ boot_arch = "armhf"
1714+ recovery_arch = "armel"
1715+ system_arch = "armel"
1716+ if environment['device_name'] in ("generic_x86", "generic_i386"):
1717+ boot_arch = "i386"
1718+ recovery_arch = "i386"
1719+ system_arch = "i386"
1720+ elif environment['device_name'] in ("generic_amd64",):
1721+ boot_arch = "amd64"
1722+ recovery_arch = "amd64"
1723+ system_arch = "amd64"
1724+
1725+ # Check that the directory exists
1726+ if not os.path.exists(cdimage_path):
1727+ return None
1728+
1729+ for version in list_versions(cdimage_path):
1730+ # Skip directory without checksums
1731+ if not os.path.exists(os.path.join(cdimage_path, version,
1732+ "SHA256SUMS")):
1733+ continue
1734+
1735+ # Check for all the ANDROID files
1736+ boot_path = os.path.join(cdimage_path, version,
1737+ "%s-preinstalled-boot-%s+%s.img" %
1738+ (series, boot_arch,
1739+ environment['device_name']))
1740+ if not os.path.exists(boot_path):
1741+ continue
1742+
1743+ recovery_path = os.path.join(cdimage_path, version,
1744+ "%s-preinstalled-recovery-%s+%s.img" %
1745+ (series, recovery_arch,
1746+ environment['device_name']))
1747+ if not os.path.exists(recovery_path):
1748+ continue
1749+
1750+ system_path = os.path.join(cdimage_path, version,
1751+ "%s-preinstalled-system-%s+%s.img" %
1752+ (series, system_arch,
1753+ environment['device_name']))
1754+ if not os.path.exists(system_path):
1755+ continue
1756+
1757+ # Check if we should only import tested images
1758+ if options.get("import", "any") == "good":
1759+ if not os.path.exists(os.path.join(cdimage_path, version,
1760+ ".marked_good")):
1761+ continue
1762+
1763+ # Set the version_detail string
1764+ version_detail = "device=%s" % version
1765+
1766+ # Extract the hashes
1767+ boot_hash = None
1768+ recovery_hash = None
1769+ system_hash = None
1770+ with open(os.path.join(cdimage_path, version,
1771+ "SHA256SUMS"), "r") as fd:
1772+ for line in fd:
1773+ line = line.strip()
1774+ if line.endswith(boot_path.split("/")[-1]):
1775+ boot_hash = line.split()[0]
1776+ elif line.endswith(recovery_path.split("/")[-1]):
1777+ recovery_hash = line.split()[0]
1778+ elif line.endswith(system_path.split("/")[-1]):
1779+ system_hash = line.split()[0]
1780+
1781+ if boot_hash and recovery_hash and system_hash:
1782+ break
1783+
1784+ if not boot_hash or not recovery_hash or not system_hash:
1785+ continue
1786+
1787+ hash_string = "%s/%s/%s" % (boot_hash, recovery_hash, system_hash)
1788+ global_hash = sha256(hash_string.encode('utf-8')).hexdigest()
1789+
1790+ # Generate the path
1791+ path = os.path.join(conf.publish_path, "pool",
1792+ "device-%s.tar.xz" % global_hash)
1793+
1794+ # Return pre-existing entries
1795+ if os.path.exists(path):
1796+ # Get the real version number (in case it got copied)
1797+ if os.path.exists(path.replace(".tar.xz", ".json")):
1798+ with open(path.replace(".tar.xz", ".json"), "r") as fd:
1799+ metadata = json.loads(fd.read())
1800+
1801+ if "version_detail" in metadata:
1802+ version_detail = metadata['version_detail']
1803+
1804+ environment['version_detail'].append(version_detail)
1805+ return path
1806+
1807+ temp_dir = tempfile.mkdtemp()
1808+
1809+ # Generate a new tarball
1810+ target_tarball = tarfile.open(os.path.join(temp_dir, "target.tar"),
1811+ "w:")
1812+
1813+ # system image
1814+ # # convert to raw image
1815+ system_img = os.path.join(temp_dir, "system.img")
1816+ with open(os.path.devnull, "w") as devnull:
1817+ subprocess.call(["simg2img", system_path, system_img],
1818+ stdout=devnull)
1819+
1820+ # # shrink to minimal size
1821+ with open(os.path.devnull, "w") as devnull:
1822+ subprocess.call(["resize2fs", "-M", system_img],
1823+ stdout=devnull, stderr=devnull)
1824+
1825+ # # include in tarball
1826+ target_tarball.add(system_img,
1827+ arcname="system/var/lib/lxc/android/system.img",
1828+ filter=root_ownership)
1829+
1830+ # boot image
1831+ target_tarball.add(boot_path, arcname="partitions/boot.img",
1832+ filter=root_ownership)
1833+
1834+ # recovery image
1835+ target_tarball.add(recovery_path,
1836+ arcname="partitions/recovery.img",
1837+ filter=root_ownership)
1838+
1839+ target_tarball.close()
1840+
1841+ # Create the pool if it doesn't exist
1842+ if not os.path.exists(os.path.join(conf.publish_path, "pool")):
1843+ os.makedirs(os.path.join(conf.publish_path, "pool"))
1844+
1845+ # Compress the target tarball and sign it
1846+ tools.xz_compress(os.path.join(temp_dir, "target.tar"), path)
1847+ gpg.sign_file(conf, "image-signing", path)
1848+
1849+ # Generate the metadata file
1850+ metadata = {}
1851+ metadata['generator'] = "cdimage-device"
1852+ metadata['version'] = version
1853+ metadata['version_detail'] = version_detail
1854+ metadata['series'] = series
1855+ metadata['device'] = environment['device_name']
1856+ metadata['boot_path'] = boot_path
1857+ metadata['boot_checksum'] = boot_hash
1858+ metadata['recovery_path'] = recovery_path
1859+ metadata['recovery_checksum'] = recovery_hash
1860+ metadata['system_path'] = system_path
1861+ metadata['system_checksum'] = system_hash
1862+
1863+ with open(path.replace(".tar.xz", ".json"), "w+") as fd:
1864+ fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
1865+ indent=4, separators=(',', ': ')))
1866+ gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
1867+
1868+ # Cleanup
1869+ shutil.rmtree(temp_dir)
1870+
1871+ environment['version_detail'].append(version_detail)
1872+ return path
1873+
1874+ return None
1875+
1876+
1877+def generate_file_cdimage_ubuntu(conf, arguments, environment):
1878+ """
1879+ Scan a cdimage tree for new ubuntu files.
1880+ """
1881+
1882+ # We need at least a path and a series
1883+ if len(arguments) < 2:
1884+ return None
1885+
1886+ # Read the arguments
1887+ cdimage_path = arguments[0]
1888+ series = arguments[1]
1889+
1890+ options = {}
1891+ if len(arguments) > 2:
1892+ options = unpack_arguments(arguments[2])
1893+
1894+ arch = "armhf"
1895+ if environment['device_name'] in ("generic_x86", "generic_i386"):
1896+ arch = "i386"
1897+ elif environment['device_name'] in ("generic_amd64",):
1898+ arch = "amd64"
1899+
1900+ # Check that the directory exists
1901+ if not os.path.exists(cdimage_path):
1902+ return None
1903+
1904+ for version in list_versions(cdimage_path):
1905+ # Skip directory without checksums
1906+ if not os.path.exists(os.path.join(cdimage_path, version,
1907+ "SHA256SUMS")):
1908+ continue
1909+
1910+ # Check for the rootfs
1911+ rootfs_path = os.path.join(cdimage_path, version,
1912+ "%s-preinstalled-%s-%s.tar.gz" %
1913+ (series, options.get("product", "touch"),
1914+ arch))
1915+ if not os.path.exists(rootfs_path):
1916+ continue
1917+
1918+ # Check if we should only import tested images
1919+ if options.get("import", "any") == "good":
1920+ if not os.path.exists(os.path.join(cdimage_path, version,
1921+ ".marked_good")):
1922+ continue
1923+
1924+ # Set the version_detail string
1925+ version_detail = "ubuntu=%s" % version
1926+
1927+ # Extract the hash
1928+ rootfs_hash = None
1929+ with open(os.path.join(cdimage_path, version,
1930+ "SHA256SUMS"), "r") as fd:
1931+ for line in fd:
1932+ line = line.strip()
1933+ if line.endswith(rootfs_path.split("/")[-1]):
1934+ rootfs_hash = line.split()[0]
1935+ break
1936+
1937+ if not rootfs_hash:
1938+ continue
1939+
1940+ # Generate the path
1941+ path = os.path.join(conf.publish_path, "pool",
1942+ "ubuntu-%s.tar.xz" % rootfs_hash)
1943+
1944+ # Return pre-existing entries
1945+ if os.path.exists(path):
1946+ # Get the real version number (in case it got copied)
1947+ if os.path.exists(path.replace(".tar.xz", ".json")):
1948+ with open(path.replace(".tar.xz", ".json"), "r") as fd:
1949+ metadata = json.loads(fd.read())
1950+
1951+ if "version_detail" in metadata:
1952+ version_detail = metadata['version_detail']
1953+
1954+ environment['version_detail'].append(version_detail)
1955+ return path
1956+
1957+ temp_dir = tempfile.mkdtemp()
1958+
1959+ # Unpack the source tarball
1960+ tools.gzip_uncompress(rootfs_path, os.path.join(temp_dir,
1961+ "source.tar"))
1962+
1963+ # Generate a new shifted tarball
1964+ source_tarball = tarfile.open(os.path.join(temp_dir, "source.tar"),
1965+ "r:")
1966+ target_tarball = tarfile.open(os.path.join(temp_dir, "target.tar"),
1967+ "w:")
1968+
1969+ added = []
1970+ for entry in source_tarball:
1971+ # FIXME: Will need to be done on the real rootfs
1972+ # Skip some files
1973+ if entry.name in ("SWAP.swap", "etc/mtab"):
1974+ continue
1975+
1976+ fileptr = None
1977+ if entry.isfile():
1978+ try:
1979+ fileptr = source_tarball.extractfile(entry.name)
1980+ except KeyError: # pragma: no cover
1981+ pass
1982+
1983+ # Update hardlinks to point to the right target
1984+ if entry.islnk():
1985+ entry.linkname = "system/%s" % entry.linkname
1986+
1987+ entry.name = "system/%s" % entry.name
1988+ target_tarball.addfile(entry, fileobj=fileptr)
1989+ added.append(entry.name)
1990+
1991+ if options.get("product", "touch") == "touch":
1992+ # FIXME: Will need to be done on the real rootfs
1993+ # Add some symlinks and directories
1994+ # # /android
1995+ new_file = tarfile.TarInfo()
1996+ new_file.type = tarfile.DIRTYPE
1997+ new_file.name = "system/android"
1998+ new_file.mode = 0o755
1999+ new_file.mtime = int(time.strftime("%s", time.localtime()))
2000+ new_file.uname = "root"
2001+ new_file.gname = "root"
2002+ target_tarball.addfile(new_file)
2003+
2004+ # # Android partitions
2005+ for android_path in ("cache", "data", "factory", "firmware",
2006+ "persist", "system"):
2007+ new_file = tarfile.TarInfo()
2008+ new_file.type = tarfile.SYMTYPE
2009+ new_file.name = "system/%s" % android_path
2010+ new_file.linkname = "/android/%s" % android_path
2011+ new_file.mode = 0o755
2012+ new_file.mtime = int(time.strftime("%s", time.localtime()))
2013+ new_file.uname = "root"
2014+ new_file.gname = "root"
2015+ target_tarball.addfile(new_file)
2016+
2017+ # # /vendor
2018+ new_file = tarfile.TarInfo()
2019+ new_file.type = tarfile.SYMTYPE
2020+ new_file.name = "system/vendor"
2021+ new_file.linkname = "/android/system/vendor"
2022+ new_file.mode = 0o755
2023+ new_file.mtime = int(time.strftime("%s", time.localtime()))
2024+ new_file.uname = "root"
2025+ new_file.gname = "root"
2026+ target_tarball.addfile(new_file)
2027+
2028+ # # /userdata
2029+ new_file = tarfile.TarInfo()
2030+ new_file.type = tarfile.DIRTYPE
2031+ new_file.name = "system/userdata"
2032+ new_file.mode = 0o755
2033+ new_file.mtime = int(time.strftime("%s", time.localtime()))
2034+ new_file.uname = "root"
2035+ new_file.gname = "root"
2036+ target_tarball.addfile(new_file)
2037+
2038+ # # /etc/mtab
2039+ new_file = tarfile.TarInfo()
2040+ new_file.type = tarfile.SYMTYPE
2041+ new_file.name = "system/etc/mtab"
2042+ new_file.linkname = "/proc/mounts"
2043+ new_file.mode = 0o444
2044+ new_file.mtime = int(time.strftime("%s", time.localtime()))
2045+ new_file.uname = "root"
2046+ new_file.gname = "root"
2047+ target_tarball.addfile(new_file)
2048+
2049+ # # /lib/modules
2050+ new_file = tarfile.TarInfo()
2051+ new_file.type = tarfile.DIRTYPE
2052+ new_file.name = "system/lib/modules"
2053+ new_file.mode = 0o755
2054+ new_file.mtime = int(time.strftime("%s", time.localtime()))
2055+ new_file.uname = "root"
2056+ new_file.gname = "root"
2057+ target_tarball.addfile(new_file)
2058+
2059+ source_tarball.close()
2060+ target_tarball.close()
2061+
2062+ # Create the pool if it doesn't exist
2063+ if not os.path.exists(os.path.join(conf.publish_path, "pool")):
2064+ os.makedirs(os.path.join(conf.publish_path, "pool"))
2065+
2066+ # Compress the target tarball and sign it
2067+ tools.xz_compress(os.path.join(temp_dir, "target.tar"), path)
2068+ gpg.sign_file(conf, "image-signing", path)
2069+
2070+ # Generate the metadata file
2071+ metadata = {}
2072+ metadata['generator'] = "cdimage-ubuntu"
2073+ metadata['version'] = version
2074+ metadata['version_detail'] = version_detail
2075+ metadata['series'] = series
2076+ metadata['rootfs_path'] = rootfs_path
2077+ metadata['rootfs_checksum'] = rootfs_hash
2078+
2079+ with open(path.replace(".tar.xz", ".json"), "w+") as fd:
2080+ fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
2081+ indent=4, separators=(',', ': ')))
2082+ gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
2083+
2084+ # Cleanup
2085+ shutil.rmtree(temp_dir)
2086+
2087+ environment['version_detail'].append(version_detail)
2088+ return path
2089+
2090+ return None
2091+
2092+
2093+def generate_file_cdimage_custom(conf, arguments, environment):
2094+ """
2095+ Scan a cdimage tree for new custom files.
2096+ """
2097+
2098+ # We need at least a path and a series
2099+ if len(arguments) < 2:
2100+ return None
2101+
2102+ # Read the arguments
2103+ cdimage_path = arguments[0]
2104+ series = arguments[1]
2105+
2106+ options = {}
2107+ if len(arguments) > 2:
2108+ options = unpack_arguments(arguments[2])
2109+
2110+ arch = "armhf"
2111+ if environment['device_name'] in ("generic_x86", "generic_i386"):
2112+ arch = "i386"
2113+ elif environment['device_name'] in ("generic_amd64",):
2114+ arch = "amd64"
2115+
2116+ # Check that the directory exists
2117+ if not os.path.exists(cdimage_path):
2118+ return None
2119+
2120+ for version in list_versions(cdimage_path):
2121+ # Skip directory without checksums
2122+ if not os.path.exists(os.path.join(cdimage_path, version,
2123+ "SHA256SUMS")):
2124+ continue
2125+
2126+ # Check for the custom tarball
2127+ custom_path = os.path.join(cdimage_path, version,
2128+ "%s-preinstalled-%s-%s.custom.tar.gz" %
2129+ (series, options.get("product", "touch"),
2130+ arch))
2131+ if not os.path.exists(custom_path):
2132+ continue
2133+
2134+ # Check if we should only import tested images
2135+ if options.get("import", "any") == "good":
2136+ if not os.path.exists(os.path.join(cdimage_path, version,
2137+ ".marked_good")):
2138+ continue
2139+
2140+ # Set the version_detail string
2141+ version_detail = "custom=%s" % version
2142+
2143+ # Extract the hash
2144+ custom_hash = None
2145+ with open(os.path.join(cdimage_path, version,
2146+ "SHA256SUMS"), "r") as fd:
2147+ for line in fd:
2148+ line = line.strip()
2149+ if line.endswith(custom_path.split("/")[-1]):
2150+ custom_hash = line.split()[0]
2151+ break
2152+
2153+ if not custom_hash:
2154+ continue
2155+
2156+ # Generate the path
2157+ path = os.path.join(conf.publish_path, "pool",
2158+ "custom-%s.tar.xz" % custom_hash)
2159+
2160+ # Return pre-existing entries
2161+ if os.path.exists(path):
2162+ # Get the real version number (in case it got copied)
2163+ if os.path.exists(path.replace(".tar.xz", ".json")):
2164+ with open(path.replace(".tar.xz", ".json"), "r") as fd:
2165+ metadata = json.loads(fd.read())
2166+
2167+ if "version_detail" in metadata:
2168+ version_detail = metadata['version_detail']
2169+
2170+ environment['version_detail'].append(version_detail)
2171+ return path
2172+
2173+ temp_dir = tempfile.mkdtemp()
2174+
2175+ # Unpack the source tarball
2176+ tools.gzip_uncompress(custom_path, os.path.join(temp_dir,
2177+ "source.tar"))
2178+
2179+ # Create the pool if it doesn't exist
2180+ if not os.path.exists(os.path.join(conf.publish_path, "pool")):
2181+ os.makedirs(os.path.join(conf.publish_path, "pool"))
2182+
2183+ # Compress the target tarball and sign it
2184+ tools.xz_compress(os.path.join(temp_dir, "source.tar"), path)
2185+ gpg.sign_file(conf, "image-signing", path)
2186+
2187+ # Generate the metadata file
2188+ metadata = {}
2189+ metadata['generator'] = "cdimage-custom"
2190+ metadata['version'] = version
2191+ metadata['version_detail'] = version_detail
2192+ metadata['series'] = series
2193+ metadata['custom_path'] = custom_path
2194+ metadata['custom_checksum'] = custom_hash
2195+
2196+ with open(path.replace(".tar.xz", ".json"), "w+") as fd:
2197+ fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
2198+ indent=4, separators=(',', ': ')))
2199+ gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
2200+
2201+ # Cleanup
2202+ shutil.rmtree(temp_dir)
2203+
2204+ environment['version_detail'].append(version_detail)
2205+ return path
2206+
2207+ return None
2208+
2209+
2210+def generate_file_cdimage_device_raw(conf, arguments, environment):
2211+ """
2212+ Scan a cdimage tree for new device files that can be unpacked as is
2213+ """
2214+
2215+ # We need at least a path and a series
2216+ if len(arguments) < 2:
2217+ return None
2218+
2219+ # Read the arguments
2220+ cdimage_path = arguments[0]
2221+ series = arguments[1]
2222+
2223+ options = {}
2224+ if len(arguments) > 2:
2225+ options = unpack_arguments(arguments[2])
2226+
2227+ arch = "armhf"
2228+ if environment['device_name'] in ("generic_x86", "generic_i386"):
2229+ arch = "i386"
2230+ elif environment['device_name'] in ("generic_amd64",):
2231+ arch = "amd64"
2232+
2233+ # Check that the directory exists
2234+ if not os.path.exists(cdimage_path):
2235+ return None
2236+
2237+ for version in list_versions(cdimage_path):
2238+ # Skip directory without checksums
2239+ if not os.path.exists(os.path.join(cdimage_path, version,
2240+ "SHA256SUMS")):
2241+ continue
2242+
2243+ # Check for the custom tarball
2244+ raw_device_path = os.path.join(cdimage_path, version,
2245+ "%s-preinstalled-%s-%s.device.tar.gz" %
2246+ (series, options.get("product", "core"),
2247+ arch))
2248+ if not os.path.exists(raw_device_path):
2249+ continue
2250+
2251+ # Check if we should only import tested images
2252+ if options.get("import", "any") == "good":
2253+ if not os.path.exists(os.path.join(cdimage_path, version,
2254+ ".marked_good")):
2255+ continue
2256+
2257+ # Set the version_detail string
2258+ version_detail = "raw-device=%s" % version
2259+
2260+ # Extract the hash
2261+ raw_device_hash = None
2262+ with open(os.path.join(cdimage_path, version,
2263+ "SHA256SUMS"), "r") as fd:
2264+ for line in fd:
2265+ line = line.strip()
2266+ if line.endswith(raw_device_path.split("/")[-1]):
2267+ raw_device_hash = line.split()[0]
2268+ break
2269+
2270+ if not raw_device_hash:
2271+ continue
2272+
2273+ # Generate the path
2274+ path = os.path.join(conf.publish_path, "pool",
2275+ "device-%s.tar.xz" % raw_device_hash)
2276+
2277+ # Return pre-existing entries
2278+ if os.path.exists(path):
2279+ # Get the real version number (in case it got copied)
2280+ if os.path.exists(path.replace(".tar.xz", ".json")):
2281+ with open(path.replace(".tar.xz", ".json"), "r") as fd:
2282+ metadata = json.loads(fd.read())
2283+
2284+ if "version_detail" in metadata:
2285+ version_detail = metadata['version_detail']
2286+
2287+ environment['version_detail'].append(version_detail)
2288+ return path
2289+
2290+ temp_dir = tempfile.mkdtemp()
2291+
2292+ # Unpack the source tarball
2293+ tools.gzip_uncompress(raw_device_path, os.path.join(temp_dir,
2294+ "source.tar"))
2295+
2296+ # Create the pool if it doesn't exist
2297+ if not os.path.exists(os.path.join(conf.publish_path, "pool")):
2298+ os.makedirs(os.path.join(conf.publish_path, "pool"))
2299+
2300+ # Compress the target tarball and sign it
2301+ tools.xz_compress(os.path.join(temp_dir, "source.tar"), path)
2302+ gpg.sign_file(conf, "image-signing", path)
2303+
2304+ # Generate the metadata file
2305+ metadata = {}
2306+ metadata['generator'] = "cdimage-device-raw"
2307+ metadata['version'] = version
2308+ metadata['version_detail'] = version_detail
2309+ metadata['series'] = series
2310+ metadata['raw_device_path'] = raw_device_path
2311+ metadata['raw_device_checksum'] = raw_device_hash
2312+
2313+ with open(path.replace(".tar.xz", ".json"), "w+") as fd:
2314+ fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
2315+ indent=4, separators=(',', ': ')))
2316+ gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
2317+
2318+ # Cleanup
2319+ shutil.rmtree(temp_dir)
2320+
2321+ environment['version_detail'].append(version_detail)
2322+ return path
2323+
2324+ return None
2325+
2326+
2327+def generate_file_http(conf, arguments, environment):
2328+ """
2329+ Grab, cache and returns a file using http/https.
2330+ """
2331+
2332+ # We need at least a URL
2333+ if len(arguments) == 0:
2334+ return None
2335+
2336+ # Read the arguments
2337+ url = arguments[0]
2338+
2339+ options = {}
2340+ if len(arguments) > 1:
2341+ options = unpack_arguments(arguments[1])
2342+
2343+ path = None
2344+ version = None
2345+
2346+ if "http_%s" % url in CACHE:
2347+ version = CACHE['http_%s' % url]
2348+
2349+ # Get the version/build number
2350+ if "monitor" in options or version:
2351+ if not version:
2352+ # Grab the current version number
2353+ old_timeout = socket.getdefaulttimeout()
2354+ socket.setdefaulttimeout(5)
2355+ try:
2356+ version = urlopen(options['monitor']).read().strip()
2357+ except socket.timeout:
2358+ return None
2359+ except IOError:
2360+ return None
2361+ socket.setdefaulttimeout(old_timeout)
2362+
2363+ # Validate the version number
2364+ if not version or len(version.split("\n")) > 1:
2365+ return None
2366+
2367+ # Push the result in the cache
2368+ CACHE['http_%s' % url] = version
2369+
2370+ # Set version_detail
2371+ version_detail = "%s=%s" % (options.get("name", "http"), version)
2372+
2373+ # FIXME: can be dropped once all the non-hased tarballs are gone
2374+ old_path = os.path.realpath(os.path.join(conf.publish_path, "pool",
2375+ "%s-%s.tar.xz" %
2376+ (options.get("name", "http"),
2377+ version)))
2378+ if os.path.exists(old_path):
2379+ # Get the real version number (in case it got copied)
2380+ if os.path.exists(old_path.replace(".tar.xz", ".json")):
2381+ with open(old_path.replace(".tar.xz", ".json"), "r") as fd:
2382+ metadata = json.loads(fd.read())
2383+
2384+ if "version_detail" in metadata:
2385+ version_detail = metadata['version_detail']
2386+
2387+ environment['version_detail'].append(version_detail)
2388+ return old_path
2389+
2390+ # Build the path, hasing together the URL and version
2391+ hash_string = "%s:%s" % (url, version)
2392+ global_hash = sha256(hash_string.encode('utf-8')).hexdigest()
2393+ path = os.path.realpath(os.path.join(conf.publish_path, "pool",
2394+ "%s-%s.tar.xz" %
2395+ (options.get("name", "http"),
2396+ global_hash)))
2397+
2398+ # Return pre-existing entries
2399+ if os.path.exists(path):
2400+ # Get the real version number (in case it got copied)
2401+ if os.path.exists(path.replace(".tar.xz", ".json")):
2402+ with open(path.replace(".tar.xz", ".json"), "r") as fd:
2403+ metadata = json.loads(fd.read())
2404+
2405+ if "version_detail" in metadata:
2406+ version_detail = metadata['version_detail']
2407+
2408+ environment['version_detail'].append(version_detail)
2409+ return path
2410+
2411+ # Grab the real thing
2412+ tempdir = tempfile.mkdtemp()
2413+ old_timeout = socket.getdefaulttimeout()
2414+ socket.setdefaulttimeout(5)
2415+ try:
2416+ urlretrieve(url, os.path.join(tempdir, "download"))
2417+ except socket.timeout:
2418+ shutil.rmtree(tempdir)
2419+ return None
2420+ except IOError:
2421+ shutil.rmtree(tempdir)
2422+ return None
2423+ socket.setdefaulttimeout(old_timeout)
2424+
2425+ # Hash it if we don't have a version number
2426+ if not version:
2427+ # Hash the file
2428+ with open(os.path.join(tempdir, "download"), "rb") as fd:
2429+ version = sha256(fd.read()).hexdigest()
2430+
2431+ # Set version_detail
2432+ version_detail = "%s=%s" % (options.get("name", "http"), version)
2433+
2434+ # Push the result in the cache
2435+ CACHE['http_%s' % url] = version
2436+
2437+ # Build the path
2438+ path = os.path.realpath(os.path.join(conf.publish_path, "pool",
2439+ "%s-%s.tar.xz" %
2440+ (options.get("name", "http"),
2441+ version)))
2442+ # Return pre-existing entries
2443+ if os.path.exists(path):
2444+ # Get the real version number (in case it got copied)
2445+ if os.path.exists(path.replace(".tar.xz", ".json")):
2446+ with open(path.replace(".tar.xz", ".json"), "r") as fd:
2447+ metadata = json.loads(fd.read())
2448+
2449+ if "version_detail" in metadata:
2450+ version_detail = metadata['version_detail']
2451+
2452+ environment['version_detail'].append(version_detail)
2453+ shutil.rmtree(tempdir)
2454+ return path
2455+
2456+ # Create the pool if it doesn't exist
2457+ if not os.path.exists(os.path.join(conf.publish_path, "pool")):
2458+ os.makedirs(os.path.join(conf.publish_path, "pool"))
2459+
2460+ # Move the file to the pool and sign it
2461+ shutil.move(os.path.join(tempdir, "download"), path)
2462+ gpg.sign_file(conf, "image-signing", path)
2463+
2464+ # Generate the metadata file
2465+ metadata = {}
2466+ metadata['generator'] = "http"
2467+ metadata['version'] = version
2468+ metadata['version_detail'] = version_detail
2469+ metadata['url'] = url
2470+
2471+ with open(path.replace(".tar.xz", ".json"), "w+") as fd:
2472+ fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
2473+ indent=4, separators=(',', ': ')))
2474+ gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
2475+
2476+ # Cleanup
2477+ shutil.rmtree(tempdir)
2478+
2479+ environment['version_detail'].append(version_detail)
2480+ return path
2481+
2482+
2483+def generate_file_keyring(conf, arguments, environment):
2484+ """
2485+ Generate a keyring tarball or return a pre-existing one.
2486+ """
2487+
2488+ # Don't generate keyring tarballs when nothing changed
2489+ if len(environment['new_files']) == 0:
2490+ return None
2491+
2492+ # We need a keyring name
2493+ if len(arguments) == 0:
2494+ return None
2495+
2496+ # Read the arguments
2497+ keyring_name = arguments[0]
2498+ keyring_path = os.path.join(conf.gpg_keyring_path, keyring_name)
2499+
2500+ # Fail on missing keyring
2501+ if not os.path.exists("%s.tar.xz" % keyring_path) or \
2502+ not os.path.exists("%s.tar.xz.asc" % keyring_path):
2503+ return None
2504+
2505+ with open("%s.tar.xz" % keyring_path, "rb") as fd:
2506+ hash_tarball = sha256(fd.read()).hexdigest()
2507+
2508+ with open("%s.tar.xz.asc" % keyring_path, "rb") as fd:
2509+ hash_signature = sha256(fd.read()).hexdigest()
2510+
2511+ hash_string = "%s/%s" % (hash_tarball, hash_signature)
2512+ global_hash = sha256(hash_string.encode('utf-8')).hexdigest()
2513+
2514+ # Build the path
2515+ path = os.path.realpath(os.path.join(conf.publish_path, "pool",
2516+ "keyring-%s.tar.xz" %
2517+ global_hash))
2518+
2519+ # Set the version_detail string
2520+ environment['version_detail'].append("keyring=%s" % keyring_name)
2521+
2522+ # Don't bother re-generating a file if it already exists
2523+ if os.path.exists(path):
2524+ return path
2525+
2526+ # Create temporary directory
2527+ tempdir = tempfile.mkdtemp()
2528+
2529+ # Generate the tarball
2530+ tarball = tarfile.open(os.path.join(tempdir, "output.tar"), "w:")
2531+ tarball.add("%s.tar.xz" % keyring_path,
2532+ arcname="/system/etc/system-image/archive-master.tar.xz",
2533+ filter=root_ownership)
2534+ tarball.add("%s.tar.xz.asc" % keyring_path,
2535+ arcname="/system/etc/system-image/archive-master.tar.xz.asc",
2536+ filter=root_ownership)
2537+ tarball.close()
2538+
2539+ # Create the pool if it doesn't exist
2540+ if not os.path.exists(os.path.join(conf.publish_path, "pool")):
2541+ os.makedirs(os.path.join(conf.publish_path, "pool"))
2542+
2543+ # Compress and sign it
2544+ tools.xz_compress(os.path.join(tempdir, "output.tar"), path)
2545+ gpg.sign_file(conf, "image-signing", path)
2546+
2547+ # Generate the metadata file
2548+ metadata = {}
2549+ metadata['generator'] = "keyring"
2550+ metadata['version'] = global_hash
2551+ metadata['version_detail'] = "keyring=%s" % keyring_name
2552+ metadata['path'] = keyring_path
2553+
2554+ with open(path.replace(".tar.xz", ".json"), "w+") as fd:
2555+ fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
2556+ indent=4, separators=(',', ': ')))
2557+ gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
2558+
2559+ # Cleanup
2560+ shutil.rmtree(tempdir)
2561+
2562+ return path
2563+
2564+
2565+def generate_file_remote_system_image(conf, arguments, environment):
2566+ """
2567+ Import files from a remote system-image server
2568+ """
2569+
2570+ # We need at least a channel name and a file prefix
2571+ if len(arguments) < 3:
2572+ return None
2573+
2574+ # Read the arguments
2575+ base_url = arguments[0]
2576+ channel_name = arguments[1]
2577+ prefix = arguments[2]
2578+
2579+ options = {}
2580+ if len(arguments) > 3:
2581+ options = unpack_arguments(arguments[3])
2582+
2583+ device_name = environment['device_name']
2584+ if 'device' in options:
2585+ device_name = options['device']
2586+
2587+ # Fetch and validate the remote channels.json
2588+ old_timeout = socket.getdefaulttimeout()
2589+ socket.setdefaulttimeout(5)
2590+ try:
2591+ channel_json = json.loads(urlopen("%s/channels.json" %
2592+ base_url).read().decode().strip())
2593+ except socket.timeout:
2594+ return None
2595+ except IOError:
2596+ return None
2597+ socket.setdefaulttimeout(old_timeout)
2598+
2599+ if channel_name not in channel_json:
2600+ return None
2601+
2602+ if "devices" not in channel_json[channel_name]:
2603+ return None
2604+
2605+ if device_name not in channel_json[channel_name]['devices']:
2606+ return None
2607+
2608+ if "index" not in (channel_json[channel_name]['devices']
2609+ [device_name]):
2610+ return None
2611+
2612+ index_url = "%s/%s" % (base_url, channel_json[channel_name]['devices']
2613+ [device_name]['index'])
2614+
2615+ # Fetch and validate the remote index.json
2616+ old_timeout = socket.getdefaulttimeout()
2617+ socket.setdefaulttimeout(5)
2618+ try:
2619+ index_json = json.loads(urlopen(index_url).read().decode())
2620+ except socket.timeout:
2621+ return None
2622+ except IOError:
2623+ return None
2624+ socket.setdefaulttimeout(old_timeout)
2625+
2626+ # Grab the list of full images
2627+ full_images = sorted([image for image in index_json['images']
2628+ if image['type'] == "full"],
2629+ key=lambda image: image['version'])
2630+
2631+ # No images
2632+ if not full_images:
2633+ return None
2634+
2635+ # Found an image, so let's try to find a match
2636+ for file_entry in full_images[-1]['files']:
2637+ file_name = file_entry['path'].split("/")[-1]
2638+ file_prefix = file_name.rsplit("-", 1)[0]
2639+ if file_prefix == prefix:
2640+ path = os.path.realpath("%s/%s" % (conf.publish_path,
2641+ file_entry['path']))
2642+ if os.path.exists(path):
2643+ return path
2644+
2645+ # Create the target if needed
2646+ if not os.path.exists(os.path.dirname(path)):
2647+ os.makedirs(os.path.dirname(path))
2648+
2649+ # Grab the file
2650+ file_url = "%s/%s" % (base_url, file_entry['path'])
2651+ socket.setdefaulttimeout(5)
2652+ try:
2653+ urlretrieve(file_url, path)
2654+ except socket.timeout:
2655+ if os.path.exists(path):
2656+ os.remove(path)
2657+ return None
2658+ except IOError:
2659+ if os.path.exists(path):
2660+ os.remove(path)
2661+ return None
2662+ socket.setdefaulttimeout(old_timeout)
2663+
2664+ if "keyring" in options:
2665+ if not tools.repack_recovery_keyring(conf, path,
2666+ options['keyring']):
2667+ if os.path.exists(path):
2668+ os.remove(path)
2669+ return None
2670+
2671+ gpg.sign_file(conf, "image-signing", path)
2672+
2673+ # Attempt to grab an associated json
2674+ socket.setdefaulttimeout(5)
2675+ json_path = path.replace(".tar.xz", ".json")
2676+ json_url = file_url.replace(".tar.xz", ".json")
2677+ try:
2678+ urlretrieve(json_url, json_path),
2679+ except socket.timeout:
2680+ if os.path.exists(json_path):
2681+ os.remove(json_path)
2682+ except IOError:
2683+ if os.path.exists(json_path):
2684+ os.remove(json_path)
2685+ socket.setdefaulttimeout(old_timeout)
2686+
2687+ if os.path.exists(json_path):
2688+ gpg.sign_file(conf, "image-signing", json_path)
2689+ with open(json_path, "r") as fd:
2690+ metadata = json.loads(fd.read())
2691+
2692+ if "version_detail" in metadata:
2693+ environment['version_detail'].append(
2694+ metadata['version_detail'])
2695+
2696+ return path
2697+
2698+ return None
2699+
2700+
2701+def generate_file_system_image(conf, arguments, environment):
2702+ """
2703+ Copy a file from another channel.
2704+ """
2705+
2706+ # We need at least a channel name and a file prefix
2707+ if len(arguments) < 2:
2708+ return None
2709+
2710+ # Read the arguments
2711+ channel_name = arguments[0]
2712+ prefix = arguments[1]
2713+
2714+ # Run some checks
2715+ pub = tree.Tree(conf)
2716+ if channel_name not in pub.list_channels():
2717+ return None
2718+
2719+ if (not environment['device_name'] in
2720+ pub.list_channels()[channel_name]['devices']):
2721+ return None
2722+
2723+ # Try to find the file
2724+ device = pub.get_device(channel_name, environment['device_name'])
2725+
2726+ full_images = sorted([image for image in device.list_images()
2727+ if image['type'] == "full"],
2728+ key=lambda image: image['version'])
2729+
2730+ # No images
2731+ if not full_images:
2732+ return None
2733+
2734+ # Found an image, so let's try to find a match
2735+ for file_entry in full_images[-1]['files']:
2736+ file_name = file_entry['path'].split("/")[-1]
2737+ file_prefix = file_name.rsplit("-", 1)[0]
2738+ if file_prefix == prefix:
2739+ path = os.path.realpath("%s/%s" % (conf.publish_path,
2740+ file_entry['path']))
2741+
2742+ if os.path.exists(path.replace(".tar.xz", ".json")):
2743+ with open(path.replace(".tar.xz", ".json"), "r") as fd:
2744+ metadata = json.loads(fd.read())
2745+
2746+ if "version_detail" in metadata:
2747+ environment['version_detail'].append(
2748+ metadata['version_detail'])
2749+
2750+ return path
2751+
2752+ return None
2753+
2754+
2755+def generate_file_version(conf, arguments, environment):
2756+ """
2757+ Generate a version tarball or return a pre-existing one.
2758+ """
2759+
2760+ # Don't generate version tarballs when nothing changed
2761+ if len(environment['new_files']) == 0:
2762+ return None
2763+
2764+ path = os.path.realpath(os.path.join(environment['device'].path,
2765+ "version-%s.tar.xz" % environment['version']))
2766+
2767+ # Set the version_detail string
2768+ environment['version_detail'].append("version=%s" % environment['version'])
2769+
2770+ # Don't bother re-generating a file if it already exists
2771+ if os.path.exists(path):
2772+ return path
2773+
2774+ # Generate version_detail
2775+ version_detail = ",".join(environment['version_detail'])
2776+
2777+ # Create temporary directory
2778+ tempdir = tempfile.mkdtemp()
2779+
2780+ # Generate the tarball
2781+ tools.generate_version_tarball(
2782+ conf, environment['channel_name'], environment['device_name'],
2783+ str(environment['version']),
2784+ os.path.join(tempdir, "version"), version_detail=version_detail)
2785+
2786+ # Create the pool if it doesn't exist
2787+ if not os.path.exists(os.path.join(environment['device'].path)):
2788+ os.makedirs(os.path.join(environment['device'].path))
2789+
2790+ # Compress and sign it
2791+ tools.xz_compress(os.path.join(tempdir, "version"), path)
2792+ gpg.sign_file(conf, "image-signing", path)
2793+
2794+ # Generate the metadata file
2795+ metadata = {}
2796+ metadata['generator'] = "version"
2797+ metadata['version'] = environment['version']
2798+ metadata['version_detail'] = "version=%s" % environment['version']
2799+ metadata['channel.ini'] = {}
2800+ metadata['channel.ini']['channel'] = environment['channel_name']
2801+ metadata['channel.ini']['device'] = environment['device_name']
2802+ metadata['channel.ini']['version'] = str(environment['version'])
2803+ metadata['channel.ini']['version_detail'] = version_detail
2804+
2805+ with open(path.replace(".tar.xz", ".json"), "w+") as fd:
2806+ fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
2807+ indent=4, separators=(',', ': ')))
2808+ gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
2809+
2810+ # Cleanup
2811+ shutil.rmtree(tempdir)
2812+
2813+ return path
2814
2815=== added file 'lib/systemimage/gpg.py'
2816--- lib/systemimage/gpg.py 1970-01-01 00:00:00 +0000
2817+++ lib/systemimage/gpg.py 2014-11-14 10:18:00 +0000
2818@@ -0,0 +1,239 @@
2819+# -*- coding: utf-8 -*-
2820+
2821+# Copyright (C) 2013 Canonical Ltd.
2822+# Author: Stéphane Graber <stgraber@ubuntu.com>
2823+
2824+# This program is free software: you can redistribute it and/or modify
2825+# it under the terms of the GNU General Public License as published by
2826+# the Free Software Foundation; version 3 of the License.
2827+#
2828+# This program is distributed in the hope that it will be useful,
2829+# but WITHOUT ANY WARRANTY; without even the implied warranty of
2830+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
2831+# GNU General Public License for more details.
2832+#
2833+# You should have received a copy of the GNU General Public License
2834+# along with this program. If not, see <http://www.gnu.org/licenses/>.
2835+
2836+import json
2837+import gpgme
2838+import os
2839+import tarfile
2840+
2841+from io import BytesIO
2842+
2843+
2844+def generate_signing_key(keyring_path, key_name, key_email, key_expiry):
2845+ """
2846+ Generate a new 2048bit RSA signing key.
2847+ """
2848+
2849+ if not os.path.isdir(keyring_path):
2850+ raise Exception("Keyring path doesn't exist: %s" % keyring_path)
2851+
2852+ key_params = """<GnupgKeyParms format="internal">
2853+Key-Type: RSA
2854+Key-Length: 2048
2855+Key-Usage: sign
2856+Name-Real: %s
2857+Name-Email: %s
2858+Expire-Date: %s
2859+</GnupgKeyParms>
2860+""" % (key_name, key_email, key_expiry)
2861+
2862+ os.environ['GNUPGHOME'] = keyring_path
2863+
2864+ ctx = gpgme.Context()
2865+ result = ctx.genkey(key_params)
2866+ key = ctx.get_key(result.fpr, True)
2867+ [uid] = key.uids
2868+
2869+ return uid
2870+
2871+
2872+def sign_file(config, key, path, destination=None, detach=True, armor=True):
2873+ """
2874+ Sign a file and publish the signature.
2875+ The key parameter must be a valid key under config.gpg_key_path.
2876+ The path must be that of a valid file.
2877+ The destination defaults to <path>.gpg (non-armored) or
2878+ <path>.asc (armored).
2879+ The detach and armor parameters respectively control the use of
2880+ detached signatures and base64 armoring.
2881+ """
2882+
2883+ key_path = "%s/%s" % (config.gpg_key_path, key)
2884+
2885+ if not os.path.isdir(key_path):
2886+ raise IndexError("Invalid GPG key name '%s'." % key)
2887+
2888+ if not os.path.isfile(path):
2889+ raise Exception("Invalid path '%s'." % path)
2890+
2891+ if not destination:
2892+ if armor:
2893+ destination = "%s.asc" % path
2894+ elif detach:
2895+ destination = "%s.sig" % path
2896+ else:
2897+ destination = "%s.gpg" % path
2898+
2899+ if os.path.exists(destination):
2900+ raise Exception("destination already exists.")
2901+
2902+ os.environ['GNUPGHOME'] = key_path
2903+
2904+ # Create a GPG context, assuming no passphrase
2905+ ctx = gpgme.Context()
2906+ ctx.armor = armor
2907+ [key] = ctx.keylist()
2908+ ctx.signers = [key]
2909+
2910+ with open(path, "rb") as fd_in, open(destination, "wb+") as fd_out:
2911+ if detach:
2912+ retval = ctx.sign(fd_in, fd_out, gpgme.SIG_MODE_DETACH)
2913+ else:
2914+ retval = ctx.sign(fd_in, fd_out, gpgme.SIG_MODE_NORMAL)
2915+
2916+ return retval
2917+
2918+
2919+class Keyring:
2920+ """
2921+ Represents a keyring, let's you list/add/remove keys and change
2922+ some of the keyring properties (type, expiration, target hardware)
2923+ """
2924+
2925+ keyring_name = None
2926+ keyring_type = None
2927+ keyring_expiry = None
2928+ keyring_model = None
2929+ keyring_path = None
2930+
2931+ def __init__(self, config, keyring_name):
2932+ keyring_path = "%s/%s" % (config.gpg_keyring_path, keyring_name)
2933+
2934+ if not os.path.isdir(keyring_path):
2935+ os.makedirs(keyring_path)
2936+
2937+ self.keyring_name = keyring_name
2938+ self.keyring_path = keyring_path
2939+
2940+ if os.path.exists("%s/keyring.json" % keyring_path):
2941+ with open("%s/keyring.json" % keyring_path, "r") as fd:
2942+ keyring_json = json.loads(fd.read())
2943+
2944+ self.keyring_type = keyring_json.get('type', None)
2945+ self.keyring_expiry = keyring_json.get('expiry', None)
2946+ self.keyring_model = keyring_json.get('model', None)
2947+ else:
2948+ open("%s/pubring.gpg" % keyring_path, "w+").close()
2949+
2950+ def generate_tarball(self, destination=None):
2951+ """
2952+ Generate a tarball of the keyring and its json metadata.
2953+ Returns the path to the tarball.
2954+ """
2955+
2956+ if not destination:
2957+ destination = "%s.tar" % self.keyring_path
2958+
2959+ if os.path.isfile(destination):
2960+ os.remove(destination)
2961+
2962+ tarball = tarfile.open(destination, "w:")
2963+ tarball.add("%s/keyring.json" % self.keyring_path,
2964+ arcname="keyring.json")
2965+ tarball.add("%s/pubring.gpg" % self.keyring_path,
2966+ arcname="keyring.gpg")
2967+ tarball.close()
2968+
2969+ return destination
2970+
2971+ def set_metadata(self, keyring_type, keyring_expiry=None,
2972+ keyring_model=None):
2973+ """
2974+ Generate a new keyring.json file.
2975+ """
2976+
2977+ keyring_json = {}
2978+ if keyring_type:
2979+ self.keyring_type = keyring_type
2980+ keyring_json['type'] = keyring_type
2981+
2982+ if keyring_expiry:
2983+ self.keyring_expiry = keyring_expiry
2984+ keyring_json['expiry'] = keyring_expiry
2985+
2986+ if keyring_model:
2987+ self.keyring_model = keyring_model
2988+ keyring_json['model'] = keyring_model
2989+
2990+ with open("%s/keyring.json" % self.keyring_path, "w+") as fd:
2991+ fd.write("%s\n" % json.dumps(keyring_json, sort_keys=True,
2992+ indent=4, separators=(',', ': ')))
2993+
2994+ def list_keys(self):
2995+ os.environ['GNUPGHOME'] = self.keyring_path
2996+
2997+ keys = []
2998+
2999+ ctx = gpgme.Context()
3000+ for key in ctx.keylist():
3001+ keys.append((key.subkeys[0].keyid, key.subkeys[0].length,
3002+ [uid.uid for uid in key.uids]))
3003+
3004+ return keys
3005+
3006+ def export_key(self, path, key, armor=True):
3007+ os.environ['GNUPGHOME'] = self.keyring_path
3008+
3009+ ctx = gpgme.Context()
3010+ ctx.armor = armor
3011+
3012+ gpg_key = ctx.get_key(key)
3013+
3014+ with open(path, "wb+") as fd:
3015+ for subkey in gpg_key.subkeys:
3016+ ctx.export(str(subkey.keyid), fd)
3017+
3018+ def import_key(self, path, armor=True):
3019+ os.environ['GNUPGHOME'] = self.keyring_path
3020+
3021+ ctx = gpgme.Context()
3022+ ctx.armor = armor
3023+
3024+ with open(path, "rb") as fd:
3025+ ctx.import_(fd)
3026+
3027+ def import_keys(self, path):
3028+ """
3029+ Import all the keys from the specified keyring.
3030+ """
3031+
3032+ os.environ['GNUPGHOME'] = path
3033+
3034+ ctx = gpgme.Context()
3035+
3036+ keys = []
3037+ for key in list(ctx.keylist()):
3038+ for subkey in key.subkeys:
3039+ content = BytesIO()
3040+ ctx.export(str(subkey.keyid), content)
3041+ keys.append(content)
3042+
3043+ os.environ['GNUPGHOME'] = self.keyring_path
3044+ ctx = gpgme.Context()
3045+
3046+ for key in keys:
3047+ key.seek(0)
3048+ ctx.import_(key)
3049+
3050+ def del_key(self, key):
3051+ os.environ['GNUPGHOME'] = self.keyring_path
3052+
3053+ ctx = gpgme.Context()
3054+
3055+ gpg_key = ctx.get_key(key)
3056+
3057+ ctx.delete(gpg_key)
3058
3059=== added file 'lib/systemimage/tools.py'
3060--- lib/systemimage/tools.py 1970-01-01 00:00:00 +0000
3061+++ lib/systemimage/tools.py 2014-11-14 10:18:00 +0000
3062@@ -0,0 +1,373 @@
3063+# -*- coding: utf-8 -*-
3064+
3065+# Copyright (C) 2013 Canonical Ltd.
3066+# Author: Stéphane Graber <stgraber@ubuntu.com>
3067+
3068+# This program is free software: you can redistribute it and/or modify
3069+# it under the terms of the GNU General Public License as published by
3070+# the Free Software Foundation; version 3 of the License.
3071+#
3072+# This program is distributed in the hope that it will be useful,
3073+# but WITHOUT ANY WARRANTY; without even the implied warranty of
3074+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
3075+# GNU General Public License for more details.
3076+#
3077+# You should have received a copy of the GNU General Public License
3078+# along with this program. If not, see <http://www.gnu.org/licenses/>.
3079+
3080+from io import BytesIO
3081+
3082+import gzip
3083+import os
3084+import re
3085+import shutil
3086+import subprocess
3087+import tarfile
3088+import tempfile
3089+import time
3090+
3091+
3092+def expand_path(path, base="/"):
3093+ """
3094+ Takes a path and returns a tuple containing the absolute path
3095+ and a relative path (relative to base).
3096+ """
3097+
3098+ if path.startswith(base):
3099+ path = re.sub('^%s' % re.escape(base), "", path)
3100+
3101+ if path.startswith(os.sep):
3102+ relpath = path[1:]
3103+ else:
3104+ relpath = path
3105+
3106+ abspath = os.path.realpath(os.path.join(base, relpath))
3107+
3108+ return abspath, relpath
3109+
3110+
3111+# Imported from cdimage.osextras
3112+def find_on_path(command):
3113+ """Is command on the executable search path?"""
3114+
3115+ if 'PATH' not in os.environ:
3116+ return False
3117+ path = os.environ['PATH']
3118+ for element in path.split(os.pathsep):
3119+ if not element:
3120+ continue
3121+ filename = os.path.join(element, command)
3122+ if os.path.isfile(filename) and os.access(filename, os.X_OK):
3123+ return True
3124+ return False
3125+
3126+
3127+def generate_version_tarball(config, channel, device, version, path,
3128+ build_path="system/etc/ubuntu-build",
3129+ channel_path="system/etc/system-image/"
3130+ "channel.ini",
3131+ version_detail=None,
3132+ channel_target=None):
3133+ """
3134+ Generates a tarball which contains two files
3135+ (build_path and channel_path).
3136+ The first contains the build id, the second a .ini config file.
3137+ The resulting tarball is written at the provided location (path).
3138+ """
3139+
3140+ tarball = tarfile.open(path, 'w:')
3141+
3142+ version_file = tarfile.TarInfo()
3143+ version_file.size = len(version) + 1
3144+ version_file.mtime = int(time.strftime("%s", time.localtime()))
3145+ version_file.name = build_path
3146+
3147+ # Append a line break
3148+ version += "\n"
3149+
3150+ tarball.addfile(version_file, BytesIO(version.encode('utf-8')))
3151+
3152+ http_port = config.public_http_port
3153+ https_port = config.public_https_port
3154+
3155+ if http_port == 0:
3156+ http_port = "disabled"
3157+
3158+ if https_port == 0:
3159+ https_port = "disabled"
3160+
3161+ channel = """[service]
3162+base: %s
3163+http_port: %s
3164+https_port: %s
3165+channel: %s
3166+device: %s
3167+build_number: %s
3168+""" % (config.public_fqdn, http_port, https_port,
3169+ channel, device, version.strip())
3170+
3171+ if channel_target:
3172+ channel += "channel_target: %s\n" % channel_target
3173+
3174+ if version_detail:
3175+ channel += "version_detail: %s\n" % version_detail
3176+
3177+ channel_file = tarfile.TarInfo()
3178+ channel_file.size = len(channel)
3179+ channel_file.mtime = int(time.strftime("%s", time.localtime()))
3180+ channel_file.name = channel_path
3181+
3182+ tarball.addfile(channel_file, BytesIO(channel.encode('utf-8')))
3183+
3184+ tarball.close()
3185+
3186+
3187+def gzip_compress(path, destination=None, level=9):
3188+ """
3189+ Compress a file (path) using gzip.
3190+ By default, creates a .gz version of the file in the same directory.
3191+ An alternate destination path may be provided.
3192+ The compress level is 9 by default but can be overriden.
3193+ """
3194+
3195+ if not destination:
3196+ destination = "%s.gz" % path
3197+
3198+ if os.path.exists(destination):
3199+ raise Exception("destination already exists.")
3200+
3201+ uncompressed = open(path, "rb")
3202+ compressed = gzip.open(destination, "wb+", level)
3203+ compressed.writelines(uncompressed)
3204+ compressed.close()
3205+ uncompressed.close()
3206+
3207+ return destination
3208+
3209+
3210+def gzip_uncompress(path, destination=None):
3211+ """
3212+ Uncompress a file (path) using gzip.
3213+ By default, uses the source path without the .gz prefix as the target.
3214+ An alternate destination path may be provided.
3215+ """
3216+
3217+ if not destination and path[-3:] != ".gz":
3218+ raise Exception("unspecified destination and path doesn't end"
3219+ " with .gz")
3220+
3221+ if not destination:
3222+ destination = path[:-3]
3223+
3224+ if os.path.exists(destination):
3225+ raise Exception("destination already exists.")
3226+
3227+ compressed = gzip.open(path, "rb")
3228+ uncompressed = open(destination, "wb+")
3229+ uncompressed.writelines(compressed)
3230+ uncompressed.close()
3231+ compressed.close()
3232+
3233+ return destination
3234+
3235+
3236+def xz_compress(path, destination=None, level=9):
3237+ """
3238+ Compress a file (path) using xz.
3239+ By default, creates a .xz version of the file in the same directory.
3240+ An alternate destination path may be provided.
3241+ The compress level is 9 by default but can be overriden.
3242+ """
3243+
3244+ # NOTE: Once we can drop support for < 3.3, the new lzma module can be used
3245+
3246+ if not destination:
3247+ destination = "%s.xz" % path
3248+
3249+ if os.path.exists(destination):
3250+ raise Exception("destination already exists.")
3251+
3252+ if find_on_path("pxz"):
3253+ xz_command = "pxz"
3254+ else:
3255+ xz_command = "xz"
3256+
3257+ with open(destination, "wb+") as fd:
3258+ retval = subprocess.call([xz_command, '-z', '-%s' % level, '-c', path],
3259+ stdout=fd)
3260+ return retval
3261+
3262+
3263+def xz_uncompress(path, destination=None):
3264+ """
3265+ Uncompress a file (path) using xz.
3266+ By default, uses the source path without the .xz prefix as the target.
3267+ An alternate destination path may be provided.
3268+ """
3269+
3270+ # NOTE: Once we can drop support for < 3.3, the new lzma module can be used
3271+
3272+ if not destination and path[-3:] != ".xz":
3273+ raise Exception("unspecified destination and path doesn't end"
3274+ " with .xz")
3275+
3276+ if not destination:
3277+ destination = path[:-3]
3278+
3279+ if os.path.exists(destination):
3280+ raise Exception("destination already exists.")
3281+
3282+ with open(destination, "wb+") as fd:
3283+ retval = subprocess.call(['xz', '-d', '-c', path],
3284+ stdout=fd)
3285+
3286+ return retval
3287+
3288+
3289+def trigger_mirror(host, port, username, key, command):
3290+ return subprocess.call(['ssh',
3291+ '-i', key,
3292+ '-l', username,
3293+ '-p', str(port),
3294+ host,
3295+ command])
3296+
3297+
3298+def sync_mirrors(config):
3299+ for mirror in sorted(config.mirrors.values(),
3300+ key=lambda mirror: mirror.ssh_host):
3301+ trigger_mirror(mirror.ssh_host, mirror.ssh_port, mirror.ssh_user,
3302+ mirror.ssh_key, mirror.ssh_command)
3303+
3304+
3305+# FIXME: keyring_name is not used
3306+def repack_recovery_keyring(conf, path, keyring_name):
3307+ tempdir = tempfile.mkdtemp()
3308+
3309+ xz_uncompress(path, os.path.join(tempdir, "input.tar"))
3310+
3311+ input_tarball = tarfile.open(os.path.join(tempdir, "input.tar"), "r:")
3312+
3313+ # Make sure the partition is in there
3314+ if "partitions/recovery.img" not in input_tarball.getnames():
3315+ shutil.rmtree(tempdir)
3316+ return False
3317+
3318+ input_tarball.extract("partitions/recovery.img", tempdir)
3319+
3320+ # Extract the content of the .img
3321+ os.mkdir(os.path.join(tempdir, "img"))
3322+ old_pwd = os.getcwd()
3323+ os.chdir(os.path.join(tempdir, "img"))
3324+ cmd = ["abootimg",
3325+ "-x", os.path.join(tempdir, "partitions", "recovery.img")]
3326+
3327+ with open(os.path.devnull, "w") as devnull:
3328+ subprocess.call(cmd, stdout=devnull, stderr=devnull)
3329+
3330+ os.chdir(old_pwd)
3331+
3332+ # Extract the content of the initrd
3333+ os.mkdir(os.path.join(tempdir, "initrd"))
3334+ state_path = os.path.join(tempdir, "fakeroot_state")
3335+ old_pwd = os.getcwd()
3336+ os.chdir(os.path.join(tempdir, "initrd"))
3337+
3338+ gzip_uncompress(os.path.join(tempdir, "img", "initrd.img"),
3339+ os.path.join(tempdir, "img", "initrd"))
3340+
3341+ with open(os.path.join(tempdir, "img", "initrd"), "rb") as fd:
3342+ with open(os.path.devnull, "w") as devnull:
3343+ subprocess.call(['fakeroot', '-s', state_path, 'cpio', '-i'],
3344+ stdin=fd, stdout=devnull, stderr=devnull)
3345+
3346+ os.chdir(old_pwd)
3347+
3348+ # Swap the files
3349+ keyring_path = os.path.join(conf.gpg_keyring_path, "archive-master")
3350+
3351+ shutil.copy("%s.tar.xz" % keyring_path,
3352+ os.path.join(tempdir, "initrd", "etc", "system-image",
3353+ "archive-master.tar.xz"))
3354+
3355+ shutil.copy("%s.tar.xz.asc" % keyring_path,
3356+ os.path.join(tempdir, "initrd", "etc", "system-image",
3357+ "archive-master.tar.xz.asc"))
3358+
3359+ # Re-generate the initrd
3360+ old_pwd = os.getcwd()
3361+ os.chdir(os.path.join(tempdir, "initrd"))
3362+
3363+ find = subprocess.Popen(["find", "."], stdout=subprocess.PIPE)
3364+ with open(os.path.join(tempdir, "img", "initrd"), "w+") as fd:
3365+ with open(os.path.devnull, "w") as devnull:
3366+ subprocess.call(['fakeroot', '-i', state_path, 'cpio',
3367+ '-o', '--format=newc'],
3368+ stdin=find.stdout,
3369+ stdout=fd,
3370+ stderr=devnull)
3371+
3372+ os.chdir(old_pwd)
3373+
3374+ os.rename(os.path.join(tempdir, "img", "initrd.img"),
3375+ os.path.join(tempdir, "img", "initrd.img.bak"))
3376+ gzip_compress(os.path.join(tempdir, "img", "initrd"),
3377+ os.path.join(tempdir, "img", "initrd.img"))
3378+
3379+ # Rewrite bootimg.cfg
3380+ content = ""
3381+ with open(os.path.join(tempdir, "img", "bootimg.cfg"), "r") as source:
3382+ for line in source:
3383+ if line.startswith("bootsize"):
3384+ line = "bootsize=0x900000\n"
3385+ content += line
3386+
3387+ with open(os.path.join(tempdir, "img", "bootimg.cfg"), "w+") as dest:
3388+ dest.write(content)
3389+
3390+ # Update the partition image
3391+ with open(os.path.devnull, "w") as devnull:
3392+ subprocess.call(['abootimg', '-u',
3393+ os.path.join(tempdir, "partitions", "recovery.img"),
3394+ "-f", os.path.join(tempdir, "img", "bootimg.cfg")],
3395+ stdout=devnull, stderr=devnull)
3396+
3397+ # Update the partition image
3398+ with open(os.path.devnull, "w") as devnull:
3399+ subprocess.call(['abootimg', '-u',
3400+ os.path.join(tempdir, "partitions", "recovery.img"),
3401+ "-r", os.path.join(tempdir, "img", "initrd.img")],
3402+ stdout=devnull, stderr=devnull)
3403+
3404+ # Generate a new tarball
3405+ output_tarball = tarfile.open(os.path.join(tempdir, "output.tar"), "w:")
3406+ for entry in input_tarball:
3407+ fileptr = None
3408+ if entry.isfile():
3409+ try:
3410+ if entry.name == "partitions/recovery.img":
3411+ with open(os.path.join(tempdir, "partitions",
3412+ "recovery.img"), "rb") as fd:
3413+ fileptr = BytesIO(fd.read())
3414+ entry.size = os.stat(
3415+ os.path.join(tempdir, "partitions",
3416+ "recovery.img")).st_size
3417+ else:
3418+ fileptr = input_tarball.extractfile(entry.name)
3419+ except KeyError: # pragma: no cover
3420+ pass
3421+
3422+ output_tarball.addfile(entry, fileobj=fileptr)
3423+ if fileptr:
3424+ fileptr.close()
3425+ fileptr = None
3426+
3427+ output_tarball.close()
3428+ input_tarball.close()
3429+
3430+ os.remove(path)
3431+ xz_compress(os.path.join(tempdir, "output.tar"), path)
3432+
3433+ shutil.rmtree(tempdir)
3434+
3435+ return True
3436
3437=== added file 'lib/systemimage/tree.py'
3438--- lib/systemimage/tree.py 1970-01-01 00:00:00 +0000
3439+++ lib/systemimage/tree.py 2014-11-14 10:18:00 +0000
3440@@ -0,0 +1,1013 @@
3441+# -*- coding: utf-8 -*-
3442+
3443+# Copyright (C) 2013 Canonical Ltd.
3444+# Author: Stéphane Graber <stgraber@ubuntu.com>
3445+
3446+# This program is free software: you can redistribute it and/or modify
3447+# it under the terms of the GNU General Public License as published by
3448+# the Free Software Foundation; version 3 of the License.
3449+#
3450+# This program is distributed in the hope that it will be useful,
3451+# but WITHOUT ANY WARRANTY; without even the implied warranty of
3452+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
3453+# GNU General Public License for more details.
3454+#
3455+# You should have received a copy of the GNU General Public License
3456+# along with this program. If not, see <http://www.gnu.org/licenses/>.
3457+
3458+import copy
3459+import json
3460+import os
3461+import shutil
3462+import time
3463+
3464+from contextlib import contextmanager
3465+from hashlib import sha256
3466+from systemimage import gpg, tools
3467+
3468+
3469+# Context managers
3470+@contextmanager
3471+def channels_json(config, path, commit=False):
3472+ """
3473+ Context function (to be used with "with") that will open a
3474+ channels.json file, parse it, validate it and return the
3475+ decoded version.
3476+
3477+ If commit is True, the file will then be updated (or created) on
3478+ exit.
3479+ """
3480+
3481+ # If the file doesn't exist, just yield an empty dict
3482+ json_content = {}
3483+ if os.path.exists(path):
3484+ with open(path, "r") as fd:
3485+ content = fd.read()
3486+ if content:
3487+ json_content = json.loads(content)
3488+
3489+ # Validation
3490+ if not isinstance(json_content, dict):
3491+ raise TypeError("Invalid channels.json, not a dict.")
3492+
3493+ if commit:
3494+ orig_json_content = copy.deepcopy(json_content)
3495+
3496+ # Yield the decoded value and save on exit
3497+ try:
3498+ yield json_content
3499+ finally:
3500+ if commit and (orig_json_content != json_content or
3501+ not os.path.exists(path)):
3502+ new_path = "%s.new" % path
3503+ with open(new_path, "w+") as fd:
3504+ fd.write("%s\n" % json.dumps(json_content, sort_keys=True,
3505+ indent=4, separators=(',', ': ')))
3506+
3507+ # Move the signature
3508+ gpg.sign_file(config, "image-signing", new_path)
3509+ if os.path.exists("%s.asc" % path):
3510+ os.remove("%s.asc" % path)
3511+ os.rename("%s.asc" % new_path, "%s.asc" % path)
3512+
3513+ # Move the index
3514+ if os.path.exists(path):
3515+ os.remove(path)
3516+ os.rename(new_path, path)
3517+
3518+
3519+@contextmanager
3520+def index_json(config, path, commit=False):
3521+ """
3522+ Context function (to be used with "with") that will open an
3523+ index.json file, parse it, validate it and return the
3524+ decoded version.
3525+
3526+ If commit is True, the file will then be updated (or created) on
3527+ exit.
3528+ """
3529+
3530+ # If the file doesn't exist, just yield an empty dict
3531+ json_content = {}
3532+ json_content['global'] = {}
3533+ json_content['images'] = []
3534+
3535+ if os.path.exists(path):
3536+ with open(path, "r") as fd:
3537+ content = fd.read()
3538+ if content:
3539+ json_content = json.loads(content)
3540+
3541+ # Validation
3542+ if not isinstance(json_content, dict):
3543+ raise TypeError("Invalid index.json, not a dict.")
3544+
3545+ if commit:
3546+ orig_json_content = copy.deepcopy(json_content)
3547+
3548+ # Yield the decoded value and save on exit
3549+ try:
3550+ yield json_content
3551+ finally:
3552+ # Remove any invalid attribute
3553+ versions = sorted({image['version']
3554+ for image in json_content['images']})
3555+ if versions:
3556+ last_version = versions[-1]
3557+
3558+ # Remove phased-percentage from any old image
3559+ for image in json_content['images']:
3560+ if image['version'] != last_version and \
3561+ "phased-percentage" in image:
3562+ image.pop("phased-percentage")
3563+
3564+ # Save to disk
3565+ if commit and (orig_json_content != json_content or
3566+ not os.path.exists(path)):
3567+ json_content['global']['generated_at'] = time.strftime(
3568+ "%a %b %d %H:%M:%S UTC %Y", time.gmtime())
3569+
3570+ new_path = "%s.new" % path
3571+ with open(new_path, "w+") as fd:
3572+ fd.write("%s\n" % json.dumps(json_content, sort_keys=True,
3573+ indent=4, separators=(',', ': ')))
3574+
3575+ # Move the signature
3576+ gpg.sign_file(config, "image-signing", new_path)
3577+ if os.path.exists("%s.asc" % path):
3578+ os.remove("%s.asc" % path)
3579+ os.rename("%s.asc" % new_path, "%s.asc" % path)
3580+
3581+ # Move the index
3582+ if os.path.exists(path):
3583+ os.remove(path)
3584+ os.rename(new_path, path)
3585+
3586+
3587+class Tree:
3588+ def __init__(self, config, path=None):
3589+ if not path:
3590+ path = config.publish_path
3591+
3592+ if not os.path.isdir(path):
3593+ raise Exception("Invalid path: %s" % path)
3594+
3595+ self.config = config
3596+ self.path = path
3597+ self.indexpath = os.path.join(path, "channels.json")
3598+
3599+ def __list_existing(self):
3600+ """
3601+ Returns a set of all files present in the tree and a set of
3602+ empty directories that can be removed.
3603+ """
3604+
3605+ existing_files = set()
3606+ empty_dirs = set()
3607+
3608+ for dirpath, dirnames, filenames in os.walk(self.path):
3609+ if dirpath == os.path.join(self.path, "gpg"):
3610+ continue
3611+
3612+ if not filenames and not dirnames:
3613+ empty_dirs.add(dirpath)
3614+
3615+ for entry in filenames:
3616+ existing_files.add(os.path.join(dirpath, entry))
3617+
3618+ return (existing_files, empty_dirs)
3619+
3620+ def __list_referenced(self):
3621+ """
3622+ Returns a set of all files that are referenced by the
3623+ various indexes and should be present in the tree.
3624+ """
3625+
3626+ listed_files = set()
3627+ listed_files.add(os.path.join(self.path, "channels.json"))
3628+ listed_files.add(os.path.join(self.path, "channels.json.asc"))
3629+
3630+ for channel, metadata in self.list_channels().items():
3631+ devices = metadata['devices']
3632+ for device in devices:
3633+ if 'keyring' in devices[device]:
3634+ listed_files.add(os.path.join(
3635+ self.path, devices[device]['keyring']['path'][1:]))
3636+ listed_files.add(os.path.join(
3637+ self.path,
3638+ devices[device]['keyring']['signature'][1:]))
3639+
3640+ device_entry = self.get_device(channel, device)
3641+
3642+ listed_files.add(os.path.join(device_entry.path, "index.json"))
3643+ listed_files.add(os.path.join(device_entry.path,
3644+ "index.json.asc"))
3645+
3646+ for image in device_entry.list_images():
3647+ for entry in image['files']:
3648+ listed_files.add(os.path.join(self.path,
3649+ entry['path'][1:]))
3650+ listed_files.add(os.path.join(self.path,
3651+ entry['signature'][1:]))
3652+
3653+ return listed_files
3654+
3655+ def change_channel_alias(self, channel_name, target_name):
3656+ """
3657+ Change the target of an alias.
3658+ """
3659+
3660+ with channels_json(self.config, self.indexpath) as channels:
3661+ if channel_name not in channels:
3662+ raise KeyError("Couldn't find channel: %s" % channel_name)
3663+
3664+ if "redirect" in channels[channel_name]:
3665+ raise KeyError("Channel is a redirect: %s" % channel_name)
3666+
3667+ if "alias" not in channels[channel_name] or \
3668+ channels[channel_name]['alias'] == channel_name:
3669+ raise KeyError("Channel isn't an alias: %s" % channel_name)
3670+
3671+ if target_name not in channels:
3672+ raise KeyError("Couldn't find target channel: %s" %
3673+ target_name)
3674+
3675+ self.remove_channel(channel_name)
3676+ self.create_channel_alias(channel_name, target_name)
3677+
3678+ return True
3679+
3680+ def cleanup_tree(self):
3681+ """
3682+ Remove any orphaned file from the tree.
3683+ """
3684+
3685+ for entry in self.list_orphaned_files():
3686+ if os.path.isdir(entry):
3687+ os.rmdir(entry)
3688+ else:
3689+ os.remove(entry)
3690+
3691+ return True
3692+
3693+ def create_channel(self, channel_name):
3694+ """
3695+ Creates a new channel entry in the tree.
3696+ """
3697+
3698+ with channels_json(self.config, self.indexpath, True) as channels:
3699+ if channel_name in channels:
3700+ raise KeyError("Channel already exists: %s" % channel_name)
3701+
3702+ channels[channel_name] = {'devices': {}}
3703+
3704+ return True
3705+
3706+ def create_channel_alias(self, channel_name, target_name):
3707+ """
3708+ Creates a new channel as an alias for an existing one.
3709+ """
3710+
3711+ with channels_json(self.config, self.indexpath, True) as channels:
3712+ if channel_name in channels:
3713+ raise KeyError("Channel already exists: %s" % channel_name)
3714+
3715+ if target_name not in channels:
3716+ raise KeyError("Couldn't find target channel: %s" %
3717+ target_name)
3718+
3719+ channels[channel_name] = {'devices': {},
3720+ 'alias': target_name}
3721+
3722+ return self.sync_alias(channel_name)
3723+
3724+ def create_channel_redirect(self, channel_name, target_name):
3725+ """
3726+ Creates a new channel redirect.
3727+ """
3728+
3729+ with channels_json(self.config, self.indexpath, True) as channels:
3730+ if channel_name in channels:
3731+ raise KeyError("Channel already exists: %s" % channel_name)
3732+
3733+ if target_name not in channels:
3734+ raise KeyError("Couldn't find target channel: %s" %
3735+ target_name)
3736+
3737+ channels[channel_name] = dict(channels[target_name])
3738+ channels[channel_name]['redirect'] = target_name
3739+
3740+ self.hide_channel(channel_name)
3741+
3742+ return True
3743+
3744+ def create_device(self, channel_name, device_name, keyring_path=None):
3745+ """
3746+ Creates a new device entry in the tree.
3747+ """
3748+
3749+ with channels_json(self.config, self.indexpath, True) as channels:
3750+ if channel_name not in channels:
3751+ raise KeyError("Couldn't find channel: %s" % channel_name)
3752+
3753+ if device_name in channels[channel_name]['devices']:
3754+ raise KeyError("Device already exists: %s" % device_name)
3755+
3756+ device_path = os.path.join(self.path, channel_name, device_name)
3757+ if not os.path.exists(device_path):
3758+ os.makedirs(device_path)
3759+
3760+ # Create an empty index if it doesn't exist, if it does,
3761+ # just validate it
3762+ with index_json(self.config, os.path.join(device_path,
3763+ "index.json"), True):
3764+ pass
3765+
3766+ device = {}
3767+ device['index'] = "/%s/%s/index.json" % (channel_name, device_name)
3768+
3769+ channels[channel_name]['devices'][device_name] = device
3770+
3771+ if keyring_path:
3772+ self.set_device_keyring(channel_name, device_name, keyring_path)
3773+
3774+ self.sync_aliases(channel_name)
3775+ self.sync_redirects(channel_name)
3776+
3777+ return True
3778+
3779+ def generate_index(self, magic=False):
3780+ """
3781+ Re-generate the channels.json file based on the current content of
3782+ the tree.
3783+
3784+ This function is only present for emergency purposes and will
3785+ completely rebuild the tree based on what's on the filesystem,
3786+ looking into some well known locations to guess things like device
3787+ keyring paths.
3788+
3789+ Call this function with confirm="I know what I'm doing" to actually
3790+ trigger it.
3791+ """
3792+
3793+ if magic != "I know what I'm doing":
3794+ raise Exception("Invalid magic value, please read the help.")
3795+
3796+ if os.path.exists(self.indexpath):
3797+ os.remove(self.indexpath)
3798+
3799+ for channel_name in [entry for entry in os.listdir(self.path)
3800+ if os.path.isdir(os.path.join(self.path,
3801+ entry))
3802+ and entry not in ('gpg',)]:
3803+ self.create_channel(channel_name)
3804+
3805+ for device_name in os.listdir(os.path.join(self.path,
3806+ channel_name)):
3807+
3808+ path = os.path.join(self.path, channel_name, device_name)
3809+ if not os.path.exists(os.path.join(path, "index.json")):
3810+ continue
3811+
3812+ keyring_path = os.path.join(path, "device.tar.xz")
3813+ if (os.path.exists(keyring_path)
3814+ and os.path.exists("%s.asc" % keyring_path)):
3815+ self.create_device(channel_name, device_name, keyring_path)
3816+ else:
3817+ self.create_device(channel_name, device_name)
3818+
3819+ return True
3820+
3821+ def get_device(self, channel_name, device_name):
3822+ """
3823+ Returns a Device instance.
3824+ """
3825+
3826+ with channels_json(self.config, self.indexpath) as channels:
3827+ if channel_name not in channels:
3828+ raise KeyError("Couldn't find channel: %s" % channel_name)
3829+
3830+ if device_name not in channels[channel_name]['devices']:
3831+ raise KeyError("Couldn't find device: %s" % device_name)
3832+
3833+ device_path = os.path.dirname(channels[channel_name]['devices']
3834+ [device_name]['index'])
3835+
3836+ return Device(self.config, os.path.normpath("%s/%s" % (self.path,
3837+ device_path)))
3838+
3839+ def hide_channel(self, channel_name):
3840+ """
3841+ Hide a channel from the client's list.
3842+ """
3843+
3844+ with channels_json(self.config, self.indexpath, True) as channels:
3845+ if channel_name not in channels:
3846+ raise KeyError("Couldn't find channel: %s" % channel_name)
3847+
3848+ channels[channel_name]['hidden'] = True
3849+
3850+ return True
3851+
3852+ def list_channels(self):
3853+ """
3854+ Returns a dict of all existing channels and devices for each of
3855+ those.
3856+ This is simply a decoded version of channels.json
3857+ """
3858+
3859+ with channels_json(self.config, self.indexpath) as channels:
3860+ return channels
3861+
3862+ def list_devices(self, channel_name):
3863+ """
3864+ Returns the list of device names for the channel.
3865+ """
3866+
3867+ with channels_json(self.config, self.indexpath) as channels:
3868+ if channel_name not in channels:
3869+ raise KeyError("Couldn't find channel: %s" % channel_name)
3870+
3871+ return list(channels[channel_name]['devices'].keys())
3872+
3873+ def list_missing_files(self):
3874+ """
3875+ Returns a list of absolute paths that should exist but aren't
3876+ present on the filesystem.
3877+ """
3878+
3879+ all_files, empty_dirs = self.__list_existing()
3880+ referenced_files = self.__list_referenced()
3881+
3882+ return sorted(referenced_files - all_files)
3883+
3884+ def list_orphaned_files(self):
3885+ """
3886+ Returns a list of absolute paths to files that are present in the
3887+ tree but aren't referenced anywhere.
3888+ """
3889+
3890+ orphaned_files = set()
3891+
3892+ all_files, empty_dirs = self.__list_existing()
3893+ referenced_files = self.__list_referenced()
3894+
3895+ orphaned_files.update(all_files - referenced_files)
3896+ orphaned_files.update(empty_dirs)
3897+
3898+ for entry in list(orphaned_files):
3899+ if entry.endswith(".json"):
3900+ tarname = entry.replace(".json", ".tar.xz")
3901+ if tarname in referenced_files:
3902+ orphaned_files.remove(entry)
3903+
3904+ if entry.endswith(".json.asc"):
3905+ tarname = entry.replace(".json.asc", ".tar.xz")
3906+ if tarname in referenced_files:
3907+ orphaned_files.remove(entry)
3908+
3909+ return sorted(orphaned_files)
3910+
3911+ def publish_keyring(self, keyring_name):
3912+ """
3913+ Publish the keyring under gpg/
3914+ """
3915+
3916+ gpg_path = os.path.join(self.config.publish_path, "gpg")
3917+
3918+ if not os.path.exists(gpg_path):
3919+ os.mkdir(gpg_path)
3920+
3921+ keyring_path = os.path.join(self.config.gpg_keyring_path, keyring_name)
3922+
3923+ if not os.path.exists("%s.tar.xz" % keyring_path):
3924+ raise Exception("Missing keyring: %s.tar.xz" % keyring_path)
3925+
3926+ if not os.path.exists("%s.tar.xz.asc" % keyring_path):
3927+ raise Exception("Missing keyring signature: %s.tar.xz.asc" %
3928+ keyring_path)
3929+
3930+ shutil.copy("%s.tar.xz" % keyring_path, gpg_path)
3931+ shutil.copy("%s.tar.xz.asc" % keyring_path, gpg_path)
3932+
3933+ return True
3934+
3935+ def remove_channel(self, channel_name):
3936+ """
3937+ Remove a channel and everything it contains.
3938+ """
3939+
3940+ with channels_json(self.config, self.indexpath, True) as channels:
3941+ if channel_name not in channels:
3942+ raise KeyError("Couldn't find channel: %s" % channel_name)
3943+
3944+ channel_path = os.path.join(self.path, channel_name)
3945+ if os.path.exists(channel_path) and \
3946+ "alias" not in channels[channel_name] and \
3947+ "redirect" not in channels[channel_name]:
3948+ shutil.rmtree(channel_path)
3949+ channels.pop(channel_name)
3950+
3951+ return True
3952+
3953+ def remove_device(self, channel_name, device_name):
3954+ """
3955+ Remove a device and everything it contains.
3956+ """
3957+
3958+ with channels_json(self.config, self.indexpath, True) as channels:
3959+ if channel_name not in channels:
3960+ raise KeyError("Couldn't find channel: %s" % channel_name)
3961+
3962+ if device_name not in channels[channel_name]['devices']:
3963+ raise KeyError("Couldn't find device: %s" % device_name)
3964+
3965+ device_path = os.path.join(self.path, channel_name, device_name)
3966+ if os.path.exists(device_path):
3967+ shutil.rmtree(device_path)
3968+ channels[channel_name]['devices'].pop(device_name)
3969+
3970+ self.sync_aliases(channel_name)
3971+ self.sync_redirects(channel_name)
3972+
3973+ return True
3974+
3975+ def rename_channel(self, old_name, new_name):
3976+ """
3977+ Rename a channel.
3978+ """
3979+
3980+ with channels_json(self.config, self.indexpath, True) as channels:
3981+ if old_name not in channels:
3982+ raise KeyError("Couldn't find channel: %s" % old_name)
3983+
3984+ if new_name in channels:
3985+ raise KeyError("Channel already exists: %s" % new_name)
3986+
3987+ old_channel_path = os.path.join(self.path, old_name)
3988+ new_channel_path = os.path.join(self.path, new_name)
3989+ if "redirect" not in channels[old_name]:
3990+ if os.path.exists(new_channel_path):
3991+ raise Exception("Channel path already exists: %s" %
3992+ new_channel_path)
3993+
3994+ if not os.path.exists(os.path.dirname(new_channel_path)):
3995+ os.makedirs(os.path.dirname(new_channel_path))
3996+ if os.path.exists(old_channel_path):
3997+ os.rename(old_channel_path, new_channel_path)
3998+
3999+ channels[new_name] = dict(channels[old_name])
4000+
4001+ if "redirect" not in channels[new_name]:
4002+ for device_name in channels[new_name]['devices']:
4003+ index_path = "/%s/%s/index.json" % (new_name, device_name)
4004+ channels[new_name]['devices'][device_name]['index'] = \
4005+ index_path
4006+
4007+ with index_json(self.config, "%s/%s" %
4008+ (self.path, index_path), True) as index:
4009+ for image in index['images']:
4010+ for entry in image['files']:
4011+ entry['path'] = entry['path'] \
4012+ .replace("/%s/" % old_name,
4013+ "/%s/" % new_name)
4014+ entry['signature'] = entry['signature'] \
4015+ .replace("/%s/" % old_name,
4016+ "/%s/" % new_name)
4017+
4018+ channels.pop(old_name)
4019+
4020+ return True
4021+
4022+ def show_channel(self, channel_name):
4023+ """
4024+ Show a channel from the client's list.
4025+ """
4026+
4027+ with channels_json(self.config, self.indexpath, True) as channels:
4028+ if channel_name not in channels:
4029+ raise KeyError("Couldn't find channel: %s" % channel_name)
4030+
4031+ if "hidden" in channels[channel_name]:
4032+ channels[channel_name].pop("hidden")
4033+
4034+ return True
4035+
4036+ def set_device_keyring(self, channel_name, device_name, path):
4037+ """
4038+ Update the keyring entry for the given channel and device.
4039+ Passing None as the path will unset any existing value.
4040+ """
4041+
4042+ with channels_json(self.config, self.indexpath, True) as channels:
4043+ if channel_name not in channels:
4044+ raise KeyError("Couldn't find channel: %s" % channel_name)
4045+
4046+ if device_name not in channels[channel_name]['devices']:
4047+ raise KeyError("Couldn't find device: %s" % device_name)
4048+
4049+ abspath, relpath = tools.expand_path(path, self.path)
4050+
4051+ if not os.path.exists(abspath):
4052+ raise Exception("Specified GPG keyring doesn't exists: %s" %
4053+ abspath)
4054+
4055+ if not os.path.exists("%s.asc" % abspath):
4056+ raise Exception("The GPG keyring signature doesn't exists: "
4057+ "%s.asc" % abspath)
4058+
4059+ keyring = {}
4060+ keyring['path'] = "/%s" % "/".join(relpath.split(os.sep))
4061+ keyring['signature'] = "/%s.asc" % "/".join(relpath.split(os.sep))
4062+
4063+ channels[channel_name]['devices'][device_name]['keyring'] = keyring
4064+
4065+ return True
4066+
4067+ def sync_alias(self, channel_name):
4068+ """
4069+ Update a channel with data from its parent.
4070+ """
4071+
4072+ with channels_json(self.config, self.indexpath) as channels:
4073+ if channel_name not in channels:
4074+ raise KeyError("Couldn't find channel: %s" % channel_name)
4075+
4076+ if "alias" not in channels[channel_name] or \
4077+ channels[channel_name]['alias'] == channel_name:
4078+ raise TypeError("Not a channel alias")
4079+
4080+ target_name = channels[channel_name]['alias']
4081+
4082+ if target_name not in channels:
4083+ raise KeyError("Couldn't find target channel: %s" %
4084+ target_name)
4085+
4086+ # Start by looking for added/removed devices
4087+ devices = set(channels[channel_name]['devices'].keys())
4088+ target_devices = set(channels[target_name]['devices'].keys())
4089+
4090+ # # Remove any removed device
4091+ for device in devices - target_devices:
4092+ self.remove_device(channel_name, device)
4093+
4094+ # # Add any missing device
4095+ for device in target_devices - devices:
4096+ self.create_device(channel_name, device)
4097+
4098+ # Iterate through all the devices to import builds
4099+ for device_name in target_devices:
4100+ device = self.get_device(channel_name, device_name)
4101+ target_device = self.get_device(target_name, device_name)
4102+
4103+ # Extract all the current builds
4104+ device_images = {(image['version'], image.get('base', None),
4105+ image['type'])
4106+ for image in device.list_images()}
4107+
4108+ target_images = {(image['version'], image.get('base', None),
4109+ image['type'])
4110+ for image in target_device.list_images()}
4111+
4112+ # Remove any removed image
4113+ for image in device_images - target_images:
4114+ device.remove_image(image[2], image[0], base=image[1])
4115+
4116+ # Create the path if it doesn't exist
4117+ if not os.path.exists(device.path):
4118+ os.makedirs(device.path)
4119+
4120+ # Add any missing image
4121+ with index_json(self.config, device.indexpath, True) as index:
4122+ for image in sorted(target_images - device_images):
4123+ orig = [entry for entry in target_device.list_images()
4124+ if entry['type'] == image[2] and
4125+ entry['version'] == image[0] and
4126+ entry.get('base', None) == image[1]]
4127+
4128+ entry = copy.deepcopy(orig[0])
4129+
4130+ # Remove the current version tarball
4131+ version_detail = None
4132+ version_index = len(entry['files'])
4133+ for fentry in entry['files']:
4134+ if fentry['path'].endswith("version-%s.tar.xz" %
4135+ entry['version']):
4136+
4137+ version_path = "%s/%s" % (
4138+ self.config.publish_path, fentry['path'])
4139+
4140+ if os.path.exists(
4141+ version_path.replace(".tar.xz",
4142+ ".json")):
4143+ with open(
4144+ version_path.replace(
4145+ ".tar.xz", ".json")) as fd:
4146+ metadata = json.loads(fd.read())
4147+ if "channel.ini" in metadata:
4148+ version_detail = \
4149+ metadata['channel.ini'].get(
4150+ "version_detail", None)
4151+
4152+ version_index = fentry['order']
4153+ entry['files'].remove(fentry)
4154+ break
4155+
4156+ # Generate a new one
4157+ path = os.path.join(device.path,
4158+ "version-%s.tar.xz" %
4159+ entry['version'])
4160+ abspath, relpath = tools.expand_path(path,
4161+ device.pub_path)
4162+ if not os.path.exists(abspath):
4163+ tools.generate_version_tarball(
4164+ self.config, channel_name, device_name,
4165+ str(entry['version']),
4166+ abspath.replace(".xz", ""),
4167+ version_detail=version_detail,
4168+ channel_target=target_name)
4169+ tools.xz_compress(abspath.replace(".xz", ""))
4170+ os.remove(abspath.replace(".xz", ""))
4171+ gpg.sign_file(self.config, "image-signing",
4172+ abspath)
4173+
4174+ with open(abspath, "rb") as fd:
4175+ checksum = sha256(fd.read()).hexdigest()
4176+
4177+ # Generate the new file entry
4178+ version = {}
4179+ version['order'] = version_index
4180+ version['path'] = "/%s" % "/".join(
4181+ relpath.split(os.sep))
4182+ version['signature'] = "/%s.asc" % "/".join(
4183+ relpath.split(os.sep))
4184+ version['checksum'] = checksum
4185+ version['size'] = int(os.stat(abspath).st_size)
4186+
4187+ # And add it
4188+ entry['files'].append(version)
4189+ index['images'].append(entry)
4190+
4191+ # Sync phased-percentage
4192+ versions = sorted({entry[0] for entry in target_images})
4193+ if versions:
4194+ device.set_phased_percentage(
4195+ versions[-1],
4196+ target_device.get_phased_percentage(versions[-1]))
4197+
4198+ return True
4199+
4200+ def sync_aliases(self, channel_name):
4201+ """
4202+ Update any channel that's an alias of the current one.
4203+ """
4204+
4205+ with channels_json(self.config, self.indexpath) as channels:
4206+ if channel_name not in channels:
4207+ raise KeyError("Couldn't find channel: %s" % channel_name)
4208+
4209+ alias_channels = [name
4210+ for name, channel
4211+ in self.list_channels().items()
4212+ if channel.get("alias", None) == channel_name
4213+ and name != channel_name]
4214+
4215+ for alias_name in alias_channels:
4216+ self.sync_alias(alias_name)
4217+
4218+ return True
4219+
4220+ def sync_redirects(self, channel_name):
4221+ """
4222+ Update any channel that's a direct of the current one.
4223+ """
4224+
4225+ with channels_json(self.config, self.indexpath) as channels:
4226+ if channel_name not in channels:
4227+ raise KeyError("Couldn't find channel: %s" % channel_name)
4228+
4229+ redirect_channels = [name
4230+ for name, channel
4231+ in self.list_channels().items()
4232+ if channel.get("redirect", None) == channel_name]
4233+
4234+ for redirect_name in redirect_channels:
4235+ self.remove_channel(redirect_name)
4236+ self.create_channel_redirect(redirect_name, channel_name)
4237+
4238+ return True
4239+
4240+
4241+class Device:
4242+ def __init__(self, config, path):
4243+ self.config = config
4244+ self.pub_path = self.config.publish_path
4245+ self.path = path
4246+ self.indexpath = os.path.join(path, "index.json")
4247+
4248+ def create_image(self, entry_type, version, description, paths,
4249+ base=None, bootme=False, minversion=None):
4250+ """
4251+ Add a new image to the index.
4252+ """
4253+
4254+ if len(paths) == 0:
4255+ raise Exception("No file passed for this image.")
4256+
4257+ files = []
4258+ count = 0
4259+
4260+ with index_json(self.config, self.indexpath, True) as index:
4261+ for path in paths:
4262+ abspath, relpath = tools.expand_path(path, self.pub_path)
4263+
4264+ if not os.path.exists(abspath):
4265+ raise Exception("Specified file doesn't exists: %s"
4266+ % abspath)
4267+
4268+ if not os.path.exists("%s.asc" % abspath):
4269+ raise Exception("The GPG file signature doesn't exists: "
4270+ "%s.asc" % abspath)
4271+
4272+ with open(abspath, "rb") as fd:
4273+ checksum = sha256(fd.read()).hexdigest()
4274+
4275+ files.append({'order': count,
4276+ 'path': "/%s" % "/".join(relpath.split(os.sep)),
4277+ 'checksum': checksum,
4278+ 'signature': "/%s.asc" % "/".join(
4279+ relpath.split(os.sep)),
4280+ 'size': int(os.stat(abspath).st_size)})
4281+
4282+ count += 1
4283+
4284+ image = {}
4285+
4286+ if entry_type == "delta":
4287+ if not base:
4288+ raise KeyError("Missing base version for delta image.")
4289+ image['base'] = int(base)
4290+ elif base:
4291+ raise KeyError("Base version set for full image.")
4292+
4293+ if bootme:
4294+ image['bootme'] = bootme
4295+
4296+ if minversion:
4297+ if entry_type == "delta":
4298+ raise KeyError("Minimum version set for delta image.")
4299+ image['minversion'] = minversion
4300+
4301+ image['description'] = description
4302+ image['files'] = files
4303+ image['type'] = entry_type
4304+ image['version'] = version
4305+ index['images'].append(image)
4306+
4307+ return True
4308+
4309+ def expire_images(self, max_images):
4310+ """
4311+ Expire images keeping the last <max_images> full images and
4312+ their deltas. Also remove any delta that has an expired image
4313+ as its base.
4314+ """
4315+
4316+ full_images = sorted([image for image in self.list_images()
4317+ if image['type'] == "full"],
4318+ key=lambda image: image['version'])
4319+
4320+ to_remove = len(full_images) - max_images
4321+ if to_remove <= 0:
4322+ return True
4323+
4324+ full_remove = full_images[:to_remove]
4325+ remove_version = [image['version'] for image in full_remove]
4326+
4327+ for image in self.list_images():
4328+ if image['type'] == "full":
4329+ if image['version'] in remove_version:
4330+ self.remove_image(image['type'], image['version'])
4331+ else:
4332+ if (image['version'] in remove_version
4333+ or image['base'] in remove_version):
4334+ self.remove_image(image['type'], image['version'],
4335+ image['base'])
4336+
4337+ return True
4338+
4339+ def get_image(self, entry_type, version, base=None):
4340+ """
4341+ Look for an image and return a dict representation of it.
4342+ """
4343+
4344+ if entry_type not in ("full", "delta"):
4345+ raise ValueError("Invalid image type: %s" % entry_type)
4346+
4347+ if entry_type == "delta" and not base:
4348+ raise ValueError("Missing base version for delta image.")
4349+
4350+ with index_json(self.config, self.indexpath) as index:
4351+ match = []
4352+ for image in index['images']:
4353+ if (image['type'] == entry_type and image['version'] == version
4354+ and (image['type'] == "full" or
4355+ image['base'] == base)):
4356+ match.append(image)
4357+
4358+ if len(match) != 1:
4359+ raise IndexError("Couldn't find a match.")
4360+
4361+ return match[0]
4362+
4363+ def get_phased_percentage(self, version):
4364+ """
4365+ Returns the phasing percentage for a given version.
4366+ """
4367+
4368+ for entry in self.list_images():
4369+ if entry['version'] == version:
4370+ if "phased-percentage" in entry:
4371+ return entry['phased-percentage']
4372+ else:
4373+ return 100
4374+ else:
4375+ raise IndexError("Invalid version number: %s" % version)
4376+
4377+ def list_images(self):
4378+ """
4379+ Returns a list of all existing images, each image is a dict.
4380+ This is simply a decoded version of the image array in index.json
4381+ """
4382+
4383+ with index_json(self.config, self.indexpath) as index:
4384+ return index['images']
4385+
4386+ def remove_image(self, entry_type, version, base=None):
4387+ """
4388+ Remove an image.
4389+ """
4390+
4391+ image = self.get_image(entry_type, version, base)
4392+ with index_json(self.config, self.indexpath, True) as index:
4393+ index['images'].remove(image)
4394+
4395+ return True
4396+
4397+ def set_description(self, entry_type, version, description,
4398+ translations={}, base=None):
4399+ """
4400+ Set or update an image description.
4401+ """
4402+
4403+ if translations and not isinstance(translations, dict):
4404+ raise TypeError("translations must be a dict.")
4405+
4406+ image = self.get_image(entry_type, version, base)
4407+
4408+ with index_json(self.config, self.indexpath, True) as index:
4409+ for entry in index['images']:
4410+ if entry != image:
4411+ continue
4412+
4413+ entry['description'] = description
4414+ for langid, value in translations.items():
4415+ entry['description_%s' % langid] = value
4416+
4417+ break
4418+
4419+ return True
4420+
4421+ def set_phased_percentage(self, version, percentage):
4422+ """
4423+ Set the phasing percentage on an image version.
4424+ """
4425+
4426+ if not isinstance(percentage, int):
4427+ raise TypeError("percentage must be an integer.")
4428+
4429+ if percentage < 0 or percentage > 100:
4430+ raise ValueError("percentage must be >= 0 and <= 100.")
4431+
4432+ with index_json(self.config, self.indexpath, True) as index:
4433+ versions = sorted({entry['version'] for entry in index['images']})
4434+
4435+ last_version = None
4436+ if versions:
4437+ last_version = versions[-1]
4438+
4439+ if version not in versions:
4440+ raise IndexError("Version doesn't exist: %s" % version)
4441+
4442+ if version != last_version:
4443+ raise Exception("Phased percentage can only be set on the "
4444+ "latest image")
4445+
4446+ for entry in index['images']:
4447+ if entry['version'] == version:
4448+ if percentage == 100 and "phased-percentage" in entry:
4449+ entry.pop("phased-percentage")
4450+ elif percentage != 100:
4451+ entry['phased-percentage'] = percentage
4452+
4453+ return True
4454
4455=== added directory 'secret'
4456=== added directory 'secret/gpg'
4457=== added directory 'secret/gpg/keyrings'
4458=== added directory 'secret/gpg/keys'
4459=== added directory 'secret/ssh'
4460=== added directory 'state'
4461=== added directory 'tests'
4462=== added file 'tests/__init__.py'
4463=== added file 'tests/generate-keys'
4464--- tests/generate-keys 1970-01-01 00:00:00 +0000
4465+++ tests/generate-keys 2014-11-14 10:18:00 +0000
4466@@ -0,0 +1,52 @@
4467+#!/usr/bin/python
4468+# -*- coding: utf-8 -*-
4469+
4470+# Copyright (C) 2013 Canonical Ltd.
4471+# Author: Stéphane Graber <stgraber@ubuntu.com>
4472+
4473+# This program is free software: you can redistribute it and/or modify
4474+# it under the terms of the GNU General Public License as published by
4475+# the Free Software Foundation; version 3 of the License.
4476+#
4477+# This program is distributed in the hope that it will be useful,
4478+# but WITHOUT ANY WARRANTY; without even the implied warranty of
4479+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
4480+# GNU General Public License for more details.
4481+#
4482+# You should have received a copy of the GNU General Public License
4483+# along with this program. If not, see <http://www.gnu.org/licenses/>.
4484+
4485+import os
4486+import shutil
4487+
4488+import sys
4489+sys.path.insert(0, 'lib')
4490+
4491+from systemimage import gpg
4492+
4493+target_dir = "tests/keys/"
4494+if not os.path.exists(target_dir):
4495+ raise Exception("Missing tests/keys directory")
4496+
4497+keys = (("archive-master", "[TESTING] Ubuntu Archive Master Signing Key",
4498+ "ftpmaster@ubuntu.com", 0),
4499+ ("image-master", "[TESTING] Ubuntu System Image Master Signing Key",
4500+ "system-image@ubuntu.com", 0),
4501+ ("image-signing", "[TESTING] Ubuntu System Image Signing Key (YYYY)",
4502+ "system-image@ubuntu.com", "2y"),
4503+ ("device-signing", "[TESTING] Random OEM Signing Key (YYYY)",
4504+ "system-image@ubuntu.com", "2y"))
4505+
4506+for key_name, key_description, key_email, key_expiry in keys:
4507+ key_dir = "%s/%s/" % (target_dir, key_name)
4508+ if os.path.exists(key_dir):
4509+ shutil.rmtree(key_dir)
4510+ os.makedirs(key_dir)
4511+
4512+ uid = gpg.generate_signing_key(key_dir, key_description, key_email,
4513+ key_expiry)
4514+
4515+ print("%s <%s>" % (uid.name, uid.email))
4516+
4517+# All done, let's mark it as done
4518+open("tests/keys/generated", "w+").close()
4519
4520=== added directory 'tests/keys'
4521=== added file 'tests/run'
4522--- tests/run 1970-01-01 00:00:00 +0000
4523+++ tests/run 2014-11-14 10:18:00 +0000
4524@@ -0,0 +1,60 @@
4525+#!/usr/bin/python
4526+# -*- coding: utf-8 -*-
4527+
4528+# Copyright (C) 2013 Canonical Ltd.
4529+# Author: Stéphane Graber <stgraber@ubuntu.com>
4530+
4531+# This program is free software: you can redistribute it and/or modify
4532+# it under the terms of the GNU General Public License as published by
4533+# the Free Software Foundation; version 3 of the License.
4534+#
4535+# This program is distributed in the hope that it will be useful,
4536+# but WITHOUT ANY WARRANTY; without even the implied warranty of
4537+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
4538+# GNU General Public License for more details.
4539+#
4540+# You should have received a copy of the GNU General Public License
4541+# along with this program. If not, see <http://www.gnu.org/licenses/>.
4542+
4543+# Dependencies:
4544+# - python2 (>= 2.7): python-gpgme, python-coverage
4545+# - python3 (>= 3.2): python3-gpgme
4546+
4547+import glob
4548+import os
4549+import re
4550+import shutil
4551+import sys
4552+import unittest
4553+
4554+coverage = True
4555+try:
4556+ from coverage import coverage
4557+ cov = coverage()
4558+ cov.start()
4559+except ImportError:
4560+ print("No coverage report, make sure python-coverage is installed")
4561+ coverage = False
4562+
4563+sys.path.insert(0, 'lib')
4564+
4565+if len(sys.argv) > 1:
4566+ test_filter = sys.argv[1]
4567+else:
4568+ test_filter = ''
4569+
4570+tests = [t[:-3] for t in os.listdir('tests')
4571+ if t.startswith('test_') and t.endswith('.py') and
4572+ re.search(test_filter, t)]
4573+tests.sort()
4574+suite = unittest.TestLoader().loadTestsFromNames(tests)
4575+res = unittest.TextTestRunner(verbosity=2).run(suite)
4576+
4577+if coverage:
4578+ if os.path.exists('tests/coverage'):
4579+ shutil.rmtree('tests/coverage')
4580+ cov.stop()
4581+ cov.html_report(include=glob.glob("lib/systemimage/*.py"),
4582+ directory='tests/coverage')
4583+ print("")
4584+ cov.report(include=glob.glob("lib/systemimage/*.py"))
4585
4586=== added file 'tests/test_config.py'
4587--- tests/test_config.py 1970-01-01 00:00:00 +0000
4588+++ tests/test_config.py 2014-11-14 10:18:00 +0000
4589@@ -0,0 +1,281 @@
4590+# -*- coding: utf-8 -*-
4591+
4592+# Copyright (C) 2013 Canonical Ltd.
4593+# Author: Stéphane Graber <stgraber@ubuntu.com>
4594+
4595+# This program is free software: you can redistribute it and/or modify
4596+# it under the terms of the GNU General Public License as published by
4597+# the Free Software Foundation; version 3 of the License.
4598+#
4599+# This program is distributed in the hope that it will be useful,
4600+# but WITHOUT ANY WARRANTY; without even the implied warranty of
4601+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
4602+# GNU General Public License for more details.
4603+#
4604+# You should have received a copy of the GNU General Public License
4605+# along with this program. If not, see <http://www.gnu.org/licenses/>.
4606+
4607+import os
4608+import shutil
4609+import tempfile
4610+import unittest
4611+
4612+from systemimage import config
4613+from systemimage import tools
4614+
4615+try:
4616+ from unittest import mock
4617+except ImportError:
4618+ import mock
4619+
4620+
4621+class ConfigTests(unittest.TestCase):
4622+ def setUp(self):
4623+ temp_directory = tempfile.mkdtemp()
4624+ self.temp_directory = temp_directory
4625+
4626+ def tearDown(self):
4627+ shutil.rmtree(self.temp_directory)
4628+
4629+ @mock.patch("subprocess.call")
4630+ def test_config(self, mock_call):
4631+ # Good complete config
4632+ config_path = os.path.join(self.temp_directory, "config")
4633+ key_path = os.path.join(self.temp_directory, "key")
4634+
4635+ with open(config_path, "w+") as fd:
4636+ fd.write("""[global]
4637+base_path = %s
4638+mirrors = a, b
4639+
4640+[mirror_default]
4641+ssh_user = user
4642+ssh_key = key
4643+ssh_port = 22
4644+ssh_command = command
4645+
4646+[mirror_a]
4647+ssh_host = hosta
4648+
4649+[mirror_b]
4650+ssh_host = hostb
4651+""" % self.temp_directory)
4652+
4653+ conf = config.Config(config_path)
4654+
4655+ # Test ssh sync
4656+ tools.sync_mirrors(conf)
4657+ expected_calls = [((['ssh', '-i', key_path, '-l', 'user',
4658+ '-p', '22', 'hosta', 'command'],), {}),
4659+ ((['ssh', '-i', key_path, '-l', 'user',
4660+ '-p', '22', 'hostb', 'command'],), {})]
4661+ self.assertEqual(mock_call.call_args_list, expected_calls)
4662+
4663+ # Invalid config
4664+ invalid_config_path = os.path.join(self.temp_directory,
4665+ "invalid_config")
4666+ with open(invalid_config_path, "w+") as fd:
4667+ fd.write("""invalid""")
4668+
4669+ self.assertEqual(config.parse_config(invalid_config_path), {})
4670+
4671+ self.assertRaises(
4672+ Exception, config.Config, os.path.join(self.temp_directory,
4673+ "invalid"))
4674+
4675+ # Test loading config from default location
4676+ config_file = os.path.join(os.path.dirname(config.__file__),
4677+ "../../etc/config")
4678+
4679+ old_pwd = os.getcwd()
4680+ os.chdir(self.temp_directory)
4681+ if not os.path.exists(config_file):
4682+ self.assertRaises(Exception, config.Config)
4683+ else:
4684+ self.assertTrue(config.Config())
4685+ os.chdir(old_pwd)
4686+
4687+ # Empty config
4688+ empty_config_path = os.path.join(self.temp_directory,
4689+ "empty_config")
4690+ with open(empty_config_path, "w+") as fd:
4691+ fd.write("")
4692+
4693+ conf = config.Config(empty_config_path)
4694+ self.assertEqual(conf.base_path, os.getcwd())
4695+
4696+ # Single mirror config
4697+ single_mirror_config_path = os.path.join(self.temp_directory,
4698+ "single_mirror_config")
4699+ with open(single_mirror_config_path, "w+") as fd:
4700+ fd.write("""[global]
4701+mirrors = a
4702+
4703+[mirror_default]
4704+ssh_user = user
4705+ssh_key = key
4706+ssh_port = 22
4707+ssh_command = command
4708+
4709+[mirror_a]
4710+ssh_host = host
4711+""")
4712+
4713+ conf = config.Config(single_mirror_config_path)
4714+ self.assertEqual(conf.mirrors['a'].ssh_command, "command")
4715+
4716+ # Missing mirror_default
4717+ missing_default_config_path = os.path.join(self.temp_directory,
4718+ "missing_default_config")
4719+ with open(missing_default_config_path, "w+") as fd:
4720+ fd.write("""[global]
4721+mirrors = a
4722+
4723+[mirror_a]
4724+ssh_host = host
4725+""")
4726+
4727+ self.assertRaises(KeyError, config.Config, missing_default_config_path)
4728+
4729+ # Missing mirror key
4730+ missing_key_config_path = os.path.join(self.temp_directory,
4731+ "missing_key_config")
4732+ with open(missing_key_config_path, "w+") as fd:
4733+ fd.write("""[global]
4734+mirrors = a
4735+
4736+[mirror_default]
4737+ssh_user = user
4738+ssh_port = 22
4739+ssh_command = command
4740+
4741+[mirror_a]
4742+ssh_host = host
4743+""")
4744+
4745+ self.assertRaises(KeyError, config.Config, missing_key_config_path)
4746+
4747+ # Missing mirror
4748+ missing_mirror_config_path = os.path.join(self.temp_directory,
4749+ "missing_mirror_config")
4750+ with open(missing_mirror_config_path, "w+") as fd:
4751+ fd.write("""[global]
4752+mirrors = a
4753+
4754+[mirror_default]
4755+ssh_user = user
4756+ssh_port = 22
4757+ssh_command = command
4758+ssh_key = key
4759+""")
4760+
4761+ self.assertRaises(KeyError, config.Config, missing_mirror_config_path)
4762+
4763+ # Missing ssh_host
4764+ missing_host_config_path = os.path.join(self.temp_directory,
4765+ "missing_host_config")
4766+ with open(missing_host_config_path, "w+") as fd:
4767+ fd.write("""[global]
4768+mirrors = a
4769+
4770+[mirror_default]
4771+ssh_user = user
4772+ssh_port = 22
4773+ssh_command = command
4774+ssh_key = key
4775+
4776+[mirror_a]
4777+ssh_user = other-user
4778+""")
4779+
4780+ self.assertRaises(KeyError, config.Config, missing_host_config_path)
4781+
4782+ # Test with env path
4783+ test_path = os.path.join(self.temp_directory, "a", "b")
4784+ os.makedirs(os.path.join(test_path, "etc"))
4785+ with open(os.path.join(test_path, "etc", "config"), "w+") as fd:
4786+ fd.write("[global]\nbase_path = a/b/c")
4787+ os.environ['SYSTEM_IMAGE_ROOT'] = test_path
4788+ test_config = config.Config()
4789+ self.assertEqual(test_config.base_path, "a/b/c")
4790+
4791+ # Test the channels config
4792+ # # Multiple channels
4793+ channel_config_path = os.path.join(self.temp_directory,
4794+ "channel_config")
4795+ with open(channel_config_path, "w+") as fd:
4796+ fd.write("""[global]
4797+channels = a, b
4798+
4799+[channel_a]
4800+type = manual
4801+fullcount = 10
4802+
4803+[channel_b]
4804+type = auto
4805+versionbase = 5
4806+deltabase = a, b
4807+files = a, b
4808+file_a = test;arg1;arg2
4809+file_b = test;arg3;arg4
4810+""")
4811+
4812+ conf = config.Config(channel_config_path)
4813+ self.assertEqual(
4814+ conf.channels['b'].files,
4815+ [{'name': 'a', 'generator': 'test',
4816+ 'arguments': ['arg1', 'arg2']},
4817+ {'name': 'b', 'generator': 'test',
4818+ 'arguments': ['arg3', 'arg4']}])
4819+
4820+ self.assertEqual(conf.channels['a'].fullcount, 10)
4821+ self.assertEqual(conf.channels['a'].versionbase, 1)
4822+ self.assertEqual(conf.channels['a'].deltabase, ['a'])
4823+
4824+ self.assertEqual(conf.channels['b'].fullcount, 0)
4825+ self.assertEqual(conf.channels['b'].versionbase, 5)
4826+ self.assertEqual(conf.channels['b'].deltabase, ["a", "b"])
4827+
4828+ # # Single channel
4829+ single_channel_config_path = os.path.join(self.temp_directory,
4830+ "single_channel_config")
4831+ with open(single_channel_config_path, "w+") as fd:
4832+ fd.write("""[global]
4833+channels = a
4834+
4835+[channel_a]
4836+deltabase = a
4837+versionbase = 1
4838+files = a
4839+file_a = test;arg1;arg2
4840+""")
4841+
4842+ conf = config.Config(single_channel_config_path)
4843+ self.assertEqual(
4844+ conf.channels['a'].files,
4845+ [{'name': 'a', 'generator': 'test',
4846+ 'arguments': ['arg1', 'arg2']}])
4847+
4848+ # # Invalid channel
4849+ invalid_channel_config_path = os.path.join(self.temp_directory,
4850+ "invalid_channel_config")
4851+ with open(invalid_channel_config_path, "w+") as fd:
4852+ fd.write("""[global]
4853+channels = a
4854+""")
4855+
4856+ self.assertRaises(KeyError, config.Config, invalid_channel_config_path)
4857+
4858+ # # Invalid file
4859+ invalid_file_channel_config_path = os.path.join(
4860+ self.temp_directory, "invalid_file_channel_config")
4861+ with open(invalid_file_channel_config_path, "w+") as fd:
4862+ fd.write("""[global]
4863+channels = a
4864+
4865+[channel_a]
4866+files = a
4867+""")
4868+
4869+ self.assertRaises(KeyError, config.Config,
4870+ invalid_file_channel_config_path)
4871
4872=== added file 'tests/test_diff.py'
4873--- tests/test_diff.py 1970-01-01 00:00:00 +0000
4874+++ tests/test_diff.py 2014-11-14 10:18:00 +0000
4875@@ -0,0 +1,279 @@
4876+# -*- coding: utf-8 -*-
4877+
4878+# Copyright (C) 2013 Canonical Ltd.
4879+# Author: Stéphane Graber <stgraber@ubuntu.com>
4880+
4881+# This program is free software: you can redistribute it and/or modify
4882+# it under the terms of the GNU General Public License as published by
4883+# the Free Software Foundation; version 3 of the License.
4884+#
4885+# This program is distributed in the hope that it will be useful,
4886+# but WITHOUT ANY WARRANTY; without even the implied warranty of
4887+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
4888+# GNU General Public License for more details.
4889+#
4890+# You should have received a copy of the GNU General Public License
4891+# along with this program. If not, see <http://www.gnu.org/licenses/>.
4892+
4893+import shutil
4894+import sys
4895+import tarfile
4896+import tempfile
4897+import unittest
4898+
4899+from io import BytesIO, StringIO
4900+from systemimage.diff import ImageDiff, compare_files
4901+
4902+
4903+class DiffTests(unittest.TestCase):
4904+ def setUp(self):
4905+ temp_directory = tempfile.mkdtemp()
4906+
4907+ source_tarball_path = "%s/source.tar" % temp_directory
4908+ target_tarball_path = "%s/target.tar" % temp_directory
4909+
4910+ source_tarball = tarfile.open(source_tarball_path, "w")
4911+ target_tarball = tarfile.open(target_tarball_path, "w")
4912+
4913+ # Standard file
4914+ a = tarfile.TarInfo()
4915+ a.name = "a"
4916+ a.size = 4
4917+
4918+ # Standard file
4919+ b = tarfile.TarInfo()
4920+ b.name = "b"
4921+ b.size = 4
4922+
4923+ # Standard directory
4924+ c_dir = tarfile.TarInfo()
4925+ c_dir.name = "c"
4926+ c_dir.type = tarfile.DIRTYPE
4927+ c_dir.mode = 0o755
4928+
4929+ # Standard file
4930+ c = tarfile.TarInfo()
4931+ c.name = "c/c"
4932+ c.size = 4
4933+
4934+ # Standard file
4935+ d_source = tarfile.TarInfo()
4936+ d_source.name = "c/d"
4937+ d_source.size = 8
4938+ d_source.mtime = 1000
4939+
4940+ # Standard file
4941+ d_target = tarfile.TarInfo()
4942+ d_target.name = "c/d"
4943+ d_target.size = 8
4944+ d_target.mtime = 1234
4945+
4946+ # Symlink
4947+ e = tarfile.TarInfo()
4948+ e.name = "e"
4949+ e.type = tarfile.SYMTYPE
4950+ e.linkname = "a"
4951+
4952+ # Hard link
4953+ f = tarfile.TarInfo()
4954+ f.name = "f"
4955+ f.type = tarfile.LNKTYPE
4956+ f.linkname = "a"
4957+
4958+ # Standard file
4959+ g_source = tarfile.TarInfo()
4960+ g_source.name = "c/g"
4961+ g_source.size = 4
4962+ g_source.mtime = 1000
4963+
4964+ # Standard file
4965+ g_target = tarfile.TarInfo()
4966+ g_target.name = "c/g"
4967+ g_target.size = 4
4968+ g_target.mtime = 1001
4969+
4970+ # Hard link
4971+ h_source = tarfile.TarInfo()
4972+ h_source.name = "c/h"
4973+ h_source.type = tarfile.LNKTYPE
4974+ h_source.linkname = "d"
4975+ h_source.mtime = 1000
4976+
4977+ # Hard link
4978+ h_target = tarfile.TarInfo()
4979+ h_target.name = "c/h"
4980+ h_target.type = tarfile.LNKTYPE
4981+ h_target.linkname = "d"
4982+ h_target.mtime = 1001
4983+
4984+ # Hard link
4985+ i = tarfile.TarInfo()
4986+ i.name = "c/a_i"
4987+ i.type = tarfile.LNKTYPE
4988+ i.linkname = "c"
4989+
4990+ # Dangling symlink
4991+ j = tarfile.TarInfo()
4992+ j.name = "c/j"
4993+ j.type = tarfile.SYMTYPE
4994+ j.linkname = "j_non-existent"
4995+
4996+ # Standard directory
4997+ k_dir = tarfile.TarInfo()
4998+ k_dir.name = "dir"
4999+ k_dir.type = tarfile.DIRTYPE
5000+ k_dir.mode = 0o755
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches