Merge lp:~cjwatson/ubuntu-system-image/cdimage-custom into lp:~registry/ubuntu-system-image/client

Proposed by Colin Watson
Status: Superseded
Proposed branch: lp:~cjwatson/ubuntu-system-image/cdimage-custom
Merge into: lp:~registry/ubuntu-system-image/client
Diff against target: 7377 lines (+7236/-0) (has conflicts)
25 files modified
.bzrignore (+9/-0)
README (+12/-0)
bin/copy-image (+308/-0)
bin/generate-keyrings (+87/-0)
bin/generate-keys (+61/-0)
bin/import-images (+305/-0)
bin/set-phased-percentage (+90/-0)
bin/si-shell (+79/-0)
etc/config.example (+48/-0)
lib/systemimage/config.py (+206/-0)
lib/systemimage/diff.py (+242/-0)
lib/systemimage/generators.py (+1173/-0)
lib/systemimage/gpg.py (+239/-0)
lib/systemimage/tools.py (+367/-0)
lib/systemimage/tree.py (+999/-0)
tests/generate-keys (+52/-0)
tests/run (+60/-0)
tests/test_config.py (+281/-0)
tests/test_diff.py (+265/-0)
tests/test_generators.py (+1039/-0)
tests/test_gpg.py (+163/-0)
tests/test_static.py (+78/-0)
tests/test_tools.py (+297/-0)
tests/test_tree.py (+679/-0)
utils/check-latest (+97/-0)
Conflict adding file .bzrignore.  Moved existing file to .bzrignore.moved.
To merge this branch: bzr merge lp:~cjwatson/ubuntu-system-image/cdimage-custom
Reviewer Review Type Date Requested Status
Registry Administrators Pending
Review via email: mp+237941@code.launchpad.net

This proposal has been superseded by a proposal from 2014-10-10.

Commit message

Add a new cdimage-custom generator.

Description of the change

Add a new cdimage-custom generator.

This is basically just a clone-and-hack of cdimage-ubuntu, simplified somewhat. It goes with recent changes to ubuntu-cdimage, and is all with the aim of being able to fix bug 1367332 (moving some click packages to /custom) in a single step for the community Ubuntu images.

To post a comment you must log in.

Unmerged revisions

246. By Colin Watson

Add a new cdimage-custom generator.

245. By Stéphane Graber

Drop system/android/cache/recovery from core image.

244. By Stéphane Graber

Hash http filepaths by default using a combination of the URL and version string, update code to pass current pep-8 test.

243. By Stéphane Graber

Fix variable names conflicts.

242. By Stéphane Graber

Use a comma as the separator to avoid ini parsing errors.

241. By Stéphane Graber

Add support for device overrides.

240. By Stéphane Graber

Add device name to version tarball.

239. By Stéphane Graber

Fix incorrect path for download cache in core image

238. By Stéphane Graber

Add /android/cache/recovery to core builds for now.

237. By Stéphane Graber

Skip android bits for non-touch

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== added file '.bzrignore'
--- .bzrignore 1970-01-01 00:00:00 +0000
+++ .bzrignore 2014-10-10 11:11:17 +0000
@@ -0,0 +1,9 @@
1etc/config
2lib/phablet/__pycache__
3secret/gpg/keyrings/*
4secret/gpg/keys/*
5secret/ssh/*
6tests/coverage
7tests/keys/*
8www/*
9state/*
010
=== renamed file '.bzrignore' => '.bzrignore.moved'
=== added file 'README'
--- README 1970-01-01 00:00:00 +0000
+++ README 2014-10-10 11:11:17 +0000
@@ -0,0 +1,12 @@
1Runtime dependencies:
2 - pxz | xz-utils
3 - python3, python3-gpgme | python, python-gpgme
4 - e2fsprogs
5 - android-tools-fsutils
6 - abootimg
7
8Test dependencies:
9 - python-mock, python3-mock
10 - python-coverage, python3-coverage
11 - pep8
12 - pyflakes3, pyflakes
013
=== added directory 'bin'
=== added file 'bin/copy-image'
--- bin/copy-image 1970-01-01 00:00:00 +0000
+++ bin/copy-image 2014-10-10 11:11:17 +0000
@@ -0,0 +1,308 @@
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3
4# Copyright (C) 2013 Canonical Ltd.
5# Author: Stéphane Graber <stgraber@ubuntu.com>
6
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; version 3 of the License.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19import json
20import os
21import sys
22sys.path.insert(0, os.path.join(sys.path[0], os.pardir, "lib"))
23
24from systemimage import config, generators, tools, tree
25
26import argparse
27import fcntl
28import logging
29
30if __name__ == '__main__':
31 parser = argparse.ArgumentParser(description="image copier")
32 parser.add_argument("source_channel", metavar="SOURCE-CHANNEL")
33 parser.add_argument("destination_channel", metavar="DESTINATION-CHANNEL")
34 parser.add_argument("device", metavar="DEVICE")
35 parser.add_argument("version", metavar="VERSION", type=int)
36 parser.add_argument("-k", "--keep-version", action="store_true",
37 help="Keep the original verison number")
38 parser.add_argument("--verbose", "-v", action="count", default=0)
39
40 args = parser.parse_args()
41
42 # Setup logging
43 formatter = logging.Formatter(
44 "%(asctime)s %(levelname)s %(message)s")
45
46 levels = {1: logging.ERROR,
47 2: logging.WARNING,
48 3: logging.INFO,
49 4: logging.DEBUG}
50
51 if args.verbose > 0:
52 stdoutlogger = logging.StreamHandler(sys.stdout)
53 stdoutlogger.setFormatter(formatter)
54 logging.root.setLevel(levels[min(4, args.verbose)])
55 logging.root.addHandler(stdoutlogger)
56 else:
57 logging.root.addHandler(logging.NullHandler())
58
59 # Load the configuration
60 conf = config.Config()
61
62 # Try to acquire a global lock
63 lock_file = os.path.join(conf.state_path, "global.lock")
64 lock_fd = open(lock_file, 'w')
65
66 try:
67 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
68 except IOError:
69 print("Something else holds the global lock. exiting.")
70 sys.exit(0)
71
72 # Load the tree
73 pub = tree.Tree(conf)
74
75 # Do some checks
76 if args.source_channel not in pub.list_channels():
77 parser.error("Invalid source channel: %s" % args.source_channel)
78
79 if args.destination_channel not in pub.list_channels():
80 parser.error("Invalid destination channel: %s" %
81 args.destination_channel)
82
83 if args.device not in pub.list_channels()[args.source_channel]['devices']:
84 parser.error("Invalid device for source channel: %s" %
85 args.device)
86
87 if args.device not in \
88 pub.list_channels()[args.destination_channel]['devices']:
89 parser.error("Invalid device for destination channel: %s" %
90 args.device)
91
92 if "alias" in pub.list_channels()[args.source_channel] and \
93 pub.list_channels()[args.source_channel]['alias'] \
94 != args.source_channel:
95 parser.error("Source channel is an alias.")
96
97 if "alias" in pub.list_channels()[args.destination_channel] and \
98 pub.list_channels()[args.destination_channel]['alias'] \
99 != args.destination_channel:
100 parser.error("Destination channel is an alias.")
101
102 if "redirect" in pub.list_channels()[args.source_channel]:
103 parser.error("Source channel is a redirect.")
104
105 if "redirect" in pub.list_channels()[args.destination_channel]:
106 parser.error("Destination channel is a redirect.")
107
108 source_device = pub.get_device(args.source_channel, args.device)
109 destination_device = pub.get_device(args.destination_channel, args.device)
110
111 if args.keep_version:
112 images = [image for image in destination_device.list_images()
113 if image['version'] == args.version]
114 if images:
115 parser.error("Version number is already used: %s" % args.version)
116
117 # Assign a new version number
118 new_version = args.version
119 if not args.keep_version:
120 # Find the next available version
121 new_version = 1
122 for image in destination_device.list_images():
123 if image['version'] >= new_version:
124 new_version = image['version'] + 1
125 logging.debug("Version for next image: %s" % new_version)
126
127 # Extract the build we want to copy
128 images = [image for image in source_device.list_images()
129 if image['type'] == "full" and image['version'] == args.version]
130 if not images:
131 parser.error("Can't find version: %s" % args.version)
132 source_image = images[0]
133
134 # Extract the list of existing full images
135 full_images = {image['version']: image
136 for image in destination_device.list_images()
137 if image['type'] == "full"}
138
139 # Check that the last full and the new image aren't one and the same
140 source_files = [entry['path'].split("/")[-1]
141 for entry in source_image['files']
142 if not entry['path'].split("/")[-1].startswith("version-")]
143 destination_files = []
144 if full_images:
145 latest_full = sorted(full_images.values(),
146 key=lambda image: image['version'])[-1]
147 destination_files = [entry['path'].split("/")[-1]
148 for entry in latest_full['files']
149 if not entry['path'].split(
150 "/")[-1].startswith("version-")]
151 if source_files == destination_files:
152 parser.error("Source image is already latest full in "
153 "destination channel.")
154
155 # Generate a list of required deltas
156 delta_base = []
157
158 if args.destination_channel in conf.channels:
159 for base_channel in conf.channels[args.destination_channel].deltabase:
160 # Skip missing channels
161 if base_channel not in pub.list_channels():
162 continue
163
164 # Skip missing devices
165 if args.device not in (pub.list_channels()
166 [base_channel]['devices']):
167 continue
168
169 # Extract the latest full image
170 base_device = pub.get_device(base_channel, args.device)
171 base_images = sorted([image
172 for image in base_device.list_images()
173 if image['type'] == "full"],
174 key=lambda image: image['version'])
175
176 # Check if the version is valid and add it
177 if base_images and base_images[-1]['version'] in full_images:
178 if (full_images[base_images[-1]['version']]
179 not in delta_base):
180 delta_base.append(full_images
181 [base_images[-1]['version']])
182 logging.debug("Source version for delta: %s" %
183 base_images[-1]['version'])
184
185 # Create new empty entries
186 new_images = {'full': {'files': []}}
187 for delta in delta_base:
188 new_images["delta_%s" % delta['version']] = {'files': []}
189
190 # Extract current version_detail and files
191 version_detail = ""
192 for entry in source_image['files']:
193 path = os.path.realpath("%s/%s" % (conf.publish_path, entry['path']))
194
195 filename = path.split("/")[-1]
196
197 # Look for version-X.tar.xz
198 if filename == "version-%s.tar.xz" % args.version:
199 # Extract the metadata
200 if os.path.exists(path.replace(".tar.xz", ".json")):
201 with open(path.replace(".tar.xz", ".json"), "r") as fd:
202 metadata = json.loads(fd.read())
203 if "channel.ini" in metadata:
204 version_detail = metadata['channel.ini'].get(
205 "version_detail", None)
206 else:
207 new_images['full']['files'].append(path)
208 logging.debug("Source version_detail is: %s" % version_detail)
209
210 # Generate new version tarball
211 environment = {}
212 environment['channel_name'] = args.destination_channel
213 environment['device'] = destination_device
214 environment['device_name'] = args.device
215 environment['version'] = new_version
216 environment['version_detail'] = [entry
217 for entry in version_detail.split(",")
218 if not entry.startswith("version=")]
219 environment['new_files'] = new_images['full']['files']
220
221 logging.info("Generating new version tarball for '%s' (%s)"
222 % (new_version, "," % environment['version_detail']))
223 version_path = generators.generate_file(conf, "version", [], environment)
224 if version_path:
225 new_images['full']['files'].append(version_path)
226
227 # Generate deltas
228 for abspath in new_images['full']['files']:
229 prefix = abspath.split("/")[-1].rsplit("-", 1)[0]
230 for delta in delta_base:
231 # Extract the source
232 src_path = None
233 for file_dict in delta['files']:
234 if (file_dict['path'].split("/")[-1]
235 .startswith(prefix)):
236 src_path = "%s/%s" % (conf.publish_path,
237 file_dict['path'])
238 break
239
240 # Check that it's not the current file
241 if src_path:
242 src_path = os.path.realpath(src_path)
243
244 # FIXME: the keyring- is a big hack...
245 if src_path == abspath and "keyring-" not in src_path:
246 continue
247
248 # Generators are allowed to return None when no delta
249 # exists at all.
250 logging.info("Generating delta from '%s' for '%s'" %
251 (delta['version'],
252 prefix))
253 delta_path = generators.generate_delta(conf, src_path,
254 abspath)
255 else:
256 delta_path = abspath
257
258 if not delta_path:
259 continue
260
261 # Get the full and relative paths
262 delta_abspath, delta_relpath = tools.expand_path(
263 delta_path, conf.publish_path)
264
265 new_images['delta_%s' % delta['version']]['files'] \
266 .append(delta_abspath)
267
268 # Add full image
269 logging.info("Publishing new image '%s' (%s) with %s files."
270 % (new_version, ",".join(environment['version_detail']),
271 len(new_images['full']['files'])))
272 destination_device.create_image("full", new_version,
273 ",".join(environment['version_detail']),
274 new_images['full']['files'])
275
276 # Add delta images
277 for delta in delta_base:
278 files = new_images["delta_%s" % delta['version']]['files']
279 logging.info("Publishing new delta from '%s' (%s)"
280 " to '%s' (%s) with %s files" %
281 (delta['version'], delta.get("description", ""),
282 new_version, ",".join(environment['version_detail']),
283 len(files)))
284
285 destination_device.create_image(
286 "delta", new_version,
287 ",".join(environment['version_detail']),
288 files,
289 base=delta['version'])
290
291 # Expire images
292 if args.destination_channel in conf.channels:
293 if conf.channels[args.destination_channel].fullcount > 0:
294 logging.info("Expiring old images")
295 destination_device.expire_images(
296 conf.channels[args.destination_channel].fullcount)
297
298 # Sync all channel aliases
299 logging.info("Syncing any existing alias")
300 pub.sync_aliases(args.destination_channel)
301
302 # Remove any orphaned file
303 logging.info("Removing orphaned files from the pool")
304 pub.cleanup_tree()
305
306 # Sync the mirrors
307 logging.info("Triggering a mirror sync")
308 tools.sync_mirrors(conf)
0309
=== added file 'bin/generate-keyrings'
--- bin/generate-keyrings 1970-01-01 00:00:00 +0000
+++ bin/generate-keyrings 2014-10-10 11:11:17 +0000
@@ -0,0 +1,87 @@
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3
4# Copyright (C) 2013 Canonical Ltd.
5# Author: Stéphane Graber <stgraber@ubuntu.com>
6
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; version 3 of the License.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19import os
20import sys
21import time
22sys.path.insert(0, 'lib')
23
24from systemimage import config
25from systemimage import gpg
26from systemimage import tools
27
28conf = config.Config()
29
30# archive-master keyring
31if os.path.exists(os.path.join(conf.gpg_key_path, "archive-master")):
32 archive_master = gpg.Keyring(conf, "archive-master")
33 archive_master.set_metadata("archive-master")
34 archive_master.import_keys(os.path.join(conf.gpg_key_path,
35 "archive-master"))
36 path = archive_master.generate_tarball()
37 tools.xz_compress(path)
38 os.remove(path)
39 gpg.sign_file(conf, "archive-master", "%s.xz" % path)
40
41# image-master keyring
42if os.path.exists(os.path.join(conf.gpg_key_path, "image-master")) and \
43 os.path.exists(os.path.join(conf.gpg_key_path, "archive-master")):
44 image_master = gpg.Keyring(conf, "image-master")
45 image_master.set_metadata("image-master")
46 image_master.import_keys(os.path.join(conf.gpg_key_path, "image-master"))
47 path = image_master.generate_tarball()
48 tools.xz_compress(path)
49 os.remove(path)
50 gpg.sign_file(conf, "archive-master", "%s.xz" % path)
51
52# image-signing keyring
53if os.path.exists(os.path.join(conf.gpg_key_path, "image-signing")) and \
54 os.path.exists(os.path.join(conf.gpg_key_path, "image-master")):
55 image_signing = gpg.Keyring(conf, "image-signing")
56 image_signing.set_metadata("image-signing",
57 int(time.strftime("%s",
58 time.localtime())) + 63072000)
59 image_signing.import_keys(os.path.join(conf.gpg_key_path, "image-signing"))
60 path = image_signing.generate_tarball()
61 tools.xz_compress(path)
62 os.remove(path)
63 gpg.sign_file(conf, "image-master", "%s.xz" % path)
64
65# device-signing keyring
66if os.path.exists(os.path.join(conf.gpg_key_path, "device-signing")) and \
67 os.path.exists(os.path.join(conf.gpg_key_path, "image-signing")):
68 device_signing = gpg.Keyring(conf, "device-signing")
69 device_signing.set_metadata("device-signing",
70 int(time.strftime("%s",
71 time.localtime())) + 2678400)
72 device_signing.import_keys(os.path.join(conf.gpg_key_path,
73 "device-signing"))
74 path = device_signing.generate_tarball()
75 tools.xz_compress(path)
76 os.remove(path)
77 gpg.sign_file(conf, "image-signing", "%s.xz" % path)
78
79# blacklist keyring
80if os.path.exists(os.path.join(conf.gpg_key_path, "blacklist")) and \
81 os.path.exists(os.path.join(conf.gpg_key_path, "image-master")):
82 blacklist = gpg.Keyring(conf, "blacklist")
83 blacklist.set_metadata("blacklist")
84 path = blacklist.generate_tarball()
85 tools.xz_compress(path)
86 os.remove(path)
87 gpg.sign_file(conf, "image-master", "%s.xz" % path)
088
=== added file 'bin/generate-keys'
--- bin/generate-keys 1970-01-01 00:00:00 +0000
+++ bin/generate-keys 2014-10-10 11:11:17 +0000
@@ -0,0 +1,61 @@
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3#
4# Copyright (C) 2014 Canonical Ltd.
5# Author: Timothy Chavez <timothy.chavez@canonical.com>
6#
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; version 3 of the License.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19import argparse
20import os
21import sys
22
23sys.path.insert(0, 'lib')
24from systemimage import config
25from systemimage.gpg import generate_signing_key
26
27
28KEYS = {
29 "archive-master": ("{0} Archive Master key", 0),
30 "image-master": ("{0} Image Master key", 0),
31 "device-signing": ("{0} Device Signing key", "2y"),
32 "image-signing": ("{0} Image Signing key", "2y")
33}
34
35
36def main():
37 parser = argparse.ArgumentParser(description='Generate signing keya.')
38 parser.add_argument("--email", dest="email", required=True,
39 help="An email address to associate with the keys")
40 parser.add_argument("--prefix", dest="prefix", required=True,
41 help="A prefix to include in the key name")
42 args = parser.parse_args()
43
44 conf = config.Config()
45
46 print("I: Generating signing keys...")
47
48 for key_id, (key_name, key_expiry) in KEYS.iteritems():
49 key_path = os.path.join(conf.gpg_key_path, key_id)
50 if os.path.exists(key_path):
51 print("W: The key \"{0}\" already exists".format(key_id))
52 continue
53 os.makedirs(key_path)
54 generate_signing_key(
55 key_path, key_name.format(args.prefix), args.email, key_expiry)
56
57 print("I: Done")
58
59
60if __name__ == "__main__":
61 main()
062
=== added file 'bin/import-images'
--- bin/import-images 1970-01-01 00:00:00 +0000
+++ bin/import-images 2014-10-10 11:11:17 +0000
@@ -0,0 +1,305 @@
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3
4# Copyright (C) 2013 Canonical Ltd.
5# Author: Stéphane Graber <stgraber@ubuntu.com>
6
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; version 3 of the License.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19import os
20import sys
21sys.path.insert(0, os.path.join(sys.path[0], os.pardir, "lib"))
22
23from systemimage import config, generators, tools, tree
24
25import argparse
26import fcntl
27import logging
28
29if __name__ == '__main__':
30 parser = argparse.ArgumentParser(description="image importer")
31 parser.add_argument("--verbose", "-v", action="count", default=0)
32 args = parser.parse_args()
33
34 # Setup logging
35 formatter = logging.Formatter(
36 "%(asctime)s %(levelname)s %(message)s")
37
38 levels = {1: logging.ERROR,
39 2: logging.WARNING,
40 3: logging.INFO,
41 4: logging.DEBUG}
42
43 if args.verbose > 0:
44 stdoutlogger = logging.StreamHandler(sys.stdout)
45 stdoutlogger.setFormatter(formatter)
46 logging.root.setLevel(levels[min(4, args.verbose)])
47 logging.root.addHandler(stdoutlogger)
48 else:
49 logging.root.addHandler(logging.NullHandler())
50
51 # Load the configuration
52 conf = config.Config()
53
54 # Try to acquire a global lock
55 lock_file = os.path.join(conf.state_path, "global.lock")
56 lock_fd = open(lock_file, 'w')
57
58 try:
59 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
60 except IOError:
61 logging.info("Something else holds the global lock. exiting.")
62 sys.exit(0)
63
64 # Load the tree
65 pub = tree.Tree(conf)
66
67 # Iterate through the channels
68 for channel_name, channel in conf.channels.items():
69 # We're only interested in automated channels
70 if channel.type != "auto":
71 logging.debug("Skipping non-auto channel: %s" % channel_name)
72 continue
73
74 logging.info("Processing channel: %s" % channel_name)
75
76 # Check the channel exists
77 if channel_name not in pub.list_channels():
78 logging.error("Invalid channel name: %s" % channel_name)
79 continue
80
81 # Iterate through the devices
82 for device_name in pub.list_channels()[channel_name]['devices']:
83 logging.info("Processing device: %s" % device_name)
84
85 device = pub.get_device(channel_name, device_name)
86
87 # Extract last full version
88 full_images = {image['version']: image
89 for image in device.list_images()
90 if image['type'] == "full"}
91
92 last_full = None
93 if full_images:
94 last_full = sorted(full_images.values(),
95 key=lambda image: image['version'])[-1]
96 logging.debug("Last full image: %s" % last_full['version'])
97 else:
98 logging.debug("This is the first full image.")
99
100 # Extract all delta base versions
101 delta_base = []
102
103 for base_channel in channel.deltabase:
104 # Skip missing channels
105 if base_channel not in pub.list_channels():
106 logging.warn("Invalid base channel: %s" % base_channel)
107 continue
108
109 # Skip missing devices
110 if device_name not in (pub.list_channels()
111 [base_channel]['devices']):
112 logging.warn("Missing device in base channel: %s in %s" %
113 (device_name, base_channel))
114 continue
115
116 # Extract the latest full image
117 base_device = pub.get_device(base_channel, device_name)
118 base_images = sorted([image
119 for image in base_device.list_images()
120 if image['type'] == "full"],
121 key=lambda image: image['version'])
122
123 # Check if the version is valid and add it
124 if base_images and base_images[-1]['version'] in full_images:
125 if (full_images[base_images[-1]['version']]
126 not in delta_base):
127 delta_base.append(full_images
128 [base_images[-1]['version']])
129 logging.debug("Source version for delta: %s" %
130 base_images[-1]['version'])
131
132 # Allocate new version number
133 new_version = channel.versionbase
134 if last_full:
135 new_version = last_full['version'] + 1
136 logging.debug("Version for next image: %s" % new_version)
137
138 # And the list used to generate version_detail
139 version_detail = []
140
141 # And a list of new files
142 new_files = []
143
144 # Keep track of what files we've processed
145 processed_files = []
146
147 # Create new empty entries
148 new_images = {}
149 new_images['full'] = {'files': []}
150 for delta in delta_base:
151 new_images["delta_%s" % delta['version']] = {'files': []}
152
153 # Iterate through the files
154 for file_entry in channel.files:
155 # Deal with device specific overrides
156 if "," in file_entry['name']:
157 file_name, file_device = file_entry['name'].split(',', 1)
158 if file_device != device_name:
159 logging.debug("Skipping '%s' because the device name"
160 "doesn't match" % file_entry['name'])
161 continue
162 else:
163 file_name = file_entry['name']
164
165 if file_name in processed_files:
166 logging.debug("Skipping '%s' because a more specific"
167 "generator was already called."
168 % file_entry['name'])
169 continue
170
171 processed_files.append(file_name)
172
173 # Generate the environment
174 environment = {}
175 environment['channel_name'] = channel_name
176 environment['device'] = device
177 environment['device_name'] = device_name
178 environment['version'] = new_version
179 environment['version_detail'] = version_detail
180 environment['new_files'] = new_files
181
182 # Call file generator
183 logging.info("Calling '%s' generator for a new file"
184 % file_entry['generator'])
185 path = generators.generate_file(conf,
186 file_entry['generator'],
187 file_entry['arguments'],
188 environment)
189
190 # Generators are allowed to return None when no build
191 # exists at all. This cancels the whole image.
192 if not path:
193 new_files = []
194 logging.info("No image will be produced because the "
195 "'%s' generator returned None" %
196 file_entry['generator'])
197 break
198
199 # Get the full and relative paths
200 abspath, relpath = tools.expand_path(path, conf.publish_path)
201 urlpath = "/%s" % "/".join(relpath.split(os.sep))
202
203 # FIXME: Extract the prefix, used later for matching between
204 # full images. This forces a specific filename format.
205 prefix = abspath.split("/")[-1].rsplit("-", 1)[0]
206
207 # Add the file to the full image
208 new_images['full']['files'].append(abspath)
209
210 # Check if same as current
211 new_file = True
212 if last_full:
213 for file_dict in last_full['files']:
214 if file_dict['path'] == urlpath:
215 new_file = False
216 break
217
218 if new_file:
219 logging.info("New file from '%s': %s" %
220 (file_entry['generator'], relpath))
221 new_files.append(abspath)
222 else:
223 logging.info("File from '%s' is already current" %
224 (file_entry['generator']))
225
226 # Generate deltas
227 for delta in delta_base:
228 # Extract the source
229 src_path = None
230 for file_dict in delta['files']:
231 if (file_dict['path'].split("/")[-1]
232 .startswith(prefix)):
233 src_path = "%s/%s" % (conf.publish_path,
234 file_dict['path'])
235 break
236
237 # Check that it's not the current file
238 if src_path:
239 src_path = os.path.realpath(src_path)
240
241 # FIXME: the keyring- is a big hack...
242 if src_path == abspath and "keyring-" not in src_path:
243 continue
244
245 # Generators are allowed to return None when no delta
246 # exists at all.
247 logging.info("Generating delta from '%s' for '%s'" %
248 (delta['version'],
249 file_entry['generator']))
250 delta_path = generators.generate_delta(conf, src_path,
251 abspath)
252 else:
253 delta_path = abspath
254
255 if not delta_path:
256 continue
257
258 # Get the full and relative paths
259 delta_abspath, delta_relpath = tools.expand_path(
260 delta_path, conf.publish_path)
261
262 new_images['delta_%s' % delta['version']]['files'] \
263 .append(delta_abspath)
264
265 # Check if we've got a new image
266 if len(new_files):
267 # Publish full image
268 logging.info("Publishing new image '%s' (%s) with %s files."
269 % (new_version,
270 ",".join(environment['version_detail']),
271 len(new_images['full']['files'])))
272 device.create_image("full", new_version,
273 ",".join(environment['version_detail']),
274 new_images['full']['files'])
275 # Publish deltas
276 for delta in delta_base:
277 files = new_images["delta_%s" % delta['version']]['files']
278 logging.info("Publishing new delta from '%s' (%s)"
279 " to '%s' (%s) with %s files" %
280 (delta['version'],
281 delta.get("description", ""),
282 new_version,
283 ",".join(environment['version_detail']),
284 len(files)))
285 device.create_image(
286 "delta", new_version,
287 ",".join(environment['version_detail']), files,
288 base=delta['version'])
289
290 # Expire images
291 if channel.fullcount > 0:
292 logging.info("Expiring old images")
293 device.expire_images(channel.fullcount)
294
295 # Sync all channel aliases
296 logging.info("Syncing any existing alias")
297 pub.sync_aliases(channel_name)
298
299 # Remove any orphaned file
300 logging.info("Removing orphaned files from the pool")
301 pub.cleanup_tree()
302
303 # Sync the mirrors
304 logging.info("Triggering a mirror sync")
305 tools.sync_mirrors(conf)
0306
=== added file 'bin/set-phased-percentage'
--- bin/set-phased-percentage 1970-01-01 00:00:00 +0000
+++ bin/set-phased-percentage 2014-10-10 11:11:17 +0000
@@ -0,0 +1,90 @@
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3
4# Copyright (C) 2013 Canonical Ltd.
5# Author: Stéphane Graber <stgraber@ubuntu.com>
6
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; version 3 of the License.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19import os
20import sys
21sys.path.insert(0, os.path.join(sys.path[0], os.pardir, "lib"))
22
23from systemimage import config, tools, tree
24
25import argparse
26import logging
27
28if __name__ == '__main__':
29 parser = argparse.ArgumentParser(description="set phased percentage")
30 parser.add_argument("channel", metavar="CHANNEL")
31 parser.add_argument("device", metavar="DEVICE")
32 parser.add_argument("version", metavar="VERSION", type=int)
33 parser.add_argument("percentage", metavar="PERCENTAGE", type=int)
34 parser.add_argument("--verbose", "-v", action="count")
35
36 args = parser.parse_args()
37
38 # Setup logging
39 formatter = logging.Formatter(
40 "%(asctime)s %(levelname)s %(message)s")
41
42 levels = {1: logging.ERROR,
43 2: logging.WARNING,
44 3: logging.INFO,
45 4: logging.DEBUG}
46
47 if args.verbose > 0:
48 stdoutlogger = logging.StreamHandler(sys.stdout)
49 stdoutlogger.setFormatter(formatter)
50 logging.root.setLevel(levels[min(4, args.verbose)])
51 logging.root.addHandler(stdoutlogger)
52 else:
53 logging.root.addHandler(logging.NullHandler())
54
55 # Load the configuration
56 conf = config.Config()
57
58 # Load the tree
59 pub = tree.Tree(conf)
60
61 # Do some checks
62 if args.channel not in pub.list_channels():
63 parser.error("Invalid channel: %s" % args.channel)
64
65 if args.device not in pub.list_channels()[args.channel]['devices']:
66 parser.error("Invalid device for source channel: %s" %
67 args.device)
68
69 if args.percentage < 0 or args.percentage > 100:
70 parser.error("Invalid value: %s" % args.percentage)
71
72 if "alias" in pub.list_channels()[args.channel] and \
73 pub.list_channels()[args.channel]['alias'] != args.channel:
74 parser.error("Channel is an alias.")
75
76 if "redirect" in pub.list_channels()[args.channel]:
77 parser.error("Channel is a redirect.")
78
79 dev = pub.get_device(args.channel, args.device)
80 logging.info("Setting phased-percentage of '%s' to %s%%" %
81 (args.version, args.percentage))
82 dev.set_phased_percentage(args.version, args.percentage)
83
84 # Sync all channel aliases
85 logging.info("Syncing any existing alias")
86 pub.sync_aliases(args.channel)
87
88 # Sync the mirrors
89 logging.info("Triggering a mirror sync")
90 tools.sync_mirrors(conf)
091
=== added file 'bin/si-shell'
--- bin/si-shell 1970-01-01 00:00:00 +0000
+++ bin/si-shell 2014-10-10 11:11:17 +0000
@@ -0,0 +1,79 @@
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3
4# Copyright (C) 2013 Canonical Ltd.
5# Author: Stéphane Graber <stgraber@ubuntu.com>
6
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; version 3 of the License.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19import code
20import logging
21import os
22import sys
23sys.path.insert(0, os.path.join(sys.path[0], os.pardir, "lib"))
24
25from systemimage import config, tree
26
27import argparse
28
29if __name__ == '__main__':
30 parser = argparse.ArgumentParser(description="system-image shell")
31 parser.add_argument("--verbose", "-v", action="count", default=0)
32
33 args = parser.parse_args()
34
35 # Setup logging
36 formatter = logging.Formatter(
37 "%(asctime)s %(levelname)s %(message)s")
38
39 levels = {1: logging.ERROR,
40 2: logging.WARNING,
41 3: logging.INFO,
42 4: logging.DEBUG}
43
44 if args.verbose > 0:
45 stdoutlogger = logging.StreamHandler(sys.stdout)
46 stdoutlogger.setFormatter(formatter)
47 stdoutlogger.setLevel(levels[min(4, args.verbose)])
48 logging.root.addHandler(stdoutlogger)
49 else:
50 logging.root.addHandler(logging.NullHandler())
51
52 # Load the configuration
53 conf = config.Config()
54
55 # Load the tree
56 pub = tree.Tree(conf)
57
58 # Start the shell
59 banner = """Welcome to the system-image shell.
60The configuration is available as: conf
61The system-image tree is availabe as: pub
62"""
63
64 class CompleterConsole(code.InteractiveConsole):
65 def __init__(self):
66 local = {'conf': conf,
67 'pub': pub}
68 code.InteractiveConsole.__init__(self, locals=local)
69 try:
70 import readline
71 except ImportError:
72 print('I: readline module not available.')
73 else:
74 import rlcompleter
75 rlcompleter # Silence pyflakes
76 readline.parse_and_bind("tab: complete")
77
78 console = CompleterConsole()
79 console.interact(banner)
080
=== added directory 'etc'
=== added file 'etc/config.example'
--- etc/config.example 1970-01-01 00:00:00 +0000
+++ etc/config.example 2014-10-10 11:11:17 +0000
@@ -0,0 +1,48 @@
1[global]
2base_path = /some/fs/path
3channels = trusty, trusty-proposed, trusty-customized
4gpg_key_path = secret/gpg/keys/
5gpg_keyring_path = secret/gpg/keyrings/
6publish_path = www/
7state_path = state/
8mirrors = a, b
9public_fqdn = system-image.example.net
10public_http_port = 80
11public_https_port = 443
12
13[channel_trusty]
14type = manual
15versionbase = 1
16fullcount = 10
17
18[channel_trusty-proposed]
19type = auto
20versionbase = 1
21fullcount = 20
22deltabase = trusty, trusty-proposed
23files = ubuntu, device, version
24file_ubuntu = cdimage-ubuntu;daily-preinstalled;trusty,import=any
25file_device = cdimage-device;daily-preinstalled;trusty,import=any
26file_version = version
27
28[channel_trusty-customized]
29type = auto
30versionbase = 1
31fullcount = 15
32files = ubuntu, device, custom, version
33file_ubuntu = system-image;trusty;file=ubuntu
34file_device = system-image;trusty;file=device
35file_custom = http;http://www.example.net/custom/custom.tar.xz;name=custom,monitor=http://www.example.net/custom/build_number
36file_version = version
37
38[mirror_default]
39ssh_user = mirror
40ssh_key = secret/ssh/mirror
41ssh_port = 22
42ssh_command = sync-mirror
43
44[mirror_a]
45ssh_host = a.example.com
46
47[mirror_b]
48ssh_host = b.example.com
049
=== added directory 'lib'
=== added directory 'lib/systemimage'
=== added file 'lib/systemimage/__init__.py'
=== added file 'lib/systemimage/config.py'
--- lib/systemimage/config.py 1970-01-01 00:00:00 +0000
+++ lib/systemimage/config.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,206 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation; version 3 of the License.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18import os
19
20try:
21 from configparser import ConfigParser
22except ImportError: # pragma: no cover
23 from ConfigParser import ConfigParser
24
25
26def parse_config(path):
27 config = {}
28
29 configp = ConfigParser()
30 try:
31 configp.read(path)
32 except:
33 return config
34
35 for section in configp.sections():
36 config_section = {}
37 for option in configp.options(section):
38 value = configp.get(section, option)
39 if ", " in value:
40 value = [entry.strip('"').strip()
41 for entry in value.split(", ")]
42 else:
43 value = value.strip('"').strip()
44 config_section[option] = value
45 config[section] = config_section
46
47 return config
48
49
50class Config:
51 def __init__(self, path=None):
52 if not path:
53 path = "%s/etc/config" % os.environ.get("SYSTEM_IMAGE_ROOT",
54 os.getcwd())
55 if not os.path.exists(path):
56 path = os.path.realpath(os.path.join(os.path.dirname(__file__),
57 "../../etc/config"))
58
59 self.load_config(path)
60
61 def load_config(self, path):
62 if not os.path.exists(path):
63 raise Exception("Configuration file doesn't exist: %s" % path)
64
65 # Read the config
66 config = parse_config(path)
67
68 if 'global' not in config:
69 config['global'] = {}
70
71 # Set defaults
72 self.base_path = config['global'].get(
73 "base_path", os.environ.get("SYSTEM_IMAGE_ROOT", os.getcwd()))
74
75 self.gpg_key_path = config['global'].get(
76 "gpg_key_path", os.path.join(self.base_path,
77 "secret", "gpg", "keys"))
78 if not self.gpg_key_path.startswith("/"):
79 self.gpg_key_path = os.path.join(self.base_path, self.gpg_key_path)
80
81 self.gpg_keyring_path = config['global'].get(
82 "gpg_keyring_path", os.path.join(self.base_path,
83 "secret", "gpg", "keyrings"))
84 if not self.gpg_keyring_path.startswith("/"):
85 self.gpg_keyring_path = os.path.join(self.base_path,
86 self.gpg_keyring_path)
87
88 self.publish_path = config['global'].get(
89 "publish_path", os.path.join(self.base_path, "www"))
90 if not self.publish_path.startswith("/"):
91 self.publish_path = os.path.join(self.base_path, self.publish_path)
92
93 self.state_path = config['global'].get(
94 "state_path", os.path.join(self.base_path, "state"))
95 if not self.state_path.startswith("/"):
96 self.state_path = os.path.join(self.base_path, self.state_path)
97
98 # Export some more keys as-is
99 for key in ("public_fqdn", "public_http_port", "public_https_port"):
100 if key not in config['global']:
101 continue
102
103 setattr(self, key, config['global'][key])
104
105 # Parse the mirror configuration
106 self.mirrors = {}
107 if "mirrors" in config['global']:
108 if not isinstance(config['global']['mirrors'], list):
109 config['global']['mirrors'] = [config['global']['mirrors']]
110
111 if len(config['global']['mirrors']) != 0:
112 if "mirror_default" not in config:
113 raise KeyError("Missing mirror_default section.")
114
115 for key in ("ssh_user", "ssh_key", "ssh_port", "ssh_command"):
116 if key not in config['mirror_default']:
117 raise KeyError("Missing key in mirror_default: %s" %
118 key)
119
120 for entry in config['global']['mirrors']:
121 dict_entry = "mirror_%s" % entry
122 if dict_entry not in config:
123 raise KeyError("Missing mirror section: %s" %
124 dict_entry)
125
126 mirror = type("Mirror", (object,), {})
127
128 if "ssh_host" not in config[dict_entry]:
129 raise KeyError("Missing key in %s: ssh_host" %
130 dict_entry)
131 else:
132 mirror.ssh_host = config[dict_entry]['ssh_host']
133
134 mirror.ssh_user = config[dict_entry].get(
135 "ssh_user", config['mirror_default']['ssh_user'])
136 mirror.ssh_key = config[dict_entry].get(
137 "ssh_key", config['mirror_default']['ssh_key'])
138 if not mirror.ssh_key.startswith("/"):
139 mirror.ssh_key = os.path.join(self.base_path,
140 mirror.ssh_key)
141 mirror.ssh_port = int(config[dict_entry].get(
142 "ssh_port", config['mirror_default']['ssh_port']))
143 mirror.ssh_command = config[dict_entry].get(
144 "ssh_command", config['mirror_default']['ssh_command'])
145
146 self.mirrors[entry] = mirror
147
148 # Parse the channel configuration
149 self.channels = {}
150 if "channels" in config['global']:
151 if not isinstance(config['global']['channels'], list):
152 config['global']['channels'] = \
153 [config['global']['channels']]
154
155 if len(config['global']['channels']) != 0:
156 for entry in config['global']['channels']:
157 dict_entry = "channel_%s" % entry
158 if dict_entry not in config:
159 raise KeyError("Missing channel section: %s" %
160 dict_entry)
161
162 channel = type("Channel", (object,), {})
163
164 channel.versionbase = int(config[dict_entry].get(
165 'versionbase', 1))
166
167 channel.type = config[dict_entry].get(
168 "type", "manual")
169
170 channel.fullcount = int(config[dict_entry].get(
171 "fullcount", 0))
172
173 channel.deltabase = [entry]
174 if "deltabase" in config[dict_entry]:
175 if isinstance(config[dict_entry]["deltabase"],
176 list):
177 channel.deltabase = \
178 config[dict_entry]["deltabase"]
179 else:
180 channel.deltabase = \
181 [config[dict_entry]["deltabase"]]
182
183 # Parse the file list
184 files = config[dict_entry].get("files", [])
185 if isinstance(files, str):
186 files = [files]
187
188 channel.files = []
189 for file_entry in files:
190 if "file_%s" % file_entry not in config[dict_entry]:
191 raise KeyError("Missing file entry: %s" %
192 "file_%s" % file_entry)
193
194 fields = (config[dict_entry]
195 ["file_%s" % file_entry].split(";"))
196
197 file_dict = {}
198 file_dict['name'] = file_entry
199 file_dict['generator'] = fields[0]
200 file_dict['arguments'] = []
201 if len(fields) > 1:
202 file_dict['arguments'] = fields[1:]
203
204 channel.files.append(file_dict)
205
206 self.channels[entry] = channel
0207
=== added file 'lib/systemimage/diff.py'
--- lib/systemimage/diff.py 1970-01-01 00:00:00 +0000
+++ lib/systemimage/diff.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,242 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation; version 3 of the License.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18import os
19import tarfile
20import time
21
22from io import BytesIO
23
24
25def compare_files(fd_source, fd_target):
26 """
27 Compare two files.
28
29 Returns True if their content matches.
30 Returns False if they don't match.
31 Returns None if the files can't be compared.
32 """
33
34 if fd_source == fd_target:
35 return True
36
37 if not fd_source or not fd_target:
38 return False
39
40 return fd_source.read() == fd_target.read()
41
42
43def list_tarfile(tarfile):
44 """
45 Walk through a tarfile and generate a list of the content.
46
47 Returns a tuple containing a set and a dict.
48 The set is typically used for simple diffs between tarballs.
49 The dict is used to easily grab the details of a specific entry.
50 """
51
52 set_content = set()
53 dict_content = {}
54
55 for entry in tarfile:
56 if entry.isdir():
57 set_content.add((entry.path, 'dir', None))
58 dict_content[entry.path] = ('dir', None)
59 else:
60 fhash = ("%s" % entry.mode,
61 "%s" % entry.devmajor,
62 "%s" % entry.devminor,
63 "%s" % entry.type.decode('utf-8'),
64 "%s" % entry.uid,
65 "%s" % entry.gid,
66 "%s" % entry.size,
67 "%s" % entry.mtime)
68
69 set_content.add((entry.path, 'file', fhash))
70 dict_content[entry.path] = ('file', fhash)
71
72 return (set_content, dict_content)
73
74
75class ImageDiff:
76 source_content = None
77 target_content = None
78 diff = None
79
80 def __init__(self, source, target):
81 self.source_file = tarfile.open(source, 'r:')
82 self.target_file = tarfile.open(target, 'r:')
83
84 def scan_content(self, image):
85 """
86 Scan the content of an image and return the image tuple.
87 This also caches the content for further use.
88 """
89
90 if image not in ("source", "target"):
91 raise KeyError("Invalid image '%s'." % image)
92
93 image_file = getattr(self, "%s_file" % image)
94
95 content = list_tarfile(image_file)
96
97 setattr(self, "%s_content" % image, content)
98 return content
99
100 def compare_images(self):
101 """
102 Compare the file listing of two images and return a set.
103 This also caches the diff for further use.
104
105 The set contains tuples of (path, changetype).
106 """
107 if not self.source_content:
108 self.scan_content("source")
109
110 if not self.target_content:
111 self.scan_content("target")
112
113 # Find the changes in the two trees
114 changes = set()
115 for change in self.source_content[0] \
116 .symmetric_difference(self.target_content[0]):
117 if change[0] not in self.source_content[1]:
118 changetype = "add"
119 elif change[0] not in self.target_content[1]:
120 changetype = "del"
121 else:
122 changetype = "mod"
123 changes.add((change[0], changetype))
124
125 # Ignore files that only vary in mtime
126 # (separate loop to run after de-dupe)
127 for change in sorted(changes):
128 if change[1] == "mod":
129 fstat_source = self.source_content[1][change[0]][1]
130 fstat_target = self.target_content[1][change[0]][1]
131
132 # Skip differences between directories and files
133 if not fstat_source or not fstat_target: # pragma: no cover
134 continue
135
136 # Deal with switched hardlinks
137 if (fstat_source[0:2] == fstat_target[0:2] and
138 fstat_source[3] != fstat_target[3] and
139 (fstat_source[3] == "1" or fstat_target[3] == "1") and
140 fstat_source[4:5] == fstat_target[4:5] and
141 fstat_source[7] == fstat_target[7]):
142 source_file = self.source_file.getmember(change[0])
143 target_file = self.target_file.getmember(change[0])
144 if compare_files(
145 self.source_file.extractfile(change[0]),
146 self.target_file.extractfile(change[0])):
147 changes.remove(change)
148 continue
149
150 # Deal with regular files
151 if fstat_source[0:7] == fstat_target[0:7]:
152 source_file = self.source_file.getmember(change[0])
153 target_file = self.target_file.getmember(change[0])
154
155 if (source_file.linkpath
156 and source_file.linkpath == target_file.linkpath):
157 changes.remove(change)
158 continue
159
160 if (source_file.isfile() and target_file.isfile()
161 and compare_files(
162 self.source_file.extractfile(change[0]),
163 self.target_file.extractfile(change[0]))):
164 changes.remove(change)
165 continue
166
167 self.diff = changes
168 return changes
169
170 def print_changes(self):
171 """
172 Simply print the list of changes.
173 """
174
175 if not self.diff:
176 self.compare_images()
177
178 for change in sorted(self.diff):
179 print(" - %s (%s)" % (change[0], change[1]))
180
181 def generate_diff_tarball(self, path):
182 """
183 Generate a tarball containing all files that are
184 different between the source and target iamge as well
185 as a file listing all removals.
186 """
187
188 if not self.diff:
189 self.compare_images()
190
191 output = tarfile.open(path, 'w:')
192
193 # List both deleted files and modified files in the removal list
194 # that's needed to allow file type change (e.g. directory to symlink)
195 removed_files_list = [entry[0] for entry in self.diff
196 if entry[1] in ("del", "mod")]
197
198 removed_files = "\n".join(removed_files_list)
199 removed_files = "%s\n" % removed_files.encode('utf-8')
200
201 removals = tarfile.TarInfo()
202 removals.name = "removed"
203 removals.size = len(removed_files)
204 removals.mtime = int(time.strftime("%s", time.localtime()))
205 removals.uname = "root"
206 removals.gname = "root"
207
208 output.addfile(removals, BytesIO(removed_files.encode('utf-8')))
209
210 # Copy all the added and modified
211 added = []
212 for name, action in sorted(self.diff):
213 if action == 'del':
214 continue
215
216 if name in added:
217 continue
218
219 newfile = self.target_file.getmember(name)
220 if newfile.islnk():
221 if newfile.linkname.startswith("system/"):
222 targetfile_path = newfile.linkname
223 else:
224 targetfile_path = os.path.normpath(os.path.join(
225 os.path.dirname(newfile.name), newfile.linkname))
226
227 targetfile = self.target_file.getmember(targetfile_path)
228
229 if ((targetfile_path, 'add') in self.diff or
230 (targetfile_path, 'mod') in self.diff) and \
231 targetfile_path not in added:
232 fileptr = self.target_file.extractfile(targetfile)
233 output.addfile(targetfile, fileptr)
234 added.append(targetfile.name)
235
236 fileptr = None
237 if newfile.isfile():
238 fileptr = self.target_file.extractfile(name)
239 output.addfile(newfile, fileobj=fileptr)
240 added.append(newfile.name)
241
242 output.close()
0243
=== added file 'lib/systemimage/generators.py'
--- lib/systemimage/generators.py 1970-01-01 00:00:00 +0000
+++ lib/systemimage/generators.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,1173 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation; version 3 of the License.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18from hashlib import sha256
19from systemimage import diff, gpg, tree, tools
20import json
21import os
22import socket
23import shutil
24import subprocess
25import tarfile
26import tempfile
27import time
28
29try:
30 from urllib.request import urlopen, urlretrieve
31except ImportError: # pragma: no cover
32 from urllib import urlopen, urlretrieve
33
34# Global
35CACHE = {}
36
37
38def root_ownership(tarinfo):
39 tarinfo.mode = 0o644
40 tarinfo.mtime = int(time.strftime("%s", time.localtime()))
41 tarinfo.uname = "root"
42 tarinfo.gname = "root"
43 return tarinfo
44
45
46def unpack_arguments(arguments):
47 """
48 Takes a string representing comma separate key=value options and
49 returns a dict.
50 """
51 arg_dict = {}
52
53 for option in arguments.split(","):
54 fields = option.split("=")
55 if len(fields) != 2:
56 continue
57
58 arg_dict[fields[0]] = fields[1]
59
60 return arg_dict
61
62
63def generate_delta(conf, source_path, target_path):
64 """
65 Take two .tar.xz file and generate a third file, stored in the pool.
66 The path to the pool file is then returned and <path>.asc is also
67 generated using the default signing key.
68 """
69 source_filename = source_path.split("/")[-1].replace(".tar.xz", "")
70 target_filename = target_path.split("/")[-1].replace(".tar.xz", "")
71
72 # FIXME: This is a bit of an hack, it'd be better not to have to hardcode
73 # that kind of stuff...
74 if (source_filename.startswith("version-")
75 and target_filename.startswith("version-")):
76 return target_path
77
78 if (source_filename.startswith("keyring-")
79 and target_filename.startswith("keyring-")):
80 return target_path
81
82 # Now for everything else
83 path = os.path.realpath(os.path.join(conf.publish_path, "pool",
84 "%s.delta-%s.tar.xz" %
85 (target_filename, source_filename)))
86
87 # Return pre-existing entries
88 if os.path.exists(path):
89 return path
90
91 # Create the pool if it doesn't exist
92 if not os.path.exists(os.path.join(conf.publish_path, "pool")):
93 os.makedirs(os.path.join(conf.publish_path, "pool"))
94
95 # Generate the diff
96 tempdir = tempfile.mkdtemp()
97 tools.xz_uncompress(source_path, os.path.join(tempdir, "source.tar"))
98 tools.xz_uncompress(target_path, os.path.join(tempdir, "target.tar"))
99
100 imagediff = diff.ImageDiff(os.path.join(tempdir, "source.tar"),
101 os.path.join(tempdir, "target.tar"))
102
103 imagediff.generate_diff_tarball(os.path.join(tempdir, "output.tar"))
104 tools.xz_compress(os.path.join(tempdir, "output.tar"), path)
105 shutil.rmtree(tempdir)
106
107 # Sign the result
108 gpg.sign_file(conf, "image-signing", path)
109
110 # Generate the metadata file
111 metadata = {}
112 metadata['generator'] = "delta"
113 metadata['source'] = {}
114 metadata['target'] = {}
115
116 if os.path.exists(source_path.replace(".tar.xz", ".json")):
117 with open(source_path.replace(".tar.xz", ".json"), "r") as fd:
118 metadata['source'] = json.loads(fd.read())
119
120 if os.path.exists(target_path.replace(".tar.xz", ".json")):
121 with open(target_path.replace(".tar.xz", ".json"), "r") as fd:
122 metadata['target'] = json.loads(fd.read())
123
124 with open(path.replace(".tar.xz", ".json"), "w+") as fd:
125 fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
126 indent=4, separators=(',', ': ')))
127 gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
128
129 return path
130
131
132def generate_file(conf, generator, arguments, environment):
133 """
134 Dispatcher for the various generators and importers.
135 It calls the right generator and signs the generated file
136 before returning the path.
137 """
138
139 if generator == "version":
140 path = generate_file_version(conf, arguments, environment)
141 elif generator == "cdimage-device":
142 path = generate_file_cdimage_device(conf, arguments, environment)
143 elif generator == "cdimage-ubuntu":
144 path = generate_file_cdimage_ubuntu(conf, arguments, environment)
145 elif generator == "cdimage-custom":
146 path = generate_file_cdimage_custom(conf, arguments, environment)
147 elif generator == "http":
148 path = generate_file_http(conf, arguments, environment)
149 elif generator == "keyring":
150 path = generate_file_keyring(conf, arguments, environment)
151 elif generator == "system-image":
152 path = generate_file_system_image(conf, arguments, environment)
153 elif generator == "remote-system-image":
154 path = generate_file_remote_system_image(conf, arguments, environment)
155 else:
156 raise Exception("Invalid generator: %s" % generator)
157
158 return path
159
160
161def generate_file_cdimage_device(conf, arguments, environment):
162 """
163 Scan a cdimage tree for new device files.
164 """
165
166 # We need at least a path and a series
167 if len(arguments) < 2:
168 return None
169
170 # Read the arguments
171 cdimage_path = arguments[0]
172 series = arguments[1]
173
174 options = {}
175 if len(arguments) > 2:
176 options = unpack_arguments(arguments[2])
177
178 boot_arch = "armhf"
179 recovery_arch = "armel"
180 system_arch = "armel"
181 if environment['device_name'] in ("generic_x86", "generic_i386"):
182 boot_arch = "i386"
183 recovery_arch = "i386"
184 system_arch = "i386"
185 elif environment['device_name'] in ("generic_amd64",):
186 boot_arch = "amd64"
187 recovery_arch = "amd64"
188 system_arch = "amd64"
189
190 # Check that the directory exists
191 if not os.path.exists(cdimage_path):
192 return None
193
194 versions = sorted([version for version in os.listdir(cdimage_path)
195 if version not in ("pending", "current")],
196 reverse=True)
197
198 for version in versions:
199 # Skip directory without checksums
200 if not os.path.exists(os.path.join(cdimage_path, version,
201 "SHA256SUMS")):
202 continue
203
204 # Check for all the needed files
205 boot_path = os.path.join(cdimage_path, version,
206 "%s-preinstalled-boot-%s+%s.img" %
207 (series, boot_arch,
208 environment['device_name']))
209 if not os.path.exists(boot_path):
210 continue
211
212 recovery_path = os.path.join(cdimage_path, version,
213 "%s-preinstalled-recovery-%s+%s.img" %
214 (series, recovery_arch,
215 environment['device_name']))
216 if not os.path.exists(recovery_path):
217 continue
218
219 system_path = os.path.join(cdimage_path, version,
220 "%s-preinstalled-system-%s+%s.img" %
221 (series, system_arch,
222 environment['device_name']))
223 if not os.path.exists(system_path):
224 continue
225
226 # Check if we should only import tested images
227 if options.get("import", "any") == "good":
228 if not os.path.exists(os.path.join(cdimage_path, version,
229 ".marked_good")):
230 continue
231
232 # Set the version_detail string
233 version_detail = "device=%s" % version
234
235 # Extract the hashes
236 boot_hash = None
237 recovery_hash = None
238 system_hash = None
239 with open(os.path.join(cdimage_path, version,
240 "SHA256SUMS"), "r") as fd:
241 for line in fd:
242 line = line.strip()
243 if line.endswith(boot_path.split("/")[-1]):
244 boot_hash = line.split()[0]
245 elif line.endswith(recovery_path.split("/")[-1]):
246 recovery_hash = line.split()[0]
247 elif line.endswith(system_path.split("/")[-1]):
248 system_hash = line.split()[0]
249
250 if boot_hash and recovery_hash and system_hash:
251 break
252
253 if not boot_hash or not recovery_hash or not system_hash:
254 continue
255
256 hash_string = "%s/%s/%s" % (boot_hash, recovery_hash, system_hash)
257 global_hash = sha256(hash_string.encode('utf-8')).hexdigest()
258
259 # Generate the path
260 path = os.path.join(conf.publish_path, "pool",
261 "device-%s.tar.xz" % global_hash)
262
263 # Return pre-existing entries
264 if os.path.exists(path):
265 # Get the real version number (in case it got copied)
266 if os.path.exists(path.replace(".tar.xz", ".json")):
267 with open(path.replace(".tar.xz", ".json"), "r") as fd:
268 metadata = json.loads(fd.read())
269
270 if "version_detail" in metadata:
271 version_detail = metadata['version_detail']
272
273 environment['version_detail'].append(version_detail)
274 return path
275
276 temp_dir = tempfile.mkdtemp()
277
278 # Generate a new tarball
279 target_tarball = tarfile.open(os.path.join(temp_dir, "target.tar"),
280 "w:")
281
282 # system image
283 # # convert to raw image
284 system_img = os.path.join(temp_dir, "system.img")
285 with open(os.path.devnull, "w") as devnull:
286 subprocess.call(["simg2img", system_path, system_img],
287 stdout=devnull)
288
289 # # shrink to minimal size
290 with open(os.path.devnull, "w") as devnull:
291 subprocess.call(["resize2fs", "-M", system_img],
292 stdout=devnull, stderr=devnull)
293
294 # # include in tarball
295 target_tarball.add(system_img,
296 arcname="system/var/lib/lxc/android/system.img",
297 filter=root_ownership)
298
299 # boot image
300 target_tarball.add(boot_path, arcname="partitions/boot.img",
301 filter=root_ownership)
302
303 # recovery image
304 target_tarball.add(recovery_path,
305 arcname="partitions/recovery.img",
306 filter=root_ownership)
307
308 target_tarball.close()
309
310 # Create the pool if it doesn't exist
311 if not os.path.exists(os.path.join(conf.publish_path, "pool")):
312 os.makedirs(os.path.join(conf.publish_path, "pool"))
313
314 # Compress the target tarball and sign it
315 tools.xz_compress(os.path.join(temp_dir, "target.tar"), path)
316 gpg.sign_file(conf, "image-signing", path)
317
318 # Generate the metadata file
319 metadata = {}
320 metadata['generator'] = "cdimage-device"
321 metadata['version'] = version
322 metadata['version_detail'] = version_detail
323 metadata['series'] = series
324 metadata['device'] = environment['device_name']
325 metadata['boot_path'] = boot_path
326 metadata['boot_checksum'] = boot_hash
327 metadata['recovery_path'] = recovery_path
328 metadata['recovery_checksum'] = recovery_hash
329 metadata['system_path'] = system_path
330 metadata['system_checksum'] = system_hash
331
332 with open(path.replace(".tar.xz", ".json"), "w+") as fd:
333 fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
334 indent=4, separators=(',', ': ')))
335 gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
336
337 # Cleanup
338 shutil.rmtree(temp_dir)
339
340 environment['version_detail'].append(version_detail)
341 return path
342
343 return None
344
345
346def generate_file_cdimage_ubuntu(conf, arguments, environment):
347 """
348 Scan a cdimage tree for new ubuntu files.
349 """
350
351 # We need at least a path and a series
352 if len(arguments) < 2:
353 return None
354
355 # Read the arguments
356 cdimage_path = arguments[0]
357 series = arguments[1]
358
359 options = {}
360 if len(arguments) > 2:
361 options = unpack_arguments(arguments[2])
362
363 arch = "armhf"
364 if environment['device_name'] in ("generic_x86", "generic_i386"):
365 arch = "i386"
366 elif environment['device_name'] in ("generic_amd64",):
367 arch = "amd64"
368
369 # Check that the directory exists
370 if not os.path.exists(cdimage_path):
371 return None
372
373 versions = sorted([version for version in os.listdir(cdimage_path)
374 if version not in ("pending", "current")],
375 reverse=True)
376
377 for version in versions:
378 # Skip directory without checksums
379 if not os.path.exists(os.path.join(cdimage_path, version,
380 "SHA256SUMS")):
381 continue
382
383 # Check for the rootfs
384 rootfs_path = os.path.join(cdimage_path, version,
385 "%s-preinstalled-%s-%s.tar.gz" %
386 (series, options.get("product", "touch"),
387 arch))
388 if not os.path.exists(rootfs_path):
389 continue
390
391 # Check if we should only import tested images
392 if options.get("import", "any") == "good":
393 if not os.path.exists(os.path.join(cdimage_path, version,
394 ".marked_good")):
395 continue
396
397 # Set the version_detail string
398 version_detail = "ubuntu=%s" % version
399
400 # Extract the hash
401 rootfs_hash = None
402 with open(os.path.join(cdimage_path, version,
403 "SHA256SUMS"), "r") as fd:
404 for line in fd:
405 line = line.strip()
406 if line.endswith(rootfs_path.split("/")[-1]):
407 rootfs_hash = line.split()[0]
408 break
409
410 if not rootfs_hash:
411 continue
412
413 # Generate the path
414 path = os.path.join(conf.publish_path, "pool",
415 "ubuntu-%s.tar.xz" % rootfs_hash)
416
417 # Return pre-existing entries
418 if os.path.exists(path):
419 # Get the real version number (in case it got copied)
420 if os.path.exists(path.replace(".tar.xz", ".json")):
421 with open(path.replace(".tar.xz", ".json"), "r") as fd:
422 metadata = json.loads(fd.read())
423
424 if "version_detail" in metadata:
425 version_detail = metadata['version_detail']
426
427 environment['version_detail'].append(version_detail)
428 return path
429
430 temp_dir = tempfile.mkdtemp()
431
432 # Unpack the source tarball
433 tools.gzip_uncompress(rootfs_path, os.path.join(temp_dir,
434 "source.tar"))
435
436 # Generate a new shifted tarball
437 source_tarball = tarfile.open(os.path.join(temp_dir, "source.tar"),
438 "r:")
439 target_tarball = tarfile.open(os.path.join(temp_dir, "target.tar"),
440 "w:")
441
442 added = []
443 for entry in source_tarball:
444 # FIXME: Will need to be done on the real rootfs
445 # Skip some files
446 if entry.name in ("SWAP.swap", "etc/mtab"):
447 continue
448
449 fileptr = None
450 if entry.isfile():
451 try:
452 fileptr = source_tarball.extractfile(entry.name)
453 except KeyError: # pragma: no cover
454 pass
455
456 # Update hardlinks to point to the right target
457 if entry.islnk():
458 entry.linkname = "system/%s" % entry.linkname
459
460 entry.name = "system/%s" % entry.name
461 target_tarball.addfile(entry, fileobj=fileptr)
462 added.append(entry.name)
463
464 if options.get("product", "touch") == "touch":
465 # FIXME: Will need to be done on the real rootfs
466 # Add some symlinks and directories
467 # # /android
468 new_file = tarfile.TarInfo()
469 new_file.type = tarfile.DIRTYPE
470 new_file.name = "system/android"
471 new_file.mode = 0o755
472 new_file.mtime = int(time.strftime("%s", time.localtime()))
473 new_file.uname = "root"
474 new_file.gname = "root"
475 target_tarball.addfile(new_file)
476
477 # # Android partitions
478 for android_path in ("cache", "data", "factory", "firmware",
479 "persist", "system"):
480 new_file = tarfile.TarInfo()
481 new_file.type = tarfile.SYMTYPE
482 new_file.name = "system/%s" % android_path
483 new_file.linkname = "/android/%s" % android_path
484 new_file.mode = 0o755
485 new_file.mtime = int(time.strftime("%s", time.localtime()))
486 new_file.uname = "root"
487 new_file.gname = "root"
488 target_tarball.addfile(new_file)
489
490 # # /vendor
491 new_file = tarfile.TarInfo()
492 new_file.type = tarfile.SYMTYPE
493 new_file.name = "system/vendor"
494 new_file.linkname = "/android/system/vendor"
495 new_file.mode = 0o755
496 new_file.mtime = int(time.strftime("%s", time.localtime()))
497 new_file.uname = "root"
498 new_file.gname = "root"
499 target_tarball.addfile(new_file)
500
501 # # /userdata
502 new_file = tarfile.TarInfo()
503 new_file.type = tarfile.DIRTYPE
504 new_file.name = "system/userdata"
505 new_file.mode = 0o755
506 new_file.mtime = int(time.strftime("%s", time.localtime()))
507 new_file.uname = "root"
508 new_file.gname = "root"
509 target_tarball.addfile(new_file)
510
511 # # /etc/mtab
512 new_file = tarfile.TarInfo()
513 new_file.type = tarfile.SYMTYPE
514 new_file.name = "system/etc/mtab"
515 new_file.linkname = "/proc/mounts"
516 new_file.mode = 0o444
517 new_file.mtime = int(time.strftime("%s", time.localtime()))
518 new_file.uname = "root"
519 new_file.gname = "root"
520 target_tarball.addfile(new_file)
521
522 # # /lib/modules
523 new_file = tarfile.TarInfo()
524 new_file.type = tarfile.DIRTYPE
525 new_file.name = "system/lib/modules"
526 new_file.mode = 0o755
527 new_file.mtime = int(time.strftime("%s", time.localtime()))
528 new_file.uname = "root"
529 new_file.gname = "root"
530 target_tarball.addfile(new_file)
531
532 source_tarball.close()
533 target_tarball.close()
534
535 # Create the pool if it doesn't exist
536 if not os.path.exists(os.path.join(conf.publish_path, "pool")):
537 os.makedirs(os.path.join(conf.publish_path, "pool"))
538
539 # Compress the target tarball and sign it
540 tools.xz_compress(os.path.join(temp_dir, "target.tar"), path)
541 gpg.sign_file(conf, "image-signing", path)
542
543 # Generate the metadata file
544 metadata = {}
545 metadata['generator'] = "cdimage-ubuntu"
546 metadata['version'] = version
547 metadata['version_detail'] = version_detail
548 metadata['series'] = series
549 metadata['rootfs_path'] = rootfs_path
550 metadata['rootfs_checksum'] = rootfs_hash
551
552 with open(path.replace(".tar.xz", ".json"), "w+") as fd:
553 fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
554 indent=4, separators=(',', ': ')))
555 gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
556
557 # Cleanup
558 shutil.rmtree(temp_dir)
559
560 environment['version_detail'].append(version_detail)
561 return path
562
563 return None
564
565
566def generate_file_cdimage_custom(conf, arguments, environment):
567 """
568 Scan a cdimage tree for new custom files.
569 """
570
571 # We need at least a path and a series
572 if len(arguments) < 2:
573 return None
574
575 # Read the arguments
576 cdimage_path = arguments[0]
577 series = arguments[1]
578
579 options = {}
580 if len(arguments) > 2:
581 options = unpack_arguments(arguments[2])
582
583 arch = "armhf"
584 if environment['device_name'] in ("generic_x86", "generic_i386"):
585 arch = "i386"
586 elif environment['device_name'] in ("generic_amd64",):
587 arch = "amd64"
588
589 # Check that the directory exists
590 if not os.path.exists(cdimage_path):
591 return None
592
593 versions = sorted([version for version in os.listdir(cdimage_path)
594 if version not in ("pending", "current")],
595 reverse=True)
596
597 for version in versions:
598 # Skip directory without checksums
599 if not os.path.exists(os.path.join(cdimage_path, version,
600 "SHA256SUMS")):
601 continue
602
603 # Check for the custom tarball
604 custom_path = os.path.join(cdimage_path, version,
605 "%s-preinstalled-%s-%s.custom.tar.gz" %
606 (series, options.get("product", "touch"),
607 arch))
608 if not os.path.exists(custom_path):
609 continue
610
611 # Check if we should only import tested images
612 if options.get("import", "any") == "good":
613 if not os.path.exists(os.path.join(cdimage_path, version,
614 ".marked_good")):
615 continue
616
617 # Set the version_detail string
618 version_detail = "custom=%s" % version
619
620 # Extract the hash
621 custom_hash = None
622 with open(os.path.join(cdimage_path, version,
623 "SHA256SUMS"), "r") as fd:
624 for line in fd:
625 line = line.strip()
626 if line.endswith(custom_path.split("/")[-1]):
627 custom_hash = line.split()[0]
628 break
629
630 if not custom_hash:
631 continue
632
633 # Generate the path
634 path = os.path.join(conf.publish_path, "pool",
635 "custom-%s.tar.xz" % custom_hash)
636
637 # Return pre-existing entries
638 if os.path.exists(path):
639 # Get the real version number (in case it got copied)
640 if os.path.exists(path.replace(".tar.xz", ".json")):
641 with open(path.replace(".tar.xz", ".json"), "r") as fd:
642 metadata = json.loads(fd.read())
643
644 if "version_detail" in metadata:
645 version_detail = metadata['version_detail']
646
647 environment['version_detail'].append(version_detail)
648 return path
649
650 temp_dir = tempfile.mkdtemp()
651
652 # Unpack the source tarball
653 tools.gzip_uncompress(custom_path, os.path.join(temp_dir,
654 "source.tar"))
655
656 # Create the pool if it doesn't exist
657 if not os.path.exists(os.path.join(conf.publish_path, "pool")):
658 os.makedirs(os.path.join(conf.publish_path, "pool"))
659
660 # Compress the target tarball and sign it
661 tools.xz_compress(os.path.join(temp_dir, "source.tar"), path)
662 gpg.sign_file(conf, "image-signing", path)
663
664 # Generate the metadata file
665 metadata = {}
666 metadata['generator'] = "cdimage-custom"
667 metadata['version'] = version
668 metadata['version_detail'] = version_detail
669 metadata['series'] = series
670 metadata['custom_path'] = custom_path
671 metadata['custom_checksum'] = custom_hash
672
673 with open(path.replace(".tar.xz", ".json"), "w+") as fd:
674 fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
675 indent=4, separators=(',', ': ')))
676 gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
677
678 # Cleanup
679 shutil.rmtree(temp_dir)
680
681 environment['version_detail'].append(version_detail)
682 return path
683
684 return None
685
686
687def generate_file_http(conf, arguments, environment):
688 """
689 Grab, cache and returns a file using http/https.
690 """
691
692 # We need at least a URL
693 if len(arguments) == 0:
694 return None
695
696 # Read the arguments
697 url = arguments[0]
698
699 options = {}
700 if len(arguments) > 1:
701 options = unpack_arguments(arguments[1])
702
703 path = None
704 version = None
705
706 if "http_%s" % url in CACHE:
707 version = CACHE['http_%s' % url]
708
709 # Get the version/build number
710 if "monitor" in options or version:
711 if not version:
712 # Grab the current version number
713 old_timeout = socket.getdefaulttimeout()
714 socket.setdefaulttimeout(5)
715 try:
716 version = urlopen(options['monitor']).read().strip()
717 except socket.timeout:
718 return None
719 except IOError:
720 return None
721 socket.setdefaulttimeout(old_timeout)
722
723 # Validate the version number
724 if not version or len(version.split("\n")) > 1:
725 return None
726
727 # Push the result in the cache
728 CACHE['http_%s' % url] = version
729
730 # Set version_detail
731 version_detail = "%s=%s" % (options.get("name", "http"), version)
732
733 # FIXME: can be dropped once all the non-hased tarballs are gone
734 old_path = os.path.realpath(os.path.join(conf.publish_path, "pool",
735 "%s-%s.tar.xz" %
736 (options.get("name", "http"),
737 version)))
738 if os.path.exists(old_path):
739 # Get the real version number (in case it got copied)
740 if os.path.exists(old_path.replace(".tar.xz", ".json")):
741 with open(old_path.replace(".tar.xz", ".json"), "r") as fd:
742 metadata = json.loads(fd.read())
743
744 if "version_detail" in metadata:
745 version_detail = metadata['version_detail']
746
747 environment['version_detail'].append(version_detail)
748 return old_path
749
750 # Build the path, hasing together the URL and version
751 hash_string = "%s:%s" % (url, version)
752 global_hash = sha256(hash_string.encode('utf-8')).hexdigest()
753 path = os.path.realpath(os.path.join(conf.publish_path, "pool",
754 "%s-%s.tar.xz" %
755 (options.get("name", "http"),
756 global_hash)))
757
758 # Return pre-existing entries
759 if os.path.exists(path):
760 # Get the real version number (in case it got copied)
761 if os.path.exists(path.replace(".tar.xz", ".json")):
762 with open(path.replace(".tar.xz", ".json"), "r") as fd:
763 metadata = json.loads(fd.read())
764
765 if "version_detail" in metadata:
766 version_detail = metadata['version_detail']
767
768 environment['version_detail'].append(version_detail)
769 return path
770
771 # Grab the real thing
772 tempdir = tempfile.mkdtemp()
773 old_timeout = socket.getdefaulttimeout()
774 socket.setdefaulttimeout(5)
775 try:
776 urlretrieve(url, os.path.join(tempdir, "download"))
777 except socket.timeout:
778 shutil.rmtree(tempdir)
779 return None
780 except IOError:
781 shutil.rmtree(tempdir)
782 return None
783 socket.setdefaulttimeout(old_timeout)
784
785 # Hash it if we don't have a version number
786 if not version:
787 # Hash the file
788 with open(os.path.join(tempdir, "download"), "rb") as fd:
789 version = sha256(fd.read()).hexdigest()
790
791 # Set version_detail
792 version_detail = "%s=%s" % (options.get("name", "http"), version)
793
794 # Push the result in the cache
795 CACHE['http_%s' % url] = version
796
797 # Build the path
798 path = os.path.realpath(os.path.join(conf.publish_path, "pool",
799 "%s-%s.tar.xz" %
800 (options.get("name", "http"),
801 version)))
802 # Return pre-existing entries
803 if os.path.exists(path):
804 # Get the real version number (in case it got copied)
805 if os.path.exists(path.replace(".tar.xz", ".json")):
806 with open(path.replace(".tar.xz", ".json"), "r") as fd:
807 metadata = json.loads(fd.read())
808
809 if "version_detail" in metadata:
810 version_detail = metadata['version_detail']
811
812 environment['version_detail'].append(version_detail)
813 shutil.rmtree(tempdir)
814 return path
815
816 # Create the pool if it doesn't exist
817 if not os.path.exists(os.path.join(conf.publish_path, "pool")):
818 os.makedirs(os.path.join(conf.publish_path, "pool"))
819
820 # Move the file to the pool and sign it
821 shutil.move(os.path.join(tempdir, "download"), path)
822 gpg.sign_file(conf, "image-signing", path)
823
824 # Generate the metadata file
825 metadata = {}
826 metadata['generator'] = "http"
827 metadata['version'] = version
828 metadata['version_detail'] = version_detail
829 metadata['url'] = url
830
831 with open(path.replace(".tar.xz", ".json"), "w+") as fd:
832 fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
833 indent=4, separators=(',', ': ')))
834 gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
835
836 # Cleanup
837 shutil.rmtree(tempdir)
838
839 environment['version_detail'].append(version_detail)
840 return path
841
842
843def generate_file_keyring(conf, arguments, environment):
844 """
845 Generate a keyring tarball or return a pre-existing one.
846 """
847
848 # Don't generate keyring tarballs when nothing changed
849 if len(environment['new_files']) == 0:
850 return None
851
852 # We need a keyring name
853 if len(arguments) == 0:
854 return None
855
856 # Read the arguments
857 keyring_name = arguments[0]
858 keyring_path = os.path.join(conf.gpg_keyring_path, keyring_name)
859
860 # Fail on missing keyring
861 if not os.path.exists("%s.tar.xz" % keyring_path) or \
862 not os.path.exists("%s.tar.xz.asc" % keyring_path):
863 return None
864
865 with open("%s.tar.xz" % keyring_path, "rb") as fd:
866 hash_tarball = sha256(fd.read()).hexdigest()
867
868 with open("%s.tar.xz.asc" % keyring_path, "rb") as fd:
869 hash_signature = sha256(fd.read()).hexdigest()
870
871 hash_string = "%s/%s" % (hash_tarball, hash_signature)
872 global_hash = sha256(hash_string.encode('utf-8')).hexdigest()
873
874 # Build the path
875 path = os.path.realpath(os.path.join(conf.publish_path, "pool",
876 "keyring-%s.tar.xz" %
877 global_hash))
878
879 # Set the version_detail string
880 environment['version_detail'].append("keyring=%s" % keyring_name)
881
882 # Don't bother re-generating a file if it already exists
883 if os.path.exists(path):
884 return path
885
886 # Create temporary directory
887 tempdir = tempfile.mkdtemp()
888
889 # Generate the tarball
890 tarball = tarfile.open(os.path.join(tempdir, "output.tar"), "w:")
891 tarball.add("%s.tar.xz" % keyring_path,
892 arcname="/system/etc/system-image/archive-master.tar.xz",
893 filter=root_ownership)
894 tarball.add("%s.tar.xz.asc" % keyring_path,
895 arcname="/system/etc/system-image/archive-master.tar.xz.asc",
896 filter=root_ownership)
897 tarball.close()
898
899 # Create the pool if it doesn't exist
900 if not os.path.exists(os.path.join(conf.publish_path, "pool")):
901 os.makedirs(os.path.join(conf.publish_path, "pool"))
902
903 # Compress and sign it
904 tools.xz_compress(os.path.join(tempdir, "output.tar"), path)
905 gpg.sign_file(conf, "image-signing", path)
906
907 # Generate the metadata file
908 metadata = {}
909 metadata['generator'] = "keyring"
910 metadata['version'] = global_hash
911 metadata['version_detail'] = "keyring=%s" % keyring_name
912 metadata['path'] = keyring_path
913
914 with open(path.replace(".tar.xz", ".json"), "w+") as fd:
915 fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
916 indent=4, separators=(',', ': ')))
917 gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
918
919 # Cleanup
920 shutil.rmtree(tempdir)
921
922 return path
923
924
925def generate_file_remote_system_image(conf, arguments, environment):
926 """
927 Import files from a remote system-image server
928 """
929
930 # We need at least a channel name and a file prefix
931 if len(arguments) < 3:
932 return None
933
934 # Read the arguments
935 base_url = arguments[0]
936 channel_name = arguments[1]
937 prefix = arguments[2]
938
939 options = {}
940 if len(arguments) > 3:
941 options = unpack_arguments(arguments[3])
942
943 device_name = environment['device_name']
944 if 'device' in options:
945 device_name = options['device']
946
947 # Fetch and validate the remote channels.json
948 old_timeout = socket.getdefaulttimeout()
949 socket.setdefaulttimeout(5)
950 try:
951 channel_json = json.loads(urlopen("%s/channels.json" %
952 base_url).read().decode().strip())
953 except socket.timeout:
954 return None
955 except IOError:
956 return None
957 socket.setdefaulttimeout(old_timeout)
958
959 if channel_name not in channel_json:
960 return None
961
962 if "devices" not in channel_json[channel_name]:
963 return None
964
965 if device_name not in channel_json[channel_name]['devices']:
966 return None
967
968 if "index" not in (channel_json[channel_name]['devices']
969 [device_name]):
970 return None
971
972 index_url = "%s/%s" % (base_url, channel_json[channel_name]['devices']
973 [device_name]['index'])
974
975 # Fetch and validate the remote index.json
976 old_timeout = socket.getdefaulttimeout()
977 socket.setdefaulttimeout(5)
978 try:
979 index_json = json.loads(urlopen(index_url).read().decode())
980 except socket.timeout:
981 return None
982 except IOError:
983 return None
984 socket.setdefaulttimeout(old_timeout)
985
986 # Grab the list of full images
987 full_images = sorted([image for image in index_json['images']
988 if image['type'] == "full"],
989 key=lambda image: image['version'])
990
991 # No images
992 if not full_images:
993 return None
994
995 # Found an image, so let's try to find a match
996 for file_entry in full_images[-1]['files']:
997 file_name = file_entry['path'].split("/")[-1]
998 file_prefix = file_name.rsplit("-", 1)[0]
999 if file_prefix == prefix:
1000 path = os.path.realpath("%s/%s" % (conf.publish_path,
1001 file_entry['path']))
1002 if os.path.exists(path):
1003 return path
1004
1005 # Create the target if needed
1006 if not os.path.exists(os.path.dirname(path)):
1007 os.makedirs(os.path.dirname(path))
1008
1009 # Grab the file
1010 file_url = "%s/%s" % (base_url, file_entry['path'])
1011 socket.setdefaulttimeout(5)
1012 try:
1013 urlretrieve(file_url, path)
1014 except socket.timeout:
1015 if os.path.exists(path):
1016 os.remove(path)
1017 return None
1018 except IOError:
1019 if os.path.exists(path):
1020 os.remove(path)
1021 return None
1022 socket.setdefaulttimeout(old_timeout)
1023
1024 if "keyring" in options:
1025 if not tools.repack_recovery_keyring(conf, path,
1026 options['keyring']):
1027 if os.path.exists(path):
1028 os.remove(path)
1029 return None
1030
1031 gpg.sign_file(conf, "image-signing", path)
1032
1033 # Attempt to grab an associated json
1034 socket.setdefaulttimeout(5)
1035 json_path = path.replace(".tar.xz", ".json")
1036 json_url = file_url.replace(".tar.xz", ".json")
1037 try:
1038 urlretrieve(json_url, json_path),
1039 except socket.timeout:
1040 if os.path.exists(json_path):
1041 os.remove(json_path)
1042 except IOError:
1043 if os.path.exists(json_path):
1044 os.remove(json_path)
1045 socket.setdefaulttimeout(old_timeout)
1046
1047 if os.path.exists(json_path):
1048 gpg.sign_file(conf, "image-signing", json_path)
1049 with open(json_path, "r") as fd:
1050 metadata = json.loads(fd.read())
1051
1052 if "version_detail" in metadata:
1053 environment['version_detail'].append(
1054 metadata['version_detail'])
1055
1056 return path
1057
1058 return None
1059
1060
1061def generate_file_system_image(conf, arguments, environment):
1062 """
1063 Copy a file from another channel.
1064 """
1065
1066 # We need at least a channel name and a file prefix
1067 if len(arguments) < 2:
1068 return None
1069
1070 # Read the arguments
1071 channel_name = arguments[0]
1072 prefix = arguments[1]
1073
1074 # Run some checks
1075 pub = tree.Tree(conf)
1076 if channel_name not in pub.list_channels():
1077 return None
1078
1079 if (not environment['device_name'] in
1080 pub.list_channels()[channel_name]['devices']):
1081 return None
1082
1083 # Try to find the file
1084 device = pub.get_device(channel_name, environment['device_name'])
1085
1086 full_images = sorted([image for image in device.list_images()
1087 if image['type'] == "full"],
1088 key=lambda image: image['version'])
1089
1090 # No images
1091 if not full_images:
1092 return None
1093
1094 # Found an image, so let's try to find a match
1095 for file_entry in full_images[-1]['files']:
1096 file_name = file_entry['path'].split("/")[-1]
1097 file_prefix = file_name.rsplit("-", 1)[0]
1098 if file_prefix == prefix:
1099 path = os.path.realpath("%s/%s" % (conf.publish_path,
1100 file_entry['path']))
1101
1102 if os.path.exists(path.replace(".tar.xz", ".json")):
1103 with open(path.replace(".tar.xz", ".json"), "r") as fd:
1104 metadata = json.loads(fd.read())
1105
1106 if "version_detail" in metadata:
1107 environment['version_detail'].append(
1108 metadata['version_detail'])
1109
1110 return path
1111
1112 return None
1113
1114
1115def generate_file_version(conf, arguments, environment):
1116 """
1117 Generate a version tarball or return a pre-existing one.
1118 """
1119
1120 # Don't generate version tarballs when nothing changed
1121 if len(environment['new_files']) == 0:
1122 return None
1123
1124 path = os.path.realpath(os.path.join(environment['device'].path,
1125 "version-%s.tar.xz" % environment['version']))
1126
1127 # Set the version_detail string
1128 environment['version_detail'].append("version=%s" % environment['version'])
1129
1130 # Don't bother re-generating a file if it already exists
1131 if os.path.exists(path):
1132 return path
1133
1134 # Generate version_detail
1135 version_detail = ",".join(environment['version_detail'])
1136
1137 # Create temporary directory
1138 tempdir = tempfile.mkdtemp()
1139
1140 # Generate the tarball
1141 tools.generate_version_tarball(
1142 conf, environment['channel_name'], environment['device_name'],
1143 str(environment['version']),
1144 os.path.join(tempdir, "version"), version_detail=version_detail)
1145
1146 # Create the pool if it doesn't exist
1147 if not os.path.exists(os.path.join(environment['device'].path)):
1148 os.makedirs(os.path.join(environment['device'].path))
1149
1150 # Compress and sign it
1151 tools.xz_compress(os.path.join(tempdir, "version"), path)
1152 gpg.sign_file(conf, "image-signing", path)
1153
1154 # Generate the metadata file
1155 metadata = {}
1156 metadata['generator'] = "version"
1157 metadata['version'] = environment['version']
1158 metadata['version_detail'] = "version=%s" % environment['version']
1159 metadata['channel.ini'] = {}
1160 metadata['channel.ini']['channel'] = environment['channel_name']
1161 metadata['channel.ini']['device'] = environment['device_name']
1162 metadata['channel.ini']['version'] = str(environment['version'])
1163 metadata['channel.ini']['version_detail'] = version_detail
1164
1165 with open(path.replace(".tar.xz", ".json"), "w+") as fd:
1166 fd.write("%s\n" % json.dumps(metadata, sort_keys=True,
1167 indent=4, separators=(',', ': ')))
1168 gpg.sign_file(conf, "image-signing", path.replace(".tar.xz", ".json"))
1169
1170 # Cleanup
1171 shutil.rmtree(tempdir)
1172
1173 return path
01174
=== added file 'lib/systemimage/gpg.py'
--- lib/systemimage/gpg.py 1970-01-01 00:00:00 +0000
+++ lib/systemimage/gpg.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,239 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation; version 3 of the License.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18import json
19import gpgme
20import os
21import tarfile
22
23from io import BytesIO
24
25
26def generate_signing_key(keyring_path, key_name, key_email, key_expiry):
27 """
28 Generate a new 2048bit RSA signing key.
29 """
30
31 if not os.path.isdir(keyring_path):
32 raise Exception("Keyring path doesn't exist: %s" % keyring_path)
33
34 key_params = """<GnupgKeyParms format="internal">
35Key-Type: RSA
36Key-Length: 2048
37Key-Usage: sign
38Name-Real: %s
39Name-Email: %s
40Expire-Date: %s
41</GnupgKeyParms>
42""" % (key_name, key_email, key_expiry)
43
44 os.environ['GNUPGHOME'] = keyring_path
45
46 ctx = gpgme.Context()
47 result = ctx.genkey(key_params)
48 key = ctx.get_key(result.fpr, True)
49 [uid] = key.uids
50
51 return uid
52
53
54def sign_file(config, key, path, destination=None, detach=True, armor=True):
55 """
56 Sign a file and publish the signature.
57 The key parameter must be a valid key under config.gpg_key_path.
58 The path must be that of a valid file.
59 The destination defaults to <path>.gpg (non-armored) or
60 <path>.asc (armored).
61 The detach and armor parameters respectively control the use of
62 detached signatures and base64 armoring.
63 """
64
65 key_path = "%s/%s" % (config.gpg_key_path, key)
66
67 if not os.path.isdir(key_path):
68 raise IndexError("Invalid GPG key name '%s'." % key)
69
70 if not os.path.isfile(path):
71 raise Exception("Invalid path '%s'." % path)
72
73 if not destination:
74 if armor:
75 destination = "%s.asc" % path
76 elif detach:
77 destination = "%s.sig" % path
78 else:
79 destination = "%s.gpg" % path
80
81 if os.path.exists(destination):
82 raise Exception("destination already exists.")
83
84 os.environ['GNUPGHOME'] = key_path
85
86 # Create a GPG context, assuming no passphrase
87 ctx = gpgme.Context()
88 ctx.armor = armor
89 [key] = ctx.keylist()
90 ctx.signers = [key]
91
92 with open(path, "rb") as fd_in, open(destination, "wb+") as fd_out:
93 if detach:
94 retval = ctx.sign(fd_in, fd_out, gpgme.SIG_MODE_DETACH)
95 else:
96 retval = ctx.sign(fd_in, fd_out, gpgme.SIG_MODE_NORMAL)
97
98 return retval
99
100
101class Keyring:
102 """
103 Represents a keyring, let's you list/add/remove keys and change
104 some of the keyring properties (type, expiration, target hardware)
105 """
106
107 keyring_name = None
108 keyring_type = None
109 keyring_expiry = None
110 keyring_model = None
111 keyring_path = None
112
113 def __init__(self, config, keyring_name):
114 keyring_path = "%s/%s" % (config.gpg_keyring_path, keyring_name)
115
116 if not os.path.isdir(keyring_path):
117 os.makedirs(keyring_path)
118
119 self.keyring_name = keyring_name
120 self.keyring_path = keyring_path
121
122 if os.path.exists("%s/keyring.json" % keyring_path):
123 with open("%s/keyring.json" % keyring_path, "r") as fd:
124 keyring_json = json.loads(fd.read())
125
126 self.keyring_type = keyring_json.get('type', None)
127 self.keyring_expiry = keyring_json.get('expiry', None)
128 self.keyring_model = keyring_json.get('model', None)
129 else:
130 open("%s/pubring.gpg" % keyring_path, "w+").close()
131
132 def generate_tarball(self, destination=None):
133 """
134 Generate a tarball of the keyring and its json metadata.
135 Returns the path to the tarball.
136 """
137
138 if not destination:
139 destination = "%s.tar" % self.keyring_path
140
141 if os.path.isfile(destination):
142 os.remove(destination)
143
144 tarball = tarfile.open(destination, "w:")
145 tarball.add("%s/keyring.json" % self.keyring_path,
146 arcname="keyring.json")
147 tarball.add("%s/pubring.gpg" % self.keyring_path,
148 arcname="keyring.gpg")
149 tarball.close()
150
151 return destination
152
153 def set_metadata(self, keyring_type, keyring_expiry=None,
154 keyring_model=None):
155 """
156 Generate a new keyring.json file.
157 """
158
159 keyring_json = {}
160 if keyring_type:
161 self.keyring_type = keyring_type
162 keyring_json['type'] = keyring_type
163
164 if keyring_expiry:
165 self.keyring_expiry = keyring_expiry
166 keyring_json['expiry'] = keyring_expiry
167
168 if keyring_model:
169 self.keyring_model = keyring_model
170 keyring_json['model'] = keyring_model
171
172 with open("%s/keyring.json" % self.keyring_path, "w+") as fd:
173 fd.write("%s\n" % json.dumps(keyring_json, sort_keys=True,
174 indent=4, separators=(',', ': ')))
175
176 def list_keys(self):
177 os.environ['GNUPGHOME'] = self.keyring_path
178
179 keys = []
180
181 ctx = gpgme.Context()
182 for key in ctx.keylist():
183 keys.append((key.subkeys[0].keyid, key.subkeys[0].length,
184 [uid.uid for uid in key.uids]))
185
186 return keys
187
188 def export_key(self, path, key, armor=True):
189 os.environ['GNUPGHOME'] = self.keyring_path
190
191 ctx = gpgme.Context()
192 ctx.armor = armor
193
194 gpg_key = ctx.get_key(key)
195
196 with open(path, "wb+") as fd:
197 for subkey in gpg_key.subkeys:
198 ctx.export(str(subkey.keyid), fd)
199
200 def import_key(self, path, armor=True):
201 os.environ['GNUPGHOME'] = self.keyring_path
202
203 ctx = gpgme.Context()
204 ctx.armor = armor
205
206 with open(path, "rb") as fd:
207 ctx.import_(fd)
208
209 def import_keys(self, path):
210 """
211 Import all the keys from the specified keyring.
212 """
213
214 os.environ['GNUPGHOME'] = path
215
216 ctx = gpgme.Context()
217
218 keys = []
219 for key in list(ctx.keylist()):
220 for subkey in key.subkeys:
221 content = BytesIO()
222 ctx.export(str(subkey.keyid), content)
223 keys.append(content)
224
225 os.environ['GNUPGHOME'] = self.keyring_path
226 ctx = gpgme.Context()
227
228 for key in keys:
229 key.seek(0)
230 ctx.import_(key)
231
232 def del_key(self, key):
233 os.environ['GNUPGHOME'] = self.keyring_path
234
235 ctx = gpgme.Context()
236
237 gpg_key = ctx.get_key(key)
238
239 ctx.delete(gpg_key)
0240
=== added file 'lib/systemimage/tools.py'
--- lib/systemimage/tools.py 1970-01-01 00:00:00 +0000
+++ lib/systemimage/tools.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,367 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation; version 3 of the License.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18from io import BytesIO
19
20import gzip
21import os
22import re
23import shutil
24import subprocess
25import tarfile
26import tempfile
27import time
28
29
30def expand_path(path, base="/"):
31 """
32 Takes a path and returns a tuple containing the absolute path
33 and a relative path (relative to base).
34 """
35
36 if path.startswith(base):
37 path = re.sub('^%s' % re.escape(base), "", path)
38
39 if path.startswith(os.sep):
40 relpath = path[1:]
41 else:
42 relpath = path
43
44 abspath = os.path.realpath(os.path.join(base, relpath))
45
46 return abspath, relpath
47
48
49# Imported from cdimage.osextras
50def find_on_path(command):
51 """Is command on the executable search path?"""
52
53 if 'PATH' not in os.environ:
54 return False
55 path = os.environ['PATH']
56 for element in path.split(os.pathsep):
57 if not element:
58 continue
59 filename = os.path.join(element, command)
60 if os.path.isfile(filename) and os.access(filename, os.X_OK):
61 return True
62 return False
63
64
65def generate_version_tarball(config, channel, device, version, path,
66 build_path="system/etc/ubuntu-build",
67 channel_path="system/etc/system-image/"
68 "channel.ini",
69 version_detail=None,
70 channel_target=None):
71 """
72 Generates a tarball which contains two files
73 (build_path and channel_path).
74 The first contains the build id, the second a .ini config file.
75 The resulting tarball is written at the provided location (path).
76 """
77
78 tarball = tarfile.open(path, 'w:')
79
80 version_file = tarfile.TarInfo()
81 version_file.size = len(version) + 1
82 version_file.mtime = int(time.strftime("%s", time.localtime()))
83 version_file.name = build_path
84
85 # Append a line break
86 version += "\n"
87
88 tarball.addfile(version_file, BytesIO(version.encode('utf-8')))
89
90 http_port = config.public_http_port
91 https_port = config.public_https_port
92
93 if http_port == 0:
94 http_port = "disabled"
95
96 if https_port == 0:
97 https_port = "disabled"
98
99 channel = """[service]
100base: %s
101http_port: %s
102https_port: %s
103channel: %s
104device: %s
105build_number: %s
106""" % (config.public_fqdn, http_port, https_port,
107 channel, device, version.strip())
108
109 if channel_target:
110 channel += "channel_target: %s\n" % channel_target
111
112 if version_detail:
113 channel += "version_detail: %s\n" % version_detail
114
115 channel_file = tarfile.TarInfo()
116 channel_file.size = len(channel)
117 channel_file.mtime = int(time.strftime("%s", time.localtime()))
118 channel_file.name = channel_path
119
120 tarball.addfile(channel_file, BytesIO(channel.encode('utf-8')))
121
122 tarball.close()
123
124
125def gzip_compress(path, destination=None, level=9):
126 """
127 Compress a file (path) using gzip.
128 By default, creates a .gz version of the file in the same directory.
129 An alternate destination path may be provided.
130 The compress level is 9 by default but can be overriden.
131 """
132
133 if not destination:
134 destination = "%s.gz" % path
135
136 if os.path.exists(destination):
137 raise Exception("destination already exists.")
138
139 uncompressed = open(path, "rb")
140 compressed = gzip.open(destination, "wb+", level)
141 compressed.writelines(uncompressed)
142 compressed.close()
143 uncompressed.close()
144
145 return destination
146
147
148def gzip_uncompress(path, destination=None):
149 """
150 Uncompress a file (path) using gzip.
151 By default, uses the source path without the .gz prefix as the target.
152 An alternate destination path may be provided.
153 """
154
155 if not destination and path[-3:] != ".gz":
156 raise Exception("unspecified destination and path doesn't end"
157 " with .gz")
158
159 if not destination:
160 destination = path[:-3]
161
162 if os.path.exists(destination):
163 raise Exception("destination already exists.")
164
165 compressed = gzip.open(path, "rb")
166 uncompressed = open(destination, "wb+")
167 uncompressed.writelines(compressed)
168 uncompressed.close()
169 compressed.close()
170
171 return destination
172
173
174def xz_compress(path, destination=None, level=9):
175 """
176 Compress a file (path) using xz.
177 By default, creates a .xz version of the file in the same directory.
178 An alternate destination path may be provided.
179 The compress level is 9 by default but can be overriden.
180 """
181
182 # NOTE: Once we can drop support for < 3.3, the new lzma module can be used
183
184 if not destination:
185 destination = "%s.xz" % path
186
187 if os.path.exists(destination):
188 raise Exception("destination already exists.")
189
190 if find_on_path("pxz"):
191 xz_command = "pxz"
192 else:
193 xz_command = "xz"
194
195 with open(destination, "wb+") as fd:
196 retval = subprocess.call([xz_command, '-z', '-%s' % level, '-c', path],
197 stdout=fd)
198 return retval
199
200
201def xz_uncompress(path, destination=None):
202 """
203 Uncompress a file (path) using xz.
204 By default, uses the source path without the .xz prefix as the target.
205 An alternate destination path may be provided.
206 """
207
208 # NOTE: Once we can drop support for < 3.3, the new lzma module can be used
209
210 if not destination and path[-3:] != ".xz":
211 raise Exception("unspecified destination and path doesn't end"
212 " with .xz")
213
214 if not destination:
215 destination = path[:-3]
216
217 if os.path.exists(destination):
218 raise Exception("destination already exists.")
219
220 with open(destination, "wb+") as fd:
221 retval = subprocess.call(['xz', '-d', '-c', path],
222 stdout=fd)
223
224 return retval
225
226
227def trigger_mirror(host, port, username, key, command):
228 return subprocess.call(['ssh',
229 '-i', key,
230 '-l', username,
231 '-p', str(port),
232 host,
233 command])
234
235
236def sync_mirrors(config):
237 for mirror in sorted(config.mirrors.values(),
238 key=lambda mirror: mirror.ssh_host):
239 trigger_mirror(mirror.ssh_host, mirror.ssh_port, mirror.ssh_user,
240 mirror.ssh_key, mirror.ssh_command)
241
242
243def repack_recovery_keyring(conf, path, keyring_name):
244 tempdir = tempfile.mkdtemp()
245
246 xz_uncompress(path, os.path.join(tempdir, "input.tar"))
247
248 input_tarball = tarfile.open(os.path.join(tempdir, "input.tar"), "r:")
249
250 # Make sure the partition is in there
251 if "partitions/recovery.img" not in input_tarball.getnames():
252 shutil.rmtree(tempdir)
253 return False
254
255 input_tarball.extract("partitions/recovery.img", tempdir)
256
257 # Extract the content of the .img
258 os.mkdir(os.path.join(tempdir, "img"))
259 old_pwd = os.getcwd()
260 os.chdir(os.path.join(tempdir, "img"))
261 cmd = ["abootimg",
262 "-x", os.path.join(tempdir, "partitions", "recovery.img")]
263
264 with open(os.path.devnull, "w") as devnull:
265 subprocess.call(cmd, stdout=devnull, stderr=devnull)
266
267 os.chdir(old_pwd)
268
269 # Extract the content of the initrd
270 os.mkdir(os.path.join(tempdir, "initrd"))
271 state_path = os.path.join(tempdir, "fakeroot_state")
272 old_pwd = os.getcwd()
273 os.chdir(os.path.join(tempdir, "initrd"))
274
275 gzip_uncompress(os.path.join(tempdir, "img", "initrd.img"),
276 os.path.join(tempdir, "img", "initrd"))
277
278 with open(os.path.join(tempdir, "img", "initrd"), "rb") as fd:
279 with open(os.path.devnull, "w") as devnull:
280 subprocess.call(['fakeroot', '-s', state_path, 'cpio', '-i'],
281 stdin=fd, stdout=devnull, stderr=devnull)
282
283 os.chdir(old_pwd)
284
285 # Swap the files
286 keyring_path = os.path.join(conf.gpg_keyring_path, "archive-master")
287
288 shutil.copy("%s.tar.xz" % keyring_path,
289 os.path.join(tempdir, "initrd", "etc", "system-image",
290 "archive-master.tar.xz"))
291
292 shutil.copy("%s.tar.xz.asc" % keyring_path,
293 os.path.join(tempdir, "initrd", "etc", "system-image",
294 "archive-master.tar.xz.asc"))
295
296 # Re-generate the initrd
297 old_pwd = os.getcwd()
298 os.chdir(os.path.join(tempdir, "initrd"))
299
300 find = subprocess.Popen(["find", "."], stdout=subprocess.PIPE)
301 with open(os.path.join(tempdir, "img", "initrd"), "w+") as fd:
302 with open(os.path.devnull, "w") as devnull:
303 subprocess.call(['fakeroot', '-i', state_path, 'cpio',
304 '-o', '--format=newc'],
305 stdin=find.stdout,
306 stdout=fd,
307 stderr=devnull)
308
309 os.chdir(old_pwd)
310
311 os.rename(os.path.join(tempdir, "img", "initrd.img"),
312 os.path.join(tempdir, "img", "initrd.img.bak"))
313 gzip_compress(os.path.join(tempdir, "img", "initrd"),
314 os.path.join(tempdir, "img", "initrd.img"))
315
316 # Rewrite bootimg.cfg
317 content = ""
318 with open(os.path.join(tempdir, "img", "bootimg.cfg"), "r") as source:
319 for line in source:
320 if line.startswith("bootsize"):
321 line = "bootsize=0x900000\n"
322 content += line
323
324 with open(os.path.join(tempdir, "img", "bootimg.cfg"), "w+") as dest:
325 dest.write(content)
326
327 # Update the partition image
328 with open(os.path.devnull, "w") as devnull:
329 subprocess.call(['abootimg', '-u',
330 os.path.join(tempdir, "partitions", "recovery.img"),
331 "-f", os.path.join(tempdir, "img", "bootimg.cfg")],
332 stdout=devnull, stderr=devnull)
333
334 # Update the partition image
335 with open(os.path.devnull, "w") as devnull:
336 subprocess.call(['abootimg', '-u',
337 os.path.join(tempdir, "partitions", "recovery.img"),
338 "-r", os.path.join(tempdir, "img", "initrd.img")],
339 stdout=devnull, stderr=devnull)
340
341 # Generate a new tarball
342 output_tarball = tarfile.open(os.path.join(tempdir, "output.tar"), "w:")
343 for entry in input_tarball:
344 fileptr = None
345 if entry.isfile():
346 try:
347 if entry.name == "partitions/recovery.img":
348 with open(os.path.join(tempdir, "partitions",
349 "recovery.img"), "rb") as fd:
350 fileptr = BytesIO(fd.read())
351 entry.size = os.stat(
352 os.path.join(tempdir, "partitions",
353 "recovery.img")).st_size
354 else:
355 fileptr = input_tarball.extractfile(entry.name)
356 except KeyError: # pragma: no cover
357 pass
358
359 output_tarball.addfile(entry, fileobj=fileptr)
360 output_tarball.close()
361
362 os.remove(path)
363 xz_compress(os.path.join(tempdir, "output.tar"), path)
364
365 shutil.rmtree(tempdir)
366
367 return True
0368
=== added file 'lib/systemimage/tree.py'
--- lib/systemimage/tree.py 1970-01-01 00:00:00 +0000
+++ lib/systemimage/tree.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,999 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation; version 3 of the License.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18import copy
19import json
20import os
21import shutil
22import time
23
24from contextlib import contextmanager
25from hashlib import sha256
26from systemimage import gpg, tools
27
28
29# Context managers
30@contextmanager
31def channels_json(config, path, commit=False):
32 """
33 Context function (to be used with "with") that will open a
34 channels.json file, parse it, validate it and return the
35 decoded version.
36
37 If commit is True, the file will then be updated (or created) on
38 exit.
39 """
40
41 # If the file doesn't exist, just yield an empty dict
42 json_content = {}
43 if os.path.exists(path):
44 with open(path, "r") as fd:
45 content = fd.read()
46 if content:
47 json_content = json.loads(content)
48
49 # Validation
50 if not isinstance(json_content, dict):
51 raise TypeError("Invalid channels.json, not a dict.")
52
53 if commit:
54 orig_json_content = copy.deepcopy(json_content)
55
56 # Yield the decoded value and save on exit
57 try:
58 yield json_content
59 finally:
60 if commit and (orig_json_content != json_content or
61 not os.path.exists(path)):
62 new_path = "%s.new" % path
63 with open(new_path, "w+") as fd:
64 fd.write("%s\n" % json.dumps(json_content, sort_keys=True,
65 indent=4, separators=(',', ': ')))
66
67 # Move the signature
68 gpg.sign_file(config, "image-signing", new_path)
69 if os.path.exists("%s.asc" % path):
70 os.remove("%s.asc" % path)
71 os.rename("%s.asc" % new_path, "%s.asc" % path)
72
73 # Move the index
74 if os.path.exists(path):
75 os.remove(path)
76 os.rename(new_path, path)
77
78
79@contextmanager
80def index_json(config, path, commit=False):
81 """
82 Context function (to be used with "with") that will open an
83 index.json file, parse it, validate it and return the
84 decoded version.
85
86 If commit is True, the file will then be updated (or created) on
87 exit.
88 """
89
90 # If the file doesn't exist, just yield an empty dict
91 json_content = {}
92 json_content['global'] = {}
93 json_content['images'] = []
94
95 if os.path.exists(path):
96 with open(path, "r") as fd:
97 content = fd.read()
98 if content:
99 json_content = json.loads(content)
100
101 # Validation
102 if not isinstance(json_content, dict):
103 raise TypeError("Invalid index.json, not a dict.")
104
105 if commit:
106 orig_json_content = copy.deepcopy(json_content)
107
108 # Yield the decoded value and save on exit
109 try:
110 yield json_content
111 finally:
112 # Remove any invalid attribute
113 versions = sorted({image['version']
114 for image in json_content['images']})
115 if versions:
116 last_version = versions[-1]
117
118 # Remove phased-percentage from any old image
119 for image in json_content['images']:
120 if image['version'] != last_version and \
121 "phased-percentage" in image:
122 image.pop("phased-percentage")
123
124 # Save to disk
125 if commit and (orig_json_content != json_content or
126 not os.path.exists(path)):
127 json_content['global']['generated_at'] = time.strftime(
128 "%a %b %d %H:%M:%S UTC %Y", time.gmtime())
129
130 new_path = "%s.new" % path
131 with open(new_path, "w+") as fd:
132 fd.write("%s\n" % json.dumps(json_content, sort_keys=True,
133 indent=4, separators=(',', ': ')))
134
135 # Move the signature
136 gpg.sign_file(config, "image-signing", new_path)
137 if os.path.exists("%s.asc" % path):
138 os.remove("%s.asc" % path)
139 os.rename("%s.asc" % new_path, "%s.asc" % path)
140
141 # Move the index
142 if os.path.exists(path):
143 os.remove(path)
144 os.rename(new_path, path)
145
146
147class Tree:
148 def __init__(self, config, path=None):
149 if not path:
150 path = config.publish_path
151
152 if not os.path.isdir(path):
153 raise Exception("Invalid path: %s" % path)
154
155 self.config = config
156 self.path = path
157 self.indexpath = os.path.join(path, "channels.json")
158
159 def __list_existing(self):
160 """
161 Returns a set of all files present in the tree and a set of
162 empty directories that can be removed.
163 """
164
165 existing_files = set()
166 empty_dirs = set()
167
168 for dirpath, dirnames, filenames in os.walk(self.path):
169 if dirpath == os.path.join(self.path, "gpg"):
170 continue
171
172 if not filenames and not dirnames:
173 empty_dirs.add(dirpath)
174
175 for entry in filenames:
176 existing_files.add(os.path.join(dirpath, entry))
177
178 return (existing_files, empty_dirs)
179
180 def __list_referenced(self):
181 """
182 Returns a set of all files that are referenced by the
183 various indexes and should be present in the tree.
184 """
185
186 listed_files = set()
187 listed_files.add(os.path.join(self.path, "channels.json"))
188 listed_files.add(os.path.join(self.path, "channels.json.asc"))
189
190 for channel, metadata in self.list_channels().items():
191 devices = metadata['devices']
192 for device in devices:
193 if 'keyring' in devices[device]:
194 listed_files.add(os.path.join(
195 self.path, devices[device]['keyring']['path'][1:]))
196 listed_files.add(os.path.join(
197 self.path,
198 devices[device]['keyring']['signature'][1:]))
199
200 device_entry = self.get_device(channel, device)
201
202 listed_files.add(os.path.join(device_entry.path, "index.json"))
203 listed_files.add(os.path.join(device_entry.path,
204 "index.json.asc"))
205
206 for image in device_entry.list_images():
207 for entry in image['files']:
208 listed_files.add(os.path.join(self.path,
209 entry['path'][1:]))
210 listed_files.add(os.path.join(self.path,
211 entry['signature'][1:]))
212
213 return listed_files
214
215 def change_channel_alias(self, channel_name, target_name):
216 """
217 Change the target of an alias.
218 """
219
220 with channels_json(self.config, self.indexpath) as channels:
221 if channel_name not in channels:
222 raise KeyError("Couldn't find channel: %s" % channel_name)
223
224 if "alias" not in channels[channel_name] or \
225 channels[channel_name]['alias'] == channel_name:
226 raise KeyError("Channel isn't an alias: %s" % channel_name)
227
228 if target_name not in channels:
229 raise KeyError("Couldn't find target channel: %s" %
230 target_name)
231
232 self.remove_channel(channel_name)
233 self.create_channel_alias(channel_name, target_name)
234
235 return True
236
237 def cleanup_tree(self):
238 """
239 Remove any orphaned file from the tree.
240 """
241
242 for entry in self.list_orphaned_files():
243 if os.path.isdir(entry):
244 os.rmdir(entry)
245 else:
246 os.remove(entry)
247
248 return True
249
250 def create_channel(self, channel_name):
251 """
252 Creates a new channel entry in the tree.
253 """
254
255 with channels_json(self.config, self.indexpath, True) as channels:
256 if channel_name in channels:
257 raise KeyError("Channel already exists: %s" % channel_name)
258
259 channels[channel_name] = {'devices': {}}
260
261 return True
262
263 def create_channel_alias(self, channel_name, target_name):
264 """
265 Creates a new channel as an alias for an existing one.
266 """
267
268 with channels_json(self.config, self.indexpath, True) as channels:
269 if channel_name in channels:
270 raise KeyError("Channel already exists: %s" % channel_name)
271
272 if target_name not in channels:
273 raise KeyError("Couldn't find target channel: %s" %
274 target_name)
275
276 channels[channel_name] = {'devices': {},
277 'alias': target_name}
278
279 return self.sync_alias(channel_name)
280
281 def create_channel_redirect(self, channel_name, target_name):
282 """
283 Creates a new channel redirect.
284 """
285
286 with channels_json(self.config, self.indexpath, True) as channels:
287 if channel_name in channels:
288 raise KeyError("Channel already exists: %s" % channel_name)
289
290 if target_name not in channels:
291 raise KeyError("Couldn't find target channel: %s" %
292 target_name)
293
294 channels[channel_name] = dict(channels[target_name])
295 channels[channel_name]['redirect'] = target_name
296
297 self.hide_channel(channel_name)
298
299 return True
300
301 def create_device(self, channel_name, device_name, keyring_path=None):
302 """
303 Creates a new device entry in the tree.
304 """
305
306 with channels_json(self.config, self.indexpath, True) as channels:
307 if channel_name not in channels:
308 raise KeyError("Couldn't find channel: %s" % channel_name)
309
310 if device_name in channels[channel_name]['devices']:
311 raise KeyError("Device already exists: %s" % device_name)
312
313 device_path = os.path.join(self.path, channel_name, device_name)
314 if not os.path.exists(device_path):
315 os.makedirs(device_path)
316
317 # Create an empty index if it doesn't exist, if it does,
318 # just validate it
319 with index_json(self.config, os.path.join(device_path,
320 "index.json"), True):
321 pass
322
323 device = {}
324 device['index'] = "/%s/%s/index.json" % (channel_name, device_name)
325
326 channels[channel_name]['devices'][device_name] = device
327
328 if keyring_path:
329 self.set_device_keyring(channel_name, device_name, keyring_path)
330
331 self.sync_aliases(channel_name)
332 self.sync_redirects(channel_name)
333
334 return True
335
336 def generate_index(self, magic=False):
337 """
338 Re-generate the channels.json file based on the current content of
339 the tree.
340
341 This function is only present for emergency purposes and will
342 completely rebuild the tree based on what's on the filesystem,
343 looking into some well known locations to guess things like device
344 keyring paths.
345
346 Call this function with confirm="I know what I'm doing" to actually
347 trigger it.
348 """
349
350 if magic != "I know what I'm doing":
351 raise Exception("Invalid magic value, please read the help.")
352
353 if os.path.exists(self.indexpath):
354 os.remove(self.indexpath)
355
356 for channel_name in [entry for entry in os.listdir(self.path)
357 if os.path.isdir(os.path.join(self.path,
358 entry))
359 and entry not in ('gpg',)]:
360 self.create_channel(channel_name)
361
362 for device_name in os.listdir(os.path.join(self.path,
363 channel_name)):
364
365 path = os.path.join(self.path, channel_name, device_name)
366 if not os.path.exists(os.path.join(path, "index.json")):
367 continue
368
369 keyring_path = os.path.join(path, "device.tar.xz")
370 if (os.path.exists(keyring_path)
371 and os.path.exists("%s.asc" % keyring_path)):
372 self.create_device(channel_name, device_name, keyring_path)
373 else:
374 self.create_device(channel_name, device_name)
375
376 return True
377
378 def get_device(self, channel_name, device_name):
379 """
380 Returns a Device instance.
381 """
382
383 with channels_json(self.config, self.indexpath) as channels:
384 if channel_name not in channels:
385 raise KeyError("Couldn't find channel: %s" % channel_name)
386
387 if device_name not in channels[channel_name]['devices']:
388 raise KeyError("Couldn't find device: %s" % device_name)
389
390 device_path = os.path.dirname(channels[channel_name]['devices']
391 [device_name]['index'])
392
393 return Device(self.config, os.path.normpath("%s/%s" % (self.path,
394 device_path)))
395
396 def hide_channel(self, channel_name):
397 """
398 Hide a channel from the client's list.
399 """
400
401 with channels_json(self.config, self.indexpath, True) as channels:
402 if channel_name not in channels:
403 raise KeyError("Couldn't find channel: %s" % channel_name)
404
405 channels[channel_name]['hidden'] = True
406
407 return True
408
409 def list_channels(self):
410 """
411 Returns a dict of all existing channels and devices for each of
412 those.
413 This is simply a decoded version of channels.json
414 """
415
416 with channels_json(self.config, self.indexpath) as channels:
417 return channels
418
419 def list_missing_files(self):
420 """
421 Returns a list of absolute paths that should exist but aren't
422 present on the filesystem.
423 """
424
425 all_files, empty_dirs = self.__list_existing()
426 referenced_files = self.__list_referenced()
427
428 return sorted(referenced_files - all_files)
429
430 def list_orphaned_files(self):
431 """
432 Returns a list of absolute paths to files that are present in the
433 tree but aren't referenced anywhere.
434 """
435
436 orphaned_files = set()
437
438 all_files, empty_dirs = self.__list_existing()
439 referenced_files = self.__list_referenced()
440
441 orphaned_files.update(all_files - referenced_files)
442 orphaned_files.update(empty_dirs)
443
444 for entry in list(orphaned_files):
445 if entry.endswith(".json"):
446 tarname = entry.replace(".json", ".tar.xz")
447 if tarname in referenced_files:
448 orphaned_files.remove(entry)
449
450 if entry.endswith(".json.asc"):
451 tarname = entry.replace(".json.asc", ".tar.xz")
452 if tarname in referenced_files:
453 orphaned_files.remove(entry)
454
455 return sorted(orphaned_files)
456
457 def publish_keyring(self, keyring_name):
458 """
459 Publish the keyring under gpg/
460 """
461
462 gpg_path = os.path.join(self.config.publish_path, "gpg")
463
464 if not os.path.exists(gpg_path):
465 os.mkdir(gpg_path)
466
467 keyring_path = os.path.join(self.config.gpg_keyring_path, keyring_name)
468
469 if not os.path.exists("%s.tar.xz" % keyring_path):
470 raise Exception("Missing keyring: %s.tar.xz" % keyring_path)
471
472 if not os.path.exists("%s.tar.xz.asc" % keyring_path):
473 raise Exception("Missing keyring signature: %s.tar.xz.asc" %
474 keyring_path)
475
476 shutil.copy("%s.tar.xz" % keyring_path, gpg_path)
477 shutil.copy("%s.tar.xz.asc" % keyring_path, gpg_path)
478
479 return True
480
481 def remove_channel(self, channel_name):
482 """
483 Remove a channel and everything it contains.
484 """
485
486 with channels_json(self.config, self.indexpath, True) as channels:
487 if channel_name not in channels:
488 raise KeyError("Couldn't find channel: %s" % channel_name)
489
490 channel_path = os.path.join(self.path, channel_name)
491 if os.path.exists(channel_path) and \
492 "alias" not in channels[channel_name] and \
493 "redirect" not in channels[channel_name]:
494 shutil.rmtree(channel_path)
495 channels.pop(channel_name)
496
497 return True
498
499 def remove_device(self, channel_name, device_name):
500 """
501 Remove a device and everything it contains.
502 """
503
504 with channels_json(self.config, self.indexpath, True) as channels:
505 if channel_name not in channels:
506 raise KeyError("Couldn't find channel: %s" % channel_name)
507
508 if device_name not in channels[channel_name]['devices']:
509 raise KeyError("Couldn't find device: %s" % device_name)
510
511 device_path = os.path.join(self.path, channel_name, device_name)
512 if os.path.exists(device_path):
513 shutil.rmtree(device_path)
514 channels[channel_name]['devices'].pop(device_name)
515
516 self.sync_aliases(channel_name)
517 self.sync_redirects(channel_name)
518
519 return True
520
521 def rename_channel(self, old_name, new_name):
522 """
523 Rename a channel.
524 """
525
526 with channels_json(self.config, self.indexpath, True) as channels:
527 if old_name not in channels:
528 raise KeyError("Couldn't find channel: %s" % old_name)
529
530 if new_name in channels:
531 raise KeyError("Channel already exists: %s" % new_name)
532
533 old_channel_path = os.path.join(self.path, old_name)
534 new_channel_path = os.path.join(self.path, new_name)
535 if "redirect" not in channels[old_name]:
536 if os.path.exists(new_channel_path):
537 raise Exception("Channel path already exists: %s" %
538 new_channel_path)
539
540 if not os.path.exists(os.path.dirname(new_channel_path)):
541 os.makedirs(os.path.dirname(new_channel_path))
542 if os.path.exists(old_channel_path):
543 os.rename(old_channel_path, new_channel_path)
544
545 channels[new_name] = dict(channels[old_name])
546
547 if "redirect" not in channels[new_name]:
548 for device_name in channels[new_name]['devices']:
549 index_path = "/%s/%s/index.json" % (new_name, device_name)
550 channels[new_name]['devices'][device_name]['index'] = \
551 index_path
552
553 with index_json(self.config, "%s/%s" %
554 (self.path, index_path), True) as index:
555 for image in index['images']:
556 for entry in image['files']:
557 entry['path'] = entry['path'] \
558 .replace("/%s/" % old_name,
559 "/%s/" % new_name)
560 entry['signature'] = entry['signature'] \
561 .replace("/%s/" % old_name,
562 "/%s/" % new_name)
563
564 channels.pop(old_name)
565
566 return True
567
568 def show_channel(self, channel_name):
569 """
570 Show a channel from the client's list.
571 """
572
573 with channels_json(self.config, self.indexpath, True) as channels:
574 if channel_name not in channels:
575 raise KeyError("Couldn't find channel: %s" % channel_name)
576
577 if "hidden" in channels[channel_name]:
578 channels[channel_name].pop("hidden")
579
580 return True
581
582 def set_device_keyring(self, channel_name, device_name, path):
583 """
584 Update the keyring entry for the given channel and device.
585 Passing None as the path will unset any existing value.
586 """
587
588 with channels_json(self.config, self.indexpath, True) as channels:
589 if channel_name not in channels:
590 raise KeyError("Couldn't find channel: %s" % channel_name)
591
592 if device_name not in channels[channel_name]['devices']:
593 raise KeyError("Couldn't find device: %s" % device_name)
594
595 abspath, relpath = tools.expand_path(path, self.path)
596
597 if not os.path.exists(abspath):
598 raise Exception("Specified GPG keyring doesn't exists: %s" %
599 abspath)
600
601 if not os.path.exists("%s.asc" % abspath):
602 raise Exception("The GPG keyring signature doesn't exists: "
603 "%s.asc" % abspath)
604
605 keyring = {}
606 keyring['path'] = "/%s" % "/".join(relpath.split(os.sep))
607 keyring['signature'] = "/%s.asc" % "/".join(relpath.split(os.sep))
608
609 channels[channel_name]['devices'][device_name]['keyring'] = keyring
610
611 return True
612
613 def sync_alias(self, channel_name):
614 """
615 Update a channel with data from its parent.
616 """
617
618 with channels_json(self.config, self.indexpath) as channels:
619 if channel_name not in channels:
620 raise KeyError("Couldn't find channel: %s" % channel_name)
621
622 if "alias" not in channels[channel_name] or \
623 channels[channel_name]['alias'] == channel_name:
624 raise TypeError("Not a channel alias")
625
626 target_name = channels[channel_name]['alias']
627
628 if target_name not in channels:
629 raise KeyError("Couldn't find target channel: %s" %
630 target_name)
631
632 # Start by looking for added/removed devices
633 devices = set(channels[channel_name]['devices'].keys())
634 target_devices = set(channels[target_name]['devices'].keys())
635
636 # # Remove any removed device
637 for device in devices - target_devices:
638 self.remove_device(channel_name, device)
639
640 # # Add any missing device
641 for device in target_devices - devices:
642 self.create_device(channel_name, device)
643
644 # Iterate through all the devices to import builds
645 for device_name in target_devices:
646 device = self.get_device(channel_name, device_name)
647 target_device = self.get_device(target_name, device_name)
648
649 # Extract all the current builds
650 device_images = {(image['version'], image.get('base', None),
651 image['type'])
652 for image in device.list_images()}
653
654 target_images = {(image['version'], image.get('base', None),
655 image['type'])
656 for image in target_device.list_images()}
657
658 # Remove any removed image
659 for image in device_images - target_images:
660 device.remove_image(image[2], image[0], base=image[1])
661
662 # Create the path if it doesn't exist
663 if not os.path.exists(device.path):
664 os.makedirs(device.path)
665
666 # Add any missing image
667 with index_json(self.config, device.indexpath, True) as index:
668 for image in sorted(target_images - device_images):
669 orig = [entry for entry in target_device.list_images()
670 if entry['type'] == image[2] and
671 entry['version'] == image[0] and
672 entry.get('base', None) == image[1]]
673
674 entry = copy.deepcopy(orig[0])
675
676 # Remove the current version tarball
677 version_detail = None
678 version_index = len(entry['files'])
679 for fentry in entry['files']:
680 if fentry['path'].endswith("version-%s.tar.xz" %
681 entry['version']):
682
683 version_path = "%s/%s" % (
684 self.config.publish_path, fentry['path'])
685
686 if os.path.exists(
687 version_path.replace(".tar.xz",
688 ".json")):
689 with open(
690 version_path.replace(
691 ".tar.xz", ".json")) as fd:
692 metadata = json.loads(fd.read())
693 if "channel.ini" in metadata:
694 version_detail = \
695 metadata['channel.ini'].get(
696 "version_detail", None)
697
698 version_index = fentry['order']
699 entry['files'].remove(fentry)
700 break
701
702 # Generate a new one
703 path = os.path.join(device.path,
704 "version-%s.tar.xz" %
705 entry['version'])
706 abspath, relpath = tools.expand_path(path,
707 device.pub_path)
708 if not os.path.exists(abspath):
709 tools.generate_version_tarball(
710 self.config, channel_name, device_name,
711 str(entry['version']),
712 abspath.replace(".xz", ""),
713 version_detail=version_detail,
714 channel_target=target_name)
715 tools.xz_compress(abspath.replace(".xz", ""))
716 os.remove(abspath.replace(".xz", ""))
717 gpg.sign_file(self.config, "image-signing",
718 abspath)
719
720 with open(abspath, "rb") as fd:
721 checksum = sha256(fd.read()).hexdigest()
722
723 # Generate the new file entry
724 version = {}
725 version['order'] = version_index
726 version['path'] = "/%s" % "/".join(
727 relpath.split(os.sep))
728 version['signature'] = "/%s.asc" % "/".join(
729 relpath.split(os.sep))
730 version['checksum'] = checksum
731 version['size'] = int(os.stat(abspath).st_size)
732
733 # And add it
734 entry['files'].append(version)
735 index['images'].append(entry)
736
737 # Sync phased-percentage
738 versions = sorted({entry[0] for entry in target_images})
739 if versions:
740 device.set_phased_percentage(
741 versions[-1],
742 target_device.get_phased_percentage(versions[-1]))
743
744 return True
745
746 def sync_aliases(self, channel_name):
747 """
748 Update any channel that's an alias of the current one.
749 """
750
751 with channels_json(self.config, self.indexpath) as channels:
752 if channel_name not in channels:
753 raise KeyError("Couldn't find channel: %s" % channel_name)
754
755 alias_channels = [name
756 for name, channel
757 in self.list_channels().items()
758 if channel.get("alias", None) == channel_name
759 and name != channel_name]
760
761 for alias_name in alias_channels:
762 self.sync_alias(alias_name)
763
764 return True
765
766 def sync_redirects(self, channel_name):
767 """
768 Update any channel that's a direct of the current one.
769 """
770
771 with channels_json(self.config, self.indexpath) as channels:
772 if channel_name not in channels:
773 raise KeyError("Couldn't find channel: %s" % channel_name)
774
775 redirect_channels = [name
776 for name, channel
777 in self.list_channels().items()
778 if channel.get("redirect", None) == channel_name]
779
780 for redirect_name in redirect_channels:
781 self.remove_channel(redirect_name)
782 self.create_channel_redirect(redirect_name, channel_name)
783
784 return True
785
786
787class Device:
788 def __init__(self, config, path):
789 self.config = config
790 self.pub_path = self.config.publish_path
791 self.path = path
792 self.indexpath = os.path.join(path, "index.json")
793
794 def create_image(self, entry_type, version, description, paths,
795 base=None, bootme=False, minversion=None):
796 """
797 Add a new image to the index.
798 """
799
800 if len(paths) == 0:
801 raise Exception("No file passed for this image.")
802
803 files = []
804 count = 0
805
806 with index_json(self.config, self.indexpath, True) as index:
807 for path in paths:
808 abspath, relpath = tools.expand_path(path, self.pub_path)
809
810 if not os.path.exists(abspath):
811 raise Exception("Specified file doesn't exists: %s"
812 % abspath)
813
814 if not os.path.exists("%s.asc" % abspath):
815 raise Exception("The GPG file signature doesn't exists: "
816 "%s.asc" % abspath)
817
818 with open(abspath, "rb") as fd:
819 checksum = sha256(fd.read()).hexdigest()
820
821 files.append({'order': count,
822 'path': "/%s" % "/".join(relpath.split(os.sep)),
823 'checksum': checksum,
824 'signature': "/%s.asc" % "/".join(
825 relpath.split(os.sep)),
826 'size': int(os.stat(abspath).st_size)})
827
828 count += 1
829
830 image = {}
831
832 if entry_type == "delta":
833 if not base:
834 raise KeyError("Missing base version for delta image.")
835 image['base'] = int(base)
836 elif base:
837 raise KeyError("Base version set for full image.")
838
839 if bootme:
840 image['bootme'] = bootme
841
842 if minversion:
843 if entry_type == "delta":
844 raise KeyError("Minimum version set for delta image.")
845 image['minversion'] = minversion
846
847 image['description'] = description
848 image['files'] = files
849 image['type'] = entry_type
850 image['version'] = version
851 index['images'].append(image)
852
853 return True
854
855 def expire_images(self, max_images):
856 """
857 Expire images keeping the last <max_images> full images and
858 their deltas. Also remove any delta that has an expired image
859 as its base.
860 """
861
862 full_images = sorted([image for image in self.list_images()
863 if image['type'] == "full"],
864 key=lambda image: image['version'])
865
866 to_remove = len(full_images) - max_images
867 if to_remove <= 0:
868 return True
869
870 full_remove = full_images[:to_remove]
871 remove_version = [image['version'] for image in full_remove]
872
873 for image in self.list_images():
874 if image['type'] == "full":
875 if image['version'] in remove_version:
876 self.remove_image(image['type'], image['version'])
877 else:
878 if (image['version'] in remove_version
879 or image['base'] in remove_version):
880 self.remove_image(image['type'], image['version'],
881 image['base'])
882
883 return True
884
885 def get_image(self, entry_type, version, base=None):
886 """
887 Look for an image and return a dict representation of it.
888 """
889
890 if entry_type not in ("full", "delta"):
891 raise ValueError("Invalid image type: %s" % entry_type)
892
893 if entry_type == "delta" and not base:
894 raise ValueError("Missing base version for delta image.")
895
896 with index_json(self.config, self.indexpath) as index:
897 match = []
898 for image in index['images']:
899 if (image['type'] == entry_type and image['version'] == version
900 and (image['type'] == "full" or
901 image['base'] == base)):
902 match.append(image)
903
904 if len(match) != 1:
905 raise IndexError("Couldn't find a match.")
906
907 return match[0]
908
909 def get_phased_percentage(self, version):
910 """
911 Returns the phasing percentage for a given version.
912 """
913
914 for entry in self.list_images():
915 if entry['version'] == version:
916 if "phased-percentage" in entry:
917 return entry['phased-percentage']
918 else:
919 return 100
920 else:
921 raise IndexError("Invalid version number: %s" % version)
922
923 def list_images(self):
924 """
925 Returns a list of all existing images, each image is a dict.
926 This is simply a decoded version of the image array in index.json
927 """
928
929 with index_json(self.config, self.indexpath) as index:
930 return index['images']
931
932 def remove_image(self, entry_type, version, base=None):
933 """
934 Remove an image.
935 """
936
937 image = self.get_image(entry_type, version, base)
938 with index_json(self.config, self.indexpath, True) as index:
939 index['images'].remove(image)
940
941 return True
942
943 def set_description(self, entry_type, version, description,
944 translations={}, base=None):
945 """
946 Set or update an image description.
947 """
948
949 if translations and not isinstance(translations, dict):
950 raise TypeError("translations must be a dict.")
951
952 image = self.get_image(entry_type, version, base)
953
954 with index_json(self.config, self.indexpath, True) as index:
955 for entry in index['images']:
956 if entry != image:
957 continue
958
959 entry['description'] = description
960 for langid, value in translations.items():
961 entry['description_%s' % langid] = value
962
963 break
964
965 return True
966
967 def set_phased_percentage(self, version, percentage):
968 """
969 Set the phasing percentage on an image version.
970 """
971
972 if not isinstance(percentage, int):
973 raise TypeError("percentage must be an integer.")
974
975 if percentage < 0 or percentage > 100:
976 raise ValueError("percentage must be >= 0 and <= 100.")
977
978 with index_json(self.config, self.indexpath, True) as index:
979 versions = sorted({entry['version'] for entry in index['images']})
980
981 last_version = None
982 if versions:
983 last_version = versions[-1]
984
985 if version not in versions:
986 raise IndexError("Version doesn't exist: %s" % version)
987
988 if version != last_version:
989 raise Exception("Phased percentage can only be set on the "
990 "latest image")
991
992 for entry in index['images']:
993 if entry['version'] == version:
994 if percentage == 100 and "phased-percentage" in entry:
995 entry.pop("phased-percentage")
996 elif percentage != 100:
997 entry['phased-percentage'] = percentage
998
999 return True
01000
=== added directory 'secret'
=== added directory 'secret/gpg'
=== added directory 'secret/gpg/keyrings'
=== added directory 'secret/gpg/keys'
=== added directory 'secret/ssh'
=== added directory 'state'
=== added directory 'tests'
=== added file 'tests/generate-keys'
--- tests/generate-keys 1970-01-01 00:00:00 +0000
+++ tests/generate-keys 2014-10-10 11:11:17 +0000
@@ -0,0 +1,52 @@
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3
4# Copyright (C) 2013 Canonical Ltd.
5# Author: Stéphane Graber <stgraber@ubuntu.com>
6
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; version 3 of the License.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19import os
20import shutil
21
22import sys
23sys.path.insert(0, 'lib')
24
25from systemimage import gpg
26
27target_dir = "tests/keys/"
28if not os.path.exists(target_dir):
29 raise Exception("Missing tests/keys directory")
30
31keys = (("archive-master", "[TESTING] Ubuntu Archive Master Signing Key",
32 "ftpmaster@ubuntu.com", 0),
33 ("image-master", "[TESTING] Ubuntu System Image Master Signing Key",
34 "system-image@ubuntu.com", 0),
35 ("image-signing", "[TESTING] Ubuntu System Image Signing Key (YYYY)",
36 "system-image@ubuntu.com", "2y"),
37 ("device-signing", "[TESTING] Random OEM Signing Key (YYYY)",
38 "system-image@ubuntu.com", "2y"))
39
40for key_name, key_description, key_email, key_expiry in keys:
41 key_dir = "%s/%s/" % (target_dir, key_name)
42 if os.path.exists(key_dir):
43 shutil.rmtree(key_dir)
44 os.makedirs(key_dir)
45
46 uid = gpg.generate_signing_key(key_dir, key_description, key_email,
47 key_expiry)
48
49 print("%s <%s>" % (uid.name, uid.email))
50
51# All done, let's mark it as done
52open("tests/keys/generated", "w+").close()
053
=== added directory 'tests/keys'
=== added file 'tests/run'
--- tests/run 1970-01-01 00:00:00 +0000
+++ tests/run 2014-10-10 11:11:17 +0000
@@ -0,0 +1,60 @@
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3
4# Copyright (C) 2013 Canonical Ltd.
5# Author: Stéphane Graber <stgraber@ubuntu.com>
6
7# This program is free software: you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; version 3 of the License.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License
17# along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19# Dependencies:
20# - python2 (>= 2.7): python-gpgme, python-coverage
21# - python3 (>= 3.2): python3-gpgme
22
23import glob
24import os
25import re
26import shutil
27import sys
28import unittest
29
30coverage = True
31try:
32 from coverage import coverage
33 cov = coverage()
34 cov.start()
35except ImportError:
36 print("No coverage report, make sure python-coverage is installed")
37 coverage = False
38
39sys.path.insert(0, 'lib')
40
41if len(sys.argv) > 1:
42 test_filter = sys.argv[1]
43else:
44 test_filter = ''
45
46tests = [t[:-3] for t in os.listdir('tests')
47 if t.startswith('test_') and t.endswith('.py') and
48 re.search(test_filter, t)]
49tests.sort()
50suite = unittest.TestLoader().loadTestsFromNames(tests)
51res = unittest.TextTestRunner(verbosity=2).run(suite)
52
53if coverage:
54 if os.path.exists('tests/coverage'):
55 shutil.rmtree('tests/coverage')
56 cov.stop()
57 cov.html_report(include=glob.glob("lib/systemimage/*.py"),
58 directory='tests/coverage')
59 print("")
60 cov.report(include=glob.glob("lib/systemimage/*.py"))
061
=== added file 'tests/test_config.py'
--- tests/test_config.py 1970-01-01 00:00:00 +0000
+++ tests/test_config.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,281 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation; version 3 of the License.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18import os
19import shutil
20import tempfile
21import unittest
22
23from systemimage import config
24from systemimage import tools
25
26try:
27 from unittest import mock
28except ImportError:
29 import mock
30
31
32class ConfigTests(unittest.TestCase):
33 def setUp(self):
34 temp_directory = tempfile.mkdtemp()
35 self.temp_directory = temp_directory
36
37 def tearDown(self):
38 shutil.rmtree(self.temp_directory)
39
40 @mock.patch("subprocess.call")
41 def test_config(self, mock_call):
42 # Good complete config
43 config_path = os.path.join(self.temp_directory, "config")
44 key_path = os.path.join(self.temp_directory, "key")
45
46 with open(config_path, "w+") as fd:
47 fd.write("""[global]
48base_path = %s
49mirrors = a, b
50
51[mirror_default]
52ssh_user = user
53ssh_key = key
54ssh_port = 22
55ssh_command = command
56
57[mirror_a]
58ssh_host = hosta
59
60[mirror_b]
61ssh_host = hostb
62""" % self.temp_directory)
63
64 conf = config.Config(config_path)
65
66 # Test ssh sync
67 tools.sync_mirrors(conf)
68 expected_calls = [((['ssh', '-i', key_path, '-l', 'user',
69 '-p', '22', 'hosta', 'command'],), {}),
70 ((['ssh', '-i', key_path, '-l', 'user',
71 '-p', '22', 'hostb', 'command'],), {})]
72 self.assertEquals(mock_call.call_args_list, expected_calls)
73
74 # Invalid config
75 invalid_config_path = os.path.join(self.temp_directory,
76 "invalid_config")
77 with open(invalid_config_path, "w+") as fd:
78 fd.write("""invalid""")
79
80 self.assertEquals(config.parse_config(invalid_config_path), {})
81
82 self.assertRaises(
83 Exception, config.Config, os.path.join(self.temp_directory,
84 "invalid"))
85
86 # Test loading config from default location
87 config_file = os.path.join(os.path.dirname(config.__file__),
88 "../../etc/config")
89
90 old_pwd = os.getcwd()
91 os.chdir(self.temp_directory)
92 if not os.path.exists(config_file):
93 self.assertRaises(Exception, config.Config)
94 else:
95 self.assertTrue(config.Config())
96 os.chdir(old_pwd)
97
98 # Empty config
99 empty_config_path = os.path.join(self.temp_directory,
100 "empty_config")
101 with open(empty_config_path, "w+") as fd:
102 fd.write("")
103
104 conf = config.Config(empty_config_path)
105 self.assertEquals(conf.base_path, os.getcwd())
106
107 # Single mirror config
108 single_mirror_config_path = os.path.join(self.temp_directory,
109 "single_mirror_config")
110 with open(single_mirror_config_path, "w+") as fd:
111 fd.write("""[global]
112mirrors = a
113
114[mirror_default]
115ssh_user = user
116ssh_key = key
117ssh_port = 22
118ssh_command = command
119
120[mirror_a]
121ssh_host = host
122""")
123
124 conf = config.Config(single_mirror_config_path)
125 self.assertEquals(conf.mirrors['a'].ssh_command, "command")
126
127 # Missing mirror_default
128 missing_default_config_path = os.path.join(self.temp_directory,
129 "missing_default_config")
130 with open(missing_default_config_path, "w+") as fd:
131 fd.write("""[global]
132mirrors = a
133
134[mirror_a]
135ssh_host = host
136""")
137
138 self.assertRaises(KeyError, config.Config, missing_default_config_path)
139
140 # Missing mirror key
141 missing_key_config_path = os.path.join(self.temp_directory,
142 "missing_key_config")
143 with open(missing_key_config_path, "w+") as fd:
144 fd.write("""[global]
145mirrors = a
146
147[mirror_default]
148ssh_user = user
149ssh_port = 22
150ssh_command = command
151
152[mirror_a]
153ssh_host = host
154""")
155
156 self.assertRaises(KeyError, config.Config, missing_key_config_path)
157
158 # Missing mirror
159 missing_mirror_config_path = os.path.join(self.temp_directory,
160 "missing_mirror_config")
161 with open(missing_mirror_config_path, "w+") as fd:
162 fd.write("""[global]
163mirrors = a
164
165[mirror_default]
166ssh_user = user
167ssh_port = 22
168ssh_command = command
169ssh_key = key
170""")
171
172 self.assertRaises(KeyError, config.Config, missing_mirror_config_path)
173
174 # Missing ssh_host
175 missing_host_config_path = os.path.join(self.temp_directory,
176 "missing_host_config")
177 with open(missing_host_config_path, "w+") as fd:
178 fd.write("""[global]
179mirrors = a
180
181[mirror_default]
182ssh_user = user
183ssh_port = 22
184ssh_command = command
185ssh_key = key
186
187[mirror_a]
188ssh_user = other-user
189""")
190
191 self.assertRaises(KeyError, config.Config, missing_host_config_path)
192
193 # Test with env path
194 test_path = os.path.join(self.temp_directory, "a", "b")
195 os.makedirs(os.path.join(test_path, "etc"))
196 with open(os.path.join(test_path, "etc", "config"), "w+") as fd:
197 fd.write("[global]\nbase_path = a/b/c")
198 os.environ['SYSTEM_IMAGE_ROOT'] = test_path
199 test_config = config.Config()
200 self.assertEquals(test_config.base_path, "a/b/c")
201
202 # Test the channels config
203 # # Multiple channels
204 channel_config_path = os.path.join(self.temp_directory,
205 "channel_config")
206 with open(channel_config_path, "w+") as fd:
207 fd.write("""[global]
208channels = a, b
209
210[channel_a]
211type = manual
212fullcount = 10
213
214[channel_b]
215type = auto
216versionbase = 5
217deltabase = a, b
218files = a, b
219file_a = test;arg1;arg2
220file_b = test;arg3;arg4
221""")
222
223 conf = config.Config(channel_config_path)
224 self.assertEquals(
225 conf.channels['b'].files,
226 [{'name': 'a', 'generator': 'test',
227 'arguments': ['arg1', 'arg2']},
228 {'name': 'b', 'generator': 'test',
229 'arguments': ['arg3', 'arg4']}])
230
231 self.assertEquals(conf.channels['a'].fullcount, 10)
232 self.assertEquals(conf.channels['a'].versionbase, 1)
233 self.assertEquals(conf.channels['a'].deltabase, ['a'])
234
235 self.assertEquals(conf.channels['b'].fullcount, 0)
236 self.assertEquals(conf.channels['b'].versionbase, 5)
237 self.assertEquals(conf.channels['b'].deltabase, ["a", "b"])
238
239 # # Single channel
240 single_channel_config_path = os.path.join(self.temp_directory,
241 "single_channel_config")
242 with open(single_channel_config_path, "w+") as fd:
243 fd.write("""[global]
244channels = a
245
246[channel_a]
247deltabase = a
248versionbase = 1
249files = a
250file_a = test;arg1;arg2
251""")
252
253 conf = config.Config(single_channel_config_path)
254 self.assertEquals(
255 conf.channels['a'].files,
256 [{'name': 'a', 'generator': 'test',
257 'arguments': ['arg1', 'arg2']}])
258
259 # # Invalid channel
260 invalid_channel_config_path = os.path.join(self.temp_directory,
261 "invalid_channel_config")
262 with open(invalid_channel_config_path, "w+") as fd:
263 fd.write("""[global]
264channels = a
265""")
266
267 self.assertRaises(KeyError, config.Config, invalid_channel_config_path)
268
269 # # Invalid file
270 invalid_file_channel_config_path = os.path.join(
271 self.temp_directory, "invalid_file_channel_config")
272 with open(invalid_file_channel_config_path, "w+") as fd:
273 fd.write("""[global]
274channels = a
275
276[channel_a]
277files = a
278""")
279
280 self.assertRaises(KeyError, config.Config,
281 invalid_file_channel_config_path)
0282
=== added file 'tests/test_diff.py'
--- tests/test_diff.py 1970-01-01 00:00:00 +0000
+++ tests/test_diff.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,265 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
5
6# This program is free software: you can redistribute it and/or modify
7# it under the terms of the GNU General Public License as published by
8# the Free Software Foundation; version 3 of the License.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18import shutil
19import sys
20import tarfile
21import tempfile
22import unittest
23
24from io import BytesIO, StringIO
25from systemimage.diff import ImageDiff, compare_files
26
27
28class DiffTests(unittest.TestCase):
29 def setUp(self):
30 temp_directory = tempfile.mkdtemp()
31
32 source_tarball_path = "%s/source.tar" % temp_directory
33 target_tarball_path = "%s/target.tar" % temp_directory
34
35 source_tarball = tarfile.open(source_tarball_path, "w")
36 target_tarball = tarfile.open(target_tarball_path, "w")
37
38 # Standard file
39 a = tarfile.TarInfo()
40 a.name = "a"
41 a.size = 4
42
43 # Standard file
44 b = tarfile.TarInfo()
45 b.name = "b"
46 b.size = 4
47
48 # Standard directory
49 c_dir = tarfile.TarInfo()
50 c_dir.name = "c"
51 c_dir.type = tarfile.DIRTYPE
52 c_dir.mode = 0o755
53
54 # Standard file
55 c = tarfile.TarInfo()
56 c.name = "c/c"
57 c.size = 4
58
59 # Standard file
60 d_source = tarfile.TarInfo()
61 d_source.name = "c/d"
62 d_source.size = 8
63 d_source.mtime = 1000
64
65 # Standard file
66 d_target = tarfile.TarInfo()
67 d_target.name = "c/d"
68 d_target.size = 8
69 d_target.mtime = 1234
70
71 # Symlink
72 e = tarfile.TarInfo()
73 e.name = "e"
74 e.type = tarfile.SYMTYPE
75 e.linkname = "a"
76
77 # Hard link
78 f = tarfile.TarInfo()
79 f.name = "f"
80 f.type = tarfile.LNKTYPE
81 f.linkname = "a"
82
83 # Standard file
84 g_source = tarfile.TarInfo()
85 g_source.name = "c/g"
86 g_source.size = 4
87 g_source.mtime = 1000
88
89 # Standard file
90 g_target = tarfile.TarInfo()
91 g_target.name = "c/g"
92 g_target.size = 4
93 g_target.mtime = 1001
94
95 # Hard link
96 h_source = tarfile.TarInfo()
97 h_source.name = "c/h"
98 h_source.type = tarfile.LNKTYPE
99 h_source.linkname = "d"
100 h_source.mtime = 1000
101
102 # Hard link
103 h_target = tarfile.TarInfo()
104 h_target.name = "c/h"
105 h_target.type = tarfile.LNKTYPE
106 h_target.linkname = "d"
107 h_target.mtime = 1001
108
109 # Hard link
110 i = tarfile.TarInfo()
111 i.name = "c/a_i"
112 i.type = tarfile.LNKTYPE
113 i.linkname = "c"
114
115 # Dangling symlink
116 j = tarfile.TarInfo()
117 j.name = "c/j"
118 j.type = tarfile.SYMTYPE
119 j.linkname = "j_non-existent"
120
121 # Standard directory
122 k_dir = tarfile.TarInfo()
123 k_dir.name = "dir"
124 k_dir.type = tarfile.DIRTYPE
125 k_dir.mode = 0o755
126
127 # Dangling symlink
128 l = tarfile.TarInfo()
129 l.name = "dir"
130 l.type = tarfile.SYMTYPE
131 l.linkname = "l_non-existent"
132
133 # Standard file
134 m_source = tarfile.TarInfo()
135 m_source.name = "m"
136 m_source.size = 4
137
138 # Hard link
139 m_target = tarfile.TarInfo()
140 m_target.name = "m"
141 m_target.type = tarfile.LNKTYPE
142 m_target.linkname = "n"
143
144 # Hard link
145 n_source = tarfile.TarInfo()
146 n_source.name = "n"
147 n_source.type = tarfile.LNKTYPE
148 n_source.linkname = "m"
149
150 # Standard file
151 n_target = tarfile.TarInfo()
152 n_target.name = "n"
153 n_target.size = 4
154
155 # Hard link
156 o_source = tarfile.TarInfo()
157 o_source.name = "system/o.1"
158 o_source.type = tarfile.LNKTYPE
159 o_source.linkname = "system/o"
160
161 # Standard file
162 o_target = tarfile.TarInfo()
163 o_target.name = "system/o"
164 o_target.size = 4
165
166 source_tarball.addfile(a, BytesIO(b"test"))
167 source_tarball.addfile(a, BytesIO(b"test"))
168 source_tarball.addfile(a, BytesIO(b"test"))
169 source_tarball.addfile(b, BytesIO(b"test"))
170 source_tarball.addfile(c_dir)
171 source_tarball.addfile(d_source, BytesIO(b"test-abc"))
172 source_tarball.addfile(g_source, BytesIO(b"test"))
173 source_tarball.addfile(h_source, BytesIO(b"test"))
174 source_tarball.addfile(k_dir)
175 source_tarball.addfile(m_source, BytesIO(b"test"))
176 source_tarball.addfile(n_source)
177
178 target_tarball.addfile(a, BytesIO(b"test"))
179 target_tarball.addfile(c_dir)
180 target_tarball.addfile(c, BytesIO(b"test"))
181 target_tarball.addfile(d_target, BytesIO(b"test-def"))
182 target_tarball.addfile(e)
183 target_tarball.addfile(f)
184 target_tarball.addfile(g_target, BytesIO(b"test"))
185 target_tarball.addfile(h_target, BytesIO(b"test"))
186 target_tarball.addfile(i)
187 target_tarball.addfile(j)
188 target_tarball.addfile(l)
189 target_tarball.addfile(n_target, BytesIO(b"test"))
190 target_tarball.addfile(m_target)
191 target_tarball.addfile(o_source)
192 target_tarball.addfile(o_target)
193
194 source_tarball.close()
195 target_tarball.close()
196
197 self.imagediff = ImageDiff(source_tarball_path, target_tarball_path)
198 self.source_tarball_path = source_tarball_path
199 self.target_tarball_path = target_tarball_path
200 self.temp_directory = temp_directory
201
202 def tearDown(self):
203 shutil.rmtree(self.temp_directory)
204
205 def test_content(self):
206 content_set, content_dict = self.imagediff.scan_content("source")
207 self.assertEquals(sorted(content_dict.keys()),
208 ['a', 'b', 'c', 'c/d', 'c/g', 'c/h', 'dir', 'm',
209 'n'])
210
211 content_set, content_dict = self.imagediff.scan_content("target")
212 self.assertEquals(sorted(content_dict.keys()),
213 ['a', 'c', 'c/a_i', 'c/c', 'c/d', 'c/g', 'c/h',
214 'c/j', 'dir', 'e', 'f', 'm', 'n', 'system/o',
215 'system/o.1'])
216
217 def test_content_invalid_image(self):
218 self.assertRaises(KeyError, self.imagediff.scan_content, "invalid")
219
220 def test_compare_files(self):
221 self.assertEquals(compare_files(None, None), True)
222 self.assertEquals(compare_files(None, BytesIO(b"abc")), False)
223
224 def test_compare_image(self):
225 diff_set = self.imagediff.compare_images()
226 self.assertTrue(("c/a_i", "add") in diff_set)
227
228 def test_print_changes(self):
229 # Redirect stdout
230 old_stdout = sys.stdout
231
232 # FIXME: Would be best to have something that works with both version
233 if sys.version[0] == "3":
234 sys.stdout = StringIO()
235 else:
236 sys.stdout = BytesIO()
237
238 self.imagediff.print_changes()
239
240 # Unredirect stdout
241 output = sys.stdout.getvalue()
242 sys.stdout = old_stdout
243
244 self.assertEquals(output, """ - b (del)
245 - c/a_i (add)
246 - c/c (add)
247 - c/d (mod)
248 - c/j (add)
249 - dir (mod)
250 - e (add)
251 - f (add)
252 - system/o (add)
253 - system/o.1 (add)
254""")
255
256 def test_generate_tarball(self):
257 output_tarball = "%s/output.tar" % self.temp_directory
258
259 self.imagediff.generate_diff_tarball(output_tarball)
260 tarball = tarfile.open(output_tarball, "r")
261
262 files_list = [entry.name for entry in tarball]
263 self.assertEquals(files_list, ['removed', 'c/c', 'c/a_i', 'c/d', 'c/j',
264 'dir', 'e', 'f', 'system/o',
265 'system/o.1'])
0266
=== added file 'tests/test_generators.py'
--- tests/test_generators.py 1970-01-01 00:00:00 +0000
+++ tests/test_generators.py 2014-10-10 11:11:17 +0000
@@ -0,0 +1,1039 @@
1# -*- coding: utf-8 -*-
2
3# Copyright (C) 2013 Canonical Ltd.
4# Author: Stéphane Graber <stgraber@ubuntu.com>
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches