Merge lp:~apw/launchpad/signing-add-sha256-checksums into lp:launchpad

Proposed by Andy Whitcroft
Status: Merged
Merged at revision: 18097
Proposed branch: lp:~apw/launchpad/signing-add-sha256-checksums
Merge into: lp:launchpad
Diff against target: 354 lines (+215/-2)
5 files modified
lib/lp/archivepublisher/publishing.py (+61/-0)
lib/lp/archivepublisher/signing.py (+8/-0)
lib/lp/archivepublisher/tests/test_publisher.py (+106/-1)
lib/lp/archivepublisher/tests/test_signing.py (+30/-0)
lib/lp/archivepublisher/utils.py (+10/-1)
To merge this branch: bzr merge lp:~apw/launchpad/signing-add-sha256-checksums
Reviewer Review Type Date Requested Status
Colin Watson (community) Approve
Review via email: mp+295615@code.launchpad.net

Commit message

Add Signing custom upload (raw-signing/raw-uefi) result checksumming. This is the first step in providing a trust chain for the signing custom uploads (Bug #1285919).

Once the Signing Custom upload is unpacked and processed we make a pass over the results producing a SHA256 checksum for each file. These are accumulated in a SHA256SUMS file which is added to the custom upload result directory.

Description of the change

Add Signing custom upload (raw-signing/raw-uefi) result checksumming. This is the first step in providing a trust chain for the signing custom uploads (Bug #1285919).

Once the Signing Custom upload is unpacked and processed we make a pass over the results producing a SHA256 checksum for each file. These are accumulated in a SHA256SUMS file which is added to the custom upload result directory.

NOTE: this branch carries a missing options test which we rely on when testing checksumming.

To post a comment you must log in.
Revision history for this message
Andy Whitcroft (apw) wrote :

Updated this branch to take into account the recent changes to restore the raw-uefi custom upload.

Revision history for this message
Colin Watson (cjwatson) :
review: Needs Fixing
Revision history for this message
Andy Whitcroft (apw) wrote :

Ok created a new DirectoryHash object which checksums files offered to it. Used this to generate the checksum files. The other smaller nits are also applied where they still exist.

Revision history for this message
Colin Watson (cjwatson) wrote :

I much prefer the DirectoryHash abstraction, thanks. Here's a boring review full of mostly style nits, after which this should be good to land.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'lib/lp/archivepublisher/publishing.py'
--- lib/lp/archivepublisher/publishing.py 2016-04-29 12:57:58 +0000
+++ lib/lp/archivepublisher/publishing.py 2016-06-06 17:15:44 +0000
@@ -261,8 +261,12 @@
261 "subdirectories.")261 "subdirectories.")
262 lfc_name = Attribute(262 lfc_name = Attribute(
263 "LibraryFileContent attribute name corresponding to this algorithm.")263 "LibraryFileContent attribute name corresponding to this algorithm.")
264 dh_name = Attribute(
265 "Filename for use when checksumming directories with this algorithm.")
264 write_by_hash = Attribute(266 write_by_hash = Attribute(
265 "Whether to write by-hash subdirectories for this algorithm.")267 "Whether to write by-hash subdirectories for this algorithm.")
268 write_directory_hash = Attribute(
269 "Whether to write *SUM files for this algorithm for directories.")
266270
267271
268@implementer(IArchiveHash)272@implementer(IArchiveHash)
@@ -271,7 +275,9 @@
271 deb822_name = "md5sum"275 deb822_name = "md5sum"
272 apt_name = "MD5Sum"276 apt_name = "MD5Sum"
273 lfc_name = "md5"277 lfc_name = "md5"
278 dh_name = "MD5SUMS"
274 write_by_hash = False279 write_by_hash = False
280 write_directory_hash = False
275281
276282
277@implementer(IArchiveHash)283@implementer(IArchiveHash)
@@ -280,7 +286,9 @@
280 deb822_name = "sha1"286 deb822_name = "sha1"
281 apt_name = "SHA1"287 apt_name = "SHA1"
282 lfc_name = "sha1"288 lfc_name = "sha1"
289 dh_name = "SHA1SUMS"
283 write_by_hash = False290 write_by_hash = False
291 write_directory_hash = False
284292
285293
286@implementer(IArchiveHash)294@implementer(IArchiveHash)
@@ -289,7 +297,9 @@
289 deb822_name = "sha256"297 deb822_name = "sha256"
290 apt_name = "SHA256"298 apt_name = "SHA256"
291 lfc_name = "sha256"299 lfc_name = "sha256"
300 dh_name = "SHA256SUMS"
292 write_by_hash = True301 write_by_hash = True
302 write_directory_hash = True
293303
294304
295archive_hashes = [305archive_hashes = [
@@ -1462,3 +1472,54 @@
1462 count += 11472 count += 1
1463 self.archive.name = new_name1473 self.archive.name = new_name
1464 self.log.info("Renamed deleted archive '%s'.", self.archive.reference)1474 self.log.info("Renamed deleted archive '%s'.", self.archive.reference)
1475
1476
1477class DirectoryHash:
1478 """Represents a directory hierarchy for hashing."""
1479
1480 def __init__(self, root, tmpdir, log):
1481 self.root = root
1482 self.tmpdir = tmpdir
1483 self.log = log
1484 self.checksum_hash = []
1485
1486 for usable in self._usable_archive_hashes:
1487 checksum_file = os.path.join(self.root, usable.dh_name)
1488 self.checksum_hash.append(
1489 (RepositoryIndexFile(checksum_file, self.tmpdir), usable))
1490
1491 def __enter__(self):
1492 return self
1493
1494 def __exit__(self, type, value, traceback):
1495 self.close()
1496
1497 @property
1498 def _usable_archive_hashes(self):
1499 for archive_hash in archive_hashes:
1500 if archive_hash.write_directory_hash:
1501 yield archive_hash
1502
1503 def add(self, path):
1504 """Add a path to be checksummed."""
1505 hashes = [
1506 (checksum_file, archive_hash.hash_factory())
1507 for (checksum_file, archive_hash) in self.checksum_hash]
1508 with open(path, 'rb') as in_file:
1509 for chunk in iter(lambda: in_file.read(256 * 1024), ""):
1510 for (checksum_file, hashobj) in hashes:
1511 hashobj.update(chunk)
1512
1513 for (checksum_file, hashobj) in hashes:
1514 checksum_file.write("%s *%s\n" %
1515 (hashobj.hexdigest(), path[len(self.root) + 1:]))
1516
1517 def add_dir(self, path):
1518 """Recursively add a directory path to be checksummed."""
1519 for dirpath, dirnames, filenames in os.walk(path):
1520 for filename in filenames:
1521 self.add(os.path.join(dirpath, filename))
1522
1523 def close(self):
1524 for (checksum_file, archive_hash) in self.checksum_hash:
1525 checksum_file.close()
14651526
=== modified file 'lib/lp/archivepublisher/signing.py'
--- lib/lp/archivepublisher/signing.py 2016-05-31 12:40:38 +0000
+++ lib/lp/archivepublisher/signing.py 2016-06-06 17:15:44 +0000
@@ -26,6 +26,7 @@
26import textwrap26import textwrap
2727
28from lp.archivepublisher.customupload import CustomUpload28from lp.archivepublisher.customupload import CustomUpload
29from lp.archivepublisher.utils import RepositoryIndexFile
29from lp.services.osutils import remove_if_exists30from lp.services.osutils import remove_if_exists
30from lp.soyuz.interfaces.queue import CustomUploadError31from lp.soyuz.interfaces.queue import CustomUploadError
3132
@@ -291,6 +292,9 @@
291292
292 No actual extraction is required.293 No actual extraction is required.
293 """294 """
295 # Avoid circular import.
296 from lp.archivepublisher.publishing import DirectoryHash
297
294 super(SigningUpload, self).extract()298 super(SigningUpload, self).extract()
295 self.setSigningOptions()299 self.setSigningOptions()
296 filehandlers = list(self.findSigningHandlers())300 filehandlers = list(self.findSigningHandlers())
@@ -303,6 +307,10 @@
303 if 'tarball' in self.signing_options:307 if 'tarball' in self.signing_options:
304 self.convertToTarball()308 self.convertToTarball()
305309
310 versiondir = os.path.join(self.tmpdir, self.version)
311 with DirectoryHash(versiondir, self.tmpdir, self.logger) as hasher:
312 hasher.add_dir(versiondir)
313
306 def shouldInstall(self, filename):314 def shouldInstall(self, filename):
307 return filename.startswith("%s/" % self.version)315 return filename.startswith("%s/" % self.version)
308316
309317
=== modified file 'lib/lp/archivepublisher/tests/test_publisher.py'
--- lib/lp/archivepublisher/tests/test_publisher.py 2016-05-14 10:17:36 +0000
+++ lib/lp/archivepublisher/tests/test_publisher.py 2016-06-06 17:15:44 +0000
@@ -37,6 +37,7 @@
37 Is,37 Is,
38 LessThan,38 LessThan,
39 Matcher,39 Matcher,
40 MatchesDict,
40 MatchesListwise,41 MatchesListwise,
41 MatchesSetwise,42 MatchesSetwise,
42 MatchesStructure,43 MatchesStructure,
@@ -56,6 +57,7 @@
56from lp.archivepublisher.publishing import (57from lp.archivepublisher.publishing import (
57 ByHash,58 ByHash,
58 ByHashes,59 ByHashes,
60 DirectoryHash,
59 getPublisher,61 getPublisher,
60 I18nIndex,62 I18nIndex,
61 Publisher,63 Publisher,
@@ -92,7 +94,10 @@
92from lp.soyuz.interfaces.archive import IArchiveSet94from lp.soyuz.interfaces.archive import IArchiveSet
93from lp.soyuz.interfaces.archivefile import IArchiveFileSet95from lp.soyuz.interfaces.archivefile import IArchiveFileSet
94from lp.soyuz.tests.test_publishing import TestNativePublishingBase96from lp.soyuz.tests.test_publishing import TestNativePublishingBase
95from lp.testing import TestCaseWithFactory97from lp.testing import (
98 TestCase,
99 TestCaseWithFactory,
100 )
96from lp.testing.fakemethod import FakeMethod101from lp.testing.fakemethod import FakeMethod
97from lp.testing.gpgkeys import gpgkeysdir102from lp.testing.gpgkeys import gpgkeysdir
98from lp.testing.keyserver import KeyServerTac103from lp.testing.keyserver import KeyServerTac
@@ -3160,3 +3165,103 @@
31603165
3161 partner = self.factory.makeArchive(purpose=ArchivePurpose.PARTNER)3166 partner = self.factory.makeArchive(purpose=ArchivePurpose.PARTNER)
3162 self.assertEqual([], self.makePublisher(partner).subcomponents)3167 self.assertEqual([], self.makePublisher(partner).subcomponents)
3168
3169
3170class TestDirectoryHash(TestCase):
3171 """Unit tests for DirectoryHash object."""
3172
3173 def createTestFile(self, path, content):
3174 with open(path, "w") as tfd:
3175 tfd.write(content)
3176 return hashlib.sha256(content).hexdigest()
3177
3178 @property
3179 def all_hash_files(self):
3180 return ['MD5SUMS', 'SHA1SUMS', 'SHA256SUMS']
3181
3182 @property
3183 def expected_hash_files(self):
3184 return ['SHA256SUMS']
3185
3186 def fetchSums(self, rootdir):
3187 result = {}
3188 for dh_file in self.all_hash_files:
3189 checksum_file = os.path.join(rootdir, dh_file)
3190 if os.path.exists(checksum_file):
3191 with open(checksum_file, "r") as sfd:
3192 for line in sfd:
3193 file_list = result.setdefault(dh_file, [])
3194 file_list.append(line.strip().split(' '))
3195 return result
3196
3197 def test_checksum_files_created(self):
3198 tmpdir = unicode(self.makeTemporaryDirectory())
3199 rootdir = unicode(self.makeTemporaryDirectory())
3200
3201 for dh_file in self.all_hash_files:
3202 checksum_file = os.path.join(rootdir, dh_file)
3203 self.assertFalse(os.path.exists(checksum_file))
3204
3205 with DirectoryHash(rootdir, tmpdir, None) as dh:
3206 pass
3207
3208 for dh_file in self.all_hash_files:
3209 checksum_file = os.path.join(rootdir, dh_file)
3210 if dh_file in self.expected_hash_files:
3211 self.assertTrue(os.path.exists(checksum_file))
3212 else:
3213 self.assertFalse(os.path.exists(checksum_file))
3214
3215 def test_basic_file_add(self):
3216 tmpdir = unicode(self.makeTemporaryDirectory())
3217 rootdir = unicode(self.makeTemporaryDirectory())
3218 test1_file = os.path.join(rootdir, "test1")
3219 test1_hash = self.createTestFile(test1_file, "test1")
3220
3221 test2_file = os.path.join(rootdir, "test2")
3222 test2_hash = self.createTestFile(test2_file, "test2")
3223
3224 os.mkdir(os.path.join(rootdir, "subdir1"))
3225
3226 test3_file = os.path.join(rootdir, "subdir1", "test3")
3227 test3_hash = self.createTestFile(test3_file, "test3")
3228
3229 with DirectoryHash(rootdir, tmpdir, None) as dh:
3230 dh.add(test1_file)
3231 dh.add(test2_file)
3232 dh.add(test3_file)
3233
3234 expected = {
3235 'SHA256SUMS': MatchesSetwise(
3236 Equals([test1_hash, "*test1"]),
3237 Equals([test2_hash, "*test2"]),
3238 Equals([test3_hash, "*subdir1/test3"]),
3239 ),
3240 }
3241 self.assertThat(self.fetchSums(rootdir), MatchesDict(expected))
3242
3243 def test_basic_directory_add(self):
3244 tmpdir = unicode(self.makeTemporaryDirectory())
3245 rootdir = unicode(self.makeTemporaryDirectory())
3246 test1_file = os.path.join(rootdir, "test1")
3247 test1_hash = self.createTestFile(test1_file, "test1 dir")
3248
3249 test2_file = os.path.join(rootdir, "test2")
3250 test2_hash = self.createTestFile(test2_file, "test2 dir")
3251
3252 os.mkdir(os.path.join(rootdir, "subdir1"))
3253
3254 test3_file = os.path.join(rootdir, "subdir1", "test3")
3255 test3_hash = self.createTestFile(test3_file, "test3 dir")
3256
3257 with DirectoryHash(rootdir, tmpdir, None) as dh:
3258 dh.add_dir(rootdir)
3259
3260 expected = {
3261 'SHA256SUMS': MatchesSetwise(
3262 Equals([test1_hash, "*test1"]),
3263 Equals([test2_hash, "*test2"]),
3264 Equals([test3_hash, "*subdir1/test3"]),
3265 ),
3266 }
3267 self.assertThat(self.fetchSums(rootdir), MatchesDict(expected))
31633268
=== modified file 'lib/lp/archivepublisher/tests/test_signing.py'
--- lib/lp/archivepublisher/tests/test_signing.py 2016-05-31 12:23:15 +0000
+++ lib/lp/archivepublisher/tests/test_signing.py 2016-06-06 17:15:44 +0000
@@ -256,6 +256,23 @@
256 self.assertContentEqual(['first', 'second'],256 self.assertContentEqual(['first', 'second'],
257 upload.signing_options.keys())257 upload.signing_options.keys())
258258
259 def test_options_none(self):
260 # Specifying no options should leave us with an open tree.
261 self.setUpUefiKeys()
262 self.setUpKmodKeys()
263 self.openArchive("test", "1.0", "amd64")
264 self.archive.add_file("1.0/empty.efi", "")
265 self.archive.add_file("1.0/empty.ko", "")
266 self.process_emulate()
267 self.assertTrue(os.path.exists(os.path.join(
268 self.getSignedPath("test", "amd64"), "1.0", "empty.efi")))
269 self.assertTrue(os.path.exists(os.path.join(
270 self.getSignedPath("test", "amd64"), "1.0", "empty.efi.signed")))
271 self.assertTrue(os.path.exists(os.path.join(
272 self.getSignedPath("test", "amd64"), "1.0", "empty.ko")))
273 self.assertTrue(os.path.exists(os.path.join(
274 self.getSignedPath("test", "amd64"), "1.0", "empty.ko.sig")))
275
259 def test_options_tarball(self):276 def test_options_tarball(self):
260 # Specifying the "tarball" option should create an tarball in277 # Specifying the "tarball" option should create an tarball in
261 # the tmpdir.278 # the tmpdir.
@@ -602,6 +619,19 @@
602 self.assertEqual(stat.S_IMODE(os.stat(self.kmod_pem).st_mode), 0o600)619 self.assertEqual(stat.S_IMODE(os.stat(self.kmod_pem).st_mode), 0o600)
603 self.assertEqual(stat.S_IMODE(os.stat(self.kmod_x509).st_mode), 0o644)620 self.assertEqual(stat.S_IMODE(os.stat(self.kmod_x509).st_mode), 0o644)
604621
622 def test_checksumming_tree(self):
623 # Specifying no options should leave us with an open tree,
624 # confirm it is checksummed.
625 self.setUpUefiKeys()
626 self.setUpKmodKeys()
627 self.openArchive("test", "1.0", "amd64")
628 self.archive.add_file("1.0/empty.efi", "")
629 self.archive.add_file("1.0/empty.ko", "")
630 self.process_emulate()
631 sha256file = os.path.join(self.getSignedPath("test", "amd64"),
632 "1.0", "SHA256SUMS")
633 self.assertTrue(os.path.exists(sha256file))
634
605635
606class TestUefi(TestSigningHelpers):636class TestUefi(TestSigningHelpers):
607637
608638
=== modified file 'lib/lp/archivepublisher/utils.py'
--- lib/lp/archivepublisher/utils.py 2016-02-05 20:28:29 +0000
+++ lib/lp/archivepublisher/utils.py 2016-06-06 17:15:44 +0000
@@ -113,7 +113,7 @@
113 (plain, gzip, bzip2, and xz) transparently and atomically.113 (plain, gzip, bzip2, and xz) transparently and atomically.
114 """114 """
115115
116 def __init__(self, path, temp_root, compressors):116 def __init__(self, path, temp_root, compressors=None):
117 """Store repositories destinations and filename.117 """Store repositories destinations and filename.
118118
119 The given 'temp_root' needs to exist; on the other hand, the119 The given 'temp_root' needs to exist; on the other hand, the
@@ -123,6 +123,9 @@
123 Additionally creates the needed temporary files in the given123 Additionally creates the needed temporary files in the given
124 'temp_root'.124 'temp_root'.
125 """125 """
126 if compressors is None:
127 compressors = [IndexCompressionType.UNCOMPRESSED]
128
126 self.root, filename = os.path.split(path)129 self.root, filename = os.path.split(path)
127 assert os.path.exists(temp_root), 'Temporary root does not exist.'130 assert os.path.exists(temp_root), 'Temporary root does not exist.'
128131
@@ -135,6 +138,12 @@
135 self.old_index_files.append(138 self.old_index_files.append(
136 cls(temp_root, filename, auto_open=False))139 cls(temp_root, filename, auto_open=False))
137140
141 def __enter__(self):
142 return self
143
144 def __exit__(self, type, value, traceback):
145 self.close()
146
138 def write(self, content):147 def write(self, content):
139 """Write contents to all target medias."""148 """Write contents to all target medias."""
140 for index_file in self.index_files:149 for index_file in self.index_files: