=== modified file 'database/schema/security.cfg' --- database/schema/security.cfg 2010-09-16 00:33:37 +0000 +++ database/schema/security.cfg 2010-09-16 09:04:13 +0000 @@ -1130,9 +1130,12 @@ public.packagebuild = SELECT, INSERT, UPDATE public.binarypackagebuild = SELECT, INSERT, UPDATE public.sourcepackagerecipebuild = SELECT, UPDATE -public.buildqueue = SELECT, INSERT, UPDATE -public.job = SELECT, INSERT, UPDATE -public.buildpackagejob = SELECT, INSERT, UPDATE +public.sourcepackagerecipebuildjob = SELECT, UPDATE +public.sourcepackagerecipe = SELECT, UPDATE +public.buildqueue = SELECT, INSERT, UPDATE, DELETE +public.job = SELECT, INSERT, UPDATE, DELETE +public.buildpackagejob = SELECT, INSERT, UPDATE, DELETE +public.builder = SELECT # Thusly the librarian public.libraryfilecontent = SELECT, INSERT === modified file 'lib/lp/archiveuploader/nascentupload.py' --- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:38:48 +0000 +++ lib/lp/archiveuploader/nascentupload.py 2010-09-16 09:05:43 +0000 @@ -498,10 +498,13 @@ if self.binaryful: return - # Set up some convenient shortcut variables. - - uploader = self.policy.getUploader(self.changes, build) - archive = self.policy.archive + # The build can have an explicit uploader, which may be different + # from the changes file signer. (i.e in case of daily source package + # builds) + if build is not None: + uploader = build.getUploader(self.changes) + else: + uploader = self.changes.signer # If we have no signer, there's no ACL we can apply. if uploader is None: === modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py' --- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 09:02:57 +0000 +++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 09:05:48 +0000 @@ -42,7 +42,7 @@ requester=self.recipe.owner) Store.of(self.build).flush() - self.options.context = 'recipe' + self.options.context = 'buildd' self.uploadprocessor = self.getUploadProcessor( self.layer.txn) === modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py' --- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:02:57 +0000 +++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:05:56 +0000 @@ -1884,7 +1884,7 @@ self.assertLogContains( "Unable to find package build job with id 42. Skipping.") - def testNoFiles(self): + def testBinaryPackageBuild_fail(self): # If the upload directory is empty, the upload # will fail. @@ -1908,6 +1908,8 @@ # Upload and accept a binary for the primary archive source. shutil.rmtree(upload_dir) + + # Commit so the build cookie has the right ids. self.layer.txn.commit() leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) os.mkdir(os.path.join(self.incoming_folder, leaf_name)) @@ -1928,7 +1930,7 @@ self.assertTrue('DEBUG: Moving upload directory ' in log_contents) - def testSuccess(self): + def testBinaryPackageBuilds(self): # Properly uploaded binaries should result in the # build status changing to FULLYBUILT. # Upload a source package @@ -1949,6 +1951,8 @@ # Upload and accept a binary for the primary archive source. shutil.rmtree(upload_dir) + + # Commit so the build cookie has the right ids. self.layer.txn.commit() leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) upload_dir = self.queueUpload("bar_1.0-1_binary", @@ -1970,6 +1974,77 @@ 'INFO: Committing the transaction and any mails associated with ' 'this upload.' in log_lines) + def testSourcePackageRecipeBuild(self): + # Properly uploaded source packages should result in the + # build status changing to FULLYBUILT. + + # Upload a source package + archive = self.factory.makeArchive() + archive.require_virtualized = False + build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar", + distroseries=self.breezy, archive=archive, requester=archive.owner) + self.assertEquals(archive.owner, build.requester) + bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build) + # Commit so the build cookie has the right ids. + self.layer.txn.commit() + leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) + relative_path = "~%s/%s/%s/%s" % ( + archive.owner.name, archive.name, self.breezy.distribution.name, + self.breezy.name) + upload_dir = self.queueUpload( + "bar_1.0-1", queue_entry=leaf_name, relative_path=relative_path) + self.options.context = 'buildd' + self.options.builds = True + build.jobStarted() + # Commit so date_started is recorded and doesn't cause constraint + # violations later. + build.status = BuildStatus.UPLOADING + self.layer.txn.commit() + self.uploadprocessor.processBuildUpload( + self.incoming_folder, leaf_name) + self.layer.txn.commit() + + self.assertEquals(BuildStatus.FULLYBUILT, build.status, + build.upload_log.read()) + self.assertEquals(None, build.builder) + self.assertIsNot(None, build.date_finished) + self.assertIsNot(None, build.duration) + log_contents = build.upload_log.read() + log_lines = log_contents.splitlines() + self.assertTrue( + 'INFO: Processing upload bar_1.0-1_source.changes' in log_lines) + self.assertTrue( + 'INFO: Committing the transaction and any mails associated with ' + 'this upload.' in log_lines) + + def testSourcePackageRecipeBuild_fail(self): + # A source package recipe build will fail if no files are present. + + # Upload a source package + build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar", + distroseries=self.breezy) + removeSecurityProxy(build).package_build.archive = self.ubuntu.main_archive + bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build) + # Commit so the build cookie has the right ids. + self.layer.txn.commit() + leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) + os.mkdir(os.path.join(self.incoming_folder, leaf_name)) + self.options.context = 'buildd' + self.options.builds = True + build.jobStarted() + # Commit so date_started is recorded and doesn't cause constraint + # violations later. + self.layer.txn.commit() + build.status = BuildStatus.UPLOADING + self.uploadprocessor.processBuildUpload( + self.incoming_folder, leaf_name) + self.layer.txn.commit() + + self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status) + self.assertEquals(None, build.builder) + self.assertIsNot(None, build.date_finished) + self.assertIsNot(None, build.duration) + class ParseBuildUploadLeafNameTests(TestCase): """Tests for parse_build_upload_leaf_name.""" === modified file 'lib/lp/archiveuploader/uploadpolicy.py' --- lib/lp/archiveuploader/uploadpolicy.py 2010-09-16 09:02:57 +0000 +++ lib/lp/archiveuploader/uploadpolicy.py 2010-09-16 09:06:21 +0000 @@ -11,7 +11,6 @@ "BuildDaemonUploadPolicy", "findPolicyByName", "IArchiveUploadPolicy", - "SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME", "UploadPolicyError", ] @@ -34,8 +33,6 @@ from lazr.enum import EnumeratedType, Item -# Defined here so that uploadpolicy.py doesn't depend on lp.code. -SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME = 'recipe' # Number of seconds in an hour (used later) HOURS = 3600 @@ -128,13 +125,8 @@ raise AssertionError( "Upload is not sourceful, binaryful or mixed.") - def getUploader(self, changes, build): - """Get the person who is doing the uploading.""" - return changes.signer - def setOptions(self, options): """Store the options for later.""" - self.options = options # Extract and locate the distribution though... self.distro = getUtility(IDistributionSet)[options.distro] if options.distroseries is not None: @@ -324,7 +316,6 @@ """The build daemon upload policy is invoked by the slave scanner.""" name = 'buildd' - accepted_type = ArchiveUploadType.BINARY_ONLY def __init__(self): super(BuildDaemonUploadPolicy, self).__init__() @@ -333,19 +324,28 @@ self.unsigned_dsc_ok = True def setOptions(self, options): - AbstractUploadPolicy.setOptions(self, options) - self.builds = True + """Store the options for later.""" + super(BuildDaemonUploadPolicy, self).setOptions(options) + options.builds = True def policySpecificChecks(self, upload): """The buildd policy should enforce that the buildid matches.""" # XXX: dsilvers 2005-10-14 bug=3135: # Implement this to check the buildid etc. - pass def rejectPPAUploads(self, upload): """Buildd policy allows PPA upload.""" return False + def validateUploadType(self, upload): + if upload.sourceful and upload.binaryful: + if self.accepted_type != ArchiveUploadType.MIXED_ONLY: + upload.reject( + "Source/binary (i.e. mixed) uploads are not allowed.") + elif not upload.sourceful and not upload.binaryful: + raise AssertionError( + "Upload is not sourceful, binaryful or mixed.") + class SyncUploadPolicy(AbstractUploadPolicy): """This policy is invoked when processing sync uploads.""" === modified file 'lib/lp/archiveuploader/uploadprocessor.py' --- lib/lp/archiveuploader/uploadprocessor.py 2010-09-16 09:02:57 +0000 +++ lib/lp/archiveuploader/uploadprocessor.py 2010-09-16 09:06:37 +0000 @@ -71,7 +71,6 @@ ) from lp.archiveuploader.uploadpolicy import ( BuildDaemonUploadPolicy, - SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME, UploadPolicyError, ) from lp.buildmaster.enums import ( @@ -207,6 +206,7 @@ The name of the leaf is the build id of the build. Build uploads always contain a single package per leaf. """ + upload_path = os.path.join(fsroot, upload) try: job_id = parse_build_upload_leaf_name(upload) except ValueError: @@ -220,15 +220,15 @@ "Unable to find package build job with id %d. Skipping." % job_id) return + logger = BufferLogger() build = buildfarm_job.getSpecificJob() if build.status != BuildStatus.UPLOADING: self.log.warn( - "Expected build status to be 'UPLOADING', was %s. Skipping.", - build.status.name) + "Expected build status to be 'UPLOADING', was %s. " + "Moving to failed.", build.status.name) + self.moveProcessedUpload(upload_path, "failed", logger) return self.log.debug("Build %s found" % build.id) - logger = BufferLogger() - upload_path = os.path.join(fsroot, upload) try: [changes_file] = self.locateChangesFiles(upload_path) logger.debug("Considering changefile %s" % changes_file) @@ -251,11 +251,11 @@ UploadStatusEnum.REJECTED: "rejected", UploadStatusEnum.ACCEPTED: "accepted"}[result] self.moveProcessedUpload(upload_path, destination, logger) + build.date_finished = datetime.datetime.now(pytz.UTC) if not (result == UploadStatusEnum.ACCEPTED and build.verifySuccessfulUpload() and build.status == BuildStatus.FULLYBUILT): build.status = BuildStatus.FAILEDTOUPLOAD - build.date_finished = datetime.datetime.now(pytz.UTC) build.notify(extra_info="Uploading build %s failed." % upload) build.storeUploadLog(logger.buffer.getvalue()) @@ -451,10 +451,8 @@ # Reject source upload to buildd upload paths. first_path = relative_path.split(os.path.sep)[0] - is_not_buildd_nor_recipe_policy = policy.name not in [ - SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME, - BuildDaemonUploadPolicy.name] - if first_path.isdigit() and is_not_buildd_nor_recipe_policy: + if (first_path.isdigit() and + policy.name != BuildDaemonUploadPolicy.name): error_message = ( "Invalid upload path (%s) for this policy (%s)" % (relative_path, policy.name)) === modified file 'lib/lp/buildmaster/interfaces/packagebuild.py' --- lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-06 20:29:00 +0000 +++ lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-16 09:04:13 +0000 @@ -71,10 +71,6 @@ title=_('Build farm job'), schema=IBuildFarmJob, required=True, readonly=True, description=_('The base build farm job.')) - policy_name = TextLine( - title=_("Policy name"), required=True, - description=_("The upload policy to use for handling these builds.")) - current_component = Attribute( 'Component where the source related to this build was last ' 'published.') @@ -149,6 +145,14 @@ created in a suspended state. """ + def getUploader(changes): + """Return the person responsible for the upload. + + This is used to when checking permissions. + + :param changes: Changes file from the upload. + """ + class IPackageBuildSource(Interface): """A utility of this interface used to create _things_.""" === modified file 'lib/lp/buildmaster/model/packagebuild.py' --- lib/lp/buildmaster/model/packagebuild.py 2010-09-16 00:33:37 +0000 +++ lib/lp/buildmaster/model/packagebuild.py 2010-09-16 09:04:13 +0000 @@ -94,8 +94,6 @@ build_farm_job_id = Int(name='build_farm_job', allow_none=False) build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') - policy_name = 'buildd' - # The following two properties are part of the IPackageBuild # interface, but need to be provided by derived classes. distribution = None @@ -239,6 +237,10 @@ """See `IPackageBuild`.""" raise NotImplementedError + def getUploader(self, changes): + """See `IPackageBuild`.""" + raise NotImplementedError + class PackageBuildDerived: """Setup the delegation for package build. @@ -360,6 +362,9 @@ # Release the builder for another job. self.buildqueue_record.builder.cleanSlave() + # Remove BuildQueue record. + self.buildqueue_record.destroySelf() + def _handleStatus_PACKAGEFAIL(self, librarian, slave_status, logger): """Handle a package that had failed to build. === modified file 'lib/lp/buildmaster/tests/test_packagebuild.py' --- lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-16 00:33:37 +0000 +++ lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-16 09:06:43 +0000 @@ -105,7 +105,6 @@ def test_default_values(self): # PackageBuild has a number of default values. - self.failUnlessEqual('buildd', self.package_build.policy_name) self.failUnlessEqual( 'multiverse', self.package_build.current_component.name) self.failUnlessEqual(None, self.package_build.distribution) @@ -322,11 +321,14 @@ def test_handleStatus_OK_absolute_filepath(self): # A filemap that tries to write to files outside of # the upload directory will result in a failed upload. + builder = self.build.builder self.build.handleStatus('OK', None, { 'filemap': {'/tmp/myfile.py': 'test_file_hash'}, }) self.assertEqual(BuildStatus.FAILEDTOUPLOAD, self.build.status) self.assertResultCount(0, "failed") + self.assertIs(None, self.build.buildqueue_record) + self.assertIs(None, builder.currentjob) def test_handleStatus_OK_relative_filepath(self): # A filemap that tries to write to files outside of === modified file 'lib/lp/code/configure.zcml' --- lib/lp/code/configure.zcml 2010-09-13 04:56:29 +0000 +++ lib/lp/code/configure.zcml 2010-09-16 09:04:13 +0000 @@ -923,7 +923,7 @@ - + - - >> build.status.title 'Uploading build' - >>> bqItem10.destroySelf() - === Successfully collected and uploaded (FULLYBUILT) === Build item 6 has binary packages available in the sample data, letting us test === modified file 'lib/lp/soyuz/model/binarypackagebuild.py' --- lib/lp/soyuz/model/binarypackagebuild.py 2010-09-16 00:33:37 +0000 +++ lib/lp/soyuz/model/binarypackagebuild.py 2010-09-16 09:04:13 +0000 @@ -760,6 +760,10 @@ # package build, then don't hit the db. return self + def getUploader(self, changes): + """See `IBinaryPackageBuild`.""" + return changes.signer + class BinaryPackageBuildSet: implements(IBinaryPackageBuildSet) === modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py' --- lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-09-16 00:33:37 +0000 +++ lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-09-16 09:04:13 +0000 @@ -150,6 +150,15 @@ self.assertStatementCount( 0, self.build.getSpecificJob) + def test_getUploader(self): + # For ACL purposes the uploader is the changes file signer. + + class MockChanges: + signer = "Somebody