Merge lp:~jelmer/launchpad/no-more-buildid into lp:launchpad
- no-more-buildid
- Merge into devel
Proposed by
Jelmer Vernooij
on 2010-09-15
| Status: | Merged |
|---|---|
| Approved by: | Robert Collins on 2010-09-15 |
| Approved revision: | no longer in the source branch. |
| Merged at revision: | 11579 |
| Proposed branch: | lp:~jelmer/launchpad/no-more-buildid |
| Merge into: | lp:launchpad |
| Prerequisite: | lp:~jelmer/launchpad/archiveuploader-build-handling |
| Diff against target: |
763 lines (+101/-156) 17 files modified
lib/lp/archiveuploader/dscfile.py (+0/-9) lib/lp/archiveuploader/nascentupload.py (+11/-10) lib/lp/archiveuploader/nascentuploadfile.py (+13/-22) lib/lp/archiveuploader/tests/__init__.py (+5/-7) lib/lp/archiveuploader/tests/nascentupload.txt (+4/-5) lib/lp/archiveuploader/tests/test_buildduploads.py (+7/-8) lib/lp/archiveuploader/tests/test_ppauploadprocessor.py (+11/-12) lib/lp/archiveuploader/tests/test_recipeuploads.py (+7/-11) lib/lp/archiveuploader/tests/test_uploadprocessor.py (+24/-21) lib/lp/archiveuploader/tests/uploadpolicy.txt (+1/-8) lib/lp/archiveuploader/uploadpolicy.py (+2/-5) lib/lp/archiveuploader/uploadprocessor.py (+1/-1) lib/lp/code/model/sourcepackagerecipebuild.py (+1/-3) lib/lp/soyuz/doc/build-failedtoupload-workflow.txt (+2/-3) lib/lp/soyuz/doc/distroseriesqueue-translations.txt (+3/-5) lib/lp/soyuz/doc/soyuz-set-of-uploads.txt (+3/-20) lib/lp/soyuz/scripts/soyuz_process_upload.py (+6/-6) |
| To merge this branch: | bzr merge lp:~jelmer/launchpad/no-more-buildid |
| Related bugs: |
| Reviewer | Review Type | Date Requested | Status |
|---|---|---|---|
| Robert Collins (community) | Approve on 2010-09-15 | ||
| Launchpad code reviewers | code | 2010-09-15 | Pending |
|
Review via email:
|
|||
Commit Message
Remove --buildid argument from archiveuploader.
Description of the Change
This removes the buildid argument from archiveuploader (the approved branch at lp:~jelmer/launchpad/506526-remove-popen removes the last use of it) and instead makes us pass build objects down.
tests: ./bin/test lp.archiveuploader
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
| 1 | === modified file 'lib/lp/archiveuploader/dscfile.py' |
| 2 | --- lib/lp/archiveuploader/dscfile.py 2010-09-15 19:44:30 +0000 |
| 3 | +++ lib/lp/archiveuploader/dscfile.py 2010-09-15 19:44:32 +0000 |
| 4 | @@ -630,15 +630,6 @@ |
| 5 | cleanup_unpacked_dir(unpacked_dir) |
| 6 | self.logger.debug("Done") |
| 7 | |
| 8 | - def findBuild(self): |
| 9 | - """Find and return the SourcePackageRecipeBuild, if one is specified. |
| 10 | - """ |
| 11 | - build_id = getattr(self.policy.options, 'buildid', None) |
| 12 | - if build_id is None: |
| 13 | - return None |
| 14 | - |
| 15 | - return getUtility(ISourcePackageRecipeBuildSource).getById(build_id) |
| 16 | - |
| 17 | def storeInDatabase(self, build): |
| 18 | """Store DSC information as a SourcePackageRelease record. |
| 19 | |
| 20 | |
| 21 | === modified file 'lib/lp/archiveuploader/nascentupload.py' |
| 22 | --- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:44:30 +0000 |
| 23 | +++ lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:44:32 +0000 |
| 24 | @@ -500,7 +500,7 @@ |
| 25 | |
| 26 | # Set up some convenient shortcut variables. |
| 27 | |
| 28 | - uploader = self.policy.getUploader(self.changes) |
| 29 | + uploader = self.policy.getUploader(self.changes, build) |
| 30 | archive = self.policy.archive |
| 31 | |
| 32 | # If we have no signer, there's no ACL we can apply. |
| 33 | @@ -940,7 +940,6 @@ |
| 34 | sourcepackagerelease = None |
| 35 | if self.sourceful: |
| 36 | assert self.changes.dsc, "Sourceful upload lacks DSC." |
| 37 | - build = self.changes.dsc.findBuild() |
| 38 | if build is not None: |
| 39 | self.changes.dsc.checkBuild(build) |
| 40 | sourcepackagerelease = self.changes.dsc.storeInDatabase(build) |
| 41 | @@ -983,19 +982,21 @@ |
| 42 | sourcepackagerelease = ( |
| 43 | binary_package_file.findSourcePackageRelease()) |
| 44 | |
| 45 | + # Find the build for this particular binary package file. |
| 46 | if build is None: |
| 47 | - build = binary_package_file.findBuild( |
| 48 | + bpf_build = binary_package_file.findBuild( |
| 49 | sourcepackagerelease) |
| 50 | - if build.source_package_release != sourcepackagerelease: |
| 51 | + else: |
| 52 | + bpf_build = build |
| 53 | + if bpf_build.source_package_release != sourcepackagerelease: |
| 54 | raise AssertionError( |
| 55 | "Attempt to upload binaries specifying build %s, " |
| 56 | - "where they don't fit." % build.id) |
| 57 | - binary_package_file.checkBuild(build) |
| 58 | - assert self.queue_root.pocket == build.pocket, ( |
| 59 | + "where they don't fit." % bpf_build.id) |
| 60 | + binary_package_file.checkBuild(bpf_build) |
| 61 | + assert self.queue_root.pocket == bpf_build.pocket, ( |
| 62 | "Binary was not build for the claimed pocket.") |
| 63 | - binary_package_file.storeInDatabase(build) |
| 64 | - processed_builds.append(build) |
| 65 | - build = None |
| 66 | + binary_package_file.storeInDatabase(bpf_build) |
| 67 | + processed_builds.append(bpf_build) |
| 68 | |
| 69 | # Store the related builds after verifying they were built |
| 70 | # from the same source. |
| 71 | |
| 72 | === modified file 'lib/lp/archiveuploader/nascentuploadfile.py' |
| 73 | --- lib/lp/archiveuploader/nascentuploadfile.py 2010-09-15 19:44:30 +0000 |
| 74 | +++ lib/lp/archiveuploader/nascentuploadfile.py 2010-09-15 19:44:32 +0000 |
| 75 | @@ -53,7 +53,6 @@ |
| 76 | PackageUploadCustomFormat, |
| 77 | PackageUploadStatus, |
| 78 | ) |
| 79 | -from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet |
| 80 | from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet |
| 81 | from lp.soyuz.interfaces.component import IComponentSet |
| 82 | from lp.soyuz.interfaces.section import ISectionSet |
| 83 | @@ -860,30 +859,22 @@ |
| 84 | - Create a new build in FULLYBUILT status. |
| 85 | |
| 86 | """ |
| 87 | - build_id = getattr(self.policy.options, 'buildid', None) |
| 88 | dar = self.policy.distroseries[self.archtag] |
| 89 | |
| 90 | - if build_id is None: |
| 91 | - # Check if there's a suitable existing build. |
| 92 | - build = sourcepackagerelease.getBuildByArch( |
| 93 | - dar, self.policy.archive) |
| 94 | - if build is not None: |
| 95 | - build.status = BuildStatus.FULLYBUILT |
| 96 | - self.logger.debug("Updating build for %s: %s" % ( |
| 97 | - dar.architecturetag, build.id)) |
| 98 | - else: |
| 99 | - # No luck. Make one. |
| 100 | - # Usually happen for security binary uploads. |
| 101 | - build = sourcepackagerelease.createBuild( |
| 102 | - dar, self.policy.pocket, self.policy.archive, |
| 103 | - status=BuildStatus.FULLYBUILT) |
| 104 | - self.logger.debug("Build %s created" % build.id) |
| 105 | + # Check if there's a suitable existing build. |
| 106 | + build = sourcepackagerelease.getBuildByArch( |
| 107 | + dar, self.policy.archive) |
| 108 | + if build is not None: |
| 109 | + build.status = BuildStatus.FULLYBUILT |
| 110 | + self.logger.debug("Updating build for %s: %s" % ( |
| 111 | + dar.architecturetag, build.id)) |
| 112 | else: |
| 113 | - build = getUtility(IBinaryPackageBuildSet).getByBuildID(build_id) |
| 114 | - self.logger.debug("Build %s found" % build.id) |
| 115 | - # Ensure gathered binary is related to a FULLYBUILT build |
| 116 | - # record. It will be check in slave-scanner procedure to |
| 117 | - # certify that the build was processed correctly. |
| 118 | + # No luck. Make one. |
| 119 | + # Usually happen for security binary uploads. |
| 120 | + build = sourcepackagerelease.createBuild( |
| 121 | + dar, self.policy.pocket, self.policy.archive, |
| 122 | + status=BuildStatus.FULLYBUILT) |
| 123 | + self.logger.debug("Build %s created" % build.id) |
| 124 | return build |
| 125 | |
| 126 | def checkBuild(self, build): |
| 127 | |
| 128 | === modified file 'lib/lp/archiveuploader/tests/__init__.py' |
| 129 | --- lib/lp/archiveuploader/tests/__init__.py 2010-08-26 20:08:43 +0000 |
| 130 | +++ lib/lp/archiveuploader/tests/__init__.py 2010-09-15 19:44:32 +0000 |
| 131 | @@ -64,17 +64,15 @@ |
| 132 | class MockUploadOptions: |
| 133 | """Mock upload policy options helper""" |
| 134 | |
| 135 | - def __init__(self, distro='ubuntutest', distroseries=None, buildid=None): |
| 136 | + def __init__(self, distro='ubuntutest', distroseries=None): |
| 137 | self.distro = distro |
| 138 | self.distroseries = distroseries |
| 139 | - self.buildid = buildid |
| 140 | - |
| 141 | - |
| 142 | -def getPolicy(name='anything', distro='ubuntu', distroseries=None, |
| 143 | - buildid=None): |
| 144 | + |
| 145 | + |
| 146 | +def getPolicy(name='anything', distro='ubuntu', distroseries=None): |
| 147 | """Build and return an Upload Policy for the given context.""" |
| 148 | policy = findPolicyByName(name) |
| 149 | - options = MockUploadOptions(distro, distroseries, buildid) |
| 150 | + options = MockUploadOptions(distro, distroseries) |
| 151 | policy.setOptions(options) |
| 152 | return policy |
| 153 | |
| 154 | |
| 155 | === modified file 'lib/lp/archiveuploader/tests/nascentupload.txt' |
| 156 | --- lib/lp/archiveuploader/tests/nascentupload.txt 2010-08-26 15:28:34 +0000 |
| 157 | +++ lib/lp/archiveuploader/tests/nascentupload.txt 2010-09-15 19:44:32 +0000 |
| 158 | @@ -27,7 +27,7 @@ |
| 159 | ... datadir, getPolicy, mock_logger, mock_logger_quiet) |
| 160 | |
| 161 | >>> buildd_policy = getPolicy( |
| 162 | - ... name='buildd', distro='ubuntu', distroseries='hoary', buildid=1) |
| 163 | + ... name='buildd', distro='ubuntu', distroseries='hoary') |
| 164 | |
| 165 | >>> sync_policy = getPolicy( |
| 166 | ... name='sync', distro='ubuntu', distroseries='hoary') |
| 167 | @@ -216,7 +216,7 @@ |
| 168 | # Use the buildd policy as it accepts unsigned changes files and binary |
| 169 | # uploads. |
| 170 | >>> modified_buildd_policy = getPolicy( |
| 171 | - ... name='buildd', distro='ubuntu', distroseries='hoary', buildid=1) |
| 172 | + ... name='buildd', distro='ubuntu', distroseries='hoary') |
| 173 | |
| 174 | >>> ed_mismatched_upload = NascentUpload.from_changesfile_path( |
| 175 | ... datadir("ed_0.2-20_i386.changes.mismatched-arch-unsigned"), |
| 176 | @@ -640,13 +640,12 @@ |
| 177 | the 'buildd' upload policy and the build record id. |
| 178 | |
| 179 | >>> buildd_policy = getPolicy( |
| 180 | - ... name='buildd', distro='ubuntu', distroseries='hoary', |
| 181 | - ... buildid=multibar_build.id) |
| 182 | + ... name='buildd', distro='ubuntu', distroseries='hoary') |
| 183 | |
| 184 | >>> multibar_bin_upload = NascentUpload.from_changesfile_path( |
| 185 | ... datadir('suite/multibar_1.0-1/multibar_1.0-1_i386.changes'), |
| 186 | ... buildd_policy, mock_logger_quiet) |
| 187 | - >>> multibar_bin_upload.process() |
| 188 | + >>> multibar_bin_upload.process(build=multibar_build) |
| 189 | >>> success = multibar_bin_upload.do_accept() |
| 190 | |
| 191 | Now that we have successfully processed the binaries coming from a |
| 192 | |
| 193 | === modified file 'lib/lp/archiveuploader/tests/test_buildduploads.py' |
| 194 | --- lib/lp/archiveuploader/tests/test_buildduploads.py 2010-08-26 15:28:34 +0000 |
| 195 | +++ lib/lp/archiveuploader/tests/test_buildduploads.py 2010-09-15 19:44:32 +0000 |
| 196 | @@ -112,7 +112,7 @@ |
| 197 | # Store source queue item for future use. |
| 198 | self.source_queue = queue_item |
| 199 | |
| 200 | - def _uploadBinary(self, archtag): |
| 201 | + def _uploadBinary(self, archtag, build): |
| 202 | """Upload the base binary. |
| 203 | |
| 204 | Ensure it got processed and has a respective queue record. |
| 205 | @@ -121,7 +121,7 @@ |
| 206 | self._prepareUpload(self.binary_dir) |
| 207 | self.uploadprocessor.processChangesFile( |
| 208 | os.path.join(self.queue_folder, "incoming", self.binary_dir), |
| 209 | - self.getBinaryChangesfileFor(archtag)) |
| 210 | + self.getBinaryChangesfileFor(archtag), build=build) |
| 211 | queue_item = self.uploadprocessor.last_processed_upload.queue_root |
| 212 | self.assertTrue( |
| 213 | queue_item is not None, |
| 214 | @@ -205,10 +205,9 @@ |
| 215 | pubrec.datepublished = UTC_NOW |
| 216 | queue_item.setDone() |
| 217 | |
| 218 | - def _setupUploadProcessorForBuild(self, build_candidate): |
| 219 | + def _setupUploadProcessorForBuild(self): |
| 220 | """Setup an UploadProcessor instance for a given buildd context.""" |
| 221 | self.options.context = self.policy |
| 222 | - self.options.buildid = str(build_candidate.id) |
| 223 | self.uploadprocessor = self.getUploadProcessor( |
| 224 | self.layer.txn) |
| 225 | |
| 226 | @@ -223,8 +222,8 @@ |
| 227 | """ |
| 228 | # Upload i386 binary. |
| 229 | build_candidate = self._createBuild('i386') |
| 230 | - self._setupUploadProcessorForBuild(build_candidate) |
| 231 | - build_used = self._uploadBinary('i386') |
| 232 | + self._setupUploadProcessorForBuild() |
| 233 | + build_used = self._uploadBinary('i386', build_candidate) |
| 234 | |
| 235 | self.assertEqual(build_used.id, build_candidate.id) |
| 236 | self.assertBuildsCreated(1) |
| 237 | @@ -239,8 +238,8 @@ |
| 238 | |
| 239 | # Upload powerpc binary |
| 240 | build_candidate = self._createBuild('powerpc') |
| 241 | - self._setupUploadProcessorForBuild(build_candidate) |
| 242 | - build_used = self._uploadBinary('powerpc') |
| 243 | + self._setupUploadProcessorForBuild() |
| 244 | + build_used = self._uploadBinary('powerpc', build_candidate) |
| 245 | |
| 246 | self.assertEqual(build_used.id, build_candidate.id) |
| 247 | self.assertBuildsCreated(2) |
| 248 | |
| 249 | === modified file 'lib/lp/archiveuploader/tests/test_ppauploadprocessor.py' |
| 250 | --- lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-08-31 11:11:09 +0000 |
| 251 | +++ lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-09-15 19:44:32 +0000 |
| 252 | @@ -355,10 +355,10 @@ |
| 253 | builds = self.name16.archive.getBuildRecords(name="bar") |
| 254 | [build] = builds |
| 255 | self.options.context = 'buildd' |
| 256 | - self.options.buildid = build.id |
| 257 | upload_dir = self.queueUpload( |
| 258 | "bar_1.0-1_binary_universe", "~name16/ubuntu") |
| 259 | - self.processUpload(self.uploadprocessor, upload_dir) |
| 260 | + self.processUpload( |
| 261 | + self.uploadprocessor, upload_dir, build=build) |
| 262 | |
| 263 | # No mails are sent for successful binary uploads. |
| 264 | self.assertEqual(len(stub.test_emails), 0, |
| 265 | @@ -405,9 +405,9 @@ |
| 266 | |
| 267 | # Binary upload to the just-created build record. |
| 268 | self.options.context = 'buildd' |
| 269 | - self.options.buildid = build.id |
| 270 | upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu") |
| 271 | - self.processUpload(self.uploadprocessor, upload_dir) |
| 272 | + self.processUpload( |
| 273 | + self.uploadprocessor, upload_dir, build=build) |
| 274 | |
| 275 | # The binary upload was accepted and it's waiting in the queue. |
| 276 | queue_items = self.breezy.getQueueItems( |
| 277 | @@ -459,9 +459,9 @@ |
| 278 | |
| 279 | # Binary upload to the just-created build record. |
| 280 | self.options.context = 'buildd' |
| 281 | - self.options.buildid = build_bar_i386.id |
| 282 | upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu") |
| 283 | - self.processUpload(self.uploadprocessor, upload_dir) |
| 284 | + self.processUpload( |
| 285 | + self.uploadprocessor, upload_dir, build=build_bar_i386) |
| 286 | |
| 287 | # The binary upload was accepted and it's waiting in the queue. |
| 288 | queue_items = self.breezy.getQueueItems( |
| 289 | @@ -760,9 +760,9 @@ |
| 290 | builds = self.name16.archive.getBuildRecords(name='bar') |
| 291 | [build] = builds |
| 292 | self.options.context = 'buildd' |
| 293 | - self.options.buildid = build.id |
| 294 | upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu") |
| 295 | - self.processUpload(self.uploadprocessor, upload_dir) |
| 296 | + self.processUpload( |
| 297 | + self.uploadprocessor, upload_dir, build=build) |
| 298 | |
| 299 | # The binary upload was accepted and it's waiting in the queue. |
| 300 | queue_items = self.breezy.getQueueItems( |
| 301 | @@ -804,10 +804,9 @@ |
| 302 | # Binary uploads should exhibit the same behaviour: |
| 303 | [build] = self.name16.archive.getBuildRecords(name="bar") |
| 304 | self.options.context = 'buildd' |
| 305 | - self.options.buildid = build.id |
| 306 | upload_dir = self.queueUpload( |
| 307 | "bar_1.0-1_contrib_binary", "~name16/ubuntu") |
| 308 | - self.processUpload(self.uploadprocessor, upload_dir) |
| 309 | + self.processUpload(self.uploadprocessor, upload_dir, build=build) |
| 310 | queue_items = self.breezy.getQueueItems( |
| 311 | status=PackageUploadStatus.ACCEPTED, name="bar", |
| 312 | version="1.0-1", exact_match=True, archive=self.name16.archive) |
| 313 | @@ -1306,14 +1305,14 @@ |
| 314 | builds = self.name16.archive.getBuildRecords(name='bar') |
| 315 | [build] = builds |
| 316 | self.options.context = 'buildd' |
| 317 | - self.options.buildid = build.id |
| 318 | |
| 319 | # Stuff 1024 MiB in name16 PPA, so anything will be above the |
| 320 | # default quota limit, 1024 MiB. |
| 321 | self._fillArchive(self.name16.archive, 1024 * (2 ** 20)) |
| 322 | |
| 323 | upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu") |
| 324 | - self.processUpload(self.uploadprocessor, upload_dir) |
| 325 | + self.processUpload( |
| 326 | + self.uploadprocessor, upload_dir, build=build) |
| 327 | |
| 328 | # The binary upload was accepted, and it's waiting in the queue. |
| 329 | queue_items = self.breezy.getQueueItems( |
| 330 | |
| 331 | === modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py' |
| 332 | --- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-08-27 11:19:54 +0000 |
| 333 | +++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-15 19:44:32 +0000 |
| 334 | @@ -10,6 +10,9 @@ |
| 335 | from storm.store import Store |
| 336 | from zope.component import getUtility |
| 337 | |
| 338 | +from lp.archiveuploader.uploadprocessor import ( |
| 339 | + UploadStatusEnum, |
| 340 | + ) |
| 341 | from lp.archiveuploader.tests.test_uploadprocessor import ( |
| 342 | TestUploadProcessorBase, |
| 343 | ) |
| 344 | @@ -17,7 +20,6 @@ |
| 345 | from lp.code.interfaces.sourcepackagerecipebuild import ( |
| 346 | ISourcePackageRecipeBuildSource, |
| 347 | ) |
| 348 | -from lp.soyuz.enums import PackageUploadStatus |
| 349 | |
| 350 | |
| 351 | class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase): |
| 352 | @@ -41,7 +43,6 @@ |
| 353 | |
| 354 | Store.of(self.build).flush() |
| 355 | self.options.context = 'recipe' |
| 356 | - self.options.buildid = self.build.id |
| 357 | |
| 358 | self.uploadprocessor = self.getUploadProcessor( |
| 359 | self.layer.txn) |
| 360 | @@ -54,19 +55,14 @@ |
| 361 | self.assertIs(None, self.build.source_package_release) |
| 362 | self.assertEqual(False, self.build.verifySuccessfulUpload()) |
| 363 | self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id) |
| 364 | - self.uploadprocessor.processChangesFile( |
| 365 | + result = self.uploadprocessor.processChangesFile( |
| 366 | os.path.join(self.queue_folder, "incoming", 'bar_1.0-1'), |
| 367 | - '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id) |
| 368 | + '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id, |
| 369 | + build=self.build) |
| 370 | self.layer.txn.commit() |
| 371 | |
| 372 | - queue_item = self.uploadprocessor.last_processed_upload.queue_root |
| 373 | - self.assertTrue( |
| 374 | - queue_item is not None, |
| 375 | + self.assertEquals(UploadStatusEnum.ACCEPTED, result, |
| 376 | "Source upload failed\nGot: %s" % "\n".join(self.log.lines)) |
| 377 | |
| 378 | - self.assertEqual(PackageUploadStatus.DONE, queue_item.status) |
| 379 | - spr = queue_item.sources[0].sourcepackagerelease |
| 380 | - self.assertEqual(self.build, spr.source_package_recipe_build) |
| 381 | - self.assertEqual(spr, self.build.source_package_release) |
| 382 | self.assertEqual(BuildStatus.FULLYBUILT, self.build.status) |
| 383 | self.assertEqual(True, self.build.verifySuccessfulUpload()) |
| 384 | |
| 385 | === modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py' |
| 386 | --- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-02 16:28:50 +0000 |
| 387 | +++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-15 19:44:32 +0000 |
| 388 | @@ -149,7 +149,7 @@ |
| 389 | |
| 390 | self.options = MockOptions() |
| 391 | self.options.base_fsroot = self.queue_folder |
| 392 | - self.options.builds = True |
| 393 | + self.options.builds = False |
| 394 | self.options.leafname = None |
| 395 | self.options.distro = "ubuntu" |
| 396 | self.options.distroseries = None |
| 397 | @@ -168,9 +168,13 @@ |
| 398 | super(TestUploadProcessorBase, self).tearDown() |
| 399 | |
| 400 | def getUploadProcessor(self, txn): |
| 401 | - def getPolicy(distro): |
| 402 | + def getPolicy(distro, build): |
| 403 | self.options.distro = distro.name |
| 404 | policy = findPolicyByName(self.options.context) |
| 405 | + if self.options.builds: |
| 406 | + policy.distroseries = build.distro_series |
| 407 | + policy.pocket = build.pocket |
| 408 | + policy.archive = build.archive |
| 409 | policy.setOptions(self.options) |
| 410 | return policy |
| 411 | return UploadProcessor( |
| 412 | @@ -284,7 +288,7 @@ |
| 413 | shutil.copytree(upload_dir, target_path) |
| 414 | return os.path.join(self.incoming_folder, queue_entry) |
| 415 | |
| 416 | - def processUpload(self, processor, upload_dir): |
| 417 | + def processUpload(self, processor, upload_dir, build=None): |
| 418 | """Process an upload queue entry directory. |
| 419 | |
| 420 | There is some duplication here with logic in UploadProcessor, |
| 421 | @@ -294,7 +298,8 @@ |
| 422 | results = [] |
| 423 | changes_files = processor.locateChangesFiles(upload_dir) |
| 424 | for changes_file in changes_files: |
| 425 | - result = processor.processChangesFile(upload_dir, changes_file) |
| 426 | + result = processor.processChangesFile( |
| 427 | + upload_dir, changes_file, build=build) |
| 428 | results.append(result) |
| 429 | return results |
| 430 | |
| 431 | @@ -689,10 +694,10 @@ |
| 432 | # Upload and accept a binary for the primary archive source. |
| 433 | shutil.rmtree(upload_dir) |
| 434 | self.options.context = 'buildd' |
| 435 | - self.options.buildid = bar_original_build.id |
| 436 | self.layer.txn.commit() |
| 437 | upload_dir = self.queueUpload("bar_1.0-1_binary") |
| 438 | - self.processUpload(uploadprocessor, upload_dir) |
| 439 | + self.processUpload(uploadprocessor, upload_dir, |
| 440 | + build=bar_original_build) |
| 441 | self.assertEqual( |
| 442 | uploadprocessor.last_processed_upload.is_rejected, False) |
| 443 | bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False) |
| 444 | @@ -720,10 +725,10 @@ |
| 445 | |
| 446 | shutil.rmtree(upload_dir) |
| 447 | self.options.context = 'buildd' |
| 448 | - self.options.buildid = bar_copied_build.id |
| 449 | upload_dir = self.queueUpload( |
| 450 | "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id) |
| 451 | - self.processUpload(uploadprocessor, upload_dir) |
| 452 | + self.processUpload(uploadprocessor, upload_dir, |
| 453 | + build=bar_copied_build) |
| 454 | |
| 455 | # Make sure the upload succeeded. |
| 456 | self.assertEqual( |
| 457 | @@ -792,9 +797,9 @@ |
| 458 | [bar_original_build] = bar_source_pub.createMissingBuilds() |
| 459 | |
| 460 | self.options.context = 'buildd' |
| 461 | - self.options.buildid = bar_original_build.id |
| 462 | upload_dir = self.queueUpload("bar_1.0-1_binary") |
| 463 | - self.processUpload(uploadprocessor, upload_dir) |
| 464 | + self.processUpload( |
| 465 | + uploadprocessor, upload_dir, build=bar_original_build) |
| 466 | [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False) |
| 467 | |
| 468 | # Prepare ubuntu/breezy-autotest to build sources in i386. |
| 469 | @@ -814,10 +819,10 @@ |
| 470 | # Re-upload the same 'bar-1.0-1' binary as if it was rebuilt |
| 471 | # in breezy-autotest context. |
| 472 | shutil.rmtree(upload_dir) |
| 473 | - self.options.buildid = bar_copied_build.id |
| 474 | self.options.distroseries = breezy_autotest.name |
| 475 | upload_dir = self.queueUpload("bar_1.0-1_binary") |
| 476 | - self.processUpload(uploadprocessor, upload_dir) |
| 477 | + self.processUpload(uploadprocessor, upload_dir, |
| 478 | + build=bar_copied_build) |
| 479 | [duplicated_binary_upload] = breezy_autotest.getQueueItems( |
| 480 | status=PackageUploadStatus.NEW, name='bar', |
| 481 | version='1.0-1', exact_match=True) |
| 482 | @@ -855,9 +860,9 @@ |
| 483 | [bar_original_build] = bar_source_pub.getBuilds() |
| 484 | |
| 485 | self.options.context = 'buildd' |
| 486 | - self.options.buildid = bar_original_build.id |
| 487 | upload_dir = self.queueUpload("bar_1.0-2_binary") |
| 488 | - self.processUpload(uploadprocessor, upload_dir) |
| 489 | + self.processUpload(uploadprocessor, upload_dir, |
| 490 | + build=bar_original_build) |
| 491 | [bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False) |
| 492 | |
| 493 | # Create a COPY archive for building in non-virtual builds. |
| 494 | @@ -874,10 +879,10 @@ |
| 495 | [bar_copied_build] = bar_copied_source.createMissingBuilds() |
| 496 | |
| 497 | shutil.rmtree(upload_dir) |
| 498 | - self.options.buildid = bar_copied_build.id |
| 499 | upload_dir = self.queueUpload( |
| 500 | "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id) |
| 501 | - self.processUpload(uploadprocessor, upload_dir) |
| 502 | + self.processUpload(uploadprocessor, upload_dir, |
| 503 | + build=bar_copied_build) |
| 504 | |
| 505 | # The binary just uploaded is accepted because it's destined for a |
| 506 | # copy archive and the PRIMARY and the COPY archives are isolated |
| 507 | @@ -1030,9 +1035,9 @@ |
| 508 | self.breezy['i386'], PackagePublishingPocket.RELEASE, |
| 509 | self.ubuntu.main_archive) |
| 510 | self.layer.txn.commit() |
| 511 | - self.options.buildid = foocomm_build.id |
| 512 | upload_dir = self.queueUpload("foocomm_1.0-1_binary") |
| 513 | - self.processUpload(uploadprocessor, upload_dir) |
| 514 | + self.processUpload( |
| 515 | + uploadprocessor, upload_dir, build=foocomm_build) |
| 516 | |
| 517 | contents = [ |
| 518 | "Subject: foocomm_1.0-1_i386.changes rejected", |
| 519 | @@ -1040,10 +1045,8 @@ |
| 520 | "where they don't fit."] |
| 521 | self.assertEmail(contents) |
| 522 | |
| 523 | - # Reset upload queue directory for a new upload and the |
| 524 | - # uploadprocessor buildid option. |
| 525 | + # Reset upload queue directory for a new upload. |
| 526 | shutil.rmtree(upload_dir) |
| 527 | - self.options.buildid = None |
| 528 | |
| 529 | # Now upload a binary package of 'foocomm', letting a new build record |
| 530 | # with appropriate data be created by the uploadprocessor. |
| 531 | |
| 532 | === modified file 'lib/lp/archiveuploader/tests/uploadpolicy.txt' |
| 533 | --- lib/lp/archiveuploader/tests/uploadpolicy.txt 2010-08-18 14:03:15 +0000 |
| 534 | +++ lib/lp/archiveuploader/tests/uploadpolicy.txt 2010-09-15 19:44:32 +0000 |
| 535 | @@ -53,23 +53,16 @@ |
| 536 | ... distro = 'ubuntu' |
| 537 | ... distroseries = None |
| 538 | >>> class MockOptions(MockAbstractOptions): |
| 539 | - ... buildid = 1 |
| 540 | + ... builds = True |
| 541 | |
| 542 | >>> ab_opts = MockAbstractOptions() |
| 543 | >>> bd_opts = MockOptions() |
| 544 | |
| 545 | >>> insecure_policy.setOptions(ab_opts) |
| 546 | - >>> insecure_policy.options is ab_opts |
| 547 | - True |
| 548 | >>> insecure_policy.distro.name |
| 549 | u'ubuntu' |
| 550 | >>> buildd_policy.setOptions(ab_opts) |
| 551 | - Traceback (most recent call last): |
| 552 | - ... |
| 553 | - UploadPolicyError: BuildID required for buildd context |
| 554 | >>> buildd_policy.setOptions(bd_opts) |
| 555 | - >>> buildd_policy.options is bd_opts |
| 556 | - True |
| 557 | >>> buildd_policy.distro.name |
| 558 | u'ubuntu' |
| 559 | |
| 560 | |
| 561 | === modified file 'lib/lp/archiveuploader/uploadpolicy.py' |
| 562 | --- lib/lp/archiveuploader/uploadpolicy.py 2010-08-25 13:04:14 +0000 |
| 563 | +++ lib/lp/archiveuploader/uploadpolicy.py 2010-09-15 19:44:32 +0000 |
| 564 | @@ -128,7 +128,7 @@ |
| 565 | raise AssertionError( |
| 566 | "Upload is not sourceful, binaryful or mixed.") |
| 567 | |
| 568 | - def getUploader(self, changes): |
| 569 | + def getUploader(self, changes, build): |
| 570 | """Get the person who is doing the uploading.""" |
| 571 | return changes.signer |
| 572 | |
| 573 | @@ -334,10 +334,7 @@ |
| 574 | |
| 575 | def setOptions(self, options): |
| 576 | AbstractUploadPolicy.setOptions(self, options) |
| 577 | - # We require a buildid to be provided |
| 578 | - if (getattr(options, 'buildid', None) is None and |
| 579 | - not getattr(options, 'builds', False)): |
| 580 | - raise UploadPolicyError("BuildID required for buildd context") |
| 581 | + self.builds = True |
| 582 | |
| 583 | def policySpecificChecks(self, upload): |
| 584 | """The buildd policy should enforce that the buildid matches.""" |
| 585 | |
| 586 | === modified file 'lib/lp/archiveuploader/uploadprocessor.py' |
| 587 | --- lib/lp/archiveuploader/uploadprocessor.py 2010-09-15 19:44:30 +0000 |
| 588 | +++ lib/lp/archiveuploader/uploadprocessor.py 2010-09-15 19:44:32 +0000 |
| 589 | @@ -417,7 +417,7 @@ |
| 590 | "https://help.launchpad.net/Packaging/PPA#Uploading " |
| 591 | "and update your configuration."))) |
| 592 | logger.debug("Finding fresh policy") |
| 593 | - policy = self._getPolicyForDistro(distribution) |
| 594 | + policy = self._getPolicyForDistro(distribution, build) |
| 595 | policy.archive = archive |
| 596 | |
| 597 | # DistroSeries overriding respect the following precedence: |
| 598 | |
| 599 | === modified file 'lib/lp/code/model/sourcepackagerecipebuild.py' |
| 600 | --- lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-09 17:02:33 +0000 |
| 601 | +++ lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-15 19:44:32 +0000 |
| 602 | @@ -83,10 +83,8 @@ |
| 603 | name = SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME |
| 604 | accepted_type = ArchiveUploadType.SOURCE_ONLY |
| 605 | |
| 606 | - def getUploader(self, changes): |
| 607 | + def getUploader(self, changes, sprb): |
| 608 | """Return the person doing the upload.""" |
| 609 | - build_id = int(getattr(self.options, 'buildid')) |
| 610 | - sprb = getUtility(ISourcePackageRecipeBuildSource).getById(build_id) |
| 611 | return sprb.requester |
| 612 | |
| 613 | |
| 614 | |
| 615 | === modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt' |
| 616 | --- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-08-04 00:16:44 +0000 |
| 617 | +++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-09-15 19:44:32 +0000 |
| 618 | @@ -162,8 +162,7 @@ |
| 619 | >>> buildd_policy = getPolicy( |
| 620 | ... name='buildd', |
| 621 | ... distro=failedtoupload_candidate.distribution.name, |
| 622 | - ... distroseries=failedtoupload_candidate.distro_series.name, |
| 623 | - ... buildid=failedtoupload_candidate.id) |
| 624 | + ... distroseries=failedtoupload_candidate.distro_series.name) |
| 625 | |
| 626 | >>> cdrkit_bin_upload = NascentUpload.from_changesfile_path( |
| 627 | ... datadir('suite/cdrkit_1.0/cdrkit_1.0_i386.changes'), |
| 628 | @@ -171,7 +170,7 @@ |
| 629 | >>> cdrkit_bin_upload.process() |
| 630 | >>> cdrkit_bin_upload.is_rejected |
| 631 | False |
| 632 | - >>> success = cdrkit_bin_upload.do_accept() |
| 633 | + >>> success = cdrkit_bin_upload.do_accept(build=failedtoupload_candidate) |
| 634 | >>> print cdrkit_bin_upload.queue_root.status.name |
| 635 | NEW |
| 636 | |
| 637 | |
| 638 | === modified file 'lib/lp/soyuz/doc/distroseriesqueue-translations.txt' |
| 639 | --- lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2010-08-24 15:29:01 +0000 |
| 640 | +++ lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2010-09-15 19:44:32 +0000 |
| 641 | @@ -74,15 +74,14 @@ |
| 642 | ... dapper_amd64, PackagePublishingPocket.RELEASE, dapper.main_archive) |
| 643 | |
| 644 | >>> buildd_policy = getPolicy( |
| 645 | - ... name='buildd', distro='ubuntu', distroseries='dapper', |
| 646 | - ... buildid=build.id) |
| 647 | + ... name='buildd', distro='ubuntu', distroseries='dapper') |
| 648 | |
| 649 | >>> pmount_upload = NascentUpload.from_changesfile_path( |
| 650 | ... datadir('pmount_0.9.7-2ubuntu2_amd64.changes'), |
| 651 | ... buildd_policy, mock_logger) |
| 652 | DEBUG: Changes file can be unsigned. |
| 653 | |
| 654 | - >>> pmount_upload.process() |
| 655 | + >>> pmount_upload.process(build=build) |
| 656 | DEBUG: Beginning processing. |
| 657 | DEBUG: Verifying the changes file. |
| 658 | DEBUG: Verifying files in upload. |
| 659 | @@ -105,9 +104,8 @@ |
| 660 | >>> print len(dapper_pmount.getLatestTranslationsUploads()) |
| 661 | 0 |
| 662 | |
| 663 | - >>> success = pmount_upload.do_accept() |
| 664 | + >>> success = pmount_upload.do_accept(build=build) |
| 665 | DEBUG: Creating queue entry |
| 666 | - DEBUG: Build ... found |
| 667 | ... |
| 668 | |
| 669 | # And all things worked. |
| 670 | |
| 671 | === modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt' |
| 672 | --- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-08-30 02:07:38 +0000 |
| 673 | +++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-09-15 19:44:32 +0000 |
| 674 | @@ -119,21 +119,17 @@ |
| 675 | >>> from lp.soyuz.scripts.soyuz_process_upload import ( |
| 676 | ... ProcessUpload) |
| 677 | >>> from canonical.testing import LaunchpadZopelessLayer |
| 678 | - >>> def process_uploads(upload_policy, build_id, series, loglevel): |
| 679 | + >>> def process_uploads(upload_policy, series, loglevel): |
| 680 | ... """Simulate process-upload.py script run. |
| 681 | ... |
| 682 | ... :param upload_policy: context in which to consider the upload |
| 683 | ... (equivalent to script's --context option). |
| 684 | - ... :param build_id: build to which to attach this upload. |
| 685 | - ... (equivalent to script's --buildid option). |
| 686 | ... :param series: distro series to give back from. |
| 687 | ... (equivalent to script's --series option). |
| 688 | ... :param loglevel: logging level (as defined in logging module). Any |
| 689 | ... log messages below this level will be suppressed. |
| 690 | ... """ |
| 691 | ... args = [temp_dir, "-C", upload_policy] |
| 692 | - ... if build_id is not None: |
| 693 | - ... args.extend(["-b", build_id]) |
| 694 | ... if series is not None: |
| 695 | ... args.extend(["-s", series]) |
| 696 | ... # Run script under 'uploader' DB user. The dbuser argument to the |
| 697 | @@ -230,11 +226,11 @@ |
| 698 | >>> from lp.services.mail import stub |
| 699 | |
| 700 | >>> def simulate_upload( |
| 701 | - ... leafname, is_new=False, upload_policy='anything', build_id=None, |
| 702 | + ... leafname, is_new=False, upload_policy='anything', |
| 703 | ... series=None, distro="ubuntutest", loglevel=logging.WARN): |
| 704 | ... """Process upload(s). Options are as for process_uploads().""" |
| 705 | ... punt_upload_into_queue(leafname, distro=distro) |
| 706 | - ... process_uploads(upload_policy, build_id, series, loglevel) |
| 707 | + ... process_uploads(upload_policy, series, loglevel) |
| 708 | ... # We seem to be leaving a lock file behind here for some reason. |
| 709 | ... # Naturally it doesn't count as an unprocessed incoming file, which |
| 710 | ... # is what we're really looking for. |
| 711 | @@ -289,19 +285,6 @@ |
| 712 | |
| 713 | >>> simulate_upload('bar_1.0-2') |
| 714 | |
| 715 | -Check the rejection of bar_1.0-2_binary when uploaded to the wrong build id. |
| 716 | - |
| 717 | - >>> simulate_upload( |
| 718 | - ... 'bar_1.0-2_binary', upload_policy="buildd", build_id="2", |
| 719 | - ... loglevel=logging.ERROR) |
| 720 | - log> Exception while accepting: |
| 721 | - Attempt to upload binaries specifying build 2, where they don't fit. |
| 722 | - ... |
| 723 | - Rejected uploads: ['bar_1.0-2_binary'] |
| 724 | - |
| 725 | -Try it again without the bogus build id. This succeeds without |
| 726 | -complaints. |
| 727 | - |
| 728 | >>> simulate_upload('bar_1.0-2_binary') |
| 729 | |
| 730 | Check the rejection of a malicious version of bar package which refers |
| 731 | |
| 732 | === modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py' |
| 733 | --- lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-20 20:31:18 +0000 |
| 734 | +++ lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-09-15 19:44:32 +0000 |
| 735 | @@ -61,11 +61,6 @@ |
| 736 | help="Distro series to give back from.") |
| 737 | |
| 738 | self.parser.add_option( |
| 739 | - "-b", "--buildid", action="store", type="int", dest="buildid", |
| 740 | - metavar="BUILD", |
| 741 | - help="The build ID to which to attach this upload.") |
| 742 | - |
| 743 | - self.parser.add_option( |
| 744 | "-a", "--announce", action="store", dest="announcelist", |
| 745 | metavar="ANNOUNCELIST", help="Override the announcement list") |
| 746 | |
| 747 | @@ -82,10 +77,15 @@ |
| 748 | "%s is not a directory" % self.options.base_fsroot) |
| 749 | |
| 750 | self.logger.debug("Initialising connection.") |
| 751 | - def getPolicy(distro): |
| 752 | + def getPolicy(distro, build): |
| 753 | self.options.distro = distro.name |
| 754 | policy = findPolicyByName(self.options.context) |
| 755 | policy.setOptions(self.options) |
| 756 | + if self.options.builds: |
| 757 | + assert build, "--builds specified but no build" |
| 758 | + policy.distroseries = build.distro_series |
| 759 | + policy.pocket = build.pocket |
| 760 | + policy.archive = build.archive |
| 761 | return policy |
| 762 | processor = UploadProcessor(self.options.base_fsroot, |
| 763 | self.options.dryrun, self.options.nomails, self.options.builds, |

Doit.