Merge lp:~michael.nelson/launchpad/567922-fix-ec2-failures-after-db-devel-merge into lp:launchpad/db-devel
- 567922-fix-ec2-failures-after-db-devel-merge
- Merge into db-devel
Proposed by
Michael Nelson
on 2010-05-21
| Status: | Merged | ||||
|---|---|---|---|---|---|
| Approved by: | Michael Nelson on 2010-05-21 | ||||
| Approved revision: | no longer in the source branch. | ||||
| Merged at revision: | 9405 | ||||
| Proposed branch: | lp:~michael.nelson/launchpad/567922-fix-ec2-failures-after-db-devel-merge | ||||
| Merge into: | lp:launchpad/db-devel | ||||
| Prerequisite: | lp:~michael.nelson/launchpad/567922-build-gen-checkpoint | ||||
| Diff against target: |
1064 lines (+168/-141) 20 files modified
lib/lp/buildmaster/configure.zcml (+1/-1) lib/lp/buildmaster/interfaces/packagebuild.py (+1/-1) lib/lp/buildmaster/model/buildbase.py (+8/-7) lib/lp/buildmaster/model/buildfarmjob.py (+1/-1) lib/lp/buildmaster/model/buildfarmjobbehavior.py (+1/-2) lib/lp/buildmaster/model/packagebuild.py (+3/-4) lib/lp/buildmaster/tests/test_buildbase.py (+1/-1) lib/lp/buildmaster/tests/test_buildqueue.py (+22/-12) lib/lp/buildmaster/tests/test_manager.py (+4/-4) lib/lp/soyuz/browser/tests/archive-views.txt (+6/-6) lib/lp/soyuz/doc/build-estimated-dispatch-time.txt (+4/-4) lib/lp/soyuz/doc/buildd-scoring.txt (+4/-2) lib/lp/soyuz/doc/buildd-slavescanner.txt (+62/-62) lib/lp/soyuz/model/binarypackagebuild.py (+16/-11) lib/lp/soyuz/model/binarypackagebuildbehavior.py (+2/-1) lib/lp/soyuz/scripts/buildd.py (+4/-4) lib/lp/soyuz/scripts/packagecopier.py (+4/-4) lib/lp/soyuz/scripts/tests/test_buildd_cronscripts.py (+13/-3) lib/lp/soyuz/scripts/tests/test_copypackage.py (+10/-10) lib/lp/soyuz/scripts/tests/test_populatearchive.py (+1/-1) |
||||
| To merge this branch: | bzr merge lp:~michael.nelson/launchpad/567922-fix-ec2-failures-after-db-devel-merge | ||||
| Related bugs: |
|
| Reviewer | Review Type | Date Requested | Status |
|---|---|---|---|
| Abel Deuring (community) | code | 2010-05-21 | Approve on 2010-05-21 |
|
Review via email:
|
|||
Commit Message
Description of the Change
This branch is part of a pipeline for
https:/
https:/
Overview
========
After getting most of the unit-tests running, I re-merged a fresh db-devel into my pipeline, resolved the conflicts, fixed the obvious errors and sent it off to ec2test. This branch is starts to fix the new failures.
To test
=======
make schema
bin/test -vv -t test_buildfarmjob -t test_buildqueue -t test_manager -t test_buildd_
To post a comment you must log in.
review:
Approve
(code)
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
| 1 | === modified file 'lib/lp/buildmaster/configure.zcml' |
| 2 | --- lib/lp/buildmaster/configure.zcml 2010-05-27 13:26:43 +0000 |
| 3 | +++ lib/lp/buildmaster/configure.zcml 2010-05-27 13:26:53 +0000 |
| 4 | @@ -87,7 +87,7 @@ |
| 5 | <!-- Grant permissions to write on BuildQueue and Build. --> |
| 6 | <require |
| 7 | permission="zope.Public" |
| 8 | - set_attributes="lastscore builder logtail date_started"/> |
| 9 | + set_attributes="lastscore estimated_duration builder logtail date_started"/> |
| 10 | </class> |
| 11 | |
| 12 | |
| 13 | |
| 14 | === modified file 'lib/lp/buildmaster/interfaces/packagebuild.py' |
| 15 | --- lib/lp/buildmaster/interfaces/packagebuild.py 2010-05-27 13:26:43 +0000 |
| 16 | +++ lib/lp/buildmaster/interfaces/packagebuild.py 2010-05-27 13:26:53 +0000 |
| 17 | @@ -82,7 +82,7 @@ |
| 18 | title=_("Distribution series"), required=True, |
| 19 | description=_("Shortcut for its distribution series."))) |
| 20 | |
| 21 | - def getUploaderCommand(distro_series, upload_leaf, uploader_logfilename): |
| 22 | + def getUploaderCommand(package_build, upload_leaf, uploader_logfilename): |
| 23 | """Get the command to run as the uploader. |
| 24 | |
| 25 | :return: A list of command line arguments, beginning with the |
| 26 | |
| 27 | === modified file 'lib/lp/buildmaster/model/buildbase.py' |
| 28 | --- lib/lp/buildmaster/model/buildbase.py 2010-05-27 13:26:43 +0000 |
| 29 | +++ lib/lp/buildmaster/model/buildbase.py 2010-05-27 13:26:53 +0000 |
| 30 | @@ -63,7 +63,8 @@ |
| 31 | """Return the directory that things will be stored in.""" |
| 32 | return os.path.join(config.builddmaster.root, 'incoming', upload_leaf) |
| 33 | |
| 34 | - def getUploaderCommand(self, upload_leaf, uploader_logfilename): |
| 35 | + @staticmethod |
| 36 | + def getUploaderCommand(build, upload_leaf, uploader_logfilename): |
| 37 | """See `IBuildBase`.""" |
| 38 | root = os.path.abspath(config.builddmaster.root) |
| 39 | uploader_command = list(config.builddmaster.uploader.split()) |
| 40 | @@ -71,12 +72,12 @@ |
| 41 | # add extra arguments for processing a binary upload |
| 42 | extra_args = [ |
| 43 | "--log-file", "%s" % uploader_logfilename, |
| 44 | - "-d", "%s" % self.distribution.name, |
| 45 | - "-s", "%s" % (self.distro_series.name + |
| 46 | - pocketsuffix[self.pocket]), |
| 47 | - "-b", "%s" % self.id, |
| 48 | + "-d", "%s" % build.distribution.name, |
| 49 | + "-s", "%s" % (build.distro_series.name + |
| 50 | + pocketsuffix[build.pocket]), |
| 51 | + "-b", "%s" % build.id, |
| 52 | "-J", "%s" % upload_leaf, |
| 53 | - '--context=%s' % self.policy_name, |
| 54 | + '--context=%s' % build.policy_name, |
| 55 | "%s" % root, |
| 56 | ] |
| 57 | |
| 58 | @@ -204,7 +205,7 @@ |
| 59 | uploader_logfilename = os.path.join( |
| 60 | upload_dir, UPLOAD_LOG_FILENAME) |
| 61 | uploader_command = build.getUploaderCommand( |
| 62 | - upload_leaf, uploader_logfilename) |
| 63 | + build, upload_leaf, uploader_logfilename) |
| 64 | logger.debug("Saving uploader log at '%s'" % uploader_logfilename) |
| 65 | |
| 66 | logger.info("Invoking uploader on %s" % root) |
| 67 | |
| 68 | === modified file 'lib/lp/buildmaster/model/buildfarmjob.py' |
| 69 | --- lib/lp/buildmaster/model/buildfarmjob.py 2010-05-27 13:26:43 +0000 |
| 70 | +++ lib/lp/buildmaster/model/buildfarmjob.py 2010-05-27 13:26:53 +0000 |
| 71 | @@ -62,7 +62,7 @@ |
| 72 | """See `IBuildFarmJobOld`.""" |
| 73 | raise NotImplementedError |
| 74 | |
| 75 | - def getByJob(self): |
| 76 | + def getByJob(self, job): |
| 77 | """See `IBuildFarmJobOld`.""" |
| 78 | raise NotImplementedError |
| 79 | |
| 80 | |
| 81 | === modified file 'lib/lp/buildmaster/model/buildfarmjobbehavior.py' |
| 82 | --- lib/lp/buildmaster/model/buildfarmjobbehavior.py 2010-05-27 13:26:43 +0000 |
| 83 | +++ lib/lp/buildmaster/model/buildfarmjobbehavior.py 2010-05-27 13:26:53 +0000 |
| 84 | @@ -184,8 +184,7 @@ |
| 85 | # XXX: dsilvers 2005-03-02: Confirm the builder has the right build? |
| 86 | |
| 87 | build = queueItem.specific_job.build |
| 88 | - build.handleStatus( |
| 89 | - build, build_status, librarian, slave_status) |
| 90 | + build.handleStatus(build_status, librarian, slave_status) |
| 91 | |
| 92 | |
| 93 | class IdleBuildBehavior(BuildFarmJobBehaviorBase): |
| 94 | |
| 95 | === modified file 'lib/lp/buildmaster/model/packagebuild.py' |
| 96 | --- lib/lp/buildmaster/model/packagebuild.py 2010-05-27 13:26:43 +0000 |
| 97 | +++ lib/lp/buildmaster/model/packagebuild.py 2010-05-27 13:26:53 +0000 |
| 98 | @@ -122,11 +122,10 @@ |
| 99 | return BuildBase.getUploadDir(upload_leaf) |
| 100 | |
| 101 | @staticmethod |
| 102 | - def getUploaderCommand(package_build, distro_series, upload_leaf, |
| 103 | - upload_logfilename): |
| 104 | + def getUploaderCommand(package_build, upload_leaf, upload_logfilename): |
| 105 | """See `IPackageBuild`.""" |
| 106 | return BuildBase.getUploaderCommand( |
| 107 | - package_build, distro_series, upload_leaf, upload_logfilename) |
| 108 | + package_build, upload_leaf, upload_logfilename) |
| 109 | |
| 110 | @staticmethod |
| 111 | def getLogFromSlave(package_build): |
| 112 | @@ -182,7 +181,7 @@ |
| 113 | |
| 114 | def queueBuild(self, suspended=False): |
| 115 | """See `IPackageBuild`.""" |
| 116 | - return BuildBase.queueBuild(self, suspended=False) |
| 117 | + return BuildBase.queueBuild(self, suspended=suspended) |
| 118 | |
| 119 | def handleStatus(self, status, librarian, slave_status): |
| 120 | """See `IPackageBuild`.""" |
| 121 | |
| 122 | === modified file 'lib/lp/buildmaster/tests/test_buildbase.py' |
| 123 | --- lib/lp/buildmaster/tests/test_buildbase.py 2010-05-27 13:26:43 +0000 |
| 124 | +++ lib/lp/buildmaster/tests/test_buildbase.py 2010-05-27 13:26:53 +0000 |
| 125 | @@ -115,7 +115,7 @@ |
| 126 | os.path.abspath(config.builddmaster.root), |
| 127 | ]) |
| 128 | uploader_command = build_base.getUploaderCommand( |
| 129 | - upload_leaf, log_file) |
| 130 | + build_base, upload_leaf, log_file) |
| 131 | self.assertEqual(config_args, uploader_command) |
| 132 | |
| 133 | |
| 134 | |
| 135 | === modified file 'lib/lp/buildmaster/tests/test_buildqueue.py' |
| 136 | --- lib/lp/buildmaster/tests/test_buildqueue.py 2010-04-21 11:51:26 +0000 |
| 137 | +++ lib/lp/buildmaster/tests/test_buildqueue.py 2010-05-27 13:26:53 +0000 |
| 138 | @@ -11,6 +11,7 @@ |
| 139 | from zope import component |
| 140 | from zope.component import getGlobalSiteManager, getUtility |
| 141 | from zope.interface.verify import verifyObject |
| 142 | +from zope.security.proxy import removeSecurityProxy |
| 143 | |
| 144 | from canonical.launchpad.webapp.interfaces import ( |
| 145 | IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR) |
| 146 | @@ -45,7 +46,7 @@ |
| 147 | for build in test.builds: |
| 148 | bq = build.buildqueue_record |
| 149 | source = None |
| 150 | - for attr in ('sourcepackagerelease', 'sourcepackagename'): |
| 151 | + for attr in ('source_package_release', 'sourcepackagename'): |
| 152 | source = getattr(build, attr, None) |
| 153 | if source is not None: |
| 154 | break |
| 155 | @@ -105,7 +106,7 @@ |
| 156 | # Monkey-patch BuildQueueSet._now() so it returns a constant time stamp |
| 157 | # that's not too far in the future. This avoids spurious test failures. |
| 158 | monkey_patch_the_now_property(bq) |
| 159 | - delay = bq._estimateTimeToNextBuilder() |
| 160 | + delay = removeSecurityProxy(bq)._estimateTimeToNextBuilder() |
| 161 | test.assertTrue( |
| 162 | delay <= min_time, |
| 163 | "Wrong min time to next available builder (%s > %s)" |
| 164 | @@ -122,7 +123,8 @@ |
| 165 | def check_delay_for_job(test, the_job, delay): |
| 166 | # Obtain the builder statistics pertaining to this job. |
| 167 | builder_data = get_builder_data() |
| 168 | - estimated_delay = the_job._estimateJobDelay(builder_data) |
| 169 | + estimated_delay = removeSecurityProxy(the_job)._estimateJobDelay( |
| 170 | + builder_data) |
| 171 | test.assertEqual(delay, estimated_delay) |
| 172 | |
| 173 | |
| 174 | @@ -144,12 +146,13 @@ |
| 175 | This avoids spurious test failures. |
| 176 | """ |
| 177 | # Use the date/time the job started if available. |
| 178 | + naked_buildqueue = removeSecurityProxy(buildqueue) |
| 179 | if buildqueue.job.date_started: |
| 180 | time_stamp = buildqueue.job.date_started |
| 181 | else: |
| 182 | - time_stamp = buildqueue._now() |
| 183 | + time_stamp = naked_buildqueue._now() |
| 184 | |
| 185 | - buildqueue._now = FakeMethod(result=time_stamp) |
| 186 | + naked_buildqueue._now = FakeMethod(result=time_stamp) |
| 187 | return time_stamp |
| 188 | |
| 189 | |
| 190 | @@ -475,7 +478,10 @@ |
| 191 | # The build in question is an x86/native one. |
| 192 | self.assertEqual(self.x86_proc.id, build.processor.id) |
| 193 | self.assertEqual(False, build.is_virtualized) |
| 194 | - bq = build.buildqueue_record |
| 195 | + |
| 196 | + # To test this non-interface method, we need to remove the |
| 197 | + # security proxy. |
| 198 | + bq = removeSecurityProxy(build.buildqueue_record) |
| 199 | builder_stats = get_builder_data() |
| 200 | # We have 4 x86 native builders. |
| 201 | self.assertEqual( |
| 202 | @@ -511,7 +517,7 @@ |
| 203 | # will be free again. |
| 204 | build, bq = find_job(self, 'flex') |
| 205 | bq.reset() |
| 206 | - free_count = bq._getFreeBuildersCount( |
| 207 | + free_count = removeSecurityProxy(bq)._getFreeBuildersCount( |
| 208 | build.processor, build.is_virtualized) |
| 209 | self.assertEqual(1, free_count) |
| 210 | |
| 211 | @@ -1095,7 +1101,7 @@ |
| 212 | for build in self.builds: |
| 213 | bq = build.buildqueue_record |
| 214 | if bq.processor == self.hppa_proc: |
| 215 | - bq.virtualized = True |
| 216 | + removeSecurityProxy(bq).virtualized = True |
| 217 | job = self.factory.makeSourcePackageRecipeBuildJob( |
| 218 | virtualized=True, estimated_duration=332, |
| 219 | sourcename='xxr-openssh-client', score=1050) |
| 220 | @@ -1111,7 +1117,8 @@ |
| 221 | |
| 222 | flex_build, flex_job = find_job(self, 'flex', 'hppa') |
| 223 | # The head job platform is the one of job #21 (xxr-openssh-client). |
| 224 | - self.assertEquals((None, True), flex_job._getHeadJobPlatform()) |
| 225 | + self.assertEquals( |
| 226 | + (None, True), removeSecurityProxy(flex_job)._getHeadJobPlatform()) |
| 227 | # The delay will be 900 (= 15*60) + 332 seconds |
| 228 | check_delay_for_job(self, flex_job, 1232) |
| 229 | |
| 230 | @@ -1133,7 +1140,8 @@ |
| 231 | # 20, xx-recipe-zsh, p: None, v:False e:0:03:42 *** s: 1053 |
| 232 | |
| 233 | # The newly added 'xxr-gwibber' job is the new head job now. |
| 234 | - self.assertEquals((None, None), flex_job._getHeadJobPlatform()) |
| 235 | + self.assertEquals( |
| 236 | + (None, None), removeSecurityProxy(flex_job)._getHeadJobPlatform()) |
| 237 | # The newly added 'xxr-gwibber' job now weighs in as well and the |
| 238 | # delay is 900 (= 15*60) + (332+111)/2 seconds |
| 239 | check_delay_for_job(self, flex_job, 1121) |
| 240 | @@ -1142,7 +1150,9 @@ |
| 241 | # 'xxr-openssh-client' jobs since the 'virtualized' values do not |
| 242 | # match. |
| 243 | flex_build, flex_job = find_job(self, 'flex', '386') |
| 244 | - self.assertEquals((None, False), flex_job._getHeadJobPlatform()) |
| 245 | + self.assertEquals( |
| 246 | + (None, False), |
| 247 | + removeSecurityProxy(flex_job)._getHeadJobPlatform()) |
| 248 | # delay is 960 (= 16*60) + 222 seconds |
| 249 | check_delay_for_job(self, flex_job, 1182) |
| 250 | |
| 251 | @@ -1222,7 +1232,7 @@ |
| 252 | for build in self.builds: |
| 253 | bq = build.buildqueue_record |
| 254 | if bq.processor == self.x86_proc: |
| 255 | - bq.virtualized = True |
| 256 | + removeSecurityProxy(bq).virtualized = True |
| 257 | |
| 258 | def test_pending_jobs_only(self): |
| 259 | # Let's see the assertion fail for a job that's not pending any more. |
| 260 | |
| 261 | === modified file 'lib/lp/buildmaster/tests/test_manager.py' |
| 262 | --- lib/lp/buildmaster/tests/test_manager.py 2010-04-23 13:42:50 +0000 |
| 263 | +++ lib/lp/buildmaster/tests/test_manager.py 2010-05-27 13:26:53 +0000 |
| 264 | @@ -586,7 +586,7 @@ |
| 265 | self.assertTrue(job.date_started is not None) |
| 266 | self.assertEqual(job.job.status, JobStatus.RUNNING) |
| 267 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) |
| 268 | - self.assertEqual(build.buildstate, BuildStatus.BUILDING) |
| 269 | + self.assertEqual(build.status, BuildStatus.BUILDING) |
| 270 | self.assertEqual(job.logtail, logtail) |
| 271 | |
| 272 | def _getManager(self): |
| 273 | @@ -710,7 +710,7 @@ |
| 274 | self.assertTrue(job.builder is None) |
| 275 | self.assertTrue(job.date_started is None) |
| 276 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) |
| 277 | - self.assertEqual(build.buildstate, BuildStatus.NEEDSBUILD) |
| 278 | + self.assertEqual(build.status, BuildStatus.NEEDSBUILD) |
| 279 | |
| 280 | def testScanRescuesJobFromBrokenBuilder(self): |
| 281 | # The job assigned to a broken builder is rescued. |
| 282 | @@ -798,7 +798,7 @@ |
| 283 | 'i386 build of mozilla-firefox 0.9 in ubuntu hoary RELEASE', |
| 284 | build.title) |
| 285 | |
| 286 | - self.assertEqual('BUILDING', build.buildstate.name) |
| 287 | + self.assertEqual('BUILDING', build.status.name) |
| 288 | self.assertNotEqual(None, job.builder) |
| 289 | self.assertNotEqual(None, job.date_started) |
| 290 | self.assertNotEqual(None, job.logtail) |
| 291 | @@ -811,7 +811,7 @@ |
| 292 | """Re-fetch the `IBuildQueue` record and check if it's clean.""" |
| 293 | job = getUtility(IBuildQueueSet).get(job_id) |
| 294 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) |
| 295 | - self.assertEqual('NEEDSBUILD', build.buildstate.name) |
| 296 | + self.assertEqual('NEEDSBUILD', build.status.name) |
| 297 | self.assertEqual(None, job.builder) |
| 298 | self.assertEqual(None, job.date_started) |
| 299 | self.assertEqual(None, job.logtail) |
| 300 | |
| 301 | === modified file 'lib/lp/soyuz/browser/tests/archive-views.txt' |
| 302 | --- lib/lp/soyuz/browser/tests/archive-views.txt 2010-04-29 09:53:56 +0000 |
| 303 | +++ lib/lp/soyuz/browser/tests/archive-views.txt 2010-05-27 13:26:53 +0000 |
| 304 | @@ -361,7 +361,7 @@ |
| 305 | >>> warty_hppa = getUtility(IDistributionSet)['ubuntu']['warty']['hppa'] |
| 306 | >>> source = view.filtered_sources[0] |
| 307 | >>> ignore = source.sourcepackagerelease.createBuild( |
| 308 | - ... distroarchseries=warty_hppa, archive=view.context, |
| 309 | + ... distro_arch_series=warty_hppa, archive=view.context, |
| 310 | ... pocket=source.pocket) |
| 311 | >>> builds = getUtility(IBinaryPackageBuildSet).getBuildsForArchive( |
| 312 | ... view.context) |
| 313 | @@ -374,9 +374,9 @@ |
| 314 | i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE |
| 315 | |
| 316 | >>> login('foo.bar@canonical.com') |
| 317 | - >>> builds[0].buildstate = BuildStatus.NEEDSBUILD |
| 318 | - >>> builds[1].buildstate = BuildStatus.BUILDING |
| 319 | - >>> builds[2].buildstate = BuildStatus.BUILDING |
| 320 | + >>> builds[0].status = BuildStatus.NEEDSBUILD |
| 321 | + >>> builds[1].status = BuildStatus.BUILDING |
| 322 | + >>> builds[2].status = BuildStatus.BUILDING |
| 323 | >>> login(ANONYMOUS) |
| 324 | |
| 325 | >>> view.num_pkgs_building |
| 326 | @@ -386,7 +386,7 @@ |
| 327 | of packages that are currently building. |
| 328 | |
| 329 | >>> login('foo.bar@canonical.com') |
| 330 | - >>> builds[4].buildstate = BuildStatus.NEEDSBUILD |
| 331 | + >>> builds[4].status = BuildStatus.NEEDSBUILD |
| 332 | >>> login(ANONYMOUS) |
| 333 | >>> view.num_pkgs_building |
| 334 | {'building': 2, 'waiting': 1, 'total': 3} |
| 335 | @@ -395,7 +395,7 @@ |
| 336 | to be building: |
| 337 | |
| 338 | >>> login('foo.bar@canonical.com') |
| 339 | - >>> builds[4].buildstate = BuildStatus.BUILDING |
| 340 | + >>> builds[4].status = BuildStatus.BUILDING |
| 341 | >>> login(ANONYMOUS) |
| 342 | >>> view.num_pkgs_building |
| 343 | {'building': 3, 'waiting': 0, 'total': 3} |
| 344 | |
| 345 | === modified file 'lib/lp/soyuz/doc/build-estimated-dispatch-time.txt' |
| 346 | --- lib/lp/soyuz/doc/build-estimated-dispatch-time.txt 2010-04-09 15:46:09 +0000 |
| 347 | +++ lib/lp/soyuz/doc/build-estimated-dispatch-time.txt 2010-05-27 13:26:53 +0000 |
| 348 | @@ -50,7 +50,7 @@ |
| 349 | ... hoary.main_archive) |
| 350 | >>> alsa_bqueue = alsa_build.queueBuild() |
| 351 | >>> alsa_bqueue.lastscore = 500 |
| 352 | - >>> alsa_build.buildstate = BuildStatus.NEEDSBUILD |
| 353 | + >>> alsa_build.status = BuildStatus.NEEDSBUILD |
| 354 | |
| 355 | Access the currently building job via the builder. |
| 356 | |
| 357 | @@ -64,7 +64,7 @@ |
| 358 | |
| 359 | Make sure the job at hand is currently being built. |
| 360 | |
| 361 | - >>> cur_build.buildstate == BuildStatus.BUILDING |
| 362 | + >>> cur_build.status == BuildStatus.BUILDING |
| 363 | True |
| 364 | |
| 365 | The start time estimation mechanism for a pending job N depends on |
| 366 | @@ -124,7 +124,7 @@ |
| 367 | >>> pmount_bqueue.lastscore = 66 |
| 368 | >>> removeSecurityProxy(pmount_bqueue).estimated_duration = ( |
| 369 | ... timedelta(minutes=12)) |
| 370 | - >>> pmount_build.buildstate = BuildStatus.NEEDSBUILD |
| 371 | + >>> pmount_build.status = BuildStatus.NEEDSBUILD |
| 372 | |
| 373 | Followed by another build for the 'iceweasel' source package that is added |
| 374 | to mark's PPA. |
| 375 | @@ -142,7 +142,7 @@ |
| 376 | >>> removeSecurityProxy(iceweasel_bqueue).estimated_duration = ( |
| 377 | ... timedelta(minutes=48)) |
| 378 | >>> iceweasel_bqueue.lastscore = 666 |
| 379 | - >>> iceweasel_build.buildstate = BuildStatus.NEEDSBUILD |
| 380 | + >>> iceweasel_build.status = BuildStatus.NEEDSBUILD |
| 381 | |
| 382 | Since the 'iceweasel' build has a higher score (666) than the 'pmount' |
| 383 | build (66) its estimated dispatch time is essentially "now". |
| 384 | |
| 385 | === modified file 'lib/lp/soyuz/doc/buildd-scoring.txt' |
| 386 | --- lib/lp/soyuz/doc/buildd-scoring.txt 2010-04-09 15:46:09 +0000 |
| 387 | +++ lib/lp/soyuz/doc/buildd-scoring.txt 2010-05-27 13:26:53 +0000 |
| 388 | @@ -65,8 +65,10 @@ |
| 389 | ... hoary386, pub.pocket, pub.archive) |
| 390 | ... |
| 391 | ... build_queue = build.queueBuild() |
| 392 | - ... build_queue.job.date_created = date_created |
| 393 | - ... build_queue.manual = manual |
| 394 | + ... from zope.security.proxy import removeSecurityProxy |
| 395 | + ... naked_build_queue = removeSecurityProxy(build_queue) |
| 396 | + ... naked_build_queue.job.date_created = date_created |
| 397 | + ... naked_build_queue.manual = manual |
| 398 | ... |
| 399 | ... return build_queue |
| 400 | |
| 401 | |
| 402 | === modified file 'lib/lp/soyuz/doc/buildd-slavescanner.txt' |
| 403 | --- lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-05-27 13:26:43 +0000 |
| 404 | +++ lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-05-27 13:26:53 +0000 |
| 405 | @@ -90,8 +90,7 @@ |
| 406 | |
| 407 | >>> default_start = datetime.datetime(2005, 1, 1, 8, 0, 0, tzinfo=UTC) |
| 408 | >>> def setupBuildQueue(build_queue, builder): |
| 409 | - ... build_queue.builder = builder |
| 410 | - ... build_queue.setDateStarted(default_start) |
| 411 | + ... build_queue.markAsBuilding(builder) |
| 412 | |
| 413 | Remove any previous buildmaster ROOT directory, to avoid any garbage |
| 414 | lock conflict (it would be recreated automatically if necessary) |
| 415 | @@ -105,7 +104,7 @@ |
| 416 | Let's check the procedures to verify/collect running build process: |
| 417 | |
| 418 | WAITING - PACKAGEFAIL -> Package has failed to build, notice from |
| 419 | - builder is stored, but Build.buildstate is mark as 'Failed to Build': |
| 420 | + builder is stored, but Build.status is mark as 'Failed to Build': |
| 421 | |
| 422 | Get a builder from the sample data: |
| 423 | |
| 424 | @@ -139,19 +138,19 @@ |
| 425 | >>> a_builder.updateBuild(bqItem3) |
| 426 | >>> build.builder is not None |
| 427 | True |
| 428 | - >>> build.datebuilt is not None |
| 429 | - True |
| 430 | - >>> build.buildduration is not None |
| 431 | - True |
| 432 | - >>> build.buildlog is not None |
| 433 | + >>> build.date_finished is not None |
| 434 | + True |
| 435 | + >>> build.duration is not None |
| 436 | + True |
| 437 | + >>> build.log is not None |
| 438 | True |
| 439 | >>> check_mail_sent(last_stub_mail_count) |
| 440 | True |
| 441 | - >>> build.buildstate.title |
| 442 | + >>> build.status.title |
| 443 | 'Failed to build' |
| 444 | |
| 445 | WAITING - DEPWAIT -> a required dependency is missing, again notice |
| 446 | -from builder, but Build.buildstate has the right state: |
| 447 | +from builder, but Build.status has the right state: |
| 448 | |
| 449 | >>> bqItem4 = a_build.queueBuild() |
| 450 | >>> setupBuildQueue(bqItem4, a_builder) |
| 451 | @@ -170,17 +169,17 @@ |
| 452 | CRITICAL:slave-scanner:***** bob is MANUALDEPWAIT ***** |
| 453 | >>> build.builder is not None |
| 454 | True |
| 455 | - >>> build.datebuilt is not None |
| 456 | - True |
| 457 | - >>> build.buildduration is not None |
| 458 | - True |
| 459 | - >>> build.buildlog is not None |
| 460 | + >>> build.date_finished is not None |
| 461 | + True |
| 462 | + >>> build.duration is not None |
| 463 | + True |
| 464 | + >>> build.log is not None |
| 465 | True |
| 466 | >>> check_mail_sent(last_stub_mail_count) |
| 467 | False |
| 468 | >>> build.dependencies |
| 469 | u'baz (>= 1.0.1)' |
| 470 | - >>> build.buildstate.title |
| 471 | + >>> build.status.title |
| 472 | 'Dependency wait' |
| 473 | |
| 474 | WAITING - CHROOTFAIL -> the Chroot for this distroseries is damage, nor |
| 475 | @@ -199,15 +198,15 @@ |
| 476 | CRITICAL:slave-scanner:***** bob is CHROOTWAIT ***** |
| 477 | >>> build.builder is not None |
| 478 | True |
| 479 | - >>> build.datebuilt is not None |
| 480 | - True |
| 481 | - >>> build.buildduration is not None |
| 482 | - True |
| 483 | - >>> build.buildlog is not None |
| 484 | + >>> build.date_finished is not None |
| 485 | + True |
| 486 | + >>> build.duration is not None |
| 487 | + True |
| 488 | + >>> build.log is not None |
| 489 | True |
| 490 | >>> check_mail_sent(last_stub_mail_count) |
| 491 | True |
| 492 | - >>> build.buildstate.title |
| 493 | + >>> build.status.title |
| 494 | 'Chroot problem' |
| 495 | |
| 496 | WAITING - BUILDERFAIL -> builder has failed by internal error, job is available for next build round: |
| 497 | @@ -234,7 +233,7 @@ |
| 498 | >>> check_mail_sent(last_stub_mail_count) |
| 499 | False |
| 500 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(bqItem6) |
| 501 | - >>> print build.buildstate.title |
| 502 | + >>> print build.status.title |
| 503 | Needs building |
| 504 | >>> job = bqItem6.specific_job.job |
| 505 | >>> print job.status.title |
| 506 | @@ -286,7 +285,7 @@ |
| 507 | >>> a_builder.updateBuild(bqItem8) |
| 508 | >>> bqItem8.builder is None |
| 509 | True |
| 510 | - >>> print bqItem8.specific_job.build.buildstate.name |
| 511 | + >>> print bqItem8.specific_job.build.status.name |
| 512 | NEEDSBUILD |
| 513 | |
| 514 | Cleanup in preparation for the next test: |
| 515 | @@ -347,15 +346,15 @@ |
| 516 | WARNING:slave-scanner:Build ... upload failed. |
| 517 | >>> build.builder is not None |
| 518 | True |
| 519 | - >>> build.datebuilt is not None |
| 520 | - True |
| 521 | - >>> build.buildduration is not None |
| 522 | - True |
| 523 | - >>> build.buildlog is not None |
| 524 | + >>> build.date_finished is not None |
| 525 | + True |
| 526 | + >>> build.duration is not None |
| 527 | + True |
| 528 | + >>> build.log is not None |
| 529 | True |
| 530 | >>> check_mail_sent(last_stub_mail_count) |
| 531 | True |
| 532 | - >>> build.buildstate.title |
| 533 | + >>> build.status.title |
| 534 | 'Failed to upload' |
| 535 | |
| 536 | Let's check the emails generated by this 'failure' |
| 537 | @@ -380,7 +379,7 @@ |
| 538 | ... |
| 539 | X-Launchpad-Build-State: FAILEDTOUPLOAD |
| 540 | ... |
| 541 | - * Build Log: http://.../...i386.mozilla-firefox_0.9_NEEDSBUILD.txt.gz |
| 542 | + * Build Log: http://.../...i386.mozilla-firefox_0.9_BUILDING.txt.gz |
| 543 | ... |
| 544 | Upload log: |
| 545 | INFO Creating lockfile:... |
| 546 | @@ -396,7 +395,7 @@ |
| 547 | >>> build.upload_log is not None |
| 548 | True |
| 549 | |
| 550 | -What we can clearly notice is that the buildlog is still containing |
| 551 | +What we can clearly notice is that the log is still containing |
| 552 | the old build state (BUILDING) in its name. This is a minor problem |
| 553 | that can be sorted by modifying the execution order of procedures |
| 554 | inside Buildergroup.buildStatus_OK method. |
| 555 | @@ -431,7 +430,7 @@ |
| 556 | the build record to FULLYBUILT, as the process-upload would do: |
| 557 | |
| 558 | >>> from lp.buildmaster.interfaces.buildbase import BuildStatus |
| 559 | - >>> build.buildstate = BuildStatus.FULLYBUILT |
| 560 | + >>> build.status = BuildStatus.FULLYBUILT |
| 561 | |
| 562 | Now the updateBuild should recognize this build record as a |
| 563 | Successfully built and uploaded procedure, not sending any |
| 564 | @@ -440,13 +439,13 @@ |
| 565 | >>> a_builder.updateBuild(bqItem10) |
| 566 | >>> build.builder is not None |
| 567 | True |
| 568 | - >>> build.datebuilt is not None |
| 569 | - True |
| 570 | - >>> build.buildduration is not None |
| 571 | - True |
| 572 | - >>> build.buildlog is not None |
| 573 | - True |
| 574 | - >>> build.buildstate.title |
| 575 | + >>> build.date_finished is not None |
| 576 | + True |
| 577 | + >>> build.duration is not None |
| 578 | + True |
| 579 | + >>> build.log is not None |
| 580 | + True |
| 581 | + >>> build.status.title |
| 582 | 'Successfully built' |
| 583 | >>> check_mail_sent(last_stub_mail_count) |
| 584 | False |
| 585 | @@ -484,7 +483,7 @@ |
| 586 | >>> check_mail_sent(last_stub_mail_count) |
| 587 | False |
| 588 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(bqItem11) |
| 589 | - >>> print build.buildstate.title |
| 590 | + >>> print build.status.title |
| 591 | Needs building |
| 592 | >>> job = bqItem11.specific_job.job |
| 593 | >>> print job.status.title |
| 594 | @@ -517,23 +516,24 @@ |
| 595 | |
| 596 | >>> bqItem12.builder = None |
| 597 | |
| 598 | -The buildlog is collected and compressed locally using gzip algorithm, |
| 599 | +The log is collected and compressed locally using gzip algorithm, |
| 600 | let's see how this method works: |
| 601 | |
| 602 | >>> bqItem10 = getUtility(IBinaryPackageBuildSet).getByBuildID( |
| 603 | ... 6).queueBuild() |
| 604 | >>> setupBuildQueue(bqItem10, a_builder) |
| 605 | + >>> build = bqItem10.specific_job.build |
| 606 | + >>> build.status = BuildStatus.FULLYBUILT |
| 607 | >>> bqItem10.builder.setSlaveForTesting(WaitingSlave('BuildStatus.OK')) |
| 608 | |
| 609 | -Before collecting and processing the buildlog we will store the files |
| 610 | +Before collecting and processing the log we will store the files |
| 611 | already created in /tmp so we can verify later that this mechanism is |
| 612 | not leaving any temporary file behind. See bug #172798. |
| 613 | |
| 614 | >>> old_tmps = os.listdir('/tmp') |
| 615 | |
| 616 | -Collect and process the buildlog. |
| 617 | +Collect and process the log. |
| 618 | |
| 619 | - >>> build = bqItem10.specific_job.build |
| 620 | >>> logfile_alias = build.getLogFromSlave(build) |
| 621 | |
| 622 | Audit the /tmp for lost temporary files, there should not be any new |
| 623 | @@ -543,7 +543,7 @@ |
| 624 | >>> sorted(os.listdir('/tmp')) == sorted(old_tmps) |
| 625 | True |
| 626 | |
| 627 | -The buildlog was compressed and directly transferred to Librarian. |
| 628 | +The log was compressed and directly transferred to Librarian. |
| 629 | |
| 630 | >>> from canonical.launchpad.interfaces import ILibraryFileAliasSet |
| 631 | >>> logfile = getUtility(ILibraryFileAliasSet)[logfile_alias] |
| 632 | @@ -558,7 +558,7 @@ |
| 633 | |
| 634 | >>> commit() |
| 635 | |
| 636 | -Check if the buildlog content is correct and accessible via the |
| 637 | +Check if the log content is correct and accessible via the |
| 638 | library file directly and via Librarian http front-end. |
| 639 | |
| 640 | Since LibrarianFileAlias does not implement required attributes for |
| 641 | @@ -592,7 +592,7 @@ |
| 642 | |
| 643 | >>> os.remove(fname) |
| 644 | |
| 645 | -The Librarian serves buildlog files with 'gzip' content-encoding and |
| 646 | +The Librarian serves log files with 'gzip' content-encoding and |
| 647 | 'text/plain' content-type. This combination instructs the browser to |
| 648 | decompress the file and display it inline, which makes it easier for |
| 649 | users to view it. |
| 650 | @@ -700,7 +700,7 @@ |
| 651 | >>> current_job = a_builder.currentjob |
| 652 | >>> resurrect_build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( |
| 653 | ... current_job) |
| 654 | - >>> resurrect_build.buildstate = BuildStatus.NEEDSBUILD |
| 655 | + >>> resurrect_build.status = BuildStatus.NEEDSBUILD |
| 656 | >>> syncUpdate(resurrect_build) |
| 657 | >>> current_job.builder = None |
| 658 | >>> current_job.setDateStarted(None) |
| 659 | @@ -713,7 +713,7 @@ |
| 660 | >>> old_candidate = removeSecurityProxy(a_builder)._findBuildCandidate() |
| 661 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( |
| 662 | ... old_candidate) |
| 663 | - >>> print build.buildstate.name |
| 664 | + >>> print build.status.name |
| 665 | NEEDSBUILD |
| 666 | |
| 667 | The 'candidate' is constant until we dispatch it. |
| 668 | @@ -747,7 +747,7 @@ |
| 669 | >>> from canonical.launchpad.interfaces import PackagePublishingStatus |
| 670 | >>> from canonical.testing.layers import LaunchpadZopelessLayer |
| 671 | |
| 672 | - >>> spr = build.sourcepackagerelease |
| 673 | + >>> spr = build.source_package_release |
| 674 | >>> pub = removeSecurityProxy(build).current_source_publication |
| 675 | >>> commit() |
| 676 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') |
| 677 | @@ -764,7 +764,7 @@ |
| 678 | Because the 'previous' candidate was marked as superseded, so it's not |
| 679 | part of the candidates list anymore. |
| 680 | |
| 681 | - >>> print build.buildstate.name |
| 682 | + >>> print build.status.name |
| 683 | SUPERSEDED |
| 684 | |
| 685 | If the candidate is for a private build whose source has not been |
| 686 | @@ -825,7 +825,7 @@ |
| 687 | |
| 688 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( |
| 689 | ... current_job) |
| 690 | - >>> print build.buildstate.name |
| 691 | + >>> print build.status.name |
| 692 | NEEDSBUILD |
| 693 | |
| 694 | >>> another_candidate = removeSecurityProxy( |
| 695 | @@ -833,7 +833,7 @@ |
| 696 | >>> print another_candidate |
| 697 | None |
| 698 | |
| 699 | - >>> print build.buildstate.name |
| 700 | + >>> print build.status.name |
| 701 | SUPERSEDED |
| 702 | |
| 703 | We'll reset the archive back to non-private for further tests: |
| 704 | @@ -1060,7 +1060,7 @@ |
| 705 | >>> login('foo.bar@canonical.com') |
| 706 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( |
| 707 | ... candidate) |
| 708 | - >>> for build_file in build.sourcepackagerelease.files: |
| 709 | + >>> for build_file in build.source_package_release.files: |
| 710 | ... removeSecurityProxy(build_file).libraryfile.restricted = True |
| 711 | >>> private_ppa = factory.makeArchive( |
| 712 | ... owner=cprov_archive.owner, name='pppa', private=True, |
| 713 | @@ -1121,7 +1121,7 @@ |
| 714 | We will create an ancestry in the primary archive target to the 'main' |
| 715 | component and this time the dispatching will follow that component. |
| 716 | |
| 717 | - >>> sourcename = build.sourcepackagerelease.name |
| 718 | + >>> sourcename = build.source_package_release.name |
| 719 | |
| 720 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') |
| 721 | >>> login('foo.bar@canonical.com') |
| 722 | @@ -1147,7 +1147,7 @@ |
| 723 | |
| 724 | >>> candidate.destroySelf() |
| 725 | |
| 726 | -Since this is a build in a private archive, the buildlog was uploaded to |
| 727 | +Since this is a build in a private archive, the log was uploaded to |
| 728 | the restricted librarian. |
| 729 | |
| 730 | >>> candidate = a_build.queueBuild() |
| 731 | @@ -1161,7 +1161,7 @@ |
| 732 | >>> build.archive.private |
| 733 | True |
| 734 | |
| 735 | - >>> lfa = build.buildlog |
| 736 | + >>> lfa = build.log |
| 737 | >>> lfa.restricted |
| 738 | True |
| 739 | >>> print lfa.filename |
| 740 | @@ -1177,7 +1177,7 @@ |
| 741 | ... |
| 742 | DownloadFailed: Alias ... cannot be downloaded from this client. |
| 743 | |
| 744 | -Accessing the buildlog via the restricted librarian will work as expected. |
| 745 | +Accessing the log via the restricted librarian will work as expected. |
| 746 | |
| 747 | >>> import urlparse |
| 748 | >>> from canonical.librarian.interfaces import IRestrictedLibrarianClient |
| 749 | @@ -1199,7 +1199,7 @@ |
| 750 | |
| 751 | >>> removeSecurityProxy(build).archive = cprov_archive |
| 752 | >>> cprov_archive.require_virtualized = True |
| 753 | - >>> for build_file in a_build.sourcepackagerelease.files: |
| 754 | + >>> for build_file in a_build.source_package_release.files: |
| 755 | ... removeSecurityProxy(build_file).libraryfile.restricted = False |
| 756 | >>> mark_archive = getUtility(IPersonSet).getByName('mark').archive |
| 757 | |
| 758 | @@ -1289,7 +1289,7 @@ |
| 759 | >>> hoary_evo = hoary.getSourcePackage( |
| 760 | ... 'evolution').currentrelease.sourcepackagerelease |
| 761 | >>> updates_build = hoary_evo.createBuild( |
| 762 | - ... distroarchseries=hoary_i386, |
| 763 | + ... distro_arch_series=hoary_i386, |
| 764 | ... pocket=PackagePublishingPocket.UPDATES, |
| 765 | ... processor=hoary_i386.default_processor, |
| 766 | ... archive=hoary_i386.main_archive) |
| 767 | @@ -1387,7 +1387,7 @@ |
| 768 | >>> a_builder.currentjob.destroySelf() |
| 769 | |
| 770 | >>> bqItem3 = a_build.queueBuild() |
| 771 | - >>> removeSecurityProxy(build).buildstate = ( |
| 772 | + >>> removeSecurityProxy(build).status = ( |
| 773 | ... BuildStatus.NEEDSBUILD) |
| 774 | >>> removeSecurityProxy(build).pocket = ( |
| 775 | ... PackagePublishingPocket.SECURITY) |
| 776 | |
| 777 | === modified file 'lib/lp/soyuz/model/binarypackagebuild.py' |
| 778 | --- lib/lp/soyuz/model/binarypackagebuild.py 2010-05-27 13:26:43 +0000 |
| 779 | +++ lib/lp/soyuz/model/binarypackagebuild.py 2010-05-27 13:26:53 +0000 |
| 780 | @@ -20,7 +20,6 @@ |
| 781 | Desc, In, Join, LeftJoin) |
| 782 | from storm.store import Store |
| 783 | from sqlobject import SQLObjectNotFound |
| 784 | -from sqlobject.sqlbuilder import AND, IN |
| 785 | |
| 786 | from canonical.config import config |
| 787 | from canonical.database.sqlbase import quote_like, SQLBase, sqlvalues |
| 788 | @@ -629,7 +628,7 @@ |
| 789 | else: |
| 790 | # completed states (success and failure) |
| 791 | buildduration = DurationFormatterAPI( |
| 792 | - self.date_finished - self.date_started).approximateduration() |
| 793 | + self.duration).approximateduration() |
| 794 | buildlog_url = self.log_url |
| 795 | builder_url = canonical_url(self.builder) |
| 796 | |
| 797 | @@ -739,11 +738,13 @@ |
| 798 | return None |
| 799 | |
| 800 | archseries_ids = [d.id for d in archseries] |
| 801 | - |
| 802 | - return BinaryPackageBuild.select( |
| 803 | - AND(BinaryPackageBuild.q.buildstate==BuildStatus.NEEDSBUILD, |
| 804 | - IN(BinaryPackageBuild.q.distroarchseriesID, archseries_ids)) |
| 805 | - ) |
| 806 | + store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
| 807 | + return store.find( |
| 808 | + BinaryPackageBuild, |
| 809 | + In(BinaryPackageBuild.distro_arch_series_id, archseries_ids), |
| 810 | + BinaryPackageBuild.package_build == PackageBuild.id, |
| 811 | + PackageBuild.build_farm_job == BuildFarmJob.id, |
| 812 | + BuildFarmJob.status == BuildStatus.NEEDSBUILD) |
| 813 | |
| 814 | def handleOptionalParamsForBuildQueries( |
| 815 | self, queries, tables, status=None, name=None, pocket=None, |
| 816 | @@ -1140,17 +1141,21 @@ |
| 817 | arch_ids = [d.id for d in archseries] |
| 818 | |
| 819 | query = """ |
| 820 | - Build.distroarchseries IN %s AND |
| 821 | - Build.buildstate = %s AND |
| 822 | + BinaryPackageBuild.distro_arch_series IN %s AND |
| 823 | + BinaryPackageBuild.package_build = PackageBuild.id AND |
| 824 | + PackageBuild.build_farm_job = BuildFarmJob.id AND |
| 825 | + BuildFarmJob.status = %s AND |
| 826 | BuildQueue.job_type = %s AND |
| 827 | BuildQueue.job = BuildPackageJob.job AND |
| 828 | - BuildPackageJob.build = build.id AND |
| 829 | + BuildPackageJob.build = BinaryPackageBuild.id AND |
| 830 | BuildQueue.builder IS NULL |
| 831 | """ % sqlvalues( |
| 832 | arch_ids, BuildStatus.NEEDSBUILD, BuildFarmJobType.PACKAGEBUILD) |
| 833 | |
| 834 | candidates = BuildQueue.select( |
| 835 | - query, clauseTables=['Build', 'BuildPackageJob'], |
| 836 | + query, clauseTables=[ |
| 837 | + 'BinaryPackageBuild', 'PackageBuild', 'BuildFarmJob', |
| 838 | + 'BuildPackageJob'], |
| 839 | orderBy=['-BuildQueue.lastscore']) |
| 840 | |
| 841 | return candidates |
| 842 | |
| 843 | === modified file 'lib/lp/soyuz/model/binarypackagebuildbehavior.py' |
| 844 | --- lib/lp/soyuz/model/binarypackagebuildbehavior.py 2010-05-27 13:26:43 +0000 |
| 845 | +++ lib/lp/soyuz/model/binarypackagebuildbehavior.py 2010-05-27 13:26:53 +0000 |
| 846 | @@ -122,7 +122,8 @@ |
| 847 | |
| 848 | # This should already have been checked earlier, but just check again |
| 849 | # here in case of programmer errors. |
| 850 | - reason = build.archive.checkUploadToPocket(build.distroseries, |
| 851 | + reason = build.archive.checkUploadToPocket( |
| 852 | + build.distro_series, |
| 853 | build.pocket) |
| 854 | assert reason is None, ( |
| 855 | "%s (%s) can not be built for pocket %s: invalid pocket due " |
| 856 | |
| 857 | === modified file 'lib/lp/soyuz/scripts/buildd.py' |
| 858 | --- lib/lp/soyuz/scripts/buildd.py 2010-04-09 15:46:09 +0000 |
| 859 | +++ lib/lp/soyuz/scripts/buildd.py 2010-05-27 13:26:53 +0000 |
| 860 | @@ -169,9 +169,9 @@ |
| 861 | |
| 862 | for build in builds: |
| 863 | if not build.buildqueue_record: |
| 864 | - name = build.sourcepackagerelease.name |
| 865 | - version = build.sourcepackagerelease.version |
| 866 | - tag = build.distroarchseries.architecturetag |
| 867 | + name = build.source_package_release.name |
| 868 | + version = build.source_package_release.version |
| 869 | + tag = build.distro_arch_series.architecturetag |
| 870 | self.logger.debug( |
| 871 | "Creating buildqueue record for %s (%s) on %s" |
| 872 | % (name, version, tag)) |
| 873 | @@ -195,7 +195,7 @@ |
| 874 | for job in candidates: |
| 875 | uptodate_build = getUtility( |
| 876 | IBinaryPackageBuildSet).getByQueueEntry(job) |
| 877 | - if uptodate_build.buildstate != BuildStatus.NEEDSBUILD: |
| 878 | + if uptodate_build.status != BuildStatus.NEEDSBUILD: |
| 879 | continue |
| 880 | job.score() |
| 881 | |
| 882 | |
| 883 | === modified file 'lib/lp/soyuz/scripts/packagecopier.py' |
| 884 | --- lib/lp/soyuz/scripts/packagecopier.py 2010-05-15 17:43:59 +0000 |
| 885 | +++ lib/lp/soyuz/scripts/packagecopier.py 2010-05-27 13:26:53 +0000 |
| 886 | @@ -110,7 +110,7 @@ |
| 887 | package_upload = build.package_upload |
| 888 | package_files.append((package_upload, 'changesfile')) |
| 889 | # Re-upload the buildlog file as necessary. |
| 890 | - package_files.append((build, 'buildlog')) |
| 891 | + package_files.append((build, 'log')) |
| 892 | elif IPackageUploadCustom.providedBy(pub_record): |
| 893 | # Re-upload the custom files included |
| 894 | package_files.append((pub_record, 'libraryfilealias')) |
| 895 | @@ -341,7 +341,7 @@ |
| 896 | if not copied_binaries.issuperset(published_binaries): |
| 897 | raise CannotCopy( |
| 898 | "binaries conflicting with the existing ones") |
| 899 | - self._checkConflictingFiles(source) |
| 900 | + self._checkConflictingFiles(source) |
| 901 | |
| 902 | def _checkConflictingFiles(self, source): |
| 903 | # If both the source and destination archive are the same, we don't |
| 904 | @@ -350,7 +350,7 @@ |
| 905 | if source.archive.id == self.archive.id: |
| 906 | return None |
| 907 | source_files = [ |
| 908 | - sprf.libraryfile.filename for sprf in |
| 909 | + sprf.libraryfile.filename for sprf in |
| 910 | source.sourcepackagerelease.files] |
| 911 | destination_sha1s = self.archive.getFilesAndSha1s(source_files) |
| 912 | for lf in source.sourcepackagerelease.files: |
| 913 | @@ -624,7 +624,7 @@ |
| 914 | # If binaries are included in the copy we include binary custom files. |
| 915 | if include_binaries: |
| 916 | for build in source.getBuilds(): |
| 917 | - if build.buildstate != BuildStatus.FULLYBUILT: |
| 918 | + if build.status != BuildStatus.FULLYBUILT: |
| 919 | continue |
| 920 | delayed_copy.addBuild(build) |
| 921 | original_build_upload = build.package_upload |
| 922 | |
| 923 | === modified file 'lib/lp/soyuz/scripts/tests/test_buildd_cronscripts.py' |
| 924 | --- lib/lp/soyuz/scripts/tests/test_buildd_cronscripts.py 2010-04-12 08:29:02 +0000 |
| 925 | +++ lib/lp/soyuz/scripts/tests/test_buildd_cronscripts.py 2010-05-27 13:26:53 +0000 |
| 926 | @@ -15,9 +15,13 @@ |
| 927 | |
| 928 | from canonical.config import config |
| 929 | from canonical.launchpad.scripts.logger import QuietFakeLogger |
| 930 | +from canonical.launchpad.webapp.interfaces import ( |
| 931 | + IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR) |
| 932 | from canonical.testing import ( |
| 933 | DatabaseLayer, LaunchpadLayer, LaunchpadZopelessLayer) |
| 934 | from lp.buildmaster.interfaces.buildbase import BuildStatus |
| 935 | +from lp.buildmaster.model.buildfarmjob import BuildFarmJob |
| 936 | +from lp.buildmaster.model.packagebuild import PackageBuild |
| 937 | from lp.registry.interfaces.distribution import IDistributionSet |
| 938 | from lp.services.scripts.base import LaunchpadScriptFailure |
| 939 | from lp.soyuz.interfaces.component import IComponentSet |
| 940 | @@ -228,7 +232,13 @@ |
| 941 | self.number_of_pending_builds = self.getPendingBuilds().count() |
| 942 | |
| 943 | def getPendingBuilds(self): |
| 944 | - return BinaryPackageBuild.selectBy(buildstate=BuildStatus.NEEDSBUILD) |
| 945 | + store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
| 946 | + pending_builds = store.find( |
| 947 | + BinaryPackageBuild, |
| 948 | + BinaryPackageBuild.package_build == PackageBuild.id, |
| 949 | + PackageBuild.build_farm_job == BuildFarmJob.id, |
| 950 | + BuildFarmJob.status == BuildStatus.NEEDSBUILD) |
| 951 | + return pending_builds |
| 952 | |
| 953 | def getRetryDepwait(self, distribution=None): |
| 954 | test_args = ['-n'] |
| 955 | @@ -278,7 +288,7 @@ |
| 956 | |
| 957 | # Make it dependend on the only binary that can be satisfied in |
| 958 | # the sampledata. |
| 959 | - depwait_build.dependencies = 'pmount' |
| 960 | + depwait_build.dependencies = u'pmount' |
| 961 | |
| 962 | self.layer.commit() |
| 963 | |
| 964 | @@ -291,7 +301,7 @@ |
| 965 | self.assertEqual( |
| 966 | self.number_of_pending_builds + 1, |
| 967 | self.getPendingBuilds().count()) |
| 968 | - self.assertEqual(depwait_build.buildstate.name, 'NEEDSBUILD') |
| 969 | + self.assertEqual(depwait_build.status.name, 'NEEDSBUILD') |
| 970 | self.assertEqual(depwait_build.buildqueue_record.lastscore, 1755) |
| 971 | |
| 972 | |
| 973 | |
| 974 | === modified file 'lib/lp/soyuz/scripts/tests/test_copypackage.py' |
| 975 | --- lib/lp/soyuz/scripts/tests/test_copypackage.py 2010-05-15 17:43:59 +0000 |
| 976 | +++ lib/lp/soyuz/scripts/tests/test_copypackage.py 2010-05-27 13:26:53 +0000 |
| 977 | @@ -305,8 +305,8 @@ |
| 978 | 'Privacy mismatch on %s' % build.upload_changesfile.filename) |
| 979 | n_files += 1 |
| 980 | self.assertEquals( |
| 981 | - build.buildlog.restricted, restricted, |
| 982 | - 'Privacy mismatch on %s' % build.buildlog.filename) |
| 983 | + build.log.restricted, restricted, |
| 984 | + 'Privacy mismatch on %s' % build.log.filename) |
| 985 | n_files += 1 |
| 986 | self.assertEquals( |
| 987 | n_files, expected_n_files, |
| 988 | @@ -322,7 +322,7 @@ |
| 989 | # update_files_privacy() called on a private binary |
| 990 | # publication that was copied to a public location correctly |
| 991 | # makes all its related files (deb file, upload changesfile |
| 992 | - # and buildlog) public. |
| 993 | + # and log) public. |
| 994 | |
| 995 | # Create a new private PPA and a private source publication. |
| 996 | private_source = self.makeSource(private=True) |
| 997 | @@ -491,7 +491,7 @@ |
| 998 | |
| 999 | def test_cannot_copy_binaries_from_FTBFS(self): |
| 1000 | [build] = self.source.createMissingBuilds() |
| 1001 | - build.buildstate = BuildStatus.FAILEDTOBUILD |
| 1002 | + build.status = BuildStatus.FAILEDTOBUILD |
| 1003 | self.assertCannotCopyBinaries( |
| 1004 | 'source has no binaries to be copied') |
| 1005 | |
| 1006 | @@ -501,7 +501,7 @@ |
| 1007 | # retried anytime, but they will fail-to-upload if a copy |
| 1008 | # has built successfully. |
| 1009 | [build] = self.source.createMissingBuilds() |
| 1010 | - build.buildstate = BuildStatus.FAILEDTOBUILD |
| 1011 | + build.status = BuildStatus.FAILEDTOBUILD |
| 1012 | self.assertCanCopySourceOnly() |
| 1013 | |
| 1014 | def test_cannot_copy_binaries_from_binaries_pending_publication(self): |
| 1015 | @@ -1156,7 +1156,7 @@ |
| 1016 | changes_file_name = '%s_%s_%s.changes' % ( |
| 1017 | lazy_bin.name, lazy_bin.version, build_i386.arch_tag) |
| 1018 | package_upload = self.test_publisher.addPackageUpload( |
| 1019 | - ppa, build_i386.distroarchseries.distroseries, |
| 1020 | + ppa, build_i386.distro_arch_series.distroseries, |
| 1021 | build_i386.pocket, changes_file_content='anything', |
| 1022 | changes_file_name=changes_file_name) |
| 1023 | package_upload.addBuild(build_i386) |
| 1024 | @@ -1862,8 +1862,8 @@ |
| 1025 | status=PackagePublishingStatus.PUBLISHED) |
| 1026 | |
| 1027 | # The i386 build is completed and the hppa one pending. |
| 1028 | - self.assertEqual(build_hppa.buildstate, BuildStatus.NEEDSBUILD) |
| 1029 | - self.assertEqual(build_i386.buildstate, BuildStatus.FULLYBUILT) |
| 1030 | + self.assertEqual(build_hppa.status, BuildStatus.NEEDSBUILD) |
| 1031 | + self.assertEqual(build_i386.status, BuildStatus.FULLYBUILT) |
| 1032 | |
| 1033 | # Commit to ensure librarian files are written. |
| 1034 | self.layer.txn.commit() |
| 1035 | @@ -2249,7 +2249,7 @@ |
| 1036 | 'foo_source.buildlog', restricted=True) |
| 1037 | |
| 1038 | for build in ppa_source.getBuilds(): |
| 1039 | - build.buildlog = fake_buildlog |
| 1040 | + build.log = fake_buildlog |
| 1041 | |
| 1042 | # Create ancestry environment in the primary archive, so we can |
| 1043 | # test unembargoed overrides. |
| 1044 | @@ -2312,7 +2312,7 @@ |
| 1045 | # Check build's upload changesfile |
| 1046 | self.assertFalse(build.upload_changesfile.restricted) |
| 1047 | # Check build's buildlog. |
| 1048 | - self.assertFalse(build.buildlog.restricted) |
| 1049 | + self.assertFalse(build.log.restricted) |
| 1050 | # Check that the pocket is -security as specified in the |
| 1051 | # script parameters. |
| 1052 | self.assertEqual( |
| 1053 | |
| 1054 | === modified file 'lib/lp/soyuz/scripts/tests/test_populatearchive.py' |
| 1055 | --- lib/lp/soyuz/scripts/tests/test_populatearchive.py 2010-04-09 15:46:09 +0000 |
| 1056 | +++ lib/lp/soyuz/scripts/tests/test_populatearchive.py 2010-05-27 13:26:53 +0000 |
| 1057 | @@ -655,7 +655,7 @@ |
| 1058 | def build_in_wrong_state(build): |
| 1059 | """True if the given build is not (pending and suspended).""" |
| 1060 | return not ( |
| 1061 | - build.buildstate == BuildStatus.NEEDSBUILD and |
| 1062 | + build.status == BuildStatus.NEEDSBUILD and |
| 1063 | build.buildqueue_record.job.status == JobStatus.SUSPENDED) |
| 1064 | hoary = getUtility(IDistributionSet)['ubuntu']['hoary'] |
| 1065 |
