Merge lp:~michael.nelson/launchpad/567922-binarypackagebuild-new-table-4 into lp:launchpad/db-devel
- 567922-binarypackagebuild-new-table-4
- Merge into db-devel
Status: | Merged | ||||
---|---|---|---|---|---|
Approved by: | Edwin Grubbs | ||||
Approved revision: | no longer in the source branch. | ||||
Merged at revision: | 9405 | ||||
Proposed branch: | lp:~michael.nelson/launchpad/567922-binarypackagebuild-new-table-4 | ||||
Merge into: | lp:launchpad/db-devel | ||||
Prerequisite: | lp:~michael.nelson/launchpad/567922-binarypackagebuild-new-table-3 | ||||
Diff against target: |
785 lines (+163/-116) (has conflicts) 21 files modified
lib/lp/archiveuploader/nascentupload.py (+2/-2) lib/lp/archiveuploader/nascentuploadfile.py (+4/-4) lib/lp/buildmaster/browser/configure.zcml (+2/-1) lib/lp/buildmaster/browser/packagebuild.py (+0/-42) lib/lp/buildmaster/interfaces/buildfarmjob.py (+6/-1) lib/lp/buildmaster/model/buildfarmjob.py (+7/-0) lib/lp/buildmaster/tests/test_buildfarmjob.py (+24/-0) lib/lp/soyuz/adapters/archivedependencies.py (+2/-2) lib/lp/soyuz/doc/archive-dependencies.txt (+9/-8) lib/lp/soyuz/doc/archive-files.txt (+1/-1) lib/lp/soyuz/doc/archive.txt (+2/-2) lib/lp/soyuz/doc/build-failedtoupload-workflow.txt (+4/-4) lib/lp/soyuz/model/archive.py (+9/-6) lib/lp/soyuz/model/binarypackagebuild.py (+33/-20) lib/lp/soyuz/model/binarypackagerelease.py (+3/-3) lib/lp/soyuz/model/distributionsourcepackagerelease.py (+10/-6) lib/lp/soyuz/model/publishing.py (+6/-5) lib/lp/soyuz/model/queue.py (+2/-2) lib/lp/soyuz/model/sourcepackagerelease.py (+7/-5) lib/lp/soyuz/tests/test_binarypackagebuild.py (+28/-0) lib/lp/soyuz/tests/test_packageupload.py (+2/-2) Text conflict in lib/lp/buildmaster/interfaces/buildbase.py Text conflict in lib/lp/buildmaster/model/buildbase.py Text conflict in lib/lp/buildmaster/tests/test_buildbase.py Text conflict in lib/lp/code/model/sourcepackagerecipebuild.py |
||||
To merge this branch: | bzr merge lp:~michael.nelson/launchpad/567922-binarypackagebuild-new-table-4 | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Edwin Grubbs (community) | code | Approve | |
Review via email: mp+25138@code.launchpad.net |
Commit message
Description of the change
This branch is part of a pipeline for
https:/
https:/
**Note**: If it's possible, please ignore the conflicts with db-devel - it's due to a reversion of some work that was in db-devel and that I'd already pumped through the pipeline, and I'm waiting for that work to land again on db-devel before re-merging and pumping.
The actual diff of this branch from the previous is:
http://
Overview
========
This branch continues the work to switch our BinaryPackageBuild class to the new binarypackagebuild table (using the delegated PackageBuild/
It (finally) gets all the soyuz unit tests passing.
Details
=======
This branch just gets the remaining soyuz unit tests passing and starts on the soyuz doctests.
This branch is dependent on the pending schema patch in a previous branch.
To test
=======
First update the test db schema (required as the db patch still needs to be updated to remove the old build table):
psql launchpad_
bin/py database/
And then:
bin/test -vv -t test_packageupload -t doc/archive-
The next branch will continue getting the soyuz doctests passing with the new model.
Preview Diff
1 | === modified file 'lib/lp/archiveuploader/nascentupload.py' | |||
2 | --- lib/lp/archiveuploader/nascentupload.py 2010-02-26 16:52:46 +0000 | |||
3 | +++ lib/lp/archiveuploader/nascentupload.py 2010-05-12 11:01:39 +0000 | |||
4 | @@ -780,7 +780,7 @@ | |||
5 | 780 | # fine. | 780 | # fine. |
6 | 781 | ancestry = self.getBinaryAncestry( | 781 | ancestry = self.getBinaryAncestry( |
7 | 782 | uploaded_file, try_other_archs=False) | 782 | uploaded_file, try_other_archs=False) |
9 | 783 | if (ancestry is not None and | 783 | if (ancestry is not None and |
10 | 784 | not self.policy.archive.is_copy): | 784 | not self.policy.archive.is_copy): |
11 | 785 | # Ignore version checks for copy archives | 785 | # Ignore version checks for copy archives |
12 | 786 | # because the ancestry comes from the primary | 786 | # because the ancestry comes from the primary |
13 | @@ -962,7 +962,7 @@ | |||
14 | 962 | for build in self.queue_root.builds] | 962 | for build in self.queue_root.builds] |
15 | 963 | if considered_build.id in attached_builds: | 963 | if considered_build.id in attached_builds: |
16 | 964 | continue | 964 | continue |
18 | 965 | assert (considered_build.sourcepackagerelease.id == | 965 | assert (considered_build.source_package_release.id == |
19 | 966 | sourcepackagerelease.id), ( | 966 | sourcepackagerelease.id), ( |
20 | 967 | "Upload contains binaries of different sources.") | 967 | "Upload contains binaries of different sources.") |
21 | 968 | self.queue_root.addBuild(considered_build) | 968 | self.queue_root.addBuild(considered_build) |
22 | 969 | 969 | ||
23 | === modified file 'lib/lp/archiveuploader/nascentuploadfile.py' | |||
24 | --- lib/lp/archiveuploader/nascentuploadfile.py 2010-04-09 15:46:09 +0000 | |||
25 | +++ lib/lp/archiveuploader/nascentuploadfile.py 2010-05-12 11:01:39 +0000 | |||
26 | @@ -806,7 +806,7 @@ | |||
27 | 806 | build = sourcepackagerelease.getBuildByArch( | 806 | build = sourcepackagerelease.getBuildByArch( |
28 | 807 | dar, self.policy.archive) | 807 | dar, self.policy.archive) |
29 | 808 | if build is not None: | 808 | if build is not None: |
31 | 809 | build.buildstate = BuildStatus.FULLYBUILT | 809 | build.status = BuildStatus.FULLYBUILT |
32 | 810 | self.logger.debug("Updating build for %s: %s" % ( | 810 | self.logger.debug("Updating build for %s: %s" % ( |
33 | 811 | dar.architecturetag, build.id)) | 811 | dar.architecturetag, build.id)) |
34 | 812 | else: | 812 | else: |
35 | @@ -822,7 +822,7 @@ | |||
36 | 822 | # Ensure gathered binary is related to a FULLYBUILT build | 822 | # Ensure gathered binary is related to a FULLYBUILT build |
37 | 823 | # record. It will be check in slave-scanner procedure to | 823 | # record. It will be check in slave-scanner procedure to |
38 | 824 | # certify that the build was processed correctly. | 824 | # certify that the build was processed correctly. |
40 | 825 | build.buildstate = BuildStatus.FULLYBUILT | 825 | build.status = BuildStatus.FULLYBUILT |
41 | 826 | # Also purge any previous failed upload_log stored, so its | 826 | # Also purge any previous failed upload_log stored, so its |
42 | 827 | # content can be garbage-collected since it's not useful | 827 | # content can be garbage-collected since it's not useful |
43 | 828 | # anymore. | 828 | # anymore. |
44 | @@ -831,9 +831,9 @@ | |||
45 | 831 | # Sanity check; raise an error if the build we've been | 831 | # Sanity check; raise an error if the build we've been |
46 | 832 | # told to link to makes no sense (ie. is not for the right | 832 | # told to link to makes no sense (ie. is not for the right |
47 | 833 | # source package). | 833 | # source package). |
49 | 834 | if (build.sourcepackagerelease != sourcepackagerelease or | 834 | if (build.source_package_release != sourcepackagerelease or |
50 | 835 | build.pocket != self.policy.pocket or | 835 | build.pocket != self.policy.pocket or |
52 | 836 | build.distroarchseries != dar or | 836 | build.distro_arch_series != dar or |
53 | 837 | build.archive != self.policy.archive): | 837 | build.archive != self.policy.archive): |
54 | 838 | raise UploadError( | 838 | raise UploadError( |
55 | 839 | "Attempt to upload binaries specifying " | 839 | "Attempt to upload binaries specifying " |
56 | 840 | 840 | ||
57 | === modified file 'lib/lp/buildmaster/browser/configure.zcml' | |||
58 | --- lib/lp/buildmaster/browser/configure.zcml 2010-05-12 11:01:28 +0000 | |||
59 | +++ lib/lp/buildmaster/browser/configure.zcml 2010-05-12 11:01:39 +0000 | |||
60 | @@ -10,5 +10,6 @@ | |||
61 | 10 | i18n_domain="launchpad"> | 10 | i18n_domain="launchpad"> |
62 | 11 | <browser:url | 11 | <browser:url |
63 | 12 | for="lp.buildmaster.interfaces.packagebuild.IPackageBuild" | 12 | for="lp.buildmaster.interfaces.packagebuild.IPackageBuild" |
65 | 13 | urldata="lp.buildmaster.browser.packagebuild.PackageBuildUrl"/> | 13 | path_expression="string:+build/${build_farm_job/id}" |
66 | 14 | attribute_to_parent="archive"/> | ||
67 | 14 | </configure> | 15 | </configure> |
68 | 15 | 16 | ||
69 | === removed file 'lib/lp/buildmaster/browser/packagebuild.py' | |||
70 | --- lib/lp/buildmaster/browser/packagebuild.py 2010-05-12 11:01:28 +0000 | |||
71 | +++ lib/lp/buildmaster/browser/packagebuild.py 1970-01-01 00:00:00 +0000 | |||
72 | @@ -1,42 +0,0 @@ | |||
73 | 1 | # Copyright 2010 Canonical Ltd. This software is licensed under the | ||
74 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | ||
75 | 3 | |||
76 | 4 | """URLs for PackageBuild classes.""" | ||
77 | 5 | |||
78 | 6 | from zope.interface import implements | ||
79 | 7 | |||
80 | 8 | from canonical.launchpad.webapp.interfaces import ICanonicalUrlData | ||
81 | 9 | |||
82 | 10 | |||
83 | 11 | class PackageBuildUrl: | ||
84 | 12 | """Dynamic URL declaration for IPackageBuild classes. | ||
85 | 13 | |||
86 | 14 | When dealing with distribution builds we want to present them | ||
87 | 15 | under IDistributionSourcePackageRelease url: | ||
88 | 16 | |||
89 | 17 | /ubuntu/+source/foo/1.0/+build/1234 | ||
90 | 18 | |||
91 | 19 | On the other hand, PPA builds will be presented under the PPA page: | ||
92 | 20 | |||
93 | 21 | /~cprov/+archive/+build/1235 | ||
94 | 22 | |||
95 | 23 | Copy archives will be presented under the archives page: | ||
96 | 24 | /ubuntu/+archive/my-special-archive/+build/1234 | ||
97 | 25 | """ | ||
98 | 26 | implements(ICanonicalUrlData) | ||
99 | 27 | rootsite = None | ||
100 | 28 | |||
101 | 29 | def __init__(self, context): | ||
102 | 30 | self.context = context | ||
103 | 31 | |||
104 | 32 | @property | ||
105 | 33 | def inside(self): | ||
106 | 34 | if self.context.archive.is_ppa or self.context.archive.is_copy: | ||
107 | 35 | return self.context.archive | ||
108 | 36 | else: | ||
109 | 37 | return self.context.distributionsourcepackagerelease | ||
110 | 38 | |||
111 | 39 | @property | ||
112 | 40 | def path(self): | ||
113 | 41 | return u"+build/%d" % self.context.build_farm_job.id | ||
114 | 42 | |||
115 | 43 | 0 | ||
116 | === modified file 'lib/lp/buildmaster/interfaces/buildfarmjob.py' | |||
117 | --- lib/lp/buildmaster/interfaces/buildfarmjob.py 2010-05-12 11:01:28 +0000 | |||
118 | +++ lib/lp/buildmaster/interfaces/buildfarmjob.py 2010-05-12 11:01:39 +0000 | |||
119 | @@ -15,7 +15,7 @@ | |||
120 | 15 | ] | 15 | ] |
121 | 16 | 16 | ||
122 | 17 | from zope.interface import Interface, Attribute | 17 | from zope.interface import Interface, Attribute |
124 | 18 | from zope.schema import Bool, Choice, Datetime, TextLine | 18 | from zope.schema import Bool, Choice, Datetime, TextLine, Timedelta |
125 | 19 | from lazr.enum import DBEnumeratedType, DBItem | 19 | from lazr.enum import DBEnumeratedType, DBItem |
126 | 20 | from lazr.restful.declarations import exported | 20 | from lazr.restful.declarations import exported |
127 | 21 | from lazr.restful.fields import Reference | 21 | from lazr.restful.fields import Reference |
128 | @@ -194,6 +194,11 @@ | |||
129 | 194 | "The timestamp when the build farm job was finished.")), | 194 | "The timestamp when the build farm job was finished.")), |
130 | 195 | ("1.0", dict(exported=True, exported_as="datebuilt"))) | 195 | ("1.0", dict(exported=True, exported_as="datebuilt"))) |
131 | 196 | 196 | ||
132 | 197 | duration = Timedelta( | ||
133 | 198 | title=_("Duration"), required=False, | ||
134 | 199 | description=_("Duration interval, calculated when the " | ||
135 | 200 | "result gets collected.")) | ||
136 | 201 | |||
137 | 197 | date_first_dispatched = exported( | 202 | date_first_dispatched = exported( |
138 | 198 | Datetime( | 203 | Datetime( |
139 | 199 | title=_("Date finished"), required=False, readonly=True, | 204 | title=_("Date finished"), required=False, readonly=True, |
140 | 200 | 205 | ||
141 | === modified file 'lib/lp/buildmaster/model/buildfarmjob.py' | |||
142 | --- lib/lp/buildmaster/model/buildfarmjob.py 2010-05-12 11:01:28 +0000 | |||
143 | +++ lib/lp/buildmaster/model/buildfarmjob.py 2010-05-12 11:01:39 +0000 | |||
144 | @@ -220,6 +220,13 @@ | |||
145 | 220 | """See `IBuildFarmJob`.""" | 220 | """See `IBuildFarmJob`.""" |
146 | 221 | return self.job_type.title | 221 | return self.job_type.title |
147 | 222 | 222 | ||
148 | 223 | @property | ||
149 | 224 | def duration(self): | ||
150 | 225 | """See `IBuildFarmJob`.""" | ||
151 | 226 | if self.date_started is None or self.date_finished is None: | ||
152 | 227 | return None | ||
153 | 228 | return self.date_finished - self.date_started | ||
154 | 229 | |||
155 | 223 | def makeJob(self): | 230 | def makeJob(self): |
156 | 224 | """See `IBuildFarmJob`.""" | 231 | """See `IBuildFarmJob`.""" |
157 | 225 | raise NotImplementedError | 232 | raise NotImplementedError |
158 | 226 | 233 | ||
159 | === modified file 'lib/lp/buildmaster/tests/test_buildfarmjob.py' | |||
160 | --- lib/lp/buildmaster/tests/test_buildfarmjob.py 2010-05-12 11:01:28 +0000 | |||
161 | +++ lib/lp/buildmaster/tests/test_buildfarmjob.py 2010-05-12 11:01:39 +0000 | |||
162 | @@ -5,10 +5,13 @@ | |||
163 | 5 | 5 | ||
164 | 6 | __metaclass__ = type | 6 | __metaclass__ = type |
165 | 7 | 7 | ||
166 | 8 | from datetime import datetime, timedelta | ||
167 | 9 | import pytz | ||
168 | 8 | import unittest | 10 | import unittest |
169 | 9 | 11 | ||
170 | 10 | from storm.store import Store | 12 | from storm.store import Store |
171 | 11 | from zope.component import getUtility | 13 | from zope.component import getUtility |
172 | 14 | from zope.security.proxy import removeSecurityProxy | ||
173 | 12 | 15 | ||
174 | 13 | from canonical.database.sqlbase import flush_database_updates | 16 | from canonical.database.sqlbase import flush_database_updates |
175 | 14 | from canonical.testing.layers import DatabaseFunctionalLayer | 17 | from canonical.testing.layers import DatabaseFunctionalLayer |
176 | @@ -112,6 +115,27 @@ | |||
177 | 112 | self.build_farm_job.job_type.title, | 115 | self.build_farm_job.job_type.title, |
178 | 113 | self.build_farm_job.title) | 116 | self.build_farm_job.title) |
179 | 114 | 117 | ||
180 | 118 | def test_duration_none(self): | ||
181 | 119 | # If either start or finished is none, the duration will be | ||
182 | 120 | # none. | ||
183 | 121 | self.build_farm_job.jobStarted() | ||
184 | 122 | self.failUnlessEqual(None, self.build_farm_job.duration) | ||
185 | 123 | |||
186 | 124 | self.build_farm_job.jobAborted() | ||
187 | 125 | removeSecurityProxy(self.build_farm_job).date_finished = ( | ||
188 | 126 | datetime.now(pytz.UTC)) | ||
189 | 127 | self.failUnlessEqual(None, self.build_farm_job.duration) | ||
190 | 128 | |||
191 | 129 | def test_duration_set(self): | ||
192 | 130 | # If both start and finished are defined, the duration will be | ||
193 | 131 | # returned. | ||
194 | 132 | now = datetime.now(pytz.UTC) | ||
195 | 133 | duration = timedelta(1) | ||
196 | 134 | naked_bfj = removeSecurityProxy(self.build_farm_job) | ||
197 | 135 | naked_bfj.date_started = now | ||
198 | 136 | naked_bfj.date_finished = now + duration | ||
199 | 137 | self.failUnlessEqual(duration, self.build_farm_job.duration) | ||
200 | 138 | |||
201 | 115 | 139 | ||
202 | 116 | def test_suite(): | 140 | def test_suite(): |
203 | 117 | return unittest.TestLoader().loadTestsFromName(__name__) | 141 | return unittest.TestLoader().loadTestsFromName(__name__) |
204 | 118 | 142 | ||
205 | === modified file 'lib/lp/soyuz/adapters/archivedependencies.py' | |||
206 | --- lib/lp/soyuz/adapters/archivedependencies.py 2010-02-26 13:42:51 +0000 | |||
207 | +++ lib/lp/soyuz/adapters/archivedependencies.py 2010-05-12 11:01:39 +0000 | |||
208 | @@ -168,7 +168,7 @@ | |||
209 | 168 | 168 | ||
210 | 169 | # Consider user-selected archive dependencies. | 169 | # Consider user-selected archive dependencies. |
211 | 170 | primary_component = get_primary_current_component( | 170 | primary_component = get_primary_current_component( |
213 | 171 | build.archive, build.distroseries, sourcepackagename) | 171 | build.archive, build.distro_series, sourcepackagename) |
214 | 172 | for archive_dependency in build.archive.dependencies: | 172 | for archive_dependency in build.archive.dependencies: |
215 | 173 | # When the dependency component is undefined, we should use | 173 | # When the dependency component is undefined, we should use |
216 | 174 | # the component where the source is published in the primary | 174 | # the component where the source is published in the primary |
217 | @@ -272,7 +272,7 @@ | |||
218 | 272 | primary_dependencies = [] | 272 | primary_dependencies = [] |
219 | 273 | for pocket in primary_pockets: | 273 | for pocket in primary_pockets: |
220 | 274 | primary_dependencies.append( | 274 | primary_dependencies.append( |
222 | 275 | (build.distroseries.distribution.main_archive, pocket, | 275 | (build.distro_series.distribution.main_archive, pocket, |
223 | 276 | primary_components)) | 276 | primary_components)) |
224 | 277 | 277 | ||
225 | 278 | return primary_dependencies | 278 | return primary_dependencies |
226 | 279 | 279 | ||
227 | === modified file 'lib/lp/soyuz/doc/archive-dependencies.txt' | |||
228 | --- lib/lp/soyuz/doc/archive-dependencies.txt 2010-03-11 01:39:25 +0000 | |||
229 | +++ lib/lp/soyuz/doc/archive-dependencies.txt 2010-05-12 11:01:39 +0000 | |||
230 | @@ -205,8 +205,9 @@ | |||
231 | 205 | ... get_sources_list_for_building) | 205 | ... get_sources_list_for_building) |
232 | 206 | 206 | ||
233 | 207 | >>> def print_building_sources_list(candidate): | 207 | >>> def print_building_sources_list(candidate): |
236 | 208 | ... sources_list = get_sources_list_for_building(candidate, | 208 | ... sources_list = get_sources_list_for_building( |
237 | 209 | ... candidate.distroarchseries, candidate.sourcepackagerelease.name) | 209 | ... candidate, candidate.distro_arch_series, |
238 | 210 | ... candidate.source_package_release.name) | ||
239 | 210 | ... for line in sources_list: | 211 | ... for line in sources_list: |
240 | 211 | ... print line | 212 | ... print line |
241 | 212 | 213 | ||
242 | @@ -219,7 +220,7 @@ | |||
243 | 219 | ... PackagePublishingStatus) | 220 | ... PackagePublishingStatus) |
244 | 220 | 221 | ||
245 | 221 | >>> cprov.archive.getAllPublishedBinaries( | 222 | >>> cprov.archive.getAllPublishedBinaries( |
247 | 222 | ... distroarchseries=a_build.distroarchseries, | 223 | ... distroarchseries=a_build.distro_arch_series, |
248 | 223 | ... status=PackagePublishingStatus.PUBLISHED).count() | 224 | ... status=PackagePublishingStatus.PUBLISHED).count() |
249 | 224 | 0 | 225 | 0 |
250 | 225 | 226 | ||
251 | @@ -323,9 +324,9 @@ | |||
252 | 323 | deb http://ppa.launchpad.dev/cprov/ppa/ubuntu hoary main | 324 | deb http://ppa.launchpad.dev/cprov/ppa/ubuntu hoary main |
253 | 324 | deb http://ftpmaster.internal/ubuntu hoary | 325 | deb http://ftpmaster.internal/ubuntu hoary |
254 | 325 | main restricted universe multiverse | 326 | main restricted universe multiverse |
256 | 326 | deb http://ftpmaster.internal/ubuntu hoary-security | 327 | deb http://ftpmaster.internal/ubuntu hoary-security |
257 | 327 | main restricted universe multiverse | 328 | main restricted universe multiverse |
259 | 328 | deb http://ftpmaster.internal/ubuntu hoary-updates | 329 | deb http://ftpmaster.internal/ubuntu hoary-updates |
260 | 329 | main restricted universe multiverse | 330 | main restricted universe multiverse |
261 | 330 | 331 | ||
262 | 331 | However, in order to avoid the problem going forward (and to allow the PPA | 332 | However, in order to avoid the problem going forward (and to allow the PPA |
263 | @@ -363,9 +364,9 @@ | |||
264 | 363 | deb http://ppa.launchpad.dev/cprov/ppa/ubuntu hoary main | 364 | deb http://ppa.launchpad.dev/cprov/ppa/ubuntu hoary main |
265 | 364 | deb http://ftpmaster.internal/ubuntu hoary | 365 | deb http://ftpmaster.internal/ubuntu hoary |
266 | 365 | main restricted universe multiverse | 366 | main restricted universe multiverse |
268 | 366 | deb http://ftpmaster.internal/ubuntu hoary-security | 367 | deb http://ftpmaster.internal/ubuntu hoary-security |
269 | 367 | main restricted universe multiverse | 368 | main restricted universe multiverse |
271 | 368 | deb http://ftpmaster.internal/ubuntu hoary-updates | 369 | deb http://ftpmaster.internal/ubuntu hoary-updates |
272 | 369 | main restricted universe multiverse | 370 | main restricted universe multiverse |
273 | 370 | 371 | ||
274 | 371 | However, in order to avoid the problem going forward (and to allow the PPA | 372 | However, in order to avoid the problem going forward (and to allow the PPA |
275 | @@ -434,7 +435,7 @@ | |||
276 | 434 | ... get_primary_current_component) | 435 | ... get_primary_current_component) |
277 | 435 | 436 | ||
278 | 436 | >>> print get_primary_current_component(a_build.archive, | 437 | >>> print get_primary_current_component(a_build.archive, |
280 | 437 | ... a_build.distroseries, a_build.sourcepackagerelease.name) | 438 | ... a_build.distro_series, a_build.source_package_release.name) |
281 | 438 | universe | 439 | universe |
282 | 439 | 440 | ||
283 | 440 | >>> print_building_sources_list(a_build) | 441 | >>> print_building_sources_list(a_build) |
284 | 441 | 442 | ||
285 | === modified file 'lib/lp/soyuz/doc/archive-files.txt' | |||
286 | --- lib/lp/soyuz/doc/archive-files.txt 2009-08-28 07:34:44 +0000 | |||
287 | +++ lib/lp/soyuz/doc/archive-files.txt 2010-05-12 11:01:39 +0000 | |||
288 | @@ -191,7 +191,7 @@ | |||
289 | 191 | ... 'buildlog_ubuntu-breezy-autotest-i386.' | 191 | ... 'buildlog_ubuntu-breezy-autotest-i386.' |
290 | 192 | ... 'test-pkg_1.0_FULLYBUILT.txt.gz') | 192 | ... 'test-pkg_1.0_FULLYBUILT.txt.gz') |
291 | 193 | >>> buildlog = test_publisher.addMockFile(buildlog_name) | 193 | >>> buildlog = test_publisher.addMockFile(buildlog_name) |
293 | 194 | >>> build.buildlog = buildlog | 194 | >>> build.log = buildlog |
294 | 195 | 195 | ||
295 | 196 | >>> buildlog == build.getFileByName(buildlog_name) | 196 | >>> buildlog == build.getFileByName(buildlog_name) |
296 | 197 | True | 197 | True |
297 | 198 | 198 | ||
298 | === modified file 'lib/lp/soyuz/doc/archive.txt' | |||
299 | --- lib/lp/soyuz/doc/archive.txt 2010-05-06 10:05:49 +0000 | |||
300 | +++ lib/lp/soyuz/doc/archive.txt 2010-05-12 11:01:39 +0000 | |||
301 | @@ -1047,13 +1047,13 @@ | |||
302 | 1047 | >>> cd_lookup = cprov_archive.getBuildRecords(name='cd') | 1047 | >>> cd_lookup = cprov_archive.getBuildRecords(name='cd') |
303 | 1048 | >>> cd_lookup.count() | 1048 | >>> cd_lookup.count() |
304 | 1049 | 1 | 1049 | 1 |
306 | 1050 | >>> cd_lookup[0].sourcepackagerelease.name | 1050 | >>> cd_lookup[0].source_package_release.name |
307 | 1051 | u'cdrkit' | 1051 | u'cdrkit' |
308 | 1052 | 1052 | ||
309 | 1053 | >>> ice_lookup = cprov_archive.getBuildRecords(name='ice') | 1053 | >>> ice_lookup = cprov_archive.getBuildRecords(name='ice') |
310 | 1054 | >>> ice_lookup.count() | 1054 | >>> ice_lookup.count() |
311 | 1055 | 1 | 1055 | 1 |
313 | 1056 | >>> ice_lookup[0].sourcepackagerelease.name | 1056 | >>> ice_lookup[0].source_package_release.name |
314 | 1057 | u'iceweasel' | 1057 | u'iceweasel' |
315 | 1058 | 1058 | ||
316 | 1059 | >>> cprov_archive.getBuildRecords(name='foo').count() | 1059 | >>> cprov_archive.getBuildRecords(name='foo').count() |
317 | 1060 | 1060 | ||
318 | === modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt' | |||
319 | --- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-04-14 17:34:35 +0000 | |||
320 | +++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-05-12 11:01:39 +0000 | |||
321 | @@ -28,7 +28,7 @@ | |||
322 | 28 | >>> print failedtoupload_candidate.title | 28 | >>> print failedtoupload_candidate.title |
323 | 29 | i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | 29 | i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE |
324 | 30 | 30 | ||
326 | 31 | >>> print failedtoupload_candidate.buildstate.name | 31 | >>> print failedtoupload_candidate.status.name |
327 | 32 | FAILEDTOUPLOAD | 32 | FAILEDTOUPLOAD |
328 | 33 | 33 | ||
329 | 34 | >>> print failedtoupload_candidate.upload_log.filename | 34 | >>> print failedtoupload_candidate.upload_log.filename |
330 | @@ -110,7 +110,7 @@ | |||
331 | 110 | Let's emulate the procedure of rescuing an FAILEDTOUPLOAD build. | 110 | Let's emulate the procedure of rescuing an FAILEDTOUPLOAD build. |
332 | 111 | A FAILEDTOUPLOAD build obviously has no binaries: | 111 | A FAILEDTOUPLOAD build obviously has no binaries: |
333 | 112 | 112 | ||
335 | 113 | >>> print failedtoupload_candidate.buildstate.name | 113 | >>> print failedtoupload_candidate.status.name |
336 | 114 | FAILEDTOUPLOAD | 114 | FAILEDTOUPLOAD |
337 | 115 | 115 | ||
338 | 116 | >>> failedtoupload_candidate.binarypackages.count() | 116 | >>> failedtoupload_candidate.binarypackages.count() |
339 | @@ -162,7 +162,7 @@ | |||
340 | 162 | >>> buildd_policy = getPolicy( | 162 | >>> buildd_policy = getPolicy( |
341 | 163 | ... name='buildd', | 163 | ... name='buildd', |
342 | 164 | ... distro=failedtoupload_candidate.distribution.name, | 164 | ... distro=failedtoupload_candidate.distribution.name, |
344 | 165 | ... distroseries=failedtoupload_candidate.distroseries.name, | 165 | ... distroseries=failedtoupload_candidate.distro_series.name, |
345 | 166 | ... buildid=failedtoupload_candidate.id) | 166 | ... buildid=failedtoupload_candidate.id) |
346 | 167 | 167 | ||
347 | 168 | >>> cdrkit_bin_upload = NascentUpload( | 168 | >>> cdrkit_bin_upload = NascentUpload( |
348 | @@ -180,7 +180,7 @@ | |||
349 | 180 | previously stored upload_log is dereferenced (they are both updated | 180 | previously stored upload_log is dereferenced (they are both updated |
350 | 181 | during the upload processing time): | 181 | during the upload processing time): |
351 | 182 | 182 | ||
353 | 183 | >>> print failedtoupload_candidate.buildstate.name | 183 | >>> print failedtoupload_candidate.status.name |
354 | 184 | FULLYBUILT | 184 | FULLYBUILT |
355 | 185 | 185 | ||
356 | 186 | >>> print failedtoupload_candidate.upload_log | 186 | >>> print failedtoupload_candidate.upload_log |
357 | 187 | 187 | ||
358 | === modified file 'lib/lp/soyuz/model/archive.py' | |||
359 | --- lib/lp/soyuz/model/archive.py 2010-05-12 11:01:28 +0000 | |||
360 | +++ lib/lp/soyuz/model/archive.py 2010-05-12 11:01:39 +0000 | |||
361 | @@ -882,18 +882,21 @@ | |||
362 | 882 | extra_exprs = [] | 882 | extra_exprs = [] |
363 | 883 | if not include_needsbuild: | 883 | if not include_needsbuild: |
364 | 884 | extra_exprs.append( | 884 | extra_exprs.append( |
366 | 885 | BinaryPackageBuild.buildstate != BuildStatus.NEEDSBUILD) | 885 | BuildFarmJob.status != BuildStatus.NEEDSBUILD) |
367 | 886 | 886 | ||
368 | 887 | find_spec = ( | 887 | find_spec = ( |
370 | 888 | BinaryPackageBuild.buildstate, | 888 | BuildFarmJob.status, |
371 | 889 | Count(BinaryPackageBuild.id) | 889 | Count(BinaryPackageBuild.id) |
372 | 890 | ) | 890 | ) |
374 | 891 | result = store.using(BinaryPackageBuild).find( | 891 | result = store.using( |
375 | 892 | BinaryPackageBuild, PackageBuild, BuildFarmJob).find( | ||
376 | 892 | find_spec, | 893 | find_spec, |
378 | 893 | BinaryPackageBuild.archive == self, | 894 | BinaryPackageBuild.package_build == PackageBuild.id, |
379 | 895 | PackageBuild.archive == self, | ||
380 | 896 | PackageBuild.build_farm_job == BuildFarmJob.id, | ||
381 | 894 | *extra_exprs | 897 | *extra_exprs |
384 | 895 | ).group_by(BinaryPackageBuild.buildstate).order_by( | 898 | ).group_by(BuildFarmJob.status).order_by( |
385 | 896 | BinaryPackageBuild.buildstate) | 899 | BuildFarmJob.status) |
386 | 897 | 900 | ||
387 | 898 | # Create a map for each count summary to a number of buildstates: | 901 | # Create a map for each count summary to a number of buildstates: |
388 | 899 | count_map = { | 902 | count_map = { |
389 | 900 | 903 | ||
390 | === modified file 'lib/lp/soyuz/model/binarypackagebuild.py' | |||
391 | --- lib/lp/soyuz/model/binarypackagebuild.py 2010-05-12 11:01:28 +0000 | |||
392 | +++ lib/lp/soyuz/model/binarypackagebuild.py 2010-05-12 11:01:39 +0000 | |||
393 | @@ -24,6 +24,8 @@ | |||
394 | 24 | 24 | ||
395 | 25 | from canonical.config import config | 25 | from canonical.config import config |
396 | 26 | from canonical.database.sqlbase import quote_like, SQLBase, sqlvalues | 26 | from canonical.database.sqlbase import quote_like, SQLBase, sqlvalues |
397 | 27 | from canonical.launchpad.browser.librarian import ( | ||
398 | 28 | ProxiedLibraryFileAlias) | ||
399 | 27 | from canonical.launchpad.components.decoratedresultset import ( | 29 | from canonical.launchpad.components.decoratedresultset import ( |
400 | 28 | DecoratedResultSet) | 30 | DecoratedResultSet) |
401 | 29 | from canonical.launchpad.database.librarian import ( | 31 | from canonical.launchpad.database.librarian import ( |
402 | @@ -209,6 +211,17 @@ | |||
403 | 209 | return self.distro_arch_series.architecturetag | 211 | return self.distro_arch_series.architecturetag |
404 | 210 | 212 | ||
405 | 211 | @property | 213 | @property |
406 | 214 | def log_url(self): | ||
407 | 215 | """See `IBuildFarmJob`. | ||
408 | 216 | |||
409 | 217 | Overridden here for the case of builds for distro archives, | ||
410 | 218 | currently only supported for binary package builds. | ||
411 | 219 | """ | ||
412 | 220 | if self.log is None: | ||
413 | 221 | return None | ||
414 | 222 | return ProxiedLibraryFileAlias(self.log, self).http_url | ||
415 | 223 | |||
416 | 224 | @property | ||
417 | 212 | def distributionsourcepackagerelease(self): | 225 | def distributionsourcepackagerelease(self): |
418 | 213 | """See `IBuild`.""" | 226 | """See `IBuild`.""" |
419 | 214 | from lp.soyuz.model.distributionsourcepackagerelease \ | 227 | from lp.soyuz.model.distributionsourcepackagerelease \ |
420 | @@ -270,7 +283,7 @@ | |||
421 | 270 | def retry(self): | 283 | def retry(self): |
422 | 271 | """See `IBuild`.""" | 284 | """See `IBuild`.""" |
423 | 272 | assert self.can_be_retried, "Build %s cannot be retried" % self.id | 285 | assert self.can_be_retried, "Build %s cannot be retried" % self.id |
425 | 273 | self.buildstate = BuildStatus.NEEDSBUILD | 286 | self.status = BuildStatus.NEEDSBUILD |
426 | 274 | self.datebuilt = None | 287 | self.datebuilt = None |
427 | 275 | self.buildduration = None | 288 | self.buildduration = None |
428 | 276 | self.builder = None | 289 | self.builder = None |
429 | @@ -522,15 +535,15 @@ | |||
430 | 522 | config.builddmaster.default_sender_address) | 535 | config.builddmaster.default_sender_address) |
431 | 523 | 536 | ||
432 | 524 | extra_headers = { | 537 | extra_headers = { |
434 | 525 | 'X-Launchpad-Build-State': self.buildstate.name, | 538 | 'X-Launchpad-Build-State': self.status.name, |
435 | 526 | 'X-Launchpad-Build-Component' : self.current_component.name, | 539 | 'X-Launchpad-Build-Component' : self.current_component.name, |
437 | 527 | 'X-Launchpad-Build-Arch' : self.distroarchseries.architecturetag, | 540 | 'X-Launchpad-Build-Arch' : self.distro_arch_series.architecturetag, |
438 | 528 | } | 541 | } |
439 | 529 | 542 | ||
440 | 530 | # XXX cprov 2006-10-27: Temporary extra debug info about the | 543 | # XXX cprov 2006-10-27: Temporary extra debug info about the |
441 | 531 | # SPR.creator in context, to be used during the service quarantine, | 544 | # SPR.creator in context, to be used during the service quarantine, |
442 | 532 | # notify_owner will be disabled to avoid *spamming* Debian people. | 545 | # notify_owner will be disabled to avoid *spamming* Debian people. |
444 | 533 | creator = self.sourcepackagerelease.creator | 546 | creator = self.source_package_release.creator |
445 | 534 | extra_headers['X-Creator-Recipient'] = ",".join( | 547 | extra_headers['X-Creator-Recipient'] = ",".join( |
446 | 535 | get_contact_email_addresses(creator)) | 548 | get_contact_email_addresses(creator)) |
447 | 536 | 549 | ||
448 | @@ -545,7 +558,7 @@ | |||
449 | 545 | # * the package build (failure) occurred in the original | 558 | # * the package build (failure) occurred in the original |
450 | 546 | # archive. | 559 | # archive. |
451 | 547 | package_was_not_copied = ( | 560 | package_was_not_copied = ( |
453 | 548 | self.archive == self.sourcepackagerelease.upload_archive) | 561 | self.archive == self.source_package_release.upload_archive) |
454 | 549 | 562 | ||
455 | 550 | if package_was_not_copied and config.builddmaster.notify_owner: | 563 | if package_was_not_copied and config.builddmaster.notify_owner: |
456 | 551 | if (self.archive.is_ppa and creator.inTeam(self.archive.owner) | 564 | if (self.archive.is_ppa and creator.inTeam(self.archive.owner) |
457 | @@ -557,7 +570,7 @@ | |||
458 | 557 | # Non-PPA notifications inform the creator regardless. | 570 | # Non-PPA notifications inform the creator regardless. |
459 | 558 | recipients = recipients.union( | 571 | recipients = recipients.union( |
460 | 559 | get_contact_email_addresses(creator)) | 572 | get_contact_email_addresses(creator)) |
462 | 560 | dsc_key = self.sourcepackagerelease.dscsigningkey | 573 | dsc_key = self.source_package_release.dscsigningkey |
463 | 561 | if dsc_key: | 574 | if dsc_key: |
464 | 562 | recipients = recipients.union( | 575 | recipients = recipients.union( |
465 | 563 | get_contact_email_addresses(dsc_key.owner)) | 576 | get_contact_email_addresses(dsc_key.owner)) |
466 | @@ -596,13 +609,13 @@ | |||
467 | 596 | # with the state in the build worflow, maybe by having an | 609 | # with the state in the build worflow, maybe by having an |
468 | 597 | # IBuild.statusReport property, which could also be used in the | 610 | # IBuild.statusReport property, which could also be used in the |
469 | 598 | # respective page template. | 611 | # respective page template. |
471 | 599 | if self.buildstate in [ | 612 | if self.status in [ |
472 | 600 | BuildStatus.NEEDSBUILD, BuildStatus.SUPERSEDED]: | 613 | BuildStatus.NEEDSBUILD, BuildStatus.SUPERSEDED]: |
473 | 601 | # untouched builds | 614 | # untouched builds |
474 | 602 | buildduration = 'not available' | 615 | buildduration = 'not available' |
475 | 603 | buildlog_url = 'not available' | 616 | buildlog_url = 'not available' |
476 | 604 | builder_url = 'not available' | 617 | builder_url = 'not available' |
478 | 605 | elif self.buildstate == BuildStatus.BUILDING: | 618 | elif self.status == BuildStatus.BUILDING: |
479 | 606 | # build in process | 619 | # build in process |
480 | 607 | buildduration = 'not finished' | 620 | buildduration = 'not finished' |
481 | 608 | buildlog_url = 'see builder page' | 621 | buildlog_url = 'see builder page' |
482 | @@ -610,11 +623,11 @@ | |||
483 | 610 | else: | 623 | else: |
484 | 611 | # completed states (success and failure) | 624 | # completed states (success and failure) |
485 | 612 | buildduration = DurationFormatterAPI( | 625 | buildduration = DurationFormatterAPI( |
488 | 613 | self.buildduration).approximateduration() | 626 | self.date_finished - self.date_started).approximateduration() |
489 | 614 | buildlog_url = self.build_log_url | 627 | buildlog_url = self.log_url |
490 | 615 | builder_url = canonical_url(self.builder) | 628 | builder_url = canonical_url(self.builder) |
491 | 616 | 629 | ||
493 | 617 | if self.buildstate == BuildStatus.FAILEDTOUPLOAD: | 630 | if self.status == BuildStatus.FAILEDTOUPLOAD: |
494 | 618 | assert extra_info is not None, ( | 631 | assert extra_info is not None, ( |
495 | 619 | 'Extra information is required for FAILEDTOUPLOAD ' | 632 | 'Extra information is required for FAILEDTOUPLOAD ' |
496 | 620 | 'notifications.') | 633 | 'notifications.') |
497 | @@ -624,10 +637,10 @@ | |||
498 | 624 | 637 | ||
499 | 625 | template = get_email_template('build-notification.txt') | 638 | template = get_email_template('build-notification.txt') |
500 | 626 | replacements = { | 639 | replacements = { |
505 | 627 | 'source_name': self.sourcepackagerelease.name, | 640 | 'source_name': self.source_package_release.name, |
506 | 628 | 'source_version': self.sourcepackagerelease.version, | 641 | 'source_version': self.source_package_release.version, |
507 | 629 | 'architecturetag': self.distroarchseries.architecturetag, | 642 | 'architecturetag': self.distro_arch_series.architecturetag, |
508 | 630 | 'build_state': self.buildstate.title, | 643 | 'build_state': self.status.title, |
509 | 631 | 'build_duration': buildduration, | 644 | 'build_duration': buildduration, |
510 | 632 | 'buildlog_url': buildlog_url, | 645 | 'buildlog_url': buildlog_url, |
511 | 633 | 'builder_url': builder_url, | 646 | 'builder_url': builder_url, |
512 | @@ -661,7 +674,7 @@ | |||
513 | 661 | if filename.endswith('.changes'): | 674 | if filename.endswith('.changes'): |
514 | 662 | file_object = self.upload_changesfile | 675 | file_object = self.upload_changesfile |
515 | 663 | elif filename.endswith('.txt.gz'): | 676 | elif filename.endswith('.txt.gz'): |
517 | 664 | file_object = self.buildlog | 677 | file_object = self.log |
518 | 665 | elif filename.endswith('_log.txt'): | 678 | elif filename.endswith('_log.txt'): |
519 | 666 | file_object = self.upload_log | 679 | file_object = self.upload_log |
520 | 667 | elif filename.endswith('deb'): | 680 | elif filename.endswith('deb'): |
521 | @@ -868,7 +881,7 @@ | |||
522 | 868 | # and share it with ISourcePackage.getBuildRecords() | 881 | # and share it with ISourcePackage.getBuildRecords() |
523 | 869 | 882 | ||
524 | 870 | # exclude gina-generated and security (dak-made) builds | 883 | # exclude gina-generated and security (dak-made) builds |
526 | 871 | # buildstate == FULLYBUILT && datebuilt == null | 884 | # status == FULLYBUILT && datebuilt == null |
527 | 872 | if status == BuildStatus.FULLYBUILT: | 885 | if status == BuildStatus.FULLYBUILT: |
528 | 873 | condition_clauses.append("BuildFarmJob.date_finished IS NOT NULL") | 886 | condition_clauses.append("BuildFarmJob.date_finished IS NOT NULL") |
529 | 874 | else: | 887 | else: |
530 | @@ -927,8 +940,8 @@ | |||
531 | 927 | 940 | ||
532 | 928 | # Get the MANUALDEPWAIT records for all archives. | 941 | # Get the MANUALDEPWAIT records for all archives. |
533 | 929 | candidates = BinaryPackageBuild.selectBy( | 942 | candidates = BinaryPackageBuild.selectBy( |
536 | 930 | buildstate=BuildStatus.MANUALDEPWAIT, | 943 | status=BuildStatus.MANUALDEPWAIT, |
537 | 931 | distroarchseries=distroarchseries) | 944 | distro_arch_series=distroarchseries) |
538 | 932 | 945 | ||
539 | 933 | candidates_count = candidates.count() | 946 | candidates_count = candidates.count() |
540 | 934 | if candidates_count == 0: | 947 | if candidates_count == 0: |
541 | @@ -978,7 +991,7 @@ | |||
542 | 978 | wanted = [] | 991 | wanted = [] |
543 | 979 | for state in states: | 992 | for state in states: |
544 | 980 | candidates = [build for build in builds | 993 | candidates = [build for build in builds |
546 | 981 | if build.buildstate == state] | 994 | if build.status == state] |
547 | 982 | wanted.extend(candidates) | 995 | wanted.extend(candidates) |
548 | 983 | return wanted | 996 | return wanted |
549 | 984 | 997 | ||
550 | 985 | 998 | ||
551 | === modified file 'lib/lp/soyuz/model/binarypackagerelease.py' | |||
552 | --- lib/lp/soyuz/model/binarypackagerelease.py 2010-04-12 11:37:48 +0000 | |||
553 | +++ lib/lp/soyuz/model/binarypackagerelease.py 2010-05-12 11:01:39 +0000 | |||
554 | @@ -84,17 +84,17 @@ | |||
555 | 84 | import DistributionSourcePackageRelease | 84 | import DistributionSourcePackageRelease |
556 | 85 | return DistributionSourcePackageRelease( | 85 | return DistributionSourcePackageRelease( |
557 | 86 | distribution=self.build.distribution, | 86 | distribution=self.build.distribution, |
559 | 87 | sourcepackagerelease=self.build.sourcepackagerelease) | 87 | sourcepackagerelease=self.build.source_package_release) |
560 | 88 | 88 | ||
561 | 89 | @property | 89 | @property |
562 | 90 | def sourcepackagename(self): | 90 | def sourcepackagename(self): |
563 | 91 | """See `IBinaryPackageRelease`.""" | 91 | """See `IBinaryPackageRelease`.""" |
565 | 92 | return self.build.sourcepackagerelease.sourcepackagename.name | 92 | return self.build.source_package_release.sourcepackagename.name |
566 | 93 | 93 | ||
567 | 94 | @property | 94 | @property |
568 | 95 | def is_new(self): | 95 | def is_new(self): |
569 | 96 | """See `IBinaryPackageRelease`.""" | 96 | """See `IBinaryPackageRelease`.""" |
571 | 97 | distroarchseries = self.build.distroarchseries | 97 | distroarchseries = self.build.distro_arch_series |
572 | 98 | distroarchseries_binary_package = distroarchseries.getBinaryPackage( | 98 | distroarchseries_binary_package = distroarchseries.getBinaryPackage( |
573 | 99 | self.binarypackagename) | 99 | self.binarypackagename) |
574 | 100 | return distroarchseries_binary_package.currentrelease is None | 100 | return distroarchseries_binary_package.currentrelease is None |
575 | 101 | 101 | ||
576 | === modified file 'lib/lp/soyuz/model/distributionsourcepackagerelease.py' | |||
577 | --- lib/lp/soyuz/model/distributionsourcepackagerelease.py 2010-04-12 08:29:02 +0000 | |||
578 | +++ lib/lp/soyuz/model/distributionsourcepackagerelease.py 2010-05-12 11:01:39 +0000 | |||
579 | @@ -16,11 +16,13 @@ | |||
580 | 16 | 16 | ||
581 | 17 | from storm.expr import Desc | 17 | from storm.expr import Desc |
582 | 18 | 18 | ||
583 | 19 | from canonical.database.sqlbase import sqlvalues | ||
584 | 20 | |||
585 | 21 | from lp.buildmaster.model.buildfarmjob import BuildFarmJob | ||
586 | 22 | from lp.buildmaster.model.packagebuild import PackageBuild | ||
587 | 19 | from lp.soyuz.interfaces.distributionsourcepackagerelease import ( | 23 | from lp.soyuz.interfaces.distributionsourcepackagerelease import ( |
588 | 20 | IDistributionSourcePackageRelease) | 24 | IDistributionSourcePackageRelease) |
589 | 21 | from lp.soyuz.interfaces.sourcepackagerelease import ISourcePackageRelease | 25 | from lp.soyuz.interfaces.sourcepackagerelease import ISourcePackageRelease |
590 | 22 | from canonical.database.sqlbase import sqlvalues | ||
591 | 23 | |||
592 | 24 | from lp.soyuz.model.archive import Archive | 26 | from lp.soyuz.model.archive import Archive |
593 | 25 | from lp.soyuz.model.binarypackagename import BinaryPackageName | 27 | from lp.soyuz.model.binarypackagename import BinaryPackageName |
594 | 26 | from lp.soyuz.model.binarypackagerelease import ( | 28 | from lp.soyuz.model.binarypackagerelease import ( |
595 | @@ -100,11 +102,13 @@ | |||
596 | 100 | # distribution that were built for a PPA but have been published | 102 | # distribution that were built for a PPA but have been published |
597 | 101 | # in a main archive. | 103 | # in a main archive. |
598 | 102 | builds_for_distro_exprs = ( | 104 | builds_for_distro_exprs = ( |
600 | 103 | (BinaryPackageBuild.sourcepackagerelease == | 105 | (BinaryPackageBuild.source_package_release == |
601 | 104 | self.sourcepackagerelease), | 106 | self.sourcepackagerelease), |
603 | 105 | BinaryPackageBuild.distroarchseries == DistroArchSeries.id, | 107 | BinaryPackageBuild.distro_arch_series == DistroArchSeries.id, |
604 | 106 | DistroArchSeries.distroseries == DistroSeries.id, | 108 | DistroArchSeries.distroseries == DistroSeries.id, |
605 | 107 | DistroSeries.distribution == self.distribution, | 109 | DistroSeries.distribution == self.distribution, |
606 | 110 | BinaryPackageBuild.package_build == PackageBuild.id, | ||
607 | 111 | PackageBuild.build_farm_job == BuildFarmJob.id | ||
608 | 108 | ) | 112 | ) |
609 | 109 | 113 | ||
610 | 110 | # First, get all the builds built in a main archive (this will | 114 | # First, get all the builds built in a main archive (this will |
611 | @@ -112,7 +116,7 @@ | |||
612 | 112 | builds_built_in_main_archives = store.find( | 116 | builds_built_in_main_archives = store.find( |
613 | 113 | BinaryPackageBuild, | 117 | BinaryPackageBuild, |
614 | 114 | builds_for_distro_exprs, | 118 | builds_for_distro_exprs, |
616 | 115 | BinaryPackageBuild.archive == Archive.id, | 119 | PackageBuild.archive == Archive.id, |
617 | 116 | Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES)) | 120 | Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES)) |
618 | 117 | 121 | ||
619 | 118 | # Next get all the builds that have a binary published in the | 122 | # Next get all the builds that have a binary published in the |
620 | @@ -132,7 +136,7 @@ | |||
621 | 132 | return builds_built_in_main_archives.union( | 136 | return builds_built_in_main_archives.union( |
622 | 133 | builds_published_in_main_archives).order_by( | 137 | builds_published_in_main_archives).order_by( |
623 | 134 | Desc( | 138 | Desc( |
625 | 135 | BinaryPackageBuild.datecreated), Desc(BinaryPackageBuild.id)) | 139 | BuildFarmJob.date_created), Desc(BinaryPackageBuild.id)) |
626 | 136 | 140 | ||
627 | 137 | @property | 141 | @property |
628 | 138 | def binary_package_names(self): | 142 | def binary_package_names(self): |
629 | 139 | 143 | ||
630 | === modified file 'lib/lp/soyuz/model/publishing.py' | |||
631 | --- lib/lp/soyuz/model/publishing.py 2010-05-12 11:01:28 +0000 | |||
632 | +++ lib/lp/soyuz/model/publishing.py 2010-05-12 11:01:39 +0000 | |||
633 | @@ -439,15 +439,16 @@ | |||
634 | 439 | BinaryPackageRelease.id AND | 439 | BinaryPackageRelease.id AND |
635 | 440 | BinaryPackagePublishingHistory.distroarchseries= | 440 | BinaryPackagePublishingHistory.distroarchseries= |
636 | 441 | DistroArchSeries.id AND | 441 | DistroArchSeries.id AND |
639 | 442 | BinaryPackageRelease.build=Build.id AND | 442 | BinaryPackageRelease.build=BinaryPackageBuild.id AND |
640 | 443 | Build.sourcepackagerelease=%s AND | 443 | BinaryPackageBuild.source_package_release=%s AND |
641 | 444 | DistroArchSeries.distroseries=%s AND | 444 | DistroArchSeries.distroseries=%s AND |
642 | 445 | BinaryPackagePublishingHistory.archive=%s AND | 445 | BinaryPackagePublishingHistory.archive=%s AND |
643 | 446 | BinaryPackagePublishingHistory.pocket=%s | 446 | BinaryPackagePublishingHistory.pocket=%s |
644 | 447 | """ % sqlvalues(self.sourcepackagerelease, self.distroseries, | 447 | """ % sqlvalues(self.sourcepackagerelease, self.distroseries, |
645 | 448 | self.archive, self.pocket) | 448 | self.archive, self.pocket) |
646 | 449 | 449 | ||
648 | 450 | clauseTables = ['Build', 'BinaryPackageRelease', 'DistroArchSeries'] | 450 | clauseTables = [ |
649 | 451 | 'BinaryPackageBuild', 'BinaryPackageRelease', 'DistroArchSeries'] | ||
650 | 451 | orderBy = ['-BinaryPackagePublishingHistory.id'] | 452 | orderBy = ['-BinaryPackagePublishingHistory.id'] |
651 | 452 | preJoins = ['binarypackagerelease'] | 453 | preJoins = ['binarypackagerelease'] |
652 | 453 | 454 | ||
653 | @@ -565,7 +566,7 @@ | |||
654 | 565 | # Check DistroArchSeries database IDs because the object belongs | 566 | # Check DistroArchSeries database IDs because the object belongs |
655 | 566 | # to different transactions (architecture_available is cached). | 567 | # to different transactions (architecture_available is cached). |
656 | 567 | if (build_candidate is not None and | 568 | if (build_candidate is not None and |
658 | 568 | (build_candidate.distroarchseries.id == arch.id or | 569 | (build_candidate.distro_arch_series.id == arch.id or |
659 | 569 | build_candidate.buildstate == BuildStatus.FULLYBUILT)): | 570 | build_candidate.buildstate == BuildStatus.FULLYBUILT)): |
660 | 570 | return None | 571 | return None |
661 | 571 | 572 | ||
662 | @@ -869,7 +870,7 @@ | |||
663 | 869 | def buildIndexStanzaFields(self): | 870 | def buildIndexStanzaFields(self): |
664 | 870 | """See `IPublishing`.""" | 871 | """See `IPublishing`.""" |
665 | 871 | bpr = self.binarypackagerelease | 872 | bpr = self.binarypackagerelease |
667 | 872 | spr = bpr.build.sourcepackagerelease | 873 | spr = bpr.build.source_package_release |
668 | 873 | 874 | ||
669 | 874 | # binaries have only one file, the DEB | 875 | # binaries have only one file, the DEB |
670 | 875 | bin_file = bpr.files[0] | 876 | bin_file = bpr.files[0] |
671 | 876 | 877 | ||
672 | === modified file 'lib/lp/soyuz/model/queue.py' | |||
673 | --- lib/lp/soyuz/model/queue.py 2010-04-22 16:39:05 +0000 | |||
674 | +++ lib/lp/soyuz/model/queue.py 2010-05-12 11:01:39 +0000 | |||
675 | @@ -509,7 +509,7 @@ | |||
676 | 509 | for queue_source in self.sources: | 509 | for queue_source in self.sources: |
677 | 510 | names.append(queue_source.sourcepackagerelease.name) | 510 | names.append(queue_source.sourcepackagerelease.name) |
678 | 511 | for queue_build in self.builds: | 511 | for queue_build in self.builds: |
680 | 512 | names.append(queue_build.build.sourcepackagerelease.name) | 512 | names.append(queue_build.build.source_package_release.name) |
681 | 513 | for queue_custom in self.customfiles: | 513 | for queue_custom in self.customfiles: |
682 | 514 | names.append(queue_custom.libraryfilealias.filename) | 514 | names.append(queue_custom.libraryfilealias.filename) |
683 | 515 | # Make sure the list items have a whitespace separator so | 515 | # Make sure the list items have a whitespace separator so |
684 | @@ -1413,7 +1413,7 @@ | |||
685 | 1413 | def publish(self, logger=None): | 1413 | def publish(self, logger=None): |
686 | 1414 | """See `IPackageUploadBuild`.""" | 1414 | """See `IPackageUploadBuild`.""" |
687 | 1415 | # Determine the build's architecturetag | 1415 | # Determine the build's architecturetag |
689 | 1416 | build_archtag = self.build.distroarchseries.architecturetag | 1416 | build_archtag = self.build.distro_arch_series.architecturetag |
690 | 1417 | # Determine the target arch series. | 1417 | # Determine the target arch series. |
691 | 1418 | # This will raise NotFoundError if anything odd happens. | 1418 | # This will raise NotFoundError if anything odd happens. |
692 | 1419 | target_dar = self.packageupload.distroseries[build_archtag] | 1419 | target_dar = self.packageupload.distroseries[build_archtag] |
693 | 1420 | 1420 | ||
694 | === modified file 'lib/lp/soyuz/model/sourcepackagerelease.py' | |||
695 | --- lib/lp/soyuz/model/sourcepackagerelease.py 2010-05-12 11:01:28 +0000 | |||
696 | +++ lib/lp/soyuz/model/sourcepackagerelease.py 2010-05-12 11:01:39 +0000 | |||
697 | @@ -146,12 +146,14 @@ | |||
698 | 146 | # a build may well have a different archive to the corresponding | 146 | # a build may well have a different archive to the corresponding |
699 | 147 | # sourcepackagerelease. | 147 | # sourcepackagerelease. |
700 | 148 | return BinaryPackageBuild.select(""" | 148 | return BinaryPackageBuild.select(""" |
703 | 149 | sourcepackagerelease = %s AND | 149 | source_package_release = %s AND |
704 | 150 | archive.id = build.archive AND | 150 | package_build = packagebuild.id AND |
705 | 151 | archive.id = packagebuild.archive AND | ||
706 | 152 | packagebuild.build_farm_job = buildfarmjob.id AND | ||
707 | 151 | archive.purpose IN %s | 153 | archive.purpose IN %s |
708 | 152 | """ % sqlvalues(self.id, MAIN_ARCHIVE_PURPOSES), | 154 | """ % sqlvalues(self.id, MAIN_ARCHIVE_PURPOSES), |
711 | 153 | orderBy=['-datecreated', 'id'], | 155 | orderBy=['-buildfarmjob.date_created', 'id'], |
712 | 154 | clauseTables=['Archive']) | 156 | clauseTables=['Archive', 'PackageBuild', 'BuildFarmJob']) |
713 | 155 | 157 | ||
714 | 156 | @property | 158 | @property |
715 | 157 | def age(self): | 159 | def age(self): |
716 | @@ -173,7 +175,7 @@ | |||
717 | 173 | @property | 175 | @property |
718 | 174 | def needs_building(self): | 176 | def needs_building(self): |
719 | 175 | for build in self._cached_builds: | 177 | for build in self._cached_builds: |
721 | 176 | if build.buildstate in [BuildStatus.NEEDSBUILD, | 178 | if build.status in [BuildStatus.NEEDSBUILD, |
722 | 177 | BuildStatus.MANUALDEPWAIT, | 179 | BuildStatus.MANUALDEPWAIT, |
723 | 178 | BuildStatus.CHROOTWAIT]: | 180 | BuildStatus.CHROOTWAIT]: |
724 | 179 | return True | 181 | return True |
725 | 180 | 182 | ||
726 | === modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py' | |||
727 | --- lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-05-12 11:01:28 +0000 | |||
728 | +++ lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-05-12 11:01:39 +0000 | |||
729 | @@ -60,6 +60,34 @@ | |||
730 | 60 | self.failIfEqual(None, bq.processor) | 60 | self.failIfEqual(None, bq.processor) |
731 | 61 | self.failUnless(bq, self.build.buildqueue_record) | 61 | self.failUnless(bq, self.build.buildqueue_record) |
732 | 62 | 62 | ||
733 | 63 | def addFakeBuildLog(self): | ||
734 | 64 | lfa = self.factory.makeLibraryFileAlias('mybuildlog.txt') | ||
735 | 65 | removeSecurityProxy(self.build).log = lfa | ||
736 | 66 | |||
737 | 67 | def test_log_url(self): | ||
738 | 68 | # The log URL for a binary package build will use | ||
739 | 69 | # the distribution source package release when the context | ||
740 | 70 | # is not a PPA or a copy archive. | ||
741 | 71 | self.addFakeBuildLog() | ||
742 | 72 | self.failUnlessEqual( | ||
743 | 73 | 'http://launchpad.dev/ubuntutest/+source/' | ||
744 | 74 | 'gedit/666/+build/%d/+files/mybuildlog.txt' % ( | ||
745 | 75 | self.build.package_build.build_farm_job.id), | ||
746 | 76 | self.build.log_url) | ||
747 | 77 | |||
748 | 78 | def test_log_url_ppa(self): | ||
749 | 79 | # On the other hand, ppa or copy builds will have a url in the | ||
750 | 80 | # context of the archive. | ||
751 | 81 | self.addFakeBuildLog() | ||
752 | 82 | ppa_owner = self.factory.makePerson(name="joe") | ||
753 | 83 | removeSecurityProxy(self.build).archive = self.factory.makeArchive( | ||
754 | 84 | owner=ppa_owner, name="myppa") | ||
755 | 85 | self.failUnlessEqual( | ||
756 | 86 | 'http://launchpad.dev/~joe/' | ||
757 | 87 | '+archive/myppa/+build/%d/+files/mybuildlog.txt' % ( | ||
758 | 88 | self.build.build_farm_job.id), | ||
759 | 89 | self.build.log_url) | ||
760 | 90 | |||
761 | 63 | 91 | ||
762 | 64 | class TestBuildUpdateDependencies(TestCaseWithFactory): | 92 | class TestBuildUpdateDependencies(TestCaseWithFactory): |
763 | 65 | 93 | ||
764 | 66 | 94 | ||
765 | === modified file 'lib/lp/soyuz/tests/test_packageupload.py' | |||
766 | --- lib/lp/soyuz/tests/test_packageupload.py 2010-03-24 11:58:07 +0000 | |||
767 | +++ lib/lp/soyuz/tests/test_packageupload.py 2010-05-12 11:01:39 +0000 | |||
768 | @@ -273,7 +273,7 @@ | |||
769 | 273 | 'main/dist-upgrader-all') | 273 | 'main/dist-upgrader-all') |
770 | 274 | self.assertEquals( | 274 | self.assertEquals( |
771 | 275 | ['20060302.0120', 'current'], sorted(os.listdir(custom_path))) | 275 | ['20060302.0120', 'current'], sorted(os.listdir(custom_path))) |
773 | 276 | 276 | ||
774 | 277 | # The custom files were also copied to the public librarian | 277 | # The custom files were also copied to the public librarian |
775 | 278 | for customfile in delayed_copy.customfiles: | 278 | for customfile in delayed_copy.customfiles: |
776 | 279 | self.assertFalse(customfile.libraryfilealias.restricted) | 279 | self.assertFalse(customfile.libraryfilealias.restricted) |
777 | @@ -300,7 +300,7 @@ | |||
778 | 300 | [pub_record] = pub_records | 300 | [pub_record] = pub_records |
779 | 301 | [build] = pub_record.getBuilds() | 301 | [build] = pub_record.getBuilds() |
780 | 302 | self.assertEquals( | 302 | self.assertEquals( |
782 | 303 | BuildStatus.NEEDSBUILD, build.buildstate) | 303 | BuildStatus.NEEDSBUILD, build.status) |
783 | 304 | 304 | ||
784 | 305 | def test_realiseUpload_for_overridden_component_archive(self): | 305 | def test_realiseUpload_for_overridden_component_archive(self): |
785 | 306 | # If the component of an upload is overridden to 'Partner' for | 306 | # If the component of an upload is overridden to 'Partner' for |
Looks good.