Merge lp:~stevenk/launchpad/bpb-currentcomponent-assertion-part-3 into lp:launchpad
- bpb-currentcomponent-assertion-part-3
- Merge into devel
Status: | Merged |
---|---|
Approved by: | Graham Binns |
Approved revision: | no longer in the source branch. |
Merged at revision: | 12253 |
Proposed branch: | lp:~stevenk/launchpad/bpb-currentcomponent-assertion-part-3 |
Merge into: | lp:launchpad |
Prerequisite: | lp:~stevenk/launchpad/bpb-currentcomponent-assertion-part-2 |
Diff against target: |
736 lines (+187/-455) 3 files modified
lib/lp/soyuz/doc/binarypackagebuild.txt (+0/-443) lib/lp/soyuz/tests/test_binarypackagebuild.py (+3/-3) lib/lp/soyuz/tests/test_hasbuildrecords.py (+184/-9) |
To merge this branch: | bzr merge lp:~stevenk/launchpad/bpb-currentcomponent-assertion-part-3 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Graham Binns (community) | code | Approve | |
Review via email: mp+46209@code.launchpad.net |
Commit message
Description of the change
This branch moves more of binarypackagebu
Graham Binns (gmb) wrote : | # |
Oops. Missed part of the diff.
Graham Binns (gmb) wrote : | # |
This method is crying out for some comments explaining what it's actually doing and why. I mean, I get that it's setting things up for later tests, but it took me a couple of goes to understand what each bit does. If you can add comments to explain it a bit better, that'd be great.
538 + def setUp(self):
539 + super(TestDistr
540 + self.admin = getUtility(
541 + self.pf_one = self.factory.
542 + pf_proc_1 = self.pf_
543 + self.factory.
544 + self.pf_two = self.factory.
545 + pf_proc_2 = self.pf_
546 + self.factory.
547 + self.distroseries = self.factory.
548 + self.distribution = self.distroseri
549 + self.das_one = self.factory.
550 + distroseries=
551 + supports_
552 + self.das_two = self.factory.
553 + distroseries=
554 + supports_
555 + self.archive = self.factory.
556 + distribution=
557 + purpose=
558 + self.arch_ids = [arch.id for arch in self.distroseri
559 + with person_
560 + self.publisher = SoyuzTestPublis
561 + self.publisher.
562 + self.distroseri
563 + self.publisher.
564 + self.builder_one = self.factory.
565 + self.builder_two = self.factory.
566 + self.builds = []
567 + self.createBuilds()
Also, bigjool's concerns on IRC need to be dealt with.
Graham Binns (gmb) wrote : | # |
Happy joy happy happy joy.
Preview Diff
1 | === modified file 'lib/lp/soyuz/doc/binarypackagebuild.txt' | |||
2 | --- lib/lp/soyuz/doc/binarypackagebuild.txt 2011-01-20 21:02:32 +0000 | |||
3 | +++ lib/lp/soyuz/doc/binarypackagebuild.txt 2011-01-20 21:02:59 +0000 | |||
4 | @@ -1,446 +1,3 @@ | |||
5 | 1 | == The BuildSet Class == | ||
6 | 2 | |||
7 | 3 | The BuildSet class gives us some useful ways to consider the | ||
8 | 4 | collection of builds. | ||
9 | 5 | |||
10 | 6 | >>> bs = getUtility(IBinaryPackageBuildSet) | ||
11 | 7 | |||
12 | 8 | We can find builds given a source package release and architecture tag. | ||
13 | 9 | |||
14 | 10 | >>> i386_builds = bs.getBuildBySRAndArchtag(20, 'i386') | ||
15 | 11 | >>> i386_builds.count() | ||
16 | 12 | 4 | ||
17 | 13 | |||
18 | 14 | IHasBuildRecords uses a base method provided by IBinaryPackageBuildSet, | ||
19 | 15 | getBuildsByArchIds(): | ||
20 | 16 | |||
21 | 17 | It receives list of architectures IDs: | ||
22 | 18 | |||
23 | 19 | >>> hoary = ubuntu.getSeries('hoary') | ||
24 | 20 | >>> arch_ids = [arch.id for arch in hoary.architectures] | ||
25 | 21 | >>> bs.getBuildsByArchIds(ubuntu, arch_ids).count() | ||
26 | 22 | 5 | ||
27 | 23 | |||
28 | 24 | It still working for empty list or None: | ||
29 | 25 | |||
30 | 26 | >>> bs.getBuildsByArchIds(ubuntu, []).count() | ||
31 | 27 | 0 | ||
32 | 28 | |||
33 | 29 | >>> bs.getBuildsByArchIds(ubuntu, None).count() | ||
34 | 30 | 0 | ||
35 | 31 | |||
36 | 32 | Using build status, only the successfully built ones: | ||
37 | 33 | |||
38 | 34 | >>> bs.getBuildsByArchIds(ubuntu, arch_ids, | ||
39 | 35 | ... status=BuildStatus.FULLYBUILT).count() | ||
40 | 36 | 2 | ||
41 | 37 | |||
42 | 38 | Check the result content: | ||
43 | 39 | |||
44 | 40 | >>> [b.title for b in bs.getBuildsByArchIds(ubuntu, arch_ids, | ||
45 | 41 | ... status=BuildStatus.FULLYBUILT)] | ||
46 | 42 | [u'i386 build of pmount 0.1-1 in ubuntu hoary RELEASE', u'hppa build | ||
47 | 43 | of pmount 0.1-1 in ubuntu hoary RELEASE'] | ||
48 | 44 | |||
49 | 45 | Using optional 'name' filter (matching with SQL LIKE %||filter||%) | ||
50 | 46 | |||
51 | 47 | >>> bs.getBuildsByArchIds(ubuntu, arch_ids, | ||
52 | 48 | ... status=BuildStatus.FULLYBUILT, | ||
53 | 49 | ... name='pmo').count() | ||
54 | 50 | 2 | ||
55 | 51 | |||
56 | 52 | Checking optional 'pocket' restriction: | ||
57 | 53 | |||
58 | 54 | >>> from lp.registry.interfaces.pocket import PackagePublishingPocket | ||
59 | 55 | >>> bs.getBuildsByArchIds(ubuntu, arch_ids, | ||
60 | 56 | ... pocket=PackagePublishingPocket.UPDATES).count() | ||
61 | 57 | 0 | ||
62 | 58 | |||
63 | 59 | >>> bs.getBuildsByArchIds(ubuntu, arch_ids, | ||
64 | 60 | ... pocket=PackagePublishingPocket.RELEASE).count() | ||
65 | 61 | 5 | ||
66 | 62 | |||
67 | 63 | getBuildsByArchIds will also return builds for archives other than the | ||
68 | 64 | primary archive. | ||
69 | 65 | |||
70 | 66 | >>> breezy = ubuntu.getSeries('breezy-autotest') | ||
71 | 67 | >>> arch_ids = [arch.id for arch in breezy.architectures] | ||
72 | 68 | >>> [(build.archive.purpose.name, build.title) for build in | ||
73 | 69 | ... bs.getBuildsByArchIds(ubuntu, arch_ids, name='commercialpackage')] | ||
74 | 70 | [('PARTNER', u'i386 build of commercialpackage 1.0-1 in ubuntu breezy-autotest RELEASE')] | ||
75 | 71 | |||
76 | 72 | `IBinaryPackageBuildSet` also provides getStatusSummaryForBuilds which | ||
77 | 73 | summarizes the build status of a set of builds: | ||
78 | 74 | |||
79 | 75 | First we'll define a helper to print the build summary: | ||
80 | 76 | |||
81 | 77 | >>> def print_build_summary(summary): | ||
82 | 78 | ... print "%s\n%s\nRelevant builds:\n%s" % ( | ||
83 | 79 | ... summary['status'].title, | ||
84 | 80 | ... summary['status'].description, | ||
85 | 81 | ... "\n".join( | ||
86 | 82 | ... " - %s" % build.title for build in summary['builds']) | ||
87 | 83 | ... ) | ||
88 | 84 | |||
89 | 85 | >>> build_summary = bs.getStatusSummaryForBuilds(i386_builds) | ||
90 | 86 | >>> print_build_summary(build_summary) | ||
91 | 87 | NEEDSBUILD | ||
92 | 88 | There are some builds waiting to be built. | ||
93 | 89 | Relevant builds: | ||
94 | 90 | - i386 build of pmount 0.1-1 in ubuntu warty RELEASE | ||
95 | 91 | |||
96 | 92 | The build set class furthermore provides a mechanism to load build-related | ||
97 | 93 | data from the database for a given set of builds: | ||
98 | 94 | |||
99 | 95 | >>> from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild | ||
100 | 96 | >>> from storm.expr import In | ||
101 | 97 | >>> from canonical.launchpad.webapp.interfaces import ( | ||
102 | 98 | ... IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR) | ||
103 | 99 | >>> store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) | ||
104 | 100 | >>> results = list(store.find( | ||
105 | 101 | ... BinaryPackageBuild, In(BinaryPackageBuild.id, (2,6,7,8)))) | ||
106 | 102 | >>> rset = removeSecurityProxy(bs)._prefetchBuildData(results) | ||
107 | 103 | >>> def filename_or_none(item): | ||
108 | 104 | ... if item is not None: | ||
109 | 105 | ... return '%s' % item.filename | ||
110 | 106 | ... else: | ||
111 | 107 | ... return 'n/a' | ||
112 | 108 | >>> def id_or_none(item): | ||
113 | 109 | ... if item is not None: | ||
114 | 110 | ... return '%d' % item.id | ||
115 | 111 | ... else: | ||
116 | 112 | ... return 'n/a' | ||
117 | 113 | >>> def sort_result_key(row): | ||
118 | 114 | ... return row[0].id | ||
119 | 115 | >>> for row in sorted(rset, key=sort_result_key): | ||
120 | 116 | ... (sourcepackagerelease, buildlog, | ||
121 | 117 | ... sourcepackagename, buildlog_content, builder, | ||
122 | 118 | ... package_build, build_farm_job) = row | ||
123 | 119 | ... print( | ||
124 | 120 | ... 'builder: %s, spr: %s, log: %s' % | ||
125 | 121 | ... (id_or_none(builder), | ||
126 | 122 | ... sourcepackagerelease.title, filename_or_none(buildlog))) | ||
127 | 123 | builder: 1, spr: mozilla-firefox - 0.9, log: netapplet-1.0.0.tar.gz | ||
128 | 124 | builder: n/a, spr: mozilla-firefox - 0.9, log: n/a | ||
129 | 125 | builder: 1, spr: pmount - 0.1-1, log: netapplet-1.0.0.tar.gz | ||
130 | 126 | builder: 1, spr: foobar - 1.0, log: netapplet-1.0.0.tar.gz | ||
131 | 127 | |||
132 | 128 | |||
133 | 129 | == IHadBuildRecords.getBuildRecords() Implementations == | ||
134 | 130 | |||
135 | 131 | XXX: Michael Nelson 20090701 bug=394276 | ||
136 | 132 | The documentation for IHasBuildRecords is now in | ||
137 | 133 | lp/soyuz/doc/hasbuildrecords.txt. The following implementation tests should | ||
138 | 134 | be converted to unit-tests in lib/soyuz/tests/test_hasbuildrecords.py. | ||
139 | 135 | |||
140 | 136 | We can find recent and pending builds for a given distrarchoseries. | ||
141 | 137 | |||
142 | 138 | >>> hoaryi386 = hoary['i386'] | ||
143 | 139 | >>> hoaryi386.title | ||
144 | 140 | u'The Hoary Hedgehog Release for i386 (x86)' | ||
145 | 141 | |||
146 | 142 | Exercises IHasBuildRecords abilities for distroarchseriess | ||
147 | 143 | |||
148 | 144 | >>> hoaryi386.getBuildRecords().count() | ||
149 | 145 | 4 | ||
150 | 146 | |||
151 | 147 | >>> hoaryi386.getBuildRecords(build_state=BuildStatus.FULLYBUILT).count() | ||
152 | 148 | 1 | ||
153 | 149 | |||
154 | 150 | >>> hoaryi386.getBuildRecords(name='pm').count() | ||
155 | 151 | 1 | ||
156 | 152 | |||
157 | 153 | >>> hoaryi386.getBuildRecords( | ||
158 | 154 | ... pocket=PackagePublishingPocket.RELEASE).count() | ||
159 | 155 | 4 | ||
160 | 156 | |||
161 | 157 | >>> hoaryi386.getBuildRecords( | ||
162 | 158 | ... pocket=PackagePublishingPocket.UPDATES).count() | ||
163 | 159 | 0 | ||
164 | 160 | |||
165 | 161 | |||
166 | 162 | For SourcePackages, getBuildRecords() returns all build records | ||
167 | 163 | published in its context (distroseries and distribution main | ||
168 | 164 | archives), independent of their corresponding source publishing | ||
169 | 165 | status. | ||
170 | 166 | |||
171 | 167 | >>> firefox = warty.getSourcePackage('mozilla-firefox') | ||
172 | 168 | |||
173 | 169 | >>> firefox.getBuildRecords().count() | ||
174 | 170 | 8 | ||
175 | 171 | |||
176 | 172 | >>> firefox.getBuildRecords( | ||
177 | 173 | ... build_state=BuildStatus.FULLYBUILT).count() | ||
178 | 174 | 6 | ||
179 | 175 | |||
180 | 176 | >>> firefox.getBuildRecords( | ||
181 | 177 | ... pocket=PackagePublishingPocket.RELEASE).count() | ||
182 | 178 | 8 | ||
183 | 179 | |||
184 | 180 | >>> firefox.getBuildRecords( | ||
185 | 181 | ... pocket=PackagePublishingPocket.UPDATES).count() | ||
186 | 182 | 0 | ||
187 | 183 | |||
188 | 184 | As mentioned above, SourcePackage.getBuildRecords() will return builds | ||
189 | 185 | for packages that are no longer published. At first, there are no | ||
190 | 186 | traces of the 'old-source' sourcepackage in ubuntutest/breezy-autotest | ||
191 | 187 | |||
192 | 188 | >>> ubuntutest = getUtility(IDistributionSet).getByName('ubuntutest') | ||
193 | 189 | >>> breezy_autotest = ubuntutest.getSeries('breezy-autotest') | ||
194 | 190 | >>> print breezy_autotest.getSourcePackage('old-source') | ||
195 | 191 | None | ||
196 | 192 | |||
197 | 193 | Once the SourcePackage exists and has builds, they will be returned by | ||
198 | 194 | getBuildRecords() ordered by descending creation date. | ||
199 | 195 | |||
200 | 196 | # Create a DELETED and a SUPERSEDED source publication in | ||
201 | 197 | # ubuntutest/breezy-autotest. | ||
202 | 198 | >>> from lp.soyuz.enums import PackagePublishingStatus | ||
203 | 199 | >>> login('foo.bar@canonical.com') | ||
204 | 200 | >>> old_source_pub = test_publisher.getPubSource( | ||
205 | 201 | ... sourcename='old-source', version='1.0', | ||
206 | 202 | ... status=PackagePublishingStatus.SUPERSEDED) | ||
207 | 203 | >>> [superseded_build] = old_source_pub.createMissingBuilds() | ||
208 | 204 | >>> deleted_source_pub = test_publisher.getPubSource( | ||
209 | 205 | ... sourcename='old-source', version='1.1', | ||
210 | 206 | ... status=PackagePublishingStatus.DELETED) | ||
211 | 207 | >>> [deleted_build] = deleted_source_pub.createMissingBuilds() | ||
212 | 208 | >>> login(ANONYMOUS) | ||
213 | 209 | |||
214 | 210 | >>> old_source_sp = breezy_autotest.getSourcePackage('old-source') | ||
215 | 211 | >>> old_source_builds = old_source_sp.getBuildRecords() | ||
216 | 212 | >>> [deleted_build, superseded_build] == list(old_source_builds) | ||
217 | 213 | True | ||
218 | 214 | |||
219 | 215 | >>> deleted_build.date_created > superseded_build.date_created | ||
220 | 216 | True | ||
221 | 217 | |||
222 | 218 | Builds records for the exactly the same `SourcePackageRelease`s may | ||
223 | 219 | exist in a rebuild archive context, but they do not 'leak' to the | ||
224 | 220 | domain of SourcePackage. | ||
225 | 221 | |||
226 | 222 | # Create a rebuild archive, copy the 'old-source' source | ||
227 | 223 | # publications to it and create builds in the rebuild archive | ||
228 | 224 | # context. | ||
229 | 225 | >>> from lp.soyuz.enums import ArchivePurpose | ||
230 | 226 | >>> login('foo.bar@canonical.com') | ||
231 | 227 | >>> rebuild_archive = factory.makeArchive( | ||
232 | 228 | ... ubuntutest, ubuntutest.owner, 'test-rebuild', | ||
233 | 229 | ... ArchivePurpose.COPY) | ||
234 | 230 | >>> rebuild_old_pub = old_source_pub.copyTo( | ||
235 | 231 | ... breezy_autotest, PackagePublishingPocket.RELEASE, | ||
236 | 232 | ... rebuild_archive) | ||
237 | 233 | >>> [rebuild_old_build] = rebuild_old_pub.createMissingBuilds() | ||
238 | 234 | >>> rebuild_deleted_pub = deleted_source_pub.copyTo( | ||
239 | 235 | ... breezy_autotest, PackagePublishingPocket.RELEASE, | ||
240 | 236 | ... rebuild_archive) | ||
241 | 237 | >>> [rebuild_deleted_build] = rebuild_deleted_pub.createMissingBuilds() | ||
242 | 238 | >>> login(ANONYMOUS) | ||
243 | 239 | |||
244 | 240 | >>> rebuild_builds = rebuild_archive.getBuildRecords() | ||
245 | 241 | >>> [rebuild_deleted_build, rebuild_old_build] == list(rebuild_builds) | ||
246 | 242 | True | ||
247 | 243 | |||
248 | 244 | >>> old_source_sp.getBuildRecords().count() | ||
249 | 245 | 2 | ||
250 | 246 | |||
251 | 247 | For a given distribution as well: | ||
252 | 248 | |||
253 | 249 | >>> ubuntu.getBuildRecords().count() | ||
254 | 250 | 17 | ||
255 | 251 | |||
256 | 252 | >>> builds = ubuntu.getBuildRecords(build_state=BuildStatus.FULLYBUILT) | ||
257 | 253 | >>> for build in builds: | ||
258 | 254 | ... print build.date_finished, build.id, build.status.value | ||
259 | 255 | 2007-08-10 00:00:14+00:00 30 1 | ||
260 | 256 | 2007-08-09 23:59:59+00:00 29 1 | ||
261 | 257 | 2005-03-25 00:00:03+00:00 7 1 | ||
262 | 258 | 2005-03-25 00:00:02+00:00 16 1 | ||
263 | 259 | 2005-03-25 00:00:01+00:00 19 1 | ||
264 | 260 | 2004-09-27 11:57:14+00:00 2 1 | ||
265 | 261 | 2004-09-27 11:57:13+00:00 18 1 | ||
266 | 262 | |||
267 | 263 | Retrieve the current PENDING builds | ||
268 | 264 | |||
269 | 265 | >>> builds = ubuntu.getBuildRecords(build_state=BuildStatus.NEEDSBUILD) | ||
270 | 266 | >>> builds.count() | ||
271 | 267 | 2 | ||
272 | 268 | |||
273 | 269 | Note, by ordering the build by BuildQueue.lastscore, it already notice | ||
274 | 270 | the existence of a new pending build, since retry already creates a | ||
275 | 271 | new BuildQueue record: | ||
276 | 272 | |||
277 | 273 | >>> builds = ubuntu.getBuildRecords(build_state=BuildStatus.NEEDSBUILD) | ||
278 | 274 | >>> builds.count() | ||
279 | 275 | 2 | ||
280 | 276 | |||
281 | 277 | Note that they are ordered by DESC lastscore, as expected: | ||
282 | 278 | |||
283 | 279 | >>> for b in builds: | ||
284 | 280 | ... b.id, b.status.value, b.buildqueue_record.lastscore | ||
285 | 281 | (9, 0, 2505) | ||
286 | 282 | (11, 0, 10) | ||
287 | 283 | |||
288 | 284 | Define a helper function to print out build details. | ||
289 | 285 | |||
290 | 286 | >>> def print_build_details(builds): | ||
291 | 287 | ... for build in builds: | ||
292 | 288 | ... if build.archive.owner: | ||
293 | 289 | ... print "%s: %s" % (build.archive.owner.name, build.title) | ||
294 | 290 | ... else: | ||
295 | 291 | ... print "main: %s" % (build.title) | ||
296 | 292 | |||
297 | 293 | Using the optional name argument to filter build results: | ||
298 | 294 | |||
299 | 295 | >>> builds = ubuntu.getBuildRecords(name='pm') | ||
300 | 296 | >>> builds.count() | ||
301 | 297 | 4 | ||
302 | 298 | >>> print_build_details(builds) | ||
303 | 299 | ubuntu-team: i386 build of pmount 0.1-1 in ubuntu warty RELEASE | ||
304 | 300 | ubuntu-team: i386 build of pmount 0.1-1 in ubuntu breezy-autotest RELEASE | ||
305 | 301 | ubuntu-team: hppa build of pmount 0.1-1 in ubuntu hoary RELEASE | ||
306 | 302 | ubuntu-team: i386 build of pmount 0.1-1 in ubuntu hoary RELEASE | ||
307 | 303 | |||
308 | 304 | or using optional pocket argument: | ||
309 | 305 | |||
310 | 306 | >>> from lp.registry.interfaces.pocket import PackagePublishingPocket | ||
311 | 307 | |||
312 | 308 | >>> ubuntu.getBuildRecords( | ||
313 | 309 | ... build_state=BuildStatus.NEEDSBUILD, | ||
314 | 310 | ... pocket=PackagePublishingPocket.RELEASE).count() | ||
315 | 311 | 2 | ||
316 | 312 | |||
317 | 313 | >>> ubuntu.getBuildRecords( | ||
318 | 314 | ... build_state=BuildStatus.NEEDSBUILD, | ||
319 | 315 | ... pocket=PackagePublishingPocket.SECURITY).count() | ||
320 | 316 | 0 | ||
321 | 317 | |||
322 | 318 | IHasBuildRecords is implemented by Builder. It can filter on build state | ||
323 | 319 | and name. A user can also be passed for security checks on private builds; | ||
324 | 320 | if user is not passed then the query runs anonymously which means private | ||
325 | 321 | builds are excluded from anything returned. | ||
326 | 322 | |||
327 | 323 | Log in as admin to avoid security on IBinaryPackageBuild for the moment. | ||
328 | 324 | |||
329 | 325 | >>> login('foo.bar@canonical.com') | ||
330 | 326 | |||
331 | 327 | Let's create a private PPA for cprov (and hence its builds become private): | ||
332 | 328 | |||
333 | 329 | >>> from lp.registry.interfaces.person import IPersonSet | ||
334 | 330 | >>> cprov = removeSecurityProxy(getUtility(IPersonSet).getByName('cprov')) | ||
335 | 331 | >>> cprov_private_ppa = factory.makeArchive( | ||
336 | 332 | ... owner=cprov, private=True, name='p3a', | ||
337 | 333 | ... distribution=cprov.archive.distribution) | ||
338 | 334 | >>> from lp.buildmaster.interfaces.builder import IBuilderSet | ||
339 | 335 | >>> bob = getUtility(IBuilderSet)['bob'] | ||
340 | 336 | >>> binaries = test_publisher.getPubBinaries( | ||
341 | 337 | ... archive=cprov_private_ppa, builder=bob, | ||
342 | 338 | ... binaryname='privacycheck-bin') | ||
343 | 339 | >>> flush_database_updates() | ||
344 | 340 | |||
345 | 341 | The default set of builds with no user specified excludes private builds: | ||
346 | 342 | |||
347 | 343 | >>> bob_builds = bob.getBuildRecords() | ||
348 | 344 | >>> print_build_details(bob_builds) | ||
349 | 345 | ubuntu-team: hppa build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
350 | 346 | cprov: hppa build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
351 | 347 | cprov: i386 build of pmount 0.1-1 in ubuntu warty RELEASE | ||
352 | 348 | cprov: i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | ||
353 | 349 | no-priv: i386 build of cdrkit 1.0 in ubuntu warty RELEASE | ||
354 | 350 | ubuntu-team: i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | ||
355 | 351 | ... | ||
356 | 352 | ubuntu-team: i386 build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
357 | 353 | ubuntu-team: i386 build of mozilla-firefox 0.9 in ubuntu breezy-autotest | ||
358 | 354 | RELEASE | ||
359 | 355 | |||
360 | 356 | >>> bob_builds.count() | ||
361 | 357 | 16 | ||
362 | 358 | |||
363 | 359 | If we include an admin user, we can see all the builds. Here, we get | ||
364 | 360 | an additional private build for cprov: | ||
365 | 361 | |||
366 | 362 | >>> from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities | ||
367 | 363 | >>> admin = getUtility(ILaunchpadCelebrities).admin | ||
368 | 364 | >>> bob_builds = bob.getBuildRecords(user=admin) | ||
369 | 365 | >>> print_build_details(bob_builds) | ||
370 | 366 | cprov: i386 build of privacycheck 666 in ubuntutest breezy-autotest... | ||
371 | 367 | ubuntu-team: hppa build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
372 | 368 | cprov: hppa build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
373 | 369 | cprov: i386 build of pmount 0.1-1 in ubuntu warty RELEASE | ||
374 | 370 | cprov: i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | ||
375 | 371 | no-priv: i386 build of cdrkit 1.0 in ubuntu warty RELEASE | ||
376 | 372 | ubuntu-team: i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | ||
377 | 373 | ... | ||
378 | 374 | ubuntu-team: i386 build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
379 | 375 | ubuntu-team: i386 build of mozilla-firefox 0.9 in ubuntu breezy-autotest | ||
380 | 376 | RELEASE | ||
381 | 377 | |||
382 | 378 | >>> bob_builds.count() | ||
383 | 379 | 17 | ||
384 | 380 | |||
385 | 381 | Cprov can also see his own builds of course: | ||
386 | 382 | |||
387 | 383 | >>> bob_builds = bob.getBuildRecords(user=cprov) | ||
388 | 384 | >>> print_build_details(bob_builds) | ||
389 | 385 | cprov: i386 build of privacycheck 666 in ubuntutest breezy-autotest... | ||
390 | 386 | ubuntu-team: hppa build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
391 | 387 | cprov: hppa build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
392 | 388 | cprov: i386 build of pmount 0.1-1 in ubuntu warty RELEASE | ||
393 | 389 | cprov: i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | ||
394 | 390 | no-priv: i386 build of cdrkit 1.0 in ubuntu warty RELEASE | ||
395 | 391 | ubuntu-team: i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | ||
396 | 392 | ... | ||
397 | 393 | ubuntu-team: i386 build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
398 | 394 | ubuntu-team: i386 build of mozilla-firefox 0.9 in ubuntu breezy-autotest | ||
399 | 395 | RELEASE | ||
400 | 396 | |||
401 | 397 | >>> bob_builds.count() | ||
402 | 398 | 17 | ||
403 | 399 | |||
404 | 400 | Buildd admins specifically are not allowed to see private builds, which will | ||
405 | 401 | be filtered from the list returned: | ||
406 | 402 | |||
407 | 403 | >>> buildd_admin = factory.makePerson() | ||
408 | 404 | >>> buildd_admins = getUtility( | ||
409 | 405 | ... IPersonSet).getByName('launchpad-buildd-admins') | ||
410 | 406 | >>> ignored = buildd_admins.addMember(buildd_admin, buildd_admin) | ||
411 | 407 | >>> bob_builds = bob.getBuildRecords(user=buildd_admin) | ||
412 | 408 | >>> print_build_details(bob_builds) | ||
413 | 409 | ubuntu-team: hppa build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
414 | 410 | cprov: hppa build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
415 | 411 | cprov: i386 build of pmount 0.1-1 in ubuntu warty RELEASE | ||
416 | 412 | cprov: i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | ||
417 | 413 | no-priv: i386 build of cdrkit 1.0 in ubuntu warty RELEASE | ||
418 | 414 | ubuntu-team: i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | ||
419 | 415 | ... | ||
420 | 416 | ubuntu-team: i386 build of mozilla-firefox 0.9 in ubuntu warty RELEASE | ||
421 | 417 | ubuntu-team: i386 build of mozilla-firefox 0.9 in ubuntu breezy-autotest | ||
422 | 418 | RELEASE | ||
423 | 419 | |||
424 | 420 | >>> bob_builds.count() | ||
425 | 421 | 16 | ||
426 | 422 | |||
427 | 423 | You can filter on build state: | ||
428 | 424 | |||
429 | 425 | >>> bob_failed_builds = bob.getBuildRecords( | ||
430 | 426 | ... build_state=BuildStatus.FAILEDTOBUILD, user=admin) | ||
431 | 427 | >>> bob_failed_builds.count() | ||
432 | 428 | 3 | ||
433 | 429 | |||
434 | 430 | You can filter on package name: | ||
435 | 431 | |||
436 | 432 | >>> bob_pmount_builds = bob.getBuildRecords(name='pmount', user=admin) | ||
437 | 433 | >>> bob_pmount_builds.count() | ||
438 | 434 | 4 | ||
439 | 435 | |||
440 | 436 | You can filter on build state and package name: | ||
441 | 437 | |||
442 | 438 | >>> bob_pmount_ok_builds = bob.getBuildRecords( | ||
443 | 439 | ... build_state=BuildStatus.FULLYBUILT, name='pmount', user=admin) | ||
444 | 440 | >>> bob_pmount_ok_builds.count() | ||
445 | 441 | 4 | ||
446 | 442 | |||
447 | 443 | |||
448 | 444 | == AssertionErrors in IBinaryPackageBuild == | 1 | == AssertionErrors in IBinaryPackageBuild == |
449 | 445 | 2 | ||
450 | 446 | Build records inserted by gina don't provide calculated_buildstart | 3 | Build records inserted by gina don't provide calculated_buildstart |
451 | 447 | 4 | ||
452 | === modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py' | |||
453 | --- lib/lp/soyuz/tests/test_binarypackagebuild.py 2011-01-14 10:06:08 +0000 | |||
454 | +++ lib/lp/soyuz/tests/test_binarypackagebuild.py 2011-01-20 21:02:59 +0000 | |||
455 | @@ -49,9 +49,9 @@ | |||
456 | 49 | super(TestBinaryPackageBuild, self).setUp() | 49 | super(TestBinaryPackageBuild, self).setUp() |
457 | 50 | publisher = SoyuzTestPublisher() | 50 | publisher = SoyuzTestPublisher() |
458 | 51 | publisher.prepareBreezyAutotest() | 51 | publisher.prepareBreezyAutotest() |
462 | 52 | gedit_spr = publisher.getPubSource( | 52 | gedit_spph = publisher.getPubSource( |
463 | 53 | spr_only=True, sourcename="gedit", | 53 | sourcename="gedit", status=PackagePublishingStatus.PUBLISHED) |
464 | 54 | status=PackagePublishingStatus.PUBLISHED) | 54 | gedit_spr = gedit_spph.sourcepackagerelease |
465 | 55 | self.build = gedit_spr.createBuild( | 55 | self.build = gedit_spr.createBuild( |
466 | 56 | distro_arch_series=publisher.distroseries['i386'], | 56 | distro_arch_series=publisher.distroseries['i386'], |
467 | 57 | archive=gedit_spr.upload_archive, | 57 | archive=gedit_spr.upload_archive, |
468 | 58 | 58 | ||
469 | === modified file 'lib/lp/soyuz/tests/test_hasbuildrecords.py' | |||
470 | --- lib/lp/soyuz/tests/test_hasbuildrecords.py 2010-10-04 19:50:45 +0000 | |||
471 | +++ lib/lp/soyuz/tests/test_hasbuildrecords.py 2011-01-20 21:02:59 +0000 | |||
472 | @@ -1,20 +1,33 @@ | |||
474 | 1 | # Copyright 2009-2010 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2011 Canonical Ltd. This software is licensed under the |
475 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
476 | 3 | 3 | ||
477 | 4 | """Test implementations of the IHasBuildRecords interface.""" | 4 | """Test implementations of the IHasBuildRecords interface.""" |
478 | 5 | 5 | ||
479 | 6 | from datetime import ( | ||
480 | 7 | datetime, | ||
481 | 8 | timedelta, | ||
482 | 9 | ) | ||
483 | 10 | import pytz | ||
484 | 6 | from zope.component import getUtility | 11 | from zope.component import getUtility |
485 | 7 | from zope.security.proxy import removeSecurityProxy | 12 | from zope.security.proxy import removeSecurityProxy |
486 | 8 | 13 | ||
487 | 9 | from canonical.testing.layers import LaunchpadZopelessLayer | 14 | from canonical.testing.layers import LaunchpadZopelessLayer |
489 | 10 | from lp.buildmaster.enums import BuildFarmJobType | 15 | from lp.buildmaster.enums import ( |
490 | 16 | BuildFarmJobType, | ||
491 | 17 | BuildStatus, | ||
492 | 18 | ) | ||
493 | 11 | from lp.buildmaster.interfaces.builder import IBuilderSet | 19 | from lp.buildmaster.interfaces.builder import IBuilderSet |
494 | 12 | from lp.buildmaster.interfaces.buildfarmjob import ( | 20 | from lp.buildmaster.interfaces.buildfarmjob import ( |
495 | 13 | IBuildFarmJob, | 21 | IBuildFarmJob, |
496 | 14 | ) | 22 | ) |
497 | 15 | from lp.buildmaster.interfaces.packagebuild import IPackageBuildSource | 23 | from lp.buildmaster.interfaces.packagebuild import IPackageBuildSource |
498 | 24 | from lp.registry.interfaces.person import IPersonSet | ||
499 | 16 | from lp.registry.interfaces.pocket import PackagePublishingPocket | 25 | from lp.registry.interfaces.pocket import PackagePublishingPocket |
500 | 17 | from lp.registry.model.sourcepackage import SourcePackage | 26 | from lp.registry.model.sourcepackage import SourcePackage |
501 | 27 | from lp.soyuz.enums import ( | ||
502 | 28 | ArchivePurpose, | ||
503 | 29 | PackagePublishingStatus, | ||
504 | 30 | ) | ||
505 | 18 | from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuild | 31 | from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuild |
506 | 19 | from lp.soyuz.interfaces.buildrecords import ( | 32 | from lp.soyuz.interfaces.buildrecords import ( |
507 | 20 | IHasBuildRecords, | 33 | IHasBuildRecords, |
508 | @@ -22,6 +35,12 @@ | |||
509 | 22 | ) | 35 | ) |
510 | 23 | from lp.soyuz.model.processor import ProcessorFamilySet | 36 | from lp.soyuz.model.processor import ProcessorFamilySet |
511 | 24 | from lp.soyuz.tests.test_binarypackagebuild import BaseTestCaseWithThreeBuilds | 37 | from lp.soyuz.tests.test_binarypackagebuild import BaseTestCaseWithThreeBuilds |
512 | 38 | from lp.soyuz.tests.test_publishing import SoyuzTestPublisher | ||
513 | 39 | from lp.testing import ( | ||
514 | 40 | person_logged_in, | ||
515 | 41 | TestCaseWithFactory, | ||
516 | 42 | ) | ||
517 | 43 | from lp.testing.sampledata import ADMIN_EMAIL | ||
518 | 25 | 44 | ||
519 | 26 | 45 | ||
520 | 27 | class TestHasBuildRecordsInterface(BaseTestCaseWithThreeBuilds): | 46 | class TestHasBuildRecordsInterface(BaseTestCaseWithThreeBuilds): |
521 | @@ -37,7 +56,6 @@ | |||
522 | 37 | def setUp(self): | 56 | def setUp(self): |
523 | 38 | """Use `SoyuzTestPublisher` to publish some sources in archives.""" | 57 | """Use `SoyuzTestPublisher` to publish some sources in archives.""" |
524 | 39 | super(TestHasBuildRecordsInterface, self).setUp() | 58 | super(TestHasBuildRecordsInterface, self).setUp() |
525 | 40 | |||
526 | 41 | self.context = self.publisher.distroseries.distribution | 59 | self.context = self.publisher.distroseries.distribution |
527 | 42 | 60 | ||
528 | 43 | def testProvidesHasBuildRecords(self): | 61 | def testProvidesHasBuildRecords(self): |
529 | @@ -64,6 +82,67 @@ | |||
530 | 64 | self.assertContentEqual(i386_builds, builds) | 82 | self.assertContentEqual(i386_builds, builds) |
531 | 65 | 83 | ||
532 | 66 | 84 | ||
533 | 85 | class TestDistributionHasBuildRecords(TestCaseWithFactory): | ||
534 | 86 | """Populate a distroseries with builds""" | ||
535 | 87 | |||
536 | 88 | layer = LaunchpadZopelessLayer | ||
537 | 89 | |||
538 | 90 | def setUp(self): | ||
539 | 91 | super(TestDistributionHasBuildRecords, self).setUp() | ||
540 | 92 | self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL) | ||
541 | 93 | # Create the machinery we need to create builds, such as | ||
542 | 94 | # DistroArchSeries and builders. | ||
543 | 95 | self.pf_one = self.factory.makeProcessorFamily() | ||
544 | 96 | pf_proc_1 = self.pf_one.addProcessor( | ||
545 | 97 | self.factory.getUniqueString(), '', '') | ||
546 | 98 | self.pf_two = self.factory.makeProcessorFamily() | ||
547 | 99 | pf_proc_2 = self.pf_two.addProcessor( | ||
548 | 100 | self.factory.getUniqueString(), '', '') | ||
549 | 101 | self.distroseries = self.factory.makeDistroSeries() | ||
550 | 102 | self.distribution = self.distroseries.distribution | ||
551 | 103 | self.das_one = self.factory.makeDistroArchSeries( | ||
552 | 104 | distroseries=self.distroseries, processorfamily=self.pf_one, | ||
553 | 105 | supports_virtualized=True) | ||
554 | 106 | self.das_two = self.factory.makeDistroArchSeries( | ||
555 | 107 | distroseries=self.distroseries, processorfamily=self.pf_two, | ||
556 | 108 | supports_virtualized=True) | ||
557 | 109 | self.archive = self.factory.makeArchive( | ||
558 | 110 | distribution=self.distroseries.distribution, | ||
559 | 111 | purpose=ArchivePurpose.PRIMARY) | ||
560 | 112 | self.arch_ids = [arch.id for arch in self.distroseries.architectures] | ||
561 | 113 | with person_logged_in(self.admin): | ||
562 | 114 | self.publisher = SoyuzTestPublisher() | ||
563 | 115 | self.publisher.prepareBreezyAutotest() | ||
564 | 116 | self.distroseries.nominatedarchindep = self.das_one | ||
565 | 117 | self.publisher.addFakeChroots(distroseries=self.distroseries) | ||
566 | 118 | self.builder_one = self.factory.makeBuilder(processor=pf_proc_1) | ||
567 | 119 | self.builder_two = self.factory.makeBuilder(processor=pf_proc_2) | ||
568 | 120 | self.builds = [] | ||
569 | 121 | self.createBuilds() | ||
570 | 122 | |||
571 | 123 | def createBuilds(self): | ||
572 | 124 | for i in range(5): | ||
573 | 125 | # Create some test builds. | ||
574 | 126 | spph = self.publisher.getPubSource( | ||
575 | 127 | sourcename=self.factory.getUniqueString(), | ||
576 | 128 | version="%s.%s" % (self.factory.getUniqueInteger(), i), | ||
577 | 129 | distroseries=self.distroseries, architecturehintlist='any') | ||
578 | 130 | builds = spph.createMissingBuilds() | ||
579 | 131 | for b in builds: | ||
580 | 132 | if i == 4: | ||
581 | 133 | b.status = BuildStatus.FAILEDTOBUILD | ||
582 | 134 | else: | ||
583 | 135 | b.status = BuildStatus.FULLYBUILT | ||
584 | 136 | b.buildqueue_record.destroySelf() | ||
585 | 137 | b.date_started = datetime.now(pytz.UTC) | ||
586 | 138 | b.date_finished = b.date_started + timedelta(minutes=5) | ||
587 | 139 | self.builds += builds | ||
588 | 140 | |||
589 | 141 | def test_get_build_records(self): | ||
590 | 142 | # A Distribution also implements IHasBuildRecords. | ||
591 | 143 | builds = self.distribution.getBuildRecords().count() | ||
592 | 144 | self.assertEquals(10, builds) | ||
593 | 145 | |||
594 | 67 | class TestDistroSeriesHasBuildRecords(TestHasBuildRecordsInterface): | 146 | class TestDistroSeriesHasBuildRecords(TestHasBuildRecordsInterface): |
595 | 68 | """Test the DistroSeries implementation of IHasBuildRecords.""" | 147 | """Test the DistroSeries implementation of IHasBuildRecords.""" |
596 | 69 | 148 | ||
597 | @@ -73,13 +152,30 @@ | |||
598 | 73 | self.context = self.publisher.distroseries | 152 | self.context = self.publisher.distroseries |
599 | 74 | 153 | ||
600 | 75 | 154 | ||
602 | 76 | class TestDistroArchSeriesHasBuildRecords(TestHasBuildRecordsInterface): | 155 | class TestDistroArchSeriesHasBuildRecords(TestDistributionHasBuildRecords): |
603 | 77 | """Test the DistroArchSeries implementation of IHasBuildRecords.""" | 156 | """Test the DistroArchSeries implementation of IHasBuildRecords.""" |
604 | 78 | 157 | ||
605 | 158 | layer = LaunchpadZopelessLayer | ||
606 | 159 | |||
607 | 79 | def setUp(self): | 160 | def setUp(self): |
608 | 80 | super(TestDistroArchSeriesHasBuildRecords, self).setUp() | 161 | super(TestDistroArchSeriesHasBuildRecords, self).setUp() |
609 | 81 | 162 | ||
611 | 82 | self.context = self.publisher.distroseries['i386'] | 163 | def test_distroarchseries(self): |
612 | 164 | # We can fetch builds records from a DistroArchSeries. | ||
613 | 165 | builds = self.das_one.getBuildRecords().count() | ||
614 | 166 | self.assertEquals(5, builds) | ||
615 | 167 | builds = self.das_one.getBuildRecords( | ||
616 | 168 | build_state=BuildStatus.FULLYBUILT).count() | ||
617 | 169 | self.assertEquals(4, builds) | ||
618 | 170 | spn = self.builds[0].source_package_release.sourcepackagename.name | ||
619 | 171 | builds = self.das_one.getBuildRecords(name=spn).count() | ||
620 | 172 | self.assertEquals(1, builds) | ||
621 | 173 | builds = self.das_one.getBuildRecords( | ||
622 | 174 | pocket=PackagePublishingPocket.RELEASE).count() | ||
623 | 175 | self.assertEquals(5, builds) | ||
624 | 176 | builds = self.das_one.getBuildRecords( | ||
625 | 177 | pocket=PackagePublishingPocket.UPDATES).count() | ||
626 | 178 | self.assertEquals(0, builds) | ||
627 | 83 | 179 | ||
628 | 84 | 180 | ||
629 | 85 | class TestArchiveHasBuildRecords(TestHasBuildRecordsInterface): | 181 | class TestArchiveHasBuildRecords(TestHasBuildRecordsInterface): |
630 | @@ -144,7 +240,6 @@ | |||
631 | 144 | # can only test this by creating a lone IBuildFarmJob of a | 240 | # can only test this by creating a lone IBuildFarmJob of a |
632 | 145 | # different type. | 241 | # different type. |
633 | 146 | from lp.buildmaster.interfaces.buildfarmjob import IBuildFarmJobSource | 242 | from lp.buildmaster.interfaces.buildfarmjob import IBuildFarmJobSource |
634 | 147 | from lp.buildmaster.enums import BuildStatus | ||
635 | 148 | build_farm_job = getUtility(IBuildFarmJobSource).new( | 243 | build_farm_job = getUtility(IBuildFarmJobSource).new( |
636 | 149 | job_type=BuildFarmJobType.RECIPEBRANCHBUILD, virtualized=True, | 244 | job_type=BuildFarmJobType.RECIPEBRANCHBUILD, virtualized=True, |
637 | 150 | status=BuildStatus.BUILDING) | 245 | status=BuildStatus.BUILDING) |
638 | @@ -180,11 +275,9 @@ | |||
639 | 180 | 275 | ||
640 | 181 | def setUp(self): | 276 | def setUp(self): |
641 | 182 | super(TestSourcePackageHasBuildRecords, self).setUp() | 277 | super(TestSourcePackageHasBuildRecords, self).setUp() |
642 | 183 | |||
643 | 184 | gedit_name = self.builds[0].source_package_release.sourcepackagename | 278 | gedit_name = self.builds[0].source_package_release.sourcepackagename |
644 | 185 | self.context = SourcePackage( | 279 | self.context = SourcePackage( |
647 | 186 | gedit_name, | 280 | gedit_name, self.builds[0].distro_arch_series.distroseries) |
646 | 187 | self.builds[0].distro_arch_series.distroseries) | ||
648 | 188 | 281 | ||
649 | 189 | # Convert the other two builds to be builds of | 282 | # Convert the other two builds to be builds of |
650 | 190 | # gedit as well so that the one source package (gedit) will have | 283 | # gedit as well so that the one source package (gedit) will have |
651 | @@ -192,3 +285,85 @@ | |||
652 | 192 | for build in self.builds[1:3]: | 285 | for build in self.builds[1:3]: |
653 | 193 | spr = build.source_package_release | 286 | spr = build.source_package_release |
654 | 194 | removeSecurityProxy(spr).sourcepackagename = gedit_name | 287 | removeSecurityProxy(spr).sourcepackagename = gedit_name |
655 | 288 | |||
656 | 289 | # Set them as sucessfully built | ||
657 | 290 | for build in self.builds: | ||
658 | 291 | build.status = BuildStatus.FULLYBUILT | ||
659 | 292 | build.buildqueue_record.destroySelf() | ||
660 | 293 | removeSecurityProxy(build).date_created = ( | ||
661 | 294 | self.factory.getUniqueDate()) | ||
662 | 295 | build.date_started = datetime.now(pytz.UTC) | ||
663 | 296 | build.date_finished = build.date_started + timedelta(minutes=5) | ||
664 | 297 | |||
665 | 298 | def test_get_build_records(self): | ||
666 | 299 | # We can fetch builds records from a SourcePackage. | ||
667 | 300 | builds = self.context.getBuildRecords( | ||
668 | 301 | build_state=BuildStatus.FULLYBUILT).count() | ||
669 | 302 | self.assertEquals(3, builds) | ||
670 | 303 | builds = self.context.getBuildRecords( | ||
671 | 304 | pocket=PackagePublishingPocket.RELEASE).count() | ||
672 | 305 | self.assertEquals(3, builds) | ||
673 | 306 | builds = self.context.getBuildRecords( | ||
674 | 307 | pocket=PackagePublishingPocket.UPDATES).count() | ||
675 | 308 | self.assertEquals(0, builds) | ||
676 | 309 | |||
677 | 310 | def test_ordering_date(self): | ||
678 | 311 | # Build records returned are ordered by creation date. | ||
679 | 312 | builds = self.context.getBuildRecords( | ||
680 | 313 | build_state=BuildStatus.FULLYBUILT) | ||
681 | 314 | date_created = [build.date_created for build in builds] | ||
682 | 315 | self.assertTrue(date_created[0] > date_created[1] > date_created[2]) | ||
683 | 316 | |||
684 | 317 | def test_ordering_lastscore(self): | ||
685 | 318 | # PENDING build records returned are ordered by score. | ||
686 | 319 | spph = self.factory.makeSourcePackagePublishingHistory() | ||
687 | 320 | spr = spph.sourcepackagerelease | ||
688 | 321 | source_package = SourcePackage.new( | ||
689 | 322 | spph.sourcepackagerelease.sourcepackagename, spph.distroseries) | ||
690 | 323 | build1 = self.factory.makeBinaryPackageBuild( | ||
691 | 324 | source_package_release=spr) | ||
692 | 325 | build2 = self.factory.makeBinaryPackageBuild( | ||
693 | 326 | source_package_release=spr) | ||
694 | 327 | build1.queueBuild() | ||
695 | 328 | build2.queueBuild() | ||
696 | 329 | build1.buildqueue_record.lastscore = 10 | ||
697 | 330 | build2.buildqueue_record.lastscore = 1000 | ||
698 | 331 | builds = list(source_package.getBuildRecords()) | ||
699 | 332 | self.assertEquals([build2, build1], builds) | ||
700 | 333 | |||
701 | 334 | def test_copy_archive_without_leak(self): | ||
702 | 335 | # If source publications are copied to a .COPY archive, they don't | ||
703 | 336 | # "leak" into SourcePackage.getBuildRecords(). | ||
704 | 337 | admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL) | ||
705 | 338 | # Set up a distroseries and related bits, so we can create builds. | ||
706 | 339 | source_name = self.factory.getUniqueString() | ||
707 | 340 | spn = self.factory.makeSourcePackageName(name=source_name) | ||
708 | 341 | pf = self.factory.makeProcessorFamily() | ||
709 | 342 | pf_proc = pf.addProcessor(self.factory.getUniqueString(), '', '') | ||
710 | 343 | distroseries = self.factory.makeDistroSeries() | ||
711 | 344 | das = self.factory.makeDistroArchSeries( | ||
712 | 345 | distroseries=distroseries, processorfamily=pf, | ||
713 | 346 | supports_virtualized=True) | ||
714 | 347 | with person_logged_in(admin): | ||
715 | 348 | publisher = SoyuzTestPublisher() | ||
716 | 349 | publisher.prepareBreezyAutotest() | ||
717 | 350 | publisher.addFakeChroots(distroseries=distroseries) | ||
718 | 351 | distroseries.nominatedarchindep = das | ||
719 | 352 | builder = self.factory.makeBuilder(processor=pf_proc) | ||
720 | 353 | spph = self.factory.makeSourcePackagePublishingHistory( | ||
721 | 354 | sourcepackagename=spn, distroseries=distroseries) | ||
722 | 355 | spph.createMissingBuilds() | ||
723 | 356 | # Create a copy archive. | ||
724 | 357 | copy = self.factory.makeArchive( | ||
725 | 358 | purpose=ArchivePurpose.COPY, | ||
726 | 359 | distribution=distroseries.distribution) | ||
727 | 360 | # And copy the publication into it. | ||
728 | 361 | copy_spph = spph.copyTo( | ||
729 | 362 | distroseries, PackagePublishingPocket.RELEASE, copy) | ||
730 | 363 | [copy_build] = copy_spph.createMissingBuilds() | ||
731 | 364 | builds = copy.getBuildRecords() | ||
732 | 365 | self.assertEquals([copy_build], list(builds)) | ||
733 | 366 | source = SourcePackage(spn, spph.distroseries) | ||
734 | 367 | # SourcePackage.getBuildRecords() doesn't have two build records. | ||
735 | 368 | builds = source.getBuildRecords().count() | ||
736 | 369 | self.assertEquals(1, builds) |
StevenK: r=me; There are a few instances of "# Test that ..." that need to be turned into a statement of expected behaviour, but I know you'll take care of those.