Merge lp:~wgrant/launchpad/bug-728836 into lp:launchpad
- bug-728836
- Merge into devel
Proposed by
William Grant
Status: | Merged | ||||
---|---|---|---|---|---|
Approved by: | Robert Collins | ||||
Approved revision: | no longer in the source branch. | ||||
Merged at revision: | 12537 | ||||
Proposed branch: | lp:~wgrant/launchpad/bug-728836 | ||||
Merge into: | lp:launchpad | ||||
Diff against target: |
574 lines (+286/-77) 9 files modified
lib/lp/registry/interfaces/distroseries.py (+1/-1) lib/lp/registry/model/distroseries.py (+2/-2) lib/lp/soyuz/interfaces/binarypackagerelease.py (+1/-0) lib/lp/soyuz/interfaces/publishing.py (+21/-4) lib/lp/soyuz/model/archive.py (+6/-1) lib/lp/soyuz/model/publishing.py (+139/-61) lib/lp/soyuz/model/queue.py (+3/-6) lib/lp/soyuz/tests/test_publishing.py (+111/-0) lib/lp/testing/factory.py (+2/-2) |
||||
To merge this branch: | bzr merge lp:~wgrant/launchpad/bug-728836 | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Robert Collins (community) | Approve | ||
Review via email: mp+52355@code.launchpad.net |
Commit message
[r=lifeless][bug=728836] Optimise PublishingSet.
Description of the change
This branch improves the scalability of Archive:
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/lp/registry/interfaces/distroseries.py' |
2 | --- lib/lp/registry/interfaces/distroseries.py 2011-02-24 15:30:54 +0000 |
3 | +++ lib/lp/registry/interfaces/distroseries.py 2011-03-07 07:05:26 +0000 |
4 | @@ -767,7 +767,7 @@ |
5 | """ |
6 | |
7 | def newArch(architecturetag, processorfamily, official, owner, |
8 | - supports_virtualized=False): |
9 | + supports_virtualized=False, enabled=True): |
10 | """Create a new port or DistroArchSeries for this DistroSeries.""" |
11 | |
12 | def copyTranslationsFromParent(ztm): |
13 | |
14 | === modified file 'lib/lp/registry/model/distroseries.py' |
15 | --- lib/lp/registry/model/distroseries.py 2011-02-22 08:20:21 +0000 |
16 | +++ lib/lp/registry/model/distroseries.py 2011-03-07 07:05:26 +0000 |
17 | @@ -1459,12 +1459,12 @@ |
18 | return DecoratedResultSet(package_caches, result_to_dsbp) |
19 | |
20 | def newArch(self, architecturetag, processorfamily, official, owner, |
21 | - supports_virtualized=False): |
22 | + supports_virtualized=False, enabled=True): |
23 | """See `IDistroSeries`.""" |
24 | distroarchseries = DistroArchSeries( |
25 | architecturetag=architecturetag, processorfamily=processorfamily, |
26 | official=official, distroseries=self, owner=owner, |
27 | - supports_virtualized=supports_virtualized) |
28 | + supports_virtualized=supports_virtualized, enabled=enabled) |
29 | return distroarchseries |
30 | |
31 | def newMilestone(self, name, dateexpected=None, summary=None, |
32 | |
33 | === modified file 'lib/lp/soyuz/interfaces/binarypackagerelease.py' |
34 | --- lib/lp/soyuz/interfaces/binarypackagerelease.py 2011-02-23 20:26:53 +0000 |
35 | +++ lib/lp/soyuz/interfaces/binarypackagerelease.py 2011-03-07 07:05:26 +0000 |
36 | @@ -44,6 +44,7 @@ |
37 | class IBinaryPackageRelease(Interface): |
38 | id = Int(title=_('ID'), required=True) |
39 | binarypackagename = Int(required=True) |
40 | + binarypackagenameID = Int(required=True) |
41 | version = TextLine(required=True, constraint=valid_debian_version) |
42 | summary = Text(required=True) |
43 | description = Text(required=True) |
44 | |
45 | === modified file 'lib/lp/soyuz/interfaces/publishing.py' |
46 | --- lib/lp/soyuz/interfaces/publishing.py 2011-03-03 00:43:44 +0000 |
47 | +++ lib/lp/soyuz/interfaces/publishing.py 2011-03-07 07:05:26 +0000 |
48 | @@ -678,6 +678,7 @@ |
49 | required=False, readonly=False, |
50 | ), |
51 | exported_as="distro_arch_series") |
52 | + distroseries = Attribute("The distroseries being published into") |
53 | component = Int( |
54 | title=_('The component being published into'), |
55 | required=False, readonly=False, |
56 | @@ -878,7 +879,25 @@ |
57 | publishing histories. |
58 | """ |
59 | |
60 | - def publishBinary(archive, binarypackagerelease, distroarchseries, |
61 | + def publishBinaries(archive, distroseries, pocket, binaries): |
62 | + """Efficiently publish multiple BinaryPackageReleases in an Archive. |
63 | + |
64 | + Creates `IBinaryPackagePublishingHistory` records for each binary, |
65 | + handling architecture-independent and debug packages, avoiding |
66 | + creation of duplicate publications, and leaving disabled |
67 | + architectures alone. |
68 | + |
69 | + :param archive: The target `IArchive`. |
70 | + :param distroseries: The target `IDistroSeries`. |
71 | + :param pocket: The target `PackagePublishingPocket`. |
72 | + :param binaries: A dict mapping `BinaryPackageReleases` to their |
73 | + desired overrides as (`Component`, `Section`, |
74 | + `PackagePublishingPriority`) tuples. |
75 | + |
76 | + :return: A list of new `IBinaryPackagePublishingHistory` records. |
77 | + """ |
78 | + |
79 | + def publishBinary(archive, binarypackagerelease, distroseries, |
80 | component, section, priority, pocket): |
81 | """Publish a `BinaryPackageRelease` in an archive. |
82 | |
83 | @@ -890,9 +909,7 @@ |
84 | |
85 | :param archive: The target `IArchive`. |
86 | :param binarypackagerelease: The `IBinaryPackageRelease` to copy. |
87 | - :param distroarchseries: An `IDistroArchSeries`. If the binary is |
88 | - architecture-independent, it will be published to all enabled |
89 | - architectures in this series. |
90 | + :param distroseries: An `IDistroSeries`. |
91 | :param component: The target `IComponent`. |
92 | :param section: The target `ISection`. |
93 | :param priority: The target `PackagePublishingPriority`. |
94 | |
95 | === modified file 'lib/lp/soyuz/model/archive.py' |
96 | --- lib/lp/soyuz/model/archive.py 2011-03-03 03:13:13 +0000 |
97 | +++ lib/lp/soyuz/model/archive.py 2011-03-07 07:05:26 +0000 |
98 | @@ -386,7 +386,7 @@ |
99 | query, clauseTables=clauseTables, orderBy=orderBy) |
100 | return dependencies |
101 | |
102 | - @property |
103 | + @cachedproperty |
104 | def debug_archive(self): |
105 | """See `IArchive`.""" |
106 | if self.purpose == ArchivePurpose.PRIMARY: |
107 | @@ -1933,6 +1933,11 @@ |
108 | if enabled == False: |
109 | new_archive.disable() |
110 | |
111 | + if purpose == ArchivePurpose.DEBUG: |
112 | + if distribution.main_archive is not None: |
113 | + del get_property_cache( |
114 | + distribution.main_archive).debug_archive |
115 | + |
116 | # Private teams cannot have public PPAs. |
117 | if owner.visibility == PersonVisibility.PRIVATE: |
118 | new_archive.buildd_secret = create_unique_token_for_table( |
119 | |
120 | === modified file 'lib/lp/soyuz/model/publishing.py' |
121 | --- lib/lp/soyuz/model/publishing.py 2011-03-03 08:01:34 +0000 |
122 | +++ lib/lp/soyuz/model/publishing.py 2011-03-07 07:05:26 +0000 |
123 | @@ -29,8 +29,10 @@ |
124 | StringCol, |
125 | ) |
126 | from storm.expr import ( |
127 | + And, |
128 | Desc, |
129 | LeftJoin, |
130 | + Or, |
131 | Sum, |
132 | ) |
133 | from storm.store import Store |
134 | @@ -63,7 +65,6 @@ |
135 | IStoreSelector, |
136 | MAIN_STORE, |
137 | ) |
138 | -from lp.app.errors import NotFoundError |
139 | from lp.buildmaster.enums import BuildStatus |
140 | from lp.buildmaster.model.buildfarmjob import BuildFarmJob |
141 | from lp.buildmaster.model.packagebuild import PackageBuild |
142 | @@ -121,7 +122,7 @@ |
143 | 'pool', poolify(source_name, component_name)) |
144 | |
145 | |
146 | -def maybe_override_component(archive, distroseries, component): |
147 | +def get_component(archive, distroseries, component): |
148 | """Override the component to fit in the archive, if possible. |
149 | |
150 | If the archive has a default component, and it forbids use of the |
151 | @@ -136,6 +137,22 @@ |
152 | return component |
153 | |
154 | |
155 | +def get_archive(archive, bpr): |
156 | + """Get the archive in which this binary should be published. |
157 | + |
158 | + Debug packages live in a DEBUG archive instead of a PRIMARY archive. |
159 | + This helper implements that override. |
160 | + """ |
161 | + if bpr.binpackageformat == BinaryPackageFormat.DDEB: |
162 | + debug_archive = archive.debug_archive |
163 | + if debug_archive is None: |
164 | + raise QueueInconsistentStateError( |
165 | + "Could not find the corresponding DEBUG archive " |
166 | + "for %s" % (archive.displayname)) |
167 | + archive = debug_archive |
168 | + return archive |
169 | + |
170 | + |
171 | class FilePublishingBase: |
172 | """Base class to publish files in the archive.""" |
173 | |
174 | @@ -914,18 +931,23 @@ |
175 | binarypackagepublishing=self).prejoin(preJoins) |
176 | |
177 | @property |
178 | + def distroseries(self): |
179 | + """See `IBinaryPackagePublishingHistory`""" |
180 | + return self.distroarchseries.distroseries |
181 | + |
182 | + @property |
183 | def binary_package_name(self): |
184 | - """See `ISourcePackagePublishingHistory`""" |
185 | + """See `IBinaryPackagePublishingHistory`""" |
186 | return self.binarypackagerelease.name |
187 | |
188 | @property |
189 | def binary_package_version(self): |
190 | - """See `ISourcePackagePublishingHistory`""" |
191 | + """See `IBinaryPackagePublishingHistory`""" |
192 | return self.binarypackagerelease.version |
193 | |
194 | @property |
195 | def priority_name(self): |
196 | - """See `ISourcePackagePublishingHistory`""" |
197 | + """See `IBinaryPackagePublishingHistory`""" |
198 | return self.priority.name |
199 | |
200 | @property |
201 | @@ -1245,6 +1267,42 @@ |
202 | self.requestDeletion(removed_by, removal_comment) |
203 | |
204 | |
205 | +def expand_binary_requests(distroseries, binaries): |
206 | + """Architecture-expand a dict of binary publication requests. |
207 | + |
208 | + For architecture-independent binaries, a tuple will be returned for each |
209 | + enabled architecture in the series. |
210 | + For architecture-dependent binaries, a tuple will be returned only for the |
211 | + architecture corresponding to the build architecture, if it exists and is |
212 | + enabled. |
213 | + |
214 | + :param binaries: A dict mapping `BinaryPackageReleases` to tuples of their |
215 | + desired overrides. |
216 | + |
217 | + :return: The binaries and the architectures in which they should be |
218 | + published, as a sequence of (`DistroArchSeries`, |
219 | + `BinaryPackageRelease`, (overrides)) tuples. |
220 | + """ |
221 | + |
222 | + archs = list(distroseries.enabled_architectures) |
223 | + arch_map = dict((arch.architecturetag, arch) for arch in archs) |
224 | + |
225 | + expanded = [] |
226 | + for bpr, overrides in binaries.iteritems(): |
227 | + if bpr.architecturespecific: |
228 | + # Find the DAS in this series corresponding to the original |
229 | + # build arch tag. If it does not exist or is disabled, we should |
230 | + # not publish. |
231 | + target_arch = arch_map.get( |
232 | + bpr.build.distro_arch_series.architecturetag) |
233 | + target_archs = [target_arch] if target_arch is not None else [] |
234 | + else: |
235 | + target_archs = archs |
236 | + for target_arch in target_archs: |
237 | + expanded.append((target_arch, bpr, overrides)) |
238 | + return expanded |
239 | + |
240 | + |
241 | class PublishingSet: |
242 | """Utilities for manipulating publications in batches.""" |
243 | |
244 | @@ -1252,61 +1310,82 @@ |
245 | |
246 | def copyBinariesTo(self, binaries, distroseries, pocket, archive): |
247 | """See `IPublishingSet`.""" |
248 | - secure_copies = [] |
249 | - for binary in binaries: |
250 | - # This will go wrong if nominatedarchindep gets deleted in a |
251 | - # future series -- it will attempt to retrieve i386 from the |
252 | - # new series, fail, and skip the publication instead of |
253 | - # publishing the remaining archs. |
254 | - try: |
255 | - build = binary.binarypackagerelease.build |
256 | - target_architecture = distroseries[ |
257 | - build.distro_arch_series.architecturetag] |
258 | - except NotFoundError: |
259 | - continue |
260 | - if not target_architecture.enabled: |
261 | - continue |
262 | - secure_copies.extend( |
263 | - getUtility(IPublishingSet).publishBinary( |
264 | - archive, binary.binarypackagerelease, target_architecture, |
265 | - binary.component, binary.section, binary.priority, |
266 | - pocket)) |
267 | - return secure_copies |
268 | - |
269 | - def publishBinary(self, archive, binarypackagerelease, distroarchseries, |
270 | + return self.publishBinaries( |
271 | + archive, distroseries, pocket, |
272 | + dict( |
273 | + (bpph.binarypackagerelease, (bpph.component, bpph.section, |
274 | + bpph.priority)) for bpph in binaries)) |
275 | + |
276 | + def publishBinaries(self, archive, distroseries, pocket, |
277 | + binaries): |
278 | + """See `IPublishingSet`.""" |
279 | + # Expand the dict of binaries into a list of tuples including the |
280 | + # architecture. |
281 | + expanded = expand_binary_requests(distroseries, binaries) |
282 | + |
283 | + # Find existing publications. |
284 | + # We should really be able to just compare BPR.id, but |
285 | + # CopyChecker doesn't seem to ensure that there are no |
286 | + # conflicting binaries from other sources. |
287 | + def make_package_condition(archive, das, bpr): |
288 | + return And( |
289 | + BinaryPackagePublishingHistory.archiveID == |
290 | + get_archive(archive, bpr).id, |
291 | + BinaryPackagePublishingHistory.distroarchseriesID == das.id, |
292 | + BinaryPackageRelease.binarypackagenameID == |
293 | + bpr.binarypackagenameID, |
294 | + BinaryPackageRelease.version == bpr.version, |
295 | + ) |
296 | + candidates = ( |
297 | + make_package_condition(archive, das, bpr) |
298 | + for das, bpr, overrides in expanded) |
299 | + already_published = IMasterStore(BinaryPackagePublishingHistory).find( |
300 | + (BinaryPackagePublishingHistory.distroarchseriesID, |
301 | + BinaryPackageRelease.binarypackagenameID, |
302 | + BinaryPackageRelease.version), |
303 | + BinaryPackagePublishingHistory.pocket == pocket, |
304 | + BinaryPackagePublishingHistory.status.is_in( |
305 | + active_publishing_status), |
306 | + BinaryPackageRelease.id == |
307 | + BinaryPackagePublishingHistory.binarypackagereleaseID, |
308 | + Or(*candidates)).config(distinct=True) |
309 | + already_published = frozenset(already_published) |
310 | + |
311 | + needed = [ |
312 | + (das, bpr, overrides) for (das, bpr, overrides) in |
313 | + expanded if (das.id, bpr.binarypackagenameID, bpr.version) |
314 | + not in already_published] |
315 | + if not needed: |
316 | + return [] |
317 | + |
318 | + insert_head = """ |
319 | + INSERT INTO BinaryPackagePublishingHistory |
320 | + (archive, distroarchseries, pocket, binarypackagerelease, |
321 | + component, section, priority, status, datecreated) |
322 | + VALUES |
323 | + """ |
324 | + insert_pubs = ", ".join( |
325 | + "(%s)" % ", ".join(sqlvalues( |
326 | + get_archive(archive, bpr).id, das.id, pocket, bpr.id, |
327 | + get_component(archive, das.distroseries, component).id, |
328 | + section.id, priority, PackagePublishingStatus.PENDING, |
329 | + UTC_NOW)) |
330 | + for (das, bpr, (component, section, priority)) in needed) |
331 | + insert_tail = " RETURNING BinaryPackagePublishingHistory.id" |
332 | + new_ids = IMasterStore(BinaryPackagePublishingHistory).execute( |
333 | + insert_head + insert_pubs + insert_tail) |
334 | + |
335 | + publications = IMasterStore(BinaryPackagePublishingHistory).find( |
336 | + BinaryPackagePublishingHistory, |
337 | + BinaryPackagePublishingHistory.id.is_in(id[0] for id in new_ids)) |
338 | + return list(publications) |
339 | + |
340 | + def publishBinary(self, archive, binarypackagerelease, distroseries, |
341 | component, section, priority, pocket): |
342 | """See `IPublishingSet`.""" |
343 | - if not binarypackagerelease.architecturespecific: |
344 | - target_archs = distroarchseries.distroseries.enabled_architectures |
345 | - else: |
346 | - target_archs = [distroarchseries] |
347 | - |
348 | - # DDEBs targeted to the PRIMARY archive are published in the |
349 | - # corresponding DEBUG archive. |
350 | - if binarypackagerelease.binpackageformat == BinaryPackageFormat.DDEB: |
351 | - debug_archive = archive.debug_archive |
352 | - if debug_archive is None: |
353 | - raise QueueInconsistentStateError( |
354 | - "Could not find the corresponding DEBUG archive " |
355 | - "for %s" % (archive.displayname)) |
356 | - archive = debug_archive |
357 | - |
358 | - published_binaries = [] |
359 | - for target_arch in target_archs: |
360 | - # We only publish the binary if it doesn't already exist in |
361 | - # the destination. Note that this means we don't support |
362 | - # override changes on their own. |
363 | - binaries_in_destination = archive.getAllPublishedBinaries( |
364 | - name=binarypackagerelease.name, exact_match=True, |
365 | - version=binarypackagerelease.version, |
366 | - status=active_publishing_status, pocket=pocket, |
367 | - distroarchseries=target_arch) |
368 | - if not bool(binaries_in_destination): |
369 | - published_binaries.append( |
370 | - getUtility(IPublishingSet).newBinaryPublication( |
371 | - archive, binarypackagerelease, target_arch, component, |
372 | - section, priority, pocket)) |
373 | - return published_binaries |
374 | + return self.publishBinaries( |
375 | + archive, distroseries, pocket, |
376 | + {binarypackagerelease: (component, section, priority)}) |
377 | |
378 | def newBinaryPublication(self, archive, binarypackagerelease, |
379 | distroarchseries, component, section, priority, |
380 | @@ -1318,7 +1397,7 @@ |
381 | archive=archive, |
382 | binarypackagerelease=binarypackagerelease, |
383 | distroarchseries=distroarchseries, |
384 | - component=maybe_override_component( |
385 | + component=get_component( |
386 | archive, distroarchseries.distroseries, component), |
387 | section=section, |
388 | priority=priority, |
389 | @@ -1335,8 +1414,7 @@ |
390 | pocket=pocket, |
391 | archive=archive, |
392 | sourcepackagerelease=sourcepackagerelease, |
393 | - component=maybe_override_component( |
394 | - archive, distroseries, component), |
395 | + component=get_component(archive, distroseries, component), |
396 | section=section, |
397 | status=PackagePublishingStatus.PENDING, |
398 | datecreated=UTC_NOW, |
399 | |
400 | === modified file 'lib/lp/soyuz/model/queue.py' |
401 | --- lib/lp/soyuz/model/queue.py 2011-02-17 17:02:54 +0000 |
402 | +++ lib/lp/soyuz/model/queue.py 2011-03-07 07:05:26 +0000 |
403 | @@ -1457,12 +1457,9 @@ |
404 | """See `IPackageUploadBuild`.""" |
405 | # Determine the build's architecturetag |
406 | build_archtag = self.build.distro_arch_series.architecturetag |
407 | - # Determine the target arch series. |
408 | - # This will raise NotFoundError if anything odd happens. |
409 | - target_das = self.packageupload.distroseries[build_archtag] |
410 | + distroseries = self.packageupload.distroseries |
411 | debug(logger, "Publishing build to %s/%s/%s" % ( |
412 | - target_das.distroseries.distribution.name, |
413 | - target_das.distroseries.name, |
414 | + distroseries.distribution.name, distroseries.name, |
415 | build_archtag)) |
416 | |
417 | # First up, publish everything in this build into that dar. |
418 | @@ -1478,7 +1475,7 @@ |
419 | getUtility(IPublishingSet).publishBinary( |
420 | archive=self.packageupload.archive, |
421 | binarypackagerelease=binary, |
422 | - distroarchseries=target_das, |
423 | + distroseries=distroseries, |
424 | component=binary.component, |
425 | section=binary.section, |
426 | priority=binary.priority, |
427 | |
428 | === modified file 'lib/lp/soyuz/tests/test_publishing.py' |
429 | --- lib/lp/soyuz/tests/test_publishing.py 2011-03-03 08:01:34 +0000 |
430 | +++ lib/lp/soyuz/tests/test_publishing.py 2011-03-07 07:05:26 +0000 |
431 | @@ -53,6 +53,7 @@ |
432 | PackagePublishingPriority, |
433 | PackagePublishingStatus, |
434 | ) |
435 | +from lp.soyuz.interfaces.queue import QueueInconsistentStateError |
436 | from lp.soyuz.interfaces.section import ISectionSet |
437 | from lp.soyuz.model.processor import ProcessorFamily |
438 | from lp.soyuz.model.publishing import ( |
439 | @@ -1399,3 +1400,113 @@ |
440 | for bpf in files: |
441 | bpf.libraryfile.filename |
442 | self.assertThat(recorder, HasQueryCount(Equals(5))) |
443 | + |
444 | + |
445 | +class TestPublishBinaries(TestCaseWithFactory): |
446 | + """Test PublishingSet.publishBinary() works.""" |
447 | + |
448 | + layer = LaunchpadZopelessLayer |
449 | + |
450 | + def makeArgs(self, bprs, distroseries, archive=None): |
451 | + """Create a dict of arguments for publishBinary.""" |
452 | + if archive is None: |
453 | + archive = distroseries.main_archive |
454 | + return { |
455 | + 'archive': archive, |
456 | + 'distroseries': distroseries, |
457 | + 'pocket': PackagePublishingPocket.BACKPORTS, |
458 | + 'binaries': dict( |
459 | + (bpr, (self.factory.makeComponent(), |
460 | + self.factory.makeSection(), |
461 | + PackagePublishingPriority.REQUIRED)) for bpr in bprs), |
462 | + } |
463 | + |
464 | + def test_architecture_dependent(self): |
465 | + # Architecture-dependent binaries get created as PENDING in the |
466 | + # corresponding architecture of the destination series and pocket, |
467 | + # with the given overrides. |
468 | + arch_tag = self.factory.getUniqueString('arch-') |
469 | + orig_das = self.factory.makeDistroArchSeries( |
470 | + architecturetag=arch_tag) |
471 | + target_das = self.factory.makeDistroArchSeries( |
472 | + architecturetag=arch_tag) |
473 | + build = self.factory.makeBinaryPackageBuild(distroarchseries=orig_das) |
474 | + bpr = self.factory.makeBinaryPackageRelease( |
475 | + build=build, architecturespecific=True) |
476 | + args = self.makeArgs([bpr], target_das.distroseries) |
477 | + [bpph] = getUtility(IPublishingSet).publishBinaries(**args) |
478 | + overrides = args['binaries'][bpr] |
479 | + self.assertEqual(bpr, bpph.binarypackagerelease) |
480 | + self.assertEqual( |
481 | + (args['archive'], target_das, args['pocket']), |
482 | + (bpph.archive, bpph.distroarchseries, bpph.pocket)) |
483 | + self.assertEqual( |
484 | + overrides, (bpph.component, bpph.section, bpph.priority)) |
485 | + self.assertEqual(PackagePublishingStatus.PENDING, bpph.status) |
486 | + |
487 | + def test_architecture_independent(self): |
488 | + # Architecture-independent binaries get published to all enabled |
489 | + # DASes in the series. |
490 | + bpr = self.factory.makeBinaryPackageRelease( |
491 | + architecturespecific=False) |
492 | + # Create 3 architectures. The binary will not be published in |
493 | + # the disabled one. |
494 | + target_das_a = self.factory.makeDistroArchSeries() |
495 | + target_das_b = self.factory.makeDistroArchSeries( |
496 | + distroseries=target_das_a.distroseries) |
497 | + target_das_c = self.factory.makeDistroArchSeries( |
498 | + distroseries=target_das_a.distroseries, enabled=False) |
499 | + args = self.makeArgs([bpr], target_das_a.distroseries) |
500 | + bpphs = getUtility(IPublishingSet).publishBinaries( |
501 | + **args) |
502 | + self.assertEquals(2, len(bpphs)) |
503 | + self.assertEquals( |
504 | + set((target_das_a, target_das_b)), |
505 | + set(bpph.distroarchseries for bpph in bpphs)) |
506 | + |
507 | + def test_does_not_duplicate(self): |
508 | + # An attempt to copy something for a second time is ignored. |
509 | + bpr = self.factory.makeBinaryPackageRelease() |
510 | + target_das = self.factory.makeDistroArchSeries() |
511 | + args = self.makeArgs([bpr], target_das.distroseries) |
512 | + [new_bpph] = getUtility(IPublishingSet).publishBinaries(**args) |
513 | + [] = getUtility(IPublishingSet).publishBinaries(**args) |
514 | + |
515 | + # But changing the target (eg. to RELEASE instead of BACKPORTS) |
516 | + # causes a new publication to be created. |
517 | + args['pocket'] = PackagePublishingPocket.RELEASE |
518 | + [another_bpph] = getUtility(IPublishingSet).publishBinaries(**args) |
519 | + |
520 | + def test_ddebs_need_debug_archive(self): |
521 | + debug = self.factory.makeBinaryPackageRelease( |
522 | + binpackageformat=BinaryPackageFormat.DDEB) |
523 | + args = self.makeArgs( |
524 | + [debug], debug.build.distro_arch_series.distroseries) |
525 | + self.assertRaises( |
526 | + QueueInconsistentStateError, |
527 | + getUtility(IPublishingSet).publishBinaries, **args) |
528 | + |
529 | + def test_ddebs_go_to_debug_archive(self): |
530 | + # Normal packages go to the given archive, but debug packages go |
531 | + # to the corresponding debug archive. |
532 | + das = self.factory.makeDistroArchSeries() |
533 | + self.factory.makeArchive( |
534 | + purpose=ArchivePurpose.DEBUG, |
535 | + distribution=das.distroseries.distribution) |
536 | + build = self.factory.makeBinaryPackageBuild(distroarchseries=das) |
537 | + normal = self.factory.makeBinaryPackageRelease(build=build) |
538 | + debug = self.factory.makeBinaryPackageRelease( |
539 | + build=build, binpackageformat=BinaryPackageFormat.DDEB) |
540 | + args = self.makeArgs([normal, debug], das.distroseries) |
541 | + bpphs = getUtility(IPublishingSet).publishBinaries(**args) |
542 | + self.assertEquals(2, len(bpphs)) |
543 | + self.assertEquals( |
544 | + set((normal, debug)), |
545 | + set(bpph.binarypackagerelease for bpph in bpphs)) |
546 | + self.assertEquals( |
547 | + set((das.main_archive, das.main_archive.debug_archive)), |
548 | + set(bpph.archive for bpph in bpphs)) |
549 | + |
550 | + # A second copy does nothing, because it checks in the debug |
551 | + # archive too. |
552 | + [] = getUtility(IPublishingSet).publishBinaries(**args) |
553 | |
554 | === modified file 'lib/lp/testing/factory.py' |
555 | --- lib/lp/testing/factory.py 2011-03-03 10:58:03 +0000 |
556 | +++ lib/lp/testing/factory.py 2011-03-07 07:05:26 +0000 |
557 | @@ -2315,7 +2315,7 @@ |
558 | def makeDistroArchSeries(self, distroseries=None, |
559 | architecturetag=None, processorfamily=None, |
560 | official=True, owner=None, |
561 | - supports_virtualized=False): |
562 | + supports_virtualized=False, enabled=True): |
563 | """Create a new distroarchseries""" |
564 | |
565 | if distroseries is None: |
566 | @@ -2331,7 +2331,7 @@ |
567 | architecturetag = self.getUniqueString('arch') |
568 | return distroseries.newArch( |
569 | architecturetag, processorfamily, official, owner, |
570 | - supports_virtualized) |
571 | + supports_virtualized, enabled) |
572 | |
573 | def makeComponent(self, name=None): |
574 | """Make a new `IComponent`.""" |
Couple of bits that can be improved.
302 + if len(needed) == 0:
'if not needed:' would be clearer.
the 8 line generator expression is getting tricky to read.