Merge lp:~cjwatson/launchpad/delete-unmodifiable-suite into lp:launchpad
- delete-unmodifiable-suite
- Merge into devel
Status: | Merged |
---|---|
Approved by: | William Grant |
Approved revision: | no longer in the source branch. |
Merged at revision: | 16526 |
Proposed branch: | lp:~cjwatson/launchpad/delete-unmodifiable-suite |
Merge into: | lp:launchpad |
Diff against target: |
694 lines (+125/-125) 4 files modified
lib/lp/soyuz/interfaces/publishing.py (+9/-6) lib/lp/soyuz/model/publishing.py (+30/-22) lib/lp/soyuz/tests/test_publishing.py (+84/-95) lib/lp/testing/factory.py (+2/-2) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/delete-unmodifiable-suite |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+152529@code.launchpad.net |
Commit message
Refuse to delete packages from unmodifiable suites.
Description of the change
== Summary ==
Bug 1152669: it's possible to delete packages from a release pocket in a stable distroseries, requiring (AFAICT) DB surgery to recover. This shouldn't be permitted since the deletion will never be published anyway.
== Proposed fix ==
Add Archive.
== Implementation details ==
I may have gone a little overboard to achieve LoC neutrality. You can always just look at the principal effective revision (http://
== Tests ==
bin/test -vvct lp.soyuz.
== Demo and Q/A ==
Try to delete a package from the quantal RELEASE pocket on qastaging (using the requestDeletion API method). This should fail with an error.
William Grant (wgrant) : | # |
Preview Diff
1 | === modified file 'lib/lp/soyuz/interfaces/publishing.py' |
2 | --- lib/lp/soyuz/interfaces/publishing.py 2013-02-28 09:41:40 +0000 |
3 | +++ lib/lp/soyuz/interfaces/publishing.py 2013-03-08 22:44:24 +0000 |
4 | @@ -6,6 +6,7 @@ |
5 | __metaclass__ = type |
6 | |
7 | __all__ = [ |
8 | + 'DeletionError', |
9 | 'IArchiveSafePublisher', |
10 | 'IBinaryPackageFilePublishing', |
11 | 'IBinaryPackagePublishingHistory', |
12 | @@ -106,6 +107,11 @@ |
13 | """Raised when an attempt to change an override fails.""" |
14 | |
15 | |
16 | +@error_status(httplib.BAD_REQUEST) |
17 | +class DeletionError(Exception): |
18 | + """Raised when an attempt to delete a publication fails.""" |
19 | + |
20 | + |
21 | name_priority_map = { |
22 | 'required': PackagePublishingPriority.REQUIRED, |
23 | 'important': PackagePublishingPriority.IMPORTANT, |
24 | @@ -284,19 +290,16 @@ |
25 | title=_('Component name'), required=True, readonly=True, |
26 | ) |
27 | publishingstatus = Int( |
28 | - title=_('Package publishing status'), required=True, |
29 | - readonly=True, |
30 | + title=_('Package publishing status'), required=True, readonly=True, |
31 | ) |
32 | pocket = Int( |
33 | - title=_('Package publishing pocket'), required=True, |
34 | - readonly=True, |
35 | + title=_('Package publishing pocket'), required=True, readonly=True, |
36 | ) |
37 | archive = Int( |
38 | title=_('Archive ID'), required=True, readonly=True, |
39 | ) |
40 | libraryfilealias = Int( |
41 | - title=_('Binarypackage file alias'), required=True, |
42 | - readonly=True, |
43 | + title=_('Binarypackage file alias'), required=True, readonly=True, |
44 | ) |
45 | libraryfilealiasfilename = TextLine( |
46 | title=_('File name'), required=True, readonly=True, |
47 | |
48 | === modified file 'lib/lp/soyuz/model/publishing.py' |
49 | --- lib/lp/soyuz/model/publishing.py 2013-02-28 09:41:40 +0000 |
50 | +++ lib/lp/soyuz/model/publishing.py 2013-03-08 22:44:24 +0000 |
51 | @@ -91,6 +91,7 @@ |
52 | ) |
53 | from lp.soyuz.interfaces.publishing import ( |
54 | active_publishing_status, |
55 | + DeletionError, |
56 | IBinaryPackageFilePublishing, |
57 | IBinaryPackagePublishingHistory, |
58 | IPublishingSet, |
59 | @@ -122,8 +123,7 @@ |
60 | # XXX cprov 2006-08-18: move it away, perhaps archivepublisher/pool.py |
61 | """Return the pool path for a given source name and component name.""" |
62 | from lp.archivepublisher.diskpool import poolify |
63 | - return os.path.join( |
64 | - 'pool', poolify(source_name, component_name)) |
65 | + return os.path.join('pool', poolify(source_name, component_name)) |
66 | |
67 | |
68 | def get_component(archive, distroseries, component): |
69 | @@ -160,8 +160,7 @@ |
70 | |
71 | def proxied_urls(files, parent): |
72 | """Run the files passed through `ProxiedLibraryFileAlias`.""" |
73 | - return [ |
74 | - ProxiedLibraryFileAlias(file, parent).http_url for file in files] |
75 | + return [ProxiedLibraryFileAlias(file, parent).http_url for file in files] |
76 | |
77 | |
78 | class FilePublishingBase: |
79 | @@ -178,15 +177,13 @@ |
80 | sha1 = filealias.content.sha1 |
81 | path = diskpool.pathFor(component, source, filename) |
82 | |
83 | - action = diskpool.addFile( |
84 | - component, source, filename, sha1, filealias) |
85 | + action = diskpool.addFile(component, source, filename, sha1, filealias) |
86 | if action == diskpool.results.FILE_ADDED: |
87 | log.debug("Added %s from library" % path) |
88 | elif action == diskpool.results.SYMLINK_ADDED: |
89 | log.debug("%s created as a symlink." % path) |
90 | elif action == diskpool.results.NONE: |
91 | - log.debug( |
92 | - "%s is already in pool with the same content." % path) |
93 | + log.debug("%s is already in pool with the same content." % path) |
94 | |
95 | @property |
96 | def archive_url(self): |
97 | @@ -435,8 +432,7 @@ |
98 | foreignKey='SourcePackageName', dbName='sourcepackagename') |
99 | sourcepackagerelease = ForeignKey( |
100 | foreignKey='SourcePackageRelease', dbName='sourcepackagerelease') |
101 | - distroseries = ForeignKey( |
102 | - foreignKey='DistroSeries', dbName='distroseries') |
103 | + distroseries = ForeignKey(foreignKey='DistroSeries', dbName='distroseries') |
104 | component = ForeignKey(foreignKey='Component', dbName='component') |
105 | section = ForeignKey(foreignKey='Section', dbName='section') |
106 | status = EnumCol(schema=PackagePublishingStatus) |
107 | @@ -732,8 +728,7 @@ |
108 | """See `IPublishing`.""" |
109 | release = self.sourcepackagerelease |
110 | name = release.sourcepackagename.name |
111 | - return "%s %s in %s" % (name, release.version, |
112 | - self.distroseries.name) |
113 | + return "%s %s in %s" % (name, release.version, self.distroseries.name) |
114 | |
115 | def buildIndexStanzaFields(self): |
116 | """See `IPublishing`.""" |
117 | @@ -925,6 +920,11 @@ |
118 | |
119 | def requestDeletion(self, removed_by, removal_comment=None): |
120 | """See `IPublishing`.""" |
121 | + if not self.archive.canModifySuite(self.distroseries, self.pocket): |
122 | + raise DeletionError( |
123 | + "Cannot delete publications from suite '%s'" % |
124 | + self.distroseries.getSuite(self.pocket)) |
125 | + |
126 | self.setDeleted(removed_by, removal_comment) |
127 | if self.archive.is_main: |
128 | dsd_job_source = getUtility(IDistroSeriesDifferenceJobSource) |
129 | @@ -1053,8 +1053,7 @@ |
130 | # ... |
131 | # <DESCRIPTION LN> |
132 | descr_lines = [line.lstrip() for line in bpr.description.splitlines()] |
133 | - bin_description = ( |
134 | - '%s\n %s' % (bpr.summary, '\n '.join(descr_lines))) |
135 | + bin_description = '%s\n %s' % (bpr.summary, '\n '.join(descr_lines)) |
136 | |
137 | # Dealing with architecturespecific field. |
138 | # Present 'all' in every archive index for architecture |
139 | @@ -1329,11 +1328,9 @@ |
140 | ] |
141 | |
142 | if start_date is not None: |
143 | - clauses.append( |
144 | - BinaryPackageReleaseDownloadCount.day >= start_date) |
145 | + clauses.append(BinaryPackageReleaseDownloadCount.day >= start_date) |
146 | if end_date is not None: |
147 | - clauses.append( |
148 | - BinaryPackageReleaseDownloadCount.day <= end_date) |
149 | + clauses.append(BinaryPackageReleaseDownloadCount.day <= end_date) |
150 | |
151 | return clauses |
152 | |
153 | @@ -1373,6 +1370,11 @@ |
154 | |
155 | def requestDeletion(self, removed_by, removal_comment=None): |
156 | """See `IPublishing`.""" |
157 | + if not self.archive.canModifySuite(self.distroseries, self.pocket): |
158 | + raise DeletionError( |
159 | + "Cannot delete publications from suite '%s'" % |
160 | + self.distroseries.getSuite(self.pocket)) |
161 | + |
162 | self.setDeleted(removed_by, removal_comment) |
163 | |
164 | def binaryFileUrls(self, include_meta=False): |
165 | @@ -1415,8 +1417,7 @@ |
166 | # Find the DAS in this series corresponding to the original |
167 | # build arch tag. If it does not exist or is disabled, we should |
168 | # not publish. |
169 | - target_arch = arch_map.get( |
170 | - bpr.build.distro_arch_series.architecturetag) |
171 | + target_arch = arch_map.get(bpr.build.arch_tag) |
172 | target_archs = [target_arch] if target_arch is not None else [] |
173 | else: |
174 | target_archs = archs |
175 | @@ -1941,8 +1942,7 @@ |
176 | # separate query for now. |
177 | source_pubs.update(store.find( |
178 | SourcePackagePublishingHistory, |
179 | - SourcePackagePublishingHistory.id.is_in( |
180 | - pubs_without_builds), |
181 | + SourcePackagePublishingHistory.id.is_in(pubs_without_builds), |
182 | SourcePackagePublishingHistory.archive == archive)) |
183 | # For each source_pub found, provide an aggregate summary of its |
184 | # builds. |
185 | @@ -2043,6 +2043,14 @@ |
186 | return |
187 | assert len(sources) + len(binaries) == len(pubs) |
188 | |
189 | + locations = set( |
190 | + (pub.archive, pub.distroseries, pub.pocket) for pub in pubs) |
191 | + for archive, distroseries, pocket in locations: |
192 | + if not archive.canModifySuite(distroseries, pocket): |
193 | + raise DeletionError( |
194 | + "Cannot delete publications from suite '%s'" % |
195 | + distroseries.getSuite(pocket)) |
196 | + |
197 | spph_ids = [spph.id for spph in sources] |
198 | self.setMultipleDeleted( |
199 | SourcePackagePublishingHistory, spph_ids, removed_by, |
200 | |
201 | === modified file 'lib/lp/soyuz/tests/test_publishing.py' |
202 | --- lib/lp/soyuz/tests/test_publishing.py 2013-02-20 12:28:38 +0000 |
203 | +++ lib/lp/soyuz/tests/test_publishing.py 2013-03-08 22:44:24 +0000 |
204 | @@ -42,6 +42,7 @@ |
205 | from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet |
206 | from lp.soyuz.interfaces.component import IComponentSet |
207 | from lp.soyuz.interfaces.publishing import ( |
208 | + DeletionError, |
209 | IPublishingSet, |
210 | OverrideError, |
211 | PackagePublishingPriority, |
212 | @@ -504,11 +505,8 @@ |
213 | if new_version is None: |
214 | new_version = version |
215 | changesfile_content = '' |
216 | - handle = open(changesfile_path, 'r') |
217 | - try: |
218 | + with open(changesfile_path, 'r') as handle: |
219 | changesfile_content = handle.read() |
220 | - finally: |
221 | - handle.close() |
222 | |
223 | source = self.getPubSource( |
224 | sourcename=sourcename, archive=archive, version=new_version, |
225 | @@ -648,9 +646,9 @@ |
226 | dominant = supersededby.binarypackagerelease.build |
227 | else: |
228 | dominant = supersededby.sourcepackagerelease |
229 | - self.assertEquals(dominant, pub.supersededby) |
230 | + self.assertEqual(dominant, pub.supersededby) |
231 | else: |
232 | - self.assertIs(None, pub.supersededby) |
233 | + self.assertIsNone(pub.supersededby) |
234 | |
235 | |
236 | class TestNativePublishing(TestNativePublishingBase): |
237 | @@ -660,9 +658,7 @@ |
238 | # the corresponding files are dumped in the disk pool/. |
239 | pub_source = self.getPubSource(filecontent='Hello world') |
240 | pub_source.publish(self.disk_pool, self.logger) |
241 | - self.assertEqual( |
242 | - PackagePublishingStatus.PUBLISHED, |
243 | - pub_source.status) |
244 | + self.assertEqual(PackagePublishingStatus.PUBLISHED, pub_source.status) |
245 | pool_path = "%s/main/f/foo/foo_666.dsc" % self.pool_dir |
246 | self.assertEqual(open(pool_path).read().strip(), 'Hello world') |
247 | |
248 | @@ -671,9 +667,7 @@ |
249 | # the corresponding files are dumped in the disk pool/. |
250 | pub_binary = self.getPubBinaries(filecontent='Hello world')[0] |
251 | pub_binary.publish(self.disk_pool, self.logger) |
252 | - self.assertEqual( |
253 | - PackagePublishingStatus.PUBLISHED, |
254 | - pub_binary.status) |
255 | + self.assertEqual(PackagePublishingStatus.PUBLISHED, pub_binary.status) |
256 | pool_path = "%s/main/f/foo/foo-bin_666_all.deb" % self.pool_dir |
257 | self.assertEqual(open(pool_path).read().strip(), 'Hello world') |
258 | |
259 | @@ -688,9 +682,8 @@ |
260 | foo_path = os.path.join(self.pool_dir, 'main', 'f', 'foo') |
261 | os.makedirs(foo_path) |
262 | foo_dsc_path = os.path.join(foo_path, 'foo_666.dsc') |
263 | - foo_dsc = open(foo_dsc_path, 'w') |
264 | - foo_dsc.write('Hello world') |
265 | - foo_dsc.close() |
266 | + with open(foo_dsc_path, 'w') as foo_dsc: |
267 | + foo_dsc.write('Hello world') |
268 | |
269 | pub_source = self.getPubSource(filecontent="Something") |
270 | pub_source.publish(self.disk_pool, self.logger) |
271 | @@ -699,8 +692,7 @@ |
272 | self.assertEqual("PoolFileOverwriteError", self.oopses[0]['type']) |
273 | |
274 | self.layer.commit() |
275 | - self.assertEqual( |
276 | - pub_source.status, PackagePublishingStatus.PENDING) |
277 | + self.assertEqual(pub_source.status, PackagePublishingStatus.PENDING) |
278 | self.assertEqual(open(foo_dsc_path).read().strip(), 'Hello world') |
279 | |
280 | def testPublishingDifferentContents(self): |
281 | @@ -711,8 +703,7 @@ |
282 | |
283 | foo_name = "%s/main/f/foo/foo_666.dsc" % self.pool_dir |
284 | pub_source.sync() |
285 | - self.assertEqual( |
286 | - pub_source.status, PackagePublishingStatus.PUBLISHED) |
287 | + self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED) |
288 | self.assertEqual(open(foo_name).read().strip(), 'foo is happy') |
289 | |
290 | # try to publish 'foo' again with a different content, it |
291 | @@ -723,8 +714,7 @@ |
292 | self.layer.commit() |
293 | |
294 | pub_source2.sync() |
295 | - self.assertEqual( |
296 | - pub_source2.status, PackagePublishingStatus.PENDING) |
297 | + self.assertEqual(pub_source2.status, PackagePublishingStatus.PENDING) |
298 | self.assertEqual(open(foo_name).read().strip(), 'foo is happy') |
299 | |
300 | def testPublishingAlreadyInPool(self): |
301 | @@ -740,16 +730,14 @@ |
302 | bar_name = "%s/main/b/bar/bar_666.dsc" % self.pool_dir |
303 | self.assertEqual(open(bar_name).read().strip(), 'bar is good') |
304 | pub_source.sync() |
305 | - self.assertEqual( |
306 | - pub_source.status, PackagePublishingStatus.PUBLISHED) |
307 | + self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED) |
308 | |
309 | pub_source2 = self.getPubSource( |
310 | sourcename='bar', filecontent='bar is good') |
311 | pub_source2.publish(self.disk_pool, self.logger) |
312 | self.layer.commit() |
313 | pub_source2.sync() |
314 | - self.assertEqual( |
315 | - pub_source2.status, PackagePublishingStatus.PUBLISHED) |
316 | + self.assertEqual(pub_source2.status, PackagePublishingStatus.PUBLISHED) |
317 | |
318 | def testPublishingSymlink(self): |
319 | """Test if publishOne moving publication between components. |
320 | @@ -759,8 +747,7 @@ |
321 | """ |
322 | content = 'am I a file or a symbolic link ?' |
323 | # publish sim.dsc in main and re-publish in universe |
324 | - pub_source = self.getPubSource( |
325 | - sourcename='sim', filecontent=content) |
326 | + pub_source = self.getPubSource(sourcename='sim', filecontent=content) |
327 | pub_source2 = self.getPubSource( |
328 | sourcename='sim', component='universe', filecontent=content) |
329 | pub_source.publish(self.disk_pool, self.logger) |
330 | @@ -769,10 +756,8 @@ |
331 | |
332 | pub_source.sync() |
333 | pub_source2.sync() |
334 | - self.assertEqual( |
335 | - pub_source.status, PackagePublishingStatus.PUBLISHED) |
336 | - self.assertEqual( |
337 | - pub_source2.status, PackagePublishingStatus.PUBLISHED) |
338 | + self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED) |
339 | + self.assertEqual(pub_source2.status, PackagePublishingStatus.PUBLISHED) |
340 | |
341 | # check the resulted symbolic link |
342 | sim_universe = "%s/universe/s/sim/sim_666.dsc" % self.pool_dir |
343 | @@ -788,8 +773,7 @@ |
344 | self.layer.commit() |
345 | |
346 | pub_source3.sync() |
347 | - self.assertEqual( |
348 | - pub_source3.status, PackagePublishingStatus.PENDING) |
349 | + self.assertEqual(pub_source3.status, PackagePublishingStatus.PENDING) |
350 | |
351 | def testPublishInAnotherArchive(self): |
352 | """Publication in another archive |
353 | @@ -882,10 +866,10 @@ |
354 | copies = (copied, ) |
355 | |
356 | for copy in copies: |
357 | - self.assertEquals(copy.component, pub_record.component) |
358 | + self.assertEqual(pub_record.component, copy.component) |
359 | copy.overrideFromAncestry() |
360 | self.layer.commit() |
361 | - self.assertEquals(copy.component.name, 'universe') |
362 | + self.assertEqual("universe", copy.component.name) |
363 | |
364 | def test_overrideFromAncestry_fallback_to_source_component(self): |
365 | # overrideFromAncestry on the lack of ancestry, falls back to the |
366 | @@ -966,7 +950,7 @@ |
367 | spph = self.factory.makeSourcePackagePublishingHistory( |
368 | sourcepackagerelease=spr, archive=ppa) |
369 | spph.overrideFromAncestry() |
370 | - self.assertEquals(spph.component.name, 'main') |
371 | + self.assertEqual("main", spph.component.name) |
372 | |
373 | def test_ppa_override_with_ancestry(self): |
374 | # Test a PPA publication with ancestry |
375 | @@ -978,7 +962,7 @@ |
376 | spph2 = self.factory.makeSourcePackagePublishingHistory( |
377 | sourcepackagerelease=spr, archive=ppa) |
378 | spph2.overrideFromAncestry() |
379 | - self.assertEquals(spph2.component.name, 'main') |
380 | + self.assertEqual("main", spph2.component.name) |
381 | |
382 | def test_copyTo_with_overrides(self): |
383 | # Specifying overrides with copyTo should result in the new |
384 | @@ -1009,8 +993,7 @@ |
385 | # SPPH's ancestor get's populated when a spph is copied over. |
386 | target_archive = self.factory.makeArchive() |
387 | spph = self.factory.makeSourcePackagePublishingHistory() |
388 | - copy = spph.copyTo( |
389 | - spph.distroseries, spph.pocket, target_archive) |
390 | + copy = spph.copyTo(spph.distroseries, spph.pocket, target_archive) |
391 | |
392 | self.assertEqual(spph, copy.ancestor) |
393 | |
394 | @@ -1059,7 +1042,8 @@ |
395 | """ |
396 | available_archs = [self.sparc_distroarch, self.avr_distroarch] |
397 | pubrec = self.getPubSource(architecturehintlist='any') |
398 | - self.assertEquals([self.sparc_distroarch], |
399 | + self.assertEqual( |
400 | + [self.sparc_distroarch], |
401 | pubrec._getAllowedArchitectures(available_archs)) |
402 | |
403 | def test__getAllowedArchitectures_restricted_override(self): |
404 | @@ -1071,7 +1055,8 @@ |
405 | available_archs = [self.sparc_distroarch, self.avr_distroarch] |
406 | getUtility(IArchiveArchSet).new(self.archive, self.avr_family) |
407 | pubrec = self.getPubSource(architecturehintlist='any') |
408 | - self.assertEquals([self.sparc_distroarch, self.avr_distroarch], |
409 | + self.assertEqual( |
410 | + [self.sparc_distroarch, self.avr_distroarch], |
411 | pubrec._getAllowedArchitectures(available_archs)) |
412 | |
413 | def test_createMissingBuilds_restricts_any(self): |
414 | @@ -1080,8 +1065,8 @@ |
415 | """ |
416 | pubrec = self.getPubSource(architecturehintlist='any') |
417 | builds = pubrec.createMissingBuilds() |
418 | - self.assertEquals(1, len(builds)) |
419 | - self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series) |
420 | + self.assertEqual(1, len(builds)) |
421 | + self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series) |
422 | |
423 | def test_createMissingBuilds_restricts_explicitlist(self): |
424 | """createMissingBuilds() limits builds targeted at a variety of |
425 | @@ -1089,8 +1074,8 @@ |
426 | """ |
427 | pubrec = self.getPubSource(architecturehintlist='sparc i386 avr') |
428 | builds = pubrec.createMissingBuilds() |
429 | - self.assertEquals(1, len(builds)) |
430 | - self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series) |
431 | + self.assertEqual(1, len(builds)) |
432 | + self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series) |
433 | |
434 | def test_createMissingBuilds_restricts_all(self): |
435 | """createMissingBuilds() should limit builds targeted at 'all' |
436 | @@ -1099,8 +1084,8 @@ |
437 | """ |
438 | pubrec = self.getPubSource(architecturehintlist='all') |
439 | builds = pubrec.createMissingBuilds() |
440 | - self.assertEquals(1, len(builds)) |
441 | - self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series) |
442 | + self.assertEqual(1, len(builds)) |
443 | + self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series) |
444 | |
445 | def test_createMissingBuilds_restrict_override(self): |
446 | """createMissingBuilds() should limit builds targeted at 'any' |
447 | @@ -1110,9 +1095,9 @@ |
448 | getUtility(IArchiveArchSet).new(self.archive, self.avr_family) |
449 | pubrec = self.getPubSource(architecturehintlist='any') |
450 | builds = pubrec.createMissingBuilds() |
451 | - self.assertEquals(2, len(builds)) |
452 | - self.assertEquals(self.avr_distroarch, builds[0].distro_arch_series) |
453 | - self.assertEquals(self.sparc_distroarch, builds[1].distro_arch_series) |
454 | + self.assertEqual(2, len(builds)) |
455 | + self.assertEqual(self.avr_distroarch, builds[0].distro_arch_series) |
456 | + self.assertEqual(self.sparc_distroarch, builds[1].distro_arch_series) |
457 | |
458 | |
459 | class PublishingSetTests(TestCaseWithFactory): |
460 | @@ -1175,59 +1160,69 @@ |
461 | |
462 | layer = ZopelessDatabaseLayer |
463 | |
464 | + def setUp(self): |
465 | + super(TestPublishingSetLite, self).setUp() |
466 | + self.person = self.factory.makePerson() |
467 | + |
468 | def test_requestDeletion_marks_SPPHs_deleted(self): |
469 | spph = self.factory.makeSourcePackagePublishingHistory() |
470 | - getUtility(IPublishingSet).requestDeletion( |
471 | - [spph], self.factory.makePerson()) |
472 | + getUtility(IPublishingSet).requestDeletion([spph], self.person) |
473 | self.assertEqual(PackagePublishingStatus.DELETED, spph.status) |
474 | |
475 | def test_requestDeletion_leaves_other_SPPHs_alone(self): |
476 | spph = self.factory.makeSourcePackagePublishingHistory() |
477 | other_spph = self.factory.makeSourcePackagePublishingHistory() |
478 | - getUtility(IPublishingSet).requestDeletion( |
479 | - [other_spph], self.factory.makePerson()) |
480 | + getUtility(IPublishingSet).requestDeletion([other_spph], self.person) |
481 | self.assertEqual(PackagePublishingStatus.PENDING, spph.status) |
482 | |
483 | def test_requestDeletion_marks_BPPHs_deleted(self): |
484 | bpph = self.factory.makeBinaryPackagePublishingHistory() |
485 | - getUtility(IPublishingSet).requestDeletion( |
486 | - [bpph], self.factory.makePerson()) |
487 | + getUtility(IPublishingSet).requestDeletion([bpph], self.person) |
488 | self.assertEqual(PackagePublishingStatus.DELETED, bpph.status) |
489 | |
490 | def test_requestDeletion_marks_attached_BPPHs_deleted(self): |
491 | bpph = self.factory.makeBinaryPackagePublishingHistory() |
492 | spph = self.factory.makeSPPHForBPPH(bpph) |
493 | - getUtility(IPublishingSet).requestDeletion( |
494 | - [spph], self.factory.makePerson()) |
495 | + getUtility(IPublishingSet).requestDeletion([spph], self.person) |
496 | self.assertEqual(PackagePublishingStatus.DELETED, spph.status) |
497 | |
498 | def test_requestDeletion_leaves_other_BPPHs_alone(self): |
499 | bpph = self.factory.makeBinaryPackagePublishingHistory() |
500 | unrelated_spph = self.factory.makeSourcePackagePublishingHistory() |
501 | getUtility(IPublishingSet).requestDeletion( |
502 | - [unrelated_spph], self.factory.makePerson()) |
503 | + [unrelated_spph], self.person) |
504 | self.assertEqual(PackagePublishingStatus.PENDING, bpph.status) |
505 | |
506 | def test_requestDeletion_accepts_empty_sources_list(self): |
507 | - person = self.factory.makePerson() |
508 | - getUtility(IPublishingSet).requestDeletion([], person) |
509 | + getUtility(IPublishingSet).requestDeletion([], self.person) |
510 | # The test is that this does not fail. |
511 | - Store.of(person).flush() |
512 | + Store.of(self.person).flush() |
513 | |
514 | def test_requestDeletion_creates_DistroSeriesDifferenceJobs(self): |
515 | dsp = self.factory.makeDistroSeriesParent() |
516 | - series = dsp.derived_series |
517 | spph = self.factory.makeSourcePackagePublishingHistory( |
518 | - series, pocket=PackagePublishingPocket.RELEASE) |
519 | + dsp.derived_series, pocket=PackagePublishingPocket.RELEASE) |
520 | spn = spph.sourcepackagerelease.sourcepackagename |
521 | - |
522 | - getUtility(IPublishingSet).requestDeletion( |
523 | - [spph], self.factory.makePerson()) |
524 | - |
525 | + getUtility(IPublishingSet).requestDeletion([spph], self.person) |
526 | self.assertEqual( |
527 | 1, len(find_waiting_jobs( |
528 | dsp.derived_series, spn, dsp.parent_series))) |
529 | |
530 | + def test_requestDeletion_disallows_unmodifiable_suites(self): |
531 | + bpph = self.factory.makeBinaryPackagePublishingHistory( |
532 | + pocket=PackagePublishingPocket.RELEASE) |
533 | + spph = self.factory.makeSourcePackagePublishingHistory( |
534 | + distroseries=bpph.distroseries, |
535 | + pocket=PackagePublishingPocket.RELEASE) |
536 | + spph.distroseries.status = SeriesStatus.CURRENT |
537 | + message = "Cannot delete publications from suite '%s'" % ( |
538 | + spph.distroseries.getSuite(spph.pocket)) |
539 | + for pub in spph, bpph: |
540 | + self.assertRaisesWithContent( |
541 | + DeletionError, message, pub.requestDeletion, self.person) |
542 | + self.assertRaisesWithContent( |
543 | + DeletionError, message, pub.api_requestDeletion, self.person) |
544 | + |
545 | |
546 | class TestSourceDomination(TestNativePublishingBase): |
547 | """Test SourcePackagePublishingHistory.supersede() operates correctly.""" |
548 | @@ -1264,7 +1259,7 @@ |
549 | |
550 | self.assertRaises(AssertionError, source.supersede, super_source) |
551 | self.checkSuperseded([source], super_source) |
552 | - self.assertEquals(super_date, source.datesuperseded) |
553 | + self.assertEqual(super_date, source.datesuperseded) |
554 | |
555 | |
556 | class TestBinaryDomination(TestNativePublishingBase): |
557 | @@ -1343,7 +1338,7 @@ |
558 | |
559 | self.assertRaises(AssertionError, bin.supersede, super_bin) |
560 | self.checkSuperseded([bin], super_bin) |
561 | - self.assertEquals(super_date, bin.datesuperseded) |
562 | + self.assertEqual(super_date, bin.datesuperseded) |
563 | |
564 | def testSkipsSupersededArchIndependentBinary(self): |
565 | """Check that supersede() skips a superseded arch-indep binary. |
566 | @@ -1365,7 +1360,7 @@ |
567 | |
568 | bin.supersede(super_bin) |
569 | self.checkSuperseded([bin], super_bin) |
570 | - self.assertEquals(super_date, bin.datesuperseded) |
571 | + self.assertEqual(super_date, bin.datesuperseded) |
572 | |
573 | def testSupersedesCorrespondingDDEB(self): |
574 | """Check that supersede() takes with it any corresponding DDEB. |
575 | @@ -1379,8 +1374,7 @@ |
576 | |
577 | # Each of these will return (i386 deb, i386 ddeb, hppa deb, |
578 | # hppa ddeb). |
579 | - bins = self.getPubBinaries( |
580 | - architecturespecific=True, with_debug=True) |
581 | + bins = self.getPubBinaries(architecturespecific=True, with_debug=True) |
582 | super_bins = self.getPubBinaries( |
583 | architecturespecific=True, with_debug=True) |
584 | |
585 | @@ -1405,8 +1399,7 @@ |
586 | distribution=self.ubuntutest) |
587 | |
588 | # This will return (i386 deb, i386 ddeb, hppa deb, hppa ddeb). |
589 | - bins = self.getPubBinaries( |
590 | - architecturespecific=True, with_debug=True) |
591 | + bins = self.getPubBinaries(architecturespecific=True, with_debug=True) |
592 | self.assertRaises(AssertionError, bins[0].supersede, bins[1]) |
593 | |
594 | |
595 | @@ -1414,9 +1407,8 @@ |
596 | """Test BinaryPackagePublishingHistory._getOtherPublications() works.""" |
597 | |
598 | def checkOtherPublications(self, this, others): |
599 | - self.assertEquals( |
600 | - set(removeSecurityProxy(this)._getOtherPublications()), |
601 | - set(others)) |
602 | + self.assertContentEqual( |
603 | + removeSecurityProxy(this)._getOtherPublications(), others) |
604 | |
605 | def testFindsOtherArchIndepPublications(self): |
606 | """Arch-indep publications with the same overrides should be found.""" |
607 | @@ -1529,7 +1521,7 @@ |
608 | # SPPH, BPPHs and BPRs. |
609 | with StormStatementRecorder() as recorder: |
610 | bins = spph.getBuiltBinaries() |
611 | - self.assertEquals(0, len(bins)) |
612 | + self.assertEqual(0, len(bins)) |
613 | self.assertThat(recorder, HasQueryCount(Equals(3))) |
614 | |
615 | self.getPubBinaries(pub_source=spph) |
616 | @@ -1541,7 +1533,7 @@ |
617 | # BPF has no query penalty. |
618 | with StormStatementRecorder() as recorder: |
619 | bins = spph.getBuiltBinaries(want_files=True) |
620 | - self.assertEquals(2, len(bins)) |
621 | + self.assertEqual(2, len(bins)) |
622 | for bpph in bins: |
623 | files = bpph.binarypackagerelease.files |
624 | self.assertEqual(1, len(files)) |
625 | @@ -1595,8 +1587,7 @@ |
626 | def test_architecture_independent(self): |
627 | # Architecture-independent binaries get published to all enabled |
628 | # DASes in the series. |
629 | - bpr = self.factory.makeBinaryPackageRelease( |
630 | - architecturespecific=False) |
631 | + bpr = self.factory.makeBinaryPackageRelease(architecturespecific=False) |
632 | # Create 3 architectures. The binary will not be published in |
633 | # the disabled one. |
634 | target_das_a = self.factory.makeDistroArchSeries() |
635 | @@ -1606,12 +1597,11 @@ |
636 | self.factory.makeDistroArchSeries( |
637 | distroseries=target_das_a.distroseries, enabled=False) |
638 | args = self.makeArgs([bpr], target_das_a.distroseries) |
639 | - bpphs = getUtility(IPublishingSet).publishBinaries( |
640 | - **args) |
641 | - self.assertEquals(2, len(bpphs)) |
642 | - self.assertEquals( |
643 | - set((target_das_a, target_das_b)), |
644 | - set(bpph.distroarchseries for bpph in bpphs)) |
645 | + bpphs = getUtility(IPublishingSet).publishBinaries(**args) |
646 | + self.assertEqual(2, len(bpphs)) |
647 | + self.assertContentEqual( |
648 | + (target_das_a, target_das_b), |
649 | + [bpph.distroarchseries for bpph in bpphs]) |
650 | |
651 | def test_architecture_disabled(self): |
652 | # An empty list is return if the DistroArchSeries was disabled. |
653 | @@ -1664,13 +1654,12 @@ |
654 | build=build, binpackageformat=BinaryPackageFormat.DDEB) |
655 | args = self.makeArgs([normal, debug], das.distroseries) |
656 | bpphs = getUtility(IPublishingSet).publishBinaries(**args) |
657 | - self.assertEquals(2, len(bpphs)) |
658 | - self.assertEquals( |
659 | - set((normal, debug)), |
660 | - set(bpph.binarypackagerelease for bpph in bpphs)) |
661 | - self.assertEquals( |
662 | - set((das.main_archive, das.main_archive.debug_archive)), |
663 | - set(bpph.archive for bpph in bpphs)) |
664 | + self.assertEqual(2, len(bpphs)) |
665 | + self.assertContentEqual( |
666 | + (normal, debug), [bpph.binarypackagerelease for bpph in bpphs]) |
667 | + self.assertContentEqual( |
668 | + (das.main_archive, das.main_archive.debug_archive), |
669 | + [bpph.archive for bpph in bpphs]) |
670 | |
671 | # A second copy does nothing, because it checks in the debug |
672 | # archive too. |
673 | |
674 | === modified file 'lib/lp/testing/factory.py' |
675 | --- lib/lp/testing/factory.py 2013-01-23 06:54:49 +0000 |
676 | +++ lib/lp/testing/factory.py 2013-03-08 22:44:24 +0000 |
677 | @@ -2,7 +2,7 @@ |
678 | # NOTE: The first line above must stay first; do not move the copyright |
679 | # notice to the top. See http://www.python.org/dev/peps/pep-0263/. |
680 | # |
681 | -# Copyright 2009-2012 Canonical Ltd. This software is licensed under the |
682 | +# Copyright 2009-2013 Canonical Ltd. This software is licensed under the |
683 | # GNU Affero General Public License version 3 (see the file LICENSE). |
684 | |
685 | """Testing infrastructure for the Launchpad application. |
686 | @@ -3678,7 +3678,7 @@ |
687 | initial source package release upload archive, or to the |
688 | distro series main archive. |
689 | :param pocket: The pocket to publish into. Can be specified as a |
690 | - string. Defaults to the RELEASE pocket. |
691 | + string. Defaults to the BACKPORTS pocket. |
692 | :param status: The publication status. Defaults to PENDING. If |
693 | set to PUBLISHED, the publisheddate will be set to now. |
694 | :param dateremoved: The removal date. |