Merge lp:~cjwatson/launchpad/delete-unmodifiable-suite into lp:launchpad
- delete-unmodifiable-suite
- Merge into devel
Status: | Merged |
---|---|
Approved by: | William Grant |
Approved revision: | no longer in the source branch. |
Merged at revision: | 16526 |
Proposed branch: | lp:~cjwatson/launchpad/delete-unmodifiable-suite |
Merge into: | lp:launchpad |
Diff against target: |
694 lines (+125/-125) 4 files modified
lib/lp/soyuz/interfaces/publishing.py (+9/-6) lib/lp/soyuz/model/publishing.py (+30/-22) lib/lp/soyuz/tests/test_publishing.py (+84/-95) lib/lp/testing/factory.py (+2/-2) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/delete-unmodifiable-suite |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+152529@code.launchpad.net |
Commit message
Refuse to delete packages from unmodifiable suites.
Description of the change
== Summary ==
Bug 1152669: it's possible to delete packages from a release pocket in a stable distroseries, requiring (AFAICT) DB surgery to recover. This shouldn't be permitted since the deletion will never be published anyway.
== Proposed fix ==
Add Archive.
== Implementation details ==
I may have gone a little overboard to achieve LoC neutrality. You can always just look at the principal effective revision (http://
== Tests ==
bin/test -vvct lp.soyuz.
== Demo and Q/A ==
Try to delete a package from the quantal RELEASE pocket on qastaging (using the requestDeletion API method). This should fail with an error.
William Grant (wgrant) : | # |
Preview Diff
1 | === modified file 'lib/lp/soyuz/interfaces/publishing.py' | |||
2 | --- lib/lp/soyuz/interfaces/publishing.py 2013-02-28 09:41:40 +0000 | |||
3 | +++ lib/lp/soyuz/interfaces/publishing.py 2013-03-08 22:44:24 +0000 | |||
4 | @@ -6,6 +6,7 @@ | |||
5 | 6 | __metaclass__ = type | 6 | __metaclass__ = type |
6 | 7 | 7 | ||
7 | 8 | __all__ = [ | 8 | __all__ = [ |
8 | 9 | 'DeletionError', | ||
9 | 9 | 'IArchiveSafePublisher', | 10 | 'IArchiveSafePublisher', |
10 | 10 | 'IBinaryPackageFilePublishing', | 11 | 'IBinaryPackageFilePublishing', |
11 | 11 | 'IBinaryPackagePublishingHistory', | 12 | 'IBinaryPackagePublishingHistory', |
12 | @@ -106,6 +107,11 @@ | |||
13 | 106 | """Raised when an attempt to change an override fails.""" | 107 | """Raised when an attempt to change an override fails.""" |
14 | 107 | 108 | ||
15 | 108 | 109 | ||
16 | 110 | @error_status(httplib.BAD_REQUEST) | ||
17 | 111 | class DeletionError(Exception): | ||
18 | 112 | """Raised when an attempt to delete a publication fails.""" | ||
19 | 113 | |||
20 | 114 | |||
21 | 109 | name_priority_map = { | 115 | name_priority_map = { |
22 | 110 | 'required': PackagePublishingPriority.REQUIRED, | 116 | 'required': PackagePublishingPriority.REQUIRED, |
23 | 111 | 'important': PackagePublishingPriority.IMPORTANT, | 117 | 'important': PackagePublishingPriority.IMPORTANT, |
24 | @@ -284,19 +290,16 @@ | |||
25 | 284 | title=_('Component name'), required=True, readonly=True, | 290 | title=_('Component name'), required=True, readonly=True, |
26 | 285 | ) | 291 | ) |
27 | 286 | publishingstatus = Int( | 292 | publishingstatus = Int( |
30 | 287 | title=_('Package publishing status'), required=True, | 293 | title=_('Package publishing status'), required=True, readonly=True, |
29 | 288 | readonly=True, | ||
31 | 289 | ) | 294 | ) |
32 | 290 | pocket = Int( | 295 | pocket = Int( |
35 | 291 | title=_('Package publishing pocket'), required=True, | 296 | title=_('Package publishing pocket'), required=True, readonly=True, |
34 | 292 | readonly=True, | ||
36 | 293 | ) | 297 | ) |
37 | 294 | archive = Int( | 298 | archive = Int( |
38 | 295 | title=_('Archive ID'), required=True, readonly=True, | 299 | title=_('Archive ID'), required=True, readonly=True, |
39 | 296 | ) | 300 | ) |
40 | 297 | libraryfilealias = Int( | 301 | libraryfilealias = Int( |
43 | 298 | title=_('Binarypackage file alias'), required=True, | 302 | title=_('Binarypackage file alias'), required=True, readonly=True, |
42 | 299 | readonly=True, | ||
44 | 300 | ) | 303 | ) |
45 | 301 | libraryfilealiasfilename = TextLine( | 304 | libraryfilealiasfilename = TextLine( |
46 | 302 | title=_('File name'), required=True, readonly=True, | 305 | title=_('File name'), required=True, readonly=True, |
47 | 303 | 306 | ||
48 | === modified file 'lib/lp/soyuz/model/publishing.py' | |||
49 | --- lib/lp/soyuz/model/publishing.py 2013-02-28 09:41:40 +0000 | |||
50 | +++ lib/lp/soyuz/model/publishing.py 2013-03-08 22:44:24 +0000 | |||
51 | @@ -91,6 +91,7 @@ | |||
52 | 91 | ) | 91 | ) |
53 | 92 | from lp.soyuz.interfaces.publishing import ( | 92 | from lp.soyuz.interfaces.publishing import ( |
54 | 93 | active_publishing_status, | 93 | active_publishing_status, |
55 | 94 | DeletionError, | ||
56 | 94 | IBinaryPackageFilePublishing, | 95 | IBinaryPackageFilePublishing, |
57 | 95 | IBinaryPackagePublishingHistory, | 96 | IBinaryPackagePublishingHistory, |
58 | 96 | IPublishingSet, | 97 | IPublishingSet, |
59 | @@ -122,8 +123,7 @@ | |||
60 | 122 | # XXX cprov 2006-08-18: move it away, perhaps archivepublisher/pool.py | 123 | # XXX cprov 2006-08-18: move it away, perhaps archivepublisher/pool.py |
61 | 123 | """Return the pool path for a given source name and component name.""" | 124 | """Return the pool path for a given source name and component name.""" |
62 | 124 | from lp.archivepublisher.diskpool import poolify | 125 | from lp.archivepublisher.diskpool import poolify |
65 | 125 | return os.path.join( | 126 | return os.path.join('pool', poolify(source_name, component_name)) |
64 | 126 | 'pool', poolify(source_name, component_name)) | ||
66 | 127 | 127 | ||
67 | 128 | 128 | ||
68 | 129 | def get_component(archive, distroseries, component): | 129 | def get_component(archive, distroseries, component): |
69 | @@ -160,8 +160,7 @@ | |||
70 | 160 | 160 | ||
71 | 161 | def proxied_urls(files, parent): | 161 | def proxied_urls(files, parent): |
72 | 162 | """Run the files passed through `ProxiedLibraryFileAlias`.""" | 162 | """Run the files passed through `ProxiedLibraryFileAlias`.""" |
75 | 163 | return [ | 163 | return [ProxiedLibraryFileAlias(file, parent).http_url for file in files] |
74 | 164 | ProxiedLibraryFileAlias(file, parent).http_url for file in files] | ||
76 | 165 | 164 | ||
77 | 166 | 165 | ||
78 | 167 | class FilePublishingBase: | 166 | class FilePublishingBase: |
79 | @@ -178,15 +177,13 @@ | |||
80 | 178 | sha1 = filealias.content.sha1 | 177 | sha1 = filealias.content.sha1 |
81 | 179 | path = diskpool.pathFor(component, source, filename) | 178 | path = diskpool.pathFor(component, source, filename) |
82 | 180 | 179 | ||
85 | 181 | action = diskpool.addFile( | 180 | action = diskpool.addFile(component, source, filename, sha1, filealias) |
84 | 182 | component, source, filename, sha1, filealias) | ||
86 | 183 | if action == diskpool.results.FILE_ADDED: | 181 | if action == diskpool.results.FILE_ADDED: |
87 | 184 | log.debug("Added %s from library" % path) | 182 | log.debug("Added %s from library" % path) |
88 | 185 | elif action == diskpool.results.SYMLINK_ADDED: | 183 | elif action == diskpool.results.SYMLINK_ADDED: |
89 | 186 | log.debug("%s created as a symlink." % path) | 184 | log.debug("%s created as a symlink." % path) |
90 | 187 | elif action == diskpool.results.NONE: | 185 | elif action == diskpool.results.NONE: |
93 | 188 | log.debug( | 186 | log.debug("%s is already in pool with the same content." % path) |
92 | 189 | "%s is already in pool with the same content." % path) | ||
94 | 190 | 187 | ||
95 | 191 | @property | 188 | @property |
96 | 192 | def archive_url(self): | 189 | def archive_url(self): |
97 | @@ -435,8 +432,7 @@ | |||
98 | 435 | foreignKey='SourcePackageName', dbName='sourcepackagename') | 432 | foreignKey='SourcePackageName', dbName='sourcepackagename') |
99 | 436 | sourcepackagerelease = ForeignKey( | 433 | sourcepackagerelease = ForeignKey( |
100 | 437 | foreignKey='SourcePackageRelease', dbName='sourcepackagerelease') | 434 | foreignKey='SourcePackageRelease', dbName='sourcepackagerelease') |
103 | 438 | distroseries = ForeignKey( | 435 | distroseries = ForeignKey(foreignKey='DistroSeries', dbName='distroseries') |
102 | 439 | foreignKey='DistroSeries', dbName='distroseries') | ||
104 | 440 | component = ForeignKey(foreignKey='Component', dbName='component') | 436 | component = ForeignKey(foreignKey='Component', dbName='component') |
105 | 441 | section = ForeignKey(foreignKey='Section', dbName='section') | 437 | section = ForeignKey(foreignKey='Section', dbName='section') |
106 | 442 | status = EnumCol(schema=PackagePublishingStatus) | 438 | status = EnumCol(schema=PackagePublishingStatus) |
107 | @@ -732,8 +728,7 @@ | |||
108 | 732 | """See `IPublishing`.""" | 728 | """See `IPublishing`.""" |
109 | 733 | release = self.sourcepackagerelease | 729 | release = self.sourcepackagerelease |
110 | 734 | name = release.sourcepackagename.name | 730 | name = release.sourcepackagename.name |
113 | 735 | return "%s %s in %s" % (name, release.version, | 731 | return "%s %s in %s" % (name, release.version, self.distroseries.name) |
112 | 736 | self.distroseries.name) | ||
114 | 737 | 732 | ||
115 | 738 | def buildIndexStanzaFields(self): | 733 | def buildIndexStanzaFields(self): |
116 | 739 | """See `IPublishing`.""" | 734 | """See `IPublishing`.""" |
117 | @@ -925,6 +920,11 @@ | |||
118 | 925 | 920 | ||
119 | 926 | def requestDeletion(self, removed_by, removal_comment=None): | 921 | def requestDeletion(self, removed_by, removal_comment=None): |
120 | 927 | """See `IPublishing`.""" | 922 | """See `IPublishing`.""" |
121 | 923 | if not self.archive.canModifySuite(self.distroseries, self.pocket): | ||
122 | 924 | raise DeletionError( | ||
123 | 925 | "Cannot delete publications from suite '%s'" % | ||
124 | 926 | self.distroseries.getSuite(self.pocket)) | ||
125 | 927 | |||
126 | 928 | self.setDeleted(removed_by, removal_comment) | 928 | self.setDeleted(removed_by, removal_comment) |
127 | 929 | if self.archive.is_main: | 929 | if self.archive.is_main: |
128 | 930 | dsd_job_source = getUtility(IDistroSeriesDifferenceJobSource) | 930 | dsd_job_source = getUtility(IDistroSeriesDifferenceJobSource) |
129 | @@ -1053,8 +1053,7 @@ | |||
130 | 1053 | # ... | 1053 | # ... |
131 | 1054 | # <DESCRIPTION LN> | 1054 | # <DESCRIPTION LN> |
132 | 1055 | descr_lines = [line.lstrip() for line in bpr.description.splitlines()] | 1055 | descr_lines = [line.lstrip() for line in bpr.description.splitlines()] |
135 | 1056 | bin_description = ( | 1056 | bin_description = '%s\n %s' % (bpr.summary, '\n '.join(descr_lines)) |
134 | 1057 | '%s\n %s' % (bpr.summary, '\n '.join(descr_lines))) | ||
136 | 1058 | 1057 | ||
137 | 1059 | # Dealing with architecturespecific field. | 1058 | # Dealing with architecturespecific field. |
138 | 1060 | # Present 'all' in every archive index for architecture | 1059 | # Present 'all' in every archive index for architecture |
139 | @@ -1329,11 +1328,9 @@ | |||
140 | 1329 | ] | 1328 | ] |
141 | 1330 | 1329 | ||
142 | 1331 | if start_date is not None: | 1330 | if start_date is not None: |
145 | 1332 | clauses.append( | 1331 | clauses.append(BinaryPackageReleaseDownloadCount.day >= start_date) |
144 | 1333 | BinaryPackageReleaseDownloadCount.day >= start_date) | ||
146 | 1334 | if end_date is not None: | 1332 | if end_date is not None: |
149 | 1335 | clauses.append( | 1333 | clauses.append(BinaryPackageReleaseDownloadCount.day <= end_date) |
148 | 1336 | BinaryPackageReleaseDownloadCount.day <= end_date) | ||
150 | 1337 | 1334 | ||
151 | 1338 | return clauses | 1335 | return clauses |
152 | 1339 | 1336 | ||
153 | @@ -1373,6 +1370,11 @@ | |||
154 | 1373 | 1370 | ||
155 | 1374 | def requestDeletion(self, removed_by, removal_comment=None): | 1371 | def requestDeletion(self, removed_by, removal_comment=None): |
156 | 1375 | """See `IPublishing`.""" | 1372 | """See `IPublishing`.""" |
157 | 1373 | if not self.archive.canModifySuite(self.distroseries, self.pocket): | ||
158 | 1374 | raise DeletionError( | ||
159 | 1375 | "Cannot delete publications from suite '%s'" % | ||
160 | 1376 | self.distroseries.getSuite(self.pocket)) | ||
161 | 1377 | |||
162 | 1376 | self.setDeleted(removed_by, removal_comment) | 1378 | self.setDeleted(removed_by, removal_comment) |
163 | 1377 | 1379 | ||
164 | 1378 | def binaryFileUrls(self, include_meta=False): | 1380 | def binaryFileUrls(self, include_meta=False): |
165 | @@ -1415,8 +1417,7 @@ | |||
166 | 1415 | # Find the DAS in this series corresponding to the original | 1417 | # Find the DAS in this series corresponding to the original |
167 | 1416 | # build arch tag. If it does not exist or is disabled, we should | 1418 | # build arch tag. If it does not exist or is disabled, we should |
168 | 1417 | # not publish. | 1419 | # not publish. |
171 | 1418 | target_arch = arch_map.get( | 1420 | target_arch = arch_map.get(bpr.build.arch_tag) |
170 | 1419 | bpr.build.distro_arch_series.architecturetag) | ||
172 | 1420 | target_archs = [target_arch] if target_arch is not None else [] | 1421 | target_archs = [target_arch] if target_arch is not None else [] |
173 | 1421 | else: | 1422 | else: |
174 | 1422 | target_archs = archs | 1423 | target_archs = archs |
175 | @@ -1941,8 +1942,7 @@ | |||
176 | 1941 | # separate query for now. | 1942 | # separate query for now. |
177 | 1942 | source_pubs.update(store.find( | 1943 | source_pubs.update(store.find( |
178 | 1943 | SourcePackagePublishingHistory, | 1944 | SourcePackagePublishingHistory, |
181 | 1944 | SourcePackagePublishingHistory.id.is_in( | 1945 | SourcePackagePublishingHistory.id.is_in(pubs_without_builds), |
180 | 1945 | pubs_without_builds), | ||
182 | 1946 | SourcePackagePublishingHistory.archive == archive)) | 1946 | SourcePackagePublishingHistory.archive == archive)) |
183 | 1947 | # For each source_pub found, provide an aggregate summary of its | 1947 | # For each source_pub found, provide an aggregate summary of its |
184 | 1948 | # builds. | 1948 | # builds. |
185 | @@ -2043,6 +2043,14 @@ | |||
186 | 2043 | return | 2043 | return |
187 | 2044 | assert len(sources) + len(binaries) == len(pubs) | 2044 | assert len(sources) + len(binaries) == len(pubs) |
188 | 2045 | 2045 | ||
189 | 2046 | locations = set( | ||
190 | 2047 | (pub.archive, pub.distroseries, pub.pocket) for pub in pubs) | ||
191 | 2048 | for archive, distroseries, pocket in locations: | ||
192 | 2049 | if not archive.canModifySuite(distroseries, pocket): | ||
193 | 2050 | raise DeletionError( | ||
194 | 2051 | "Cannot delete publications from suite '%s'" % | ||
195 | 2052 | distroseries.getSuite(pocket)) | ||
196 | 2053 | |||
197 | 2046 | spph_ids = [spph.id for spph in sources] | 2054 | spph_ids = [spph.id for spph in sources] |
198 | 2047 | self.setMultipleDeleted( | 2055 | self.setMultipleDeleted( |
199 | 2048 | SourcePackagePublishingHistory, spph_ids, removed_by, | 2056 | SourcePackagePublishingHistory, spph_ids, removed_by, |
200 | 2049 | 2057 | ||
201 | === modified file 'lib/lp/soyuz/tests/test_publishing.py' | |||
202 | --- lib/lp/soyuz/tests/test_publishing.py 2013-02-20 12:28:38 +0000 | |||
203 | +++ lib/lp/soyuz/tests/test_publishing.py 2013-03-08 22:44:24 +0000 | |||
204 | @@ -42,6 +42,7 @@ | |||
205 | 42 | from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet | 42 | from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet |
206 | 43 | from lp.soyuz.interfaces.component import IComponentSet | 43 | from lp.soyuz.interfaces.component import IComponentSet |
207 | 44 | from lp.soyuz.interfaces.publishing import ( | 44 | from lp.soyuz.interfaces.publishing import ( |
208 | 45 | DeletionError, | ||
209 | 45 | IPublishingSet, | 46 | IPublishingSet, |
210 | 46 | OverrideError, | 47 | OverrideError, |
211 | 47 | PackagePublishingPriority, | 48 | PackagePublishingPriority, |
212 | @@ -504,11 +505,8 @@ | |||
213 | 504 | if new_version is None: | 505 | if new_version is None: |
214 | 505 | new_version = version | 506 | new_version = version |
215 | 506 | changesfile_content = '' | 507 | changesfile_content = '' |
218 | 507 | handle = open(changesfile_path, 'r') | 508 | with open(changesfile_path, 'r') as handle: |
217 | 508 | try: | ||
219 | 509 | changesfile_content = handle.read() | 509 | changesfile_content = handle.read() |
220 | 510 | finally: | ||
221 | 511 | handle.close() | ||
222 | 512 | 510 | ||
223 | 513 | source = self.getPubSource( | 511 | source = self.getPubSource( |
224 | 514 | sourcename=sourcename, archive=archive, version=new_version, | 512 | sourcename=sourcename, archive=archive, version=new_version, |
225 | @@ -648,9 +646,9 @@ | |||
226 | 648 | dominant = supersededby.binarypackagerelease.build | 646 | dominant = supersededby.binarypackagerelease.build |
227 | 649 | else: | 647 | else: |
228 | 650 | dominant = supersededby.sourcepackagerelease | 648 | dominant = supersededby.sourcepackagerelease |
230 | 651 | self.assertEquals(dominant, pub.supersededby) | 649 | self.assertEqual(dominant, pub.supersededby) |
231 | 652 | else: | 650 | else: |
233 | 653 | self.assertIs(None, pub.supersededby) | 651 | self.assertIsNone(pub.supersededby) |
234 | 654 | 652 | ||
235 | 655 | 653 | ||
236 | 656 | class TestNativePublishing(TestNativePublishingBase): | 654 | class TestNativePublishing(TestNativePublishingBase): |
237 | @@ -660,9 +658,7 @@ | |||
238 | 660 | # the corresponding files are dumped in the disk pool/. | 658 | # the corresponding files are dumped in the disk pool/. |
239 | 661 | pub_source = self.getPubSource(filecontent='Hello world') | 659 | pub_source = self.getPubSource(filecontent='Hello world') |
240 | 662 | pub_source.publish(self.disk_pool, self.logger) | 660 | pub_source.publish(self.disk_pool, self.logger) |
244 | 663 | self.assertEqual( | 661 | self.assertEqual(PackagePublishingStatus.PUBLISHED, pub_source.status) |
242 | 664 | PackagePublishingStatus.PUBLISHED, | ||
243 | 665 | pub_source.status) | ||
245 | 666 | pool_path = "%s/main/f/foo/foo_666.dsc" % self.pool_dir | 662 | pool_path = "%s/main/f/foo/foo_666.dsc" % self.pool_dir |
246 | 667 | self.assertEqual(open(pool_path).read().strip(), 'Hello world') | 663 | self.assertEqual(open(pool_path).read().strip(), 'Hello world') |
247 | 668 | 664 | ||
248 | @@ -671,9 +667,7 @@ | |||
249 | 671 | # the corresponding files are dumped in the disk pool/. | 667 | # the corresponding files are dumped in the disk pool/. |
250 | 672 | pub_binary = self.getPubBinaries(filecontent='Hello world')[0] | 668 | pub_binary = self.getPubBinaries(filecontent='Hello world')[0] |
251 | 673 | pub_binary.publish(self.disk_pool, self.logger) | 669 | pub_binary.publish(self.disk_pool, self.logger) |
255 | 674 | self.assertEqual( | 670 | self.assertEqual(PackagePublishingStatus.PUBLISHED, pub_binary.status) |
253 | 675 | PackagePublishingStatus.PUBLISHED, | ||
254 | 676 | pub_binary.status) | ||
256 | 677 | pool_path = "%s/main/f/foo/foo-bin_666_all.deb" % self.pool_dir | 671 | pool_path = "%s/main/f/foo/foo-bin_666_all.deb" % self.pool_dir |
257 | 678 | self.assertEqual(open(pool_path).read().strip(), 'Hello world') | 672 | self.assertEqual(open(pool_path).read().strip(), 'Hello world') |
258 | 679 | 673 | ||
259 | @@ -688,9 +682,8 @@ | |||
260 | 688 | foo_path = os.path.join(self.pool_dir, 'main', 'f', 'foo') | 682 | foo_path = os.path.join(self.pool_dir, 'main', 'f', 'foo') |
261 | 689 | os.makedirs(foo_path) | 683 | os.makedirs(foo_path) |
262 | 690 | foo_dsc_path = os.path.join(foo_path, 'foo_666.dsc') | 684 | foo_dsc_path = os.path.join(foo_path, 'foo_666.dsc') |
266 | 691 | foo_dsc = open(foo_dsc_path, 'w') | 685 | with open(foo_dsc_path, 'w') as foo_dsc: |
267 | 692 | foo_dsc.write('Hello world') | 686 | foo_dsc.write('Hello world') |
265 | 693 | foo_dsc.close() | ||
268 | 694 | 687 | ||
269 | 695 | pub_source = self.getPubSource(filecontent="Something") | 688 | pub_source = self.getPubSource(filecontent="Something") |
270 | 696 | pub_source.publish(self.disk_pool, self.logger) | 689 | pub_source.publish(self.disk_pool, self.logger) |
271 | @@ -699,8 +692,7 @@ | |||
272 | 699 | self.assertEqual("PoolFileOverwriteError", self.oopses[0]['type']) | 692 | self.assertEqual("PoolFileOverwriteError", self.oopses[0]['type']) |
273 | 700 | 693 | ||
274 | 701 | self.layer.commit() | 694 | self.layer.commit() |
277 | 702 | self.assertEqual( | 695 | self.assertEqual(pub_source.status, PackagePublishingStatus.PENDING) |
276 | 703 | pub_source.status, PackagePublishingStatus.PENDING) | ||
278 | 704 | self.assertEqual(open(foo_dsc_path).read().strip(), 'Hello world') | 696 | self.assertEqual(open(foo_dsc_path).read().strip(), 'Hello world') |
279 | 705 | 697 | ||
280 | 706 | def testPublishingDifferentContents(self): | 698 | def testPublishingDifferentContents(self): |
281 | @@ -711,8 +703,7 @@ | |||
282 | 711 | 703 | ||
283 | 712 | foo_name = "%s/main/f/foo/foo_666.dsc" % self.pool_dir | 704 | foo_name = "%s/main/f/foo/foo_666.dsc" % self.pool_dir |
284 | 713 | pub_source.sync() | 705 | pub_source.sync() |
287 | 714 | self.assertEqual( | 706 | self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED) |
286 | 715 | pub_source.status, PackagePublishingStatus.PUBLISHED) | ||
288 | 716 | self.assertEqual(open(foo_name).read().strip(), 'foo is happy') | 707 | self.assertEqual(open(foo_name).read().strip(), 'foo is happy') |
289 | 717 | 708 | ||
290 | 718 | # try to publish 'foo' again with a different content, it | 709 | # try to publish 'foo' again with a different content, it |
291 | @@ -723,8 +714,7 @@ | |||
292 | 723 | self.layer.commit() | 714 | self.layer.commit() |
293 | 724 | 715 | ||
294 | 725 | pub_source2.sync() | 716 | pub_source2.sync() |
297 | 726 | self.assertEqual( | 717 | self.assertEqual(pub_source2.status, PackagePublishingStatus.PENDING) |
296 | 727 | pub_source2.status, PackagePublishingStatus.PENDING) | ||
298 | 728 | self.assertEqual(open(foo_name).read().strip(), 'foo is happy') | 718 | self.assertEqual(open(foo_name).read().strip(), 'foo is happy') |
299 | 729 | 719 | ||
300 | 730 | def testPublishingAlreadyInPool(self): | 720 | def testPublishingAlreadyInPool(self): |
301 | @@ -740,16 +730,14 @@ | |||
302 | 740 | bar_name = "%s/main/b/bar/bar_666.dsc" % self.pool_dir | 730 | bar_name = "%s/main/b/bar/bar_666.dsc" % self.pool_dir |
303 | 741 | self.assertEqual(open(bar_name).read().strip(), 'bar is good') | 731 | self.assertEqual(open(bar_name).read().strip(), 'bar is good') |
304 | 742 | pub_source.sync() | 732 | pub_source.sync() |
307 | 743 | self.assertEqual( | 733 | self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED) |
306 | 744 | pub_source.status, PackagePublishingStatus.PUBLISHED) | ||
308 | 745 | 734 | ||
309 | 746 | pub_source2 = self.getPubSource( | 735 | pub_source2 = self.getPubSource( |
310 | 747 | sourcename='bar', filecontent='bar is good') | 736 | sourcename='bar', filecontent='bar is good') |
311 | 748 | pub_source2.publish(self.disk_pool, self.logger) | 737 | pub_source2.publish(self.disk_pool, self.logger) |
312 | 749 | self.layer.commit() | 738 | self.layer.commit() |
313 | 750 | pub_source2.sync() | 739 | pub_source2.sync() |
316 | 751 | self.assertEqual( | 740 | self.assertEqual(pub_source2.status, PackagePublishingStatus.PUBLISHED) |
315 | 752 | pub_source2.status, PackagePublishingStatus.PUBLISHED) | ||
317 | 753 | 741 | ||
318 | 754 | def testPublishingSymlink(self): | 742 | def testPublishingSymlink(self): |
319 | 755 | """Test if publishOne moving publication between components. | 743 | """Test if publishOne moving publication between components. |
320 | @@ -759,8 +747,7 @@ | |||
321 | 759 | """ | 747 | """ |
322 | 760 | content = 'am I a file or a symbolic link ?' | 748 | content = 'am I a file or a symbolic link ?' |
323 | 761 | # publish sim.dsc in main and re-publish in universe | 749 | # publish sim.dsc in main and re-publish in universe |
326 | 762 | pub_source = self.getPubSource( | 750 | pub_source = self.getPubSource(sourcename='sim', filecontent=content) |
325 | 763 | sourcename='sim', filecontent=content) | ||
327 | 764 | pub_source2 = self.getPubSource( | 751 | pub_source2 = self.getPubSource( |
328 | 765 | sourcename='sim', component='universe', filecontent=content) | 752 | sourcename='sim', component='universe', filecontent=content) |
329 | 766 | pub_source.publish(self.disk_pool, self.logger) | 753 | pub_source.publish(self.disk_pool, self.logger) |
330 | @@ -769,10 +756,8 @@ | |||
331 | 769 | 756 | ||
332 | 770 | pub_source.sync() | 757 | pub_source.sync() |
333 | 771 | pub_source2.sync() | 758 | pub_source2.sync() |
338 | 772 | self.assertEqual( | 759 | self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED) |
339 | 773 | pub_source.status, PackagePublishingStatus.PUBLISHED) | 760 | self.assertEqual(pub_source2.status, PackagePublishingStatus.PUBLISHED) |
336 | 774 | self.assertEqual( | ||
337 | 775 | pub_source2.status, PackagePublishingStatus.PUBLISHED) | ||
340 | 776 | 761 | ||
341 | 777 | # check the resulted symbolic link | 762 | # check the resulted symbolic link |
342 | 778 | sim_universe = "%s/universe/s/sim/sim_666.dsc" % self.pool_dir | 763 | sim_universe = "%s/universe/s/sim/sim_666.dsc" % self.pool_dir |
343 | @@ -788,8 +773,7 @@ | |||
344 | 788 | self.layer.commit() | 773 | self.layer.commit() |
345 | 789 | 774 | ||
346 | 790 | pub_source3.sync() | 775 | pub_source3.sync() |
349 | 791 | self.assertEqual( | 776 | self.assertEqual(pub_source3.status, PackagePublishingStatus.PENDING) |
348 | 792 | pub_source3.status, PackagePublishingStatus.PENDING) | ||
350 | 793 | 777 | ||
351 | 794 | def testPublishInAnotherArchive(self): | 778 | def testPublishInAnotherArchive(self): |
352 | 795 | """Publication in another archive | 779 | """Publication in another archive |
353 | @@ -882,10 +866,10 @@ | |||
354 | 882 | copies = (copied, ) | 866 | copies = (copied, ) |
355 | 883 | 867 | ||
356 | 884 | for copy in copies: | 868 | for copy in copies: |
358 | 885 | self.assertEquals(copy.component, pub_record.component) | 869 | self.assertEqual(pub_record.component, copy.component) |
359 | 886 | copy.overrideFromAncestry() | 870 | copy.overrideFromAncestry() |
360 | 887 | self.layer.commit() | 871 | self.layer.commit() |
362 | 888 | self.assertEquals(copy.component.name, 'universe') | 872 | self.assertEqual("universe", copy.component.name) |
363 | 889 | 873 | ||
364 | 890 | def test_overrideFromAncestry_fallback_to_source_component(self): | 874 | def test_overrideFromAncestry_fallback_to_source_component(self): |
365 | 891 | # overrideFromAncestry on the lack of ancestry, falls back to the | 875 | # overrideFromAncestry on the lack of ancestry, falls back to the |
366 | @@ -966,7 +950,7 @@ | |||
367 | 966 | spph = self.factory.makeSourcePackagePublishingHistory( | 950 | spph = self.factory.makeSourcePackagePublishingHistory( |
368 | 967 | sourcepackagerelease=spr, archive=ppa) | 951 | sourcepackagerelease=spr, archive=ppa) |
369 | 968 | spph.overrideFromAncestry() | 952 | spph.overrideFromAncestry() |
371 | 969 | self.assertEquals(spph.component.name, 'main') | 953 | self.assertEqual("main", spph.component.name) |
372 | 970 | 954 | ||
373 | 971 | def test_ppa_override_with_ancestry(self): | 955 | def test_ppa_override_with_ancestry(self): |
374 | 972 | # Test a PPA publication with ancestry | 956 | # Test a PPA publication with ancestry |
375 | @@ -978,7 +962,7 @@ | |||
376 | 978 | spph2 = self.factory.makeSourcePackagePublishingHistory( | 962 | spph2 = self.factory.makeSourcePackagePublishingHistory( |
377 | 979 | sourcepackagerelease=spr, archive=ppa) | 963 | sourcepackagerelease=spr, archive=ppa) |
378 | 980 | spph2.overrideFromAncestry() | 964 | spph2.overrideFromAncestry() |
380 | 981 | self.assertEquals(spph2.component.name, 'main') | 965 | self.assertEqual("main", spph2.component.name) |
381 | 982 | 966 | ||
382 | 983 | def test_copyTo_with_overrides(self): | 967 | def test_copyTo_with_overrides(self): |
383 | 984 | # Specifying overrides with copyTo should result in the new | 968 | # Specifying overrides with copyTo should result in the new |
384 | @@ -1009,8 +993,7 @@ | |||
385 | 1009 | # SPPH's ancestor get's populated when a spph is copied over. | 993 | # SPPH's ancestor get's populated when a spph is copied over. |
386 | 1010 | target_archive = self.factory.makeArchive() | 994 | target_archive = self.factory.makeArchive() |
387 | 1011 | spph = self.factory.makeSourcePackagePublishingHistory() | 995 | spph = self.factory.makeSourcePackagePublishingHistory() |
390 | 1012 | copy = spph.copyTo( | 996 | copy = spph.copyTo(spph.distroseries, spph.pocket, target_archive) |
389 | 1013 | spph.distroseries, spph.pocket, target_archive) | ||
391 | 1014 | 997 | ||
392 | 1015 | self.assertEqual(spph, copy.ancestor) | 998 | self.assertEqual(spph, copy.ancestor) |
393 | 1016 | 999 | ||
394 | @@ -1059,7 +1042,8 @@ | |||
395 | 1059 | """ | 1042 | """ |
396 | 1060 | available_archs = [self.sparc_distroarch, self.avr_distroarch] | 1043 | available_archs = [self.sparc_distroarch, self.avr_distroarch] |
397 | 1061 | pubrec = self.getPubSource(architecturehintlist='any') | 1044 | pubrec = self.getPubSource(architecturehintlist='any') |
399 | 1062 | self.assertEquals([self.sparc_distroarch], | 1045 | self.assertEqual( |
400 | 1046 | [self.sparc_distroarch], | ||
401 | 1063 | pubrec._getAllowedArchitectures(available_archs)) | 1047 | pubrec._getAllowedArchitectures(available_archs)) |
402 | 1064 | 1048 | ||
403 | 1065 | def test__getAllowedArchitectures_restricted_override(self): | 1049 | def test__getAllowedArchitectures_restricted_override(self): |
404 | @@ -1071,7 +1055,8 @@ | |||
405 | 1071 | available_archs = [self.sparc_distroarch, self.avr_distroarch] | 1055 | available_archs = [self.sparc_distroarch, self.avr_distroarch] |
406 | 1072 | getUtility(IArchiveArchSet).new(self.archive, self.avr_family) | 1056 | getUtility(IArchiveArchSet).new(self.archive, self.avr_family) |
407 | 1073 | pubrec = self.getPubSource(architecturehintlist='any') | 1057 | pubrec = self.getPubSource(architecturehintlist='any') |
409 | 1074 | self.assertEquals([self.sparc_distroarch, self.avr_distroarch], | 1058 | self.assertEqual( |
410 | 1059 | [self.sparc_distroarch, self.avr_distroarch], | ||
411 | 1075 | pubrec._getAllowedArchitectures(available_archs)) | 1060 | pubrec._getAllowedArchitectures(available_archs)) |
412 | 1076 | 1061 | ||
413 | 1077 | def test_createMissingBuilds_restricts_any(self): | 1062 | def test_createMissingBuilds_restricts_any(self): |
414 | @@ -1080,8 +1065,8 @@ | |||
415 | 1080 | """ | 1065 | """ |
416 | 1081 | pubrec = self.getPubSource(architecturehintlist='any') | 1066 | pubrec = self.getPubSource(architecturehintlist='any') |
417 | 1082 | builds = pubrec.createMissingBuilds() | 1067 | builds = pubrec.createMissingBuilds() |
420 | 1083 | self.assertEquals(1, len(builds)) | 1068 | self.assertEqual(1, len(builds)) |
421 | 1084 | self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series) | 1069 | self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series) |
422 | 1085 | 1070 | ||
423 | 1086 | def test_createMissingBuilds_restricts_explicitlist(self): | 1071 | def test_createMissingBuilds_restricts_explicitlist(self): |
424 | 1087 | """createMissingBuilds() limits builds targeted at a variety of | 1072 | """createMissingBuilds() limits builds targeted at a variety of |
425 | @@ -1089,8 +1074,8 @@ | |||
426 | 1089 | """ | 1074 | """ |
427 | 1090 | pubrec = self.getPubSource(architecturehintlist='sparc i386 avr') | 1075 | pubrec = self.getPubSource(architecturehintlist='sparc i386 avr') |
428 | 1091 | builds = pubrec.createMissingBuilds() | 1076 | builds = pubrec.createMissingBuilds() |
431 | 1092 | self.assertEquals(1, len(builds)) | 1077 | self.assertEqual(1, len(builds)) |
432 | 1093 | self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series) | 1078 | self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series) |
433 | 1094 | 1079 | ||
434 | 1095 | def test_createMissingBuilds_restricts_all(self): | 1080 | def test_createMissingBuilds_restricts_all(self): |
435 | 1096 | """createMissingBuilds() should limit builds targeted at 'all' | 1081 | """createMissingBuilds() should limit builds targeted at 'all' |
436 | @@ -1099,8 +1084,8 @@ | |||
437 | 1099 | """ | 1084 | """ |
438 | 1100 | pubrec = self.getPubSource(architecturehintlist='all') | 1085 | pubrec = self.getPubSource(architecturehintlist='all') |
439 | 1101 | builds = pubrec.createMissingBuilds() | 1086 | builds = pubrec.createMissingBuilds() |
442 | 1102 | self.assertEquals(1, len(builds)) | 1087 | self.assertEqual(1, len(builds)) |
443 | 1103 | self.assertEquals(self.sparc_distroarch, builds[0].distro_arch_series) | 1088 | self.assertEqual(self.sparc_distroarch, builds[0].distro_arch_series) |
444 | 1104 | 1089 | ||
445 | 1105 | def test_createMissingBuilds_restrict_override(self): | 1090 | def test_createMissingBuilds_restrict_override(self): |
446 | 1106 | """createMissingBuilds() should limit builds targeted at 'any' | 1091 | """createMissingBuilds() should limit builds targeted at 'any' |
447 | @@ -1110,9 +1095,9 @@ | |||
448 | 1110 | getUtility(IArchiveArchSet).new(self.archive, self.avr_family) | 1095 | getUtility(IArchiveArchSet).new(self.archive, self.avr_family) |
449 | 1111 | pubrec = self.getPubSource(architecturehintlist='any') | 1096 | pubrec = self.getPubSource(architecturehintlist='any') |
450 | 1112 | builds = pubrec.createMissingBuilds() | 1097 | builds = pubrec.createMissingBuilds() |
454 | 1113 | self.assertEquals(2, len(builds)) | 1098 | self.assertEqual(2, len(builds)) |
455 | 1114 | self.assertEquals(self.avr_distroarch, builds[0].distro_arch_series) | 1099 | self.assertEqual(self.avr_distroarch, builds[0].distro_arch_series) |
456 | 1115 | self.assertEquals(self.sparc_distroarch, builds[1].distro_arch_series) | 1100 | self.assertEqual(self.sparc_distroarch, builds[1].distro_arch_series) |
457 | 1116 | 1101 | ||
458 | 1117 | 1102 | ||
459 | 1118 | class PublishingSetTests(TestCaseWithFactory): | 1103 | class PublishingSetTests(TestCaseWithFactory): |
460 | @@ -1175,59 +1160,69 @@ | |||
461 | 1175 | 1160 | ||
462 | 1176 | layer = ZopelessDatabaseLayer | 1161 | layer = ZopelessDatabaseLayer |
463 | 1177 | 1162 | ||
464 | 1163 | def setUp(self): | ||
465 | 1164 | super(TestPublishingSetLite, self).setUp() | ||
466 | 1165 | self.person = self.factory.makePerson() | ||
467 | 1166 | |||
468 | 1178 | def test_requestDeletion_marks_SPPHs_deleted(self): | 1167 | def test_requestDeletion_marks_SPPHs_deleted(self): |
469 | 1179 | spph = self.factory.makeSourcePackagePublishingHistory() | 1168 | spph = self.factory.makeSourcePackagePublishingHistory() |
472 | 1180 | getUtility(IPublishingSet).requestDeletion( | 1169 | getUtility(IPublishingSet).requestDeletion([spph], self.person) |
471 | 1181 | [spph], self.factory.makePerson()) | ||
473 | 1182 | self.assertEqual(PackagePublishingStatus.DELETED, spph.status) | 1170 | self.assertEqual(PackagePublishingStatus.DELETED, spph.status) |
474 | 1183 | 1171 | ||
475 | 1184 | def test_requestDeletion_leaves_other_SPPHs_alone(self): | 1172 | def test_requestDeletion_leaves_other_SPPHs_alone(self): |
476 | 1185 | spph = self.factory.makeSourcePackagePublishingHistory() | 1173 | spph = self.factory.makeSourcePackagePublishingHistory() |
477 | 1186 | other_spph = self.factory.makeSourcePackagePublishingHistory() | 1174 | other_spph = self.factory.makeSourcePackagePublishingHistory() |
480 | 1187 | getUtility(IPublishingSet).requestDeletion( | 1175 | getUtility(IPublishingSet).requestDeletion([other_spph], self.person) |
479 | 1188 | [other_spph], self.factory.makePerson()) | ||
481 | 1189 | self.assertEqual(PackagePublishingStatus.PENDING, spph.status) | 1176 | self.assertEqual(PackagePublishingStatus.PENDING, spph.status) |
482 | 1190 | 1177 | ||
483 | 1191 | def test_requestDeletion_marks_BPPHs_deleted(self): | 1178 | def test_requestDeletion_marks_BPPHs_deleted(self): |
484 | 1192 | bpph = self.factory.makeBinaryPackagePublishingHistory() | 1179 | bpph = self.factory.makeBinaryPackagePublishingHistory() |
487 | 1193 | getUtility(IPublishingSet).requestDeletion( | 1180 | getUtility(IPublishingSet).requestDeletion([bpph], self.person) |
486 | 1194 | [bpph], self.factory.makePerson()) | ||
488 | 1195 | self.assertEqual(PackagePublishingStatus.DELETED, bpph.status) | 1181 | self.assertEqual(PackagePublishingStatus.DELETED, bpph.status) |
489 | 1196 | 1182 | ||
490 | 1197 | def test_requestDeletion_marks_attached_BPPHs_deleted(self): | 1183 | def test_requestDeletion_marks_attached_BPPHs_deleted(self): |
491 | 1198 | bpph = self.factory.makeBinaryPackagePublishingHistory() | 1184 | bpph = self.factory.makeBinaryPackagePublishingHistory() |
492 | 1199 | spph = self.factory.makeSPPHForBPPH(bpph) | 1185 | spph = self.factory.makeSPPHForBPPH(bpph) |
495 | 1200 | getUtility(IPublishingSet).requestDeletion( | 1186 | getUtility(IPublishingSet).requestDeletion([spph], self.person) |
494 | 1201 | [spph], self.factory.makePerson()) | ||
496 | 1202 | self.assertEqual(PackagePublishingStatus.DELETED, spph.status) | 1187 | self.assertEqual(PackagePublishingStatus.DELETED, spph.status) |
497 | 1203 | 1188 | ||
498 | 1204 | def test_requestDeletion_leaves_other_BPPHs_alone(self): | 1189 | def test_requestDeletion_leaves_other_BPPHs_alone(self): |
499 | 1205 | bpph = self.factory.makeBinaryPackagePublishingHistory() | 1190 | bpph = self.factory.makeBinaryPackagePublishingHistory() |
500 | 1206 | unrelated_spph = self.factory.makeSourcePackagePublishingHistory() | 1191 | unrelated_spph = self.factory.makeSourcePackagePublishingHistory() |
501 | 1207 | getUtility(IPublishingSet).requestDeletion( | 1192 | getUtility(IPublishingSet).requestDeletion( |
503 | 1208 | [unrelated_spph], self.factory.makePerson()) | 1193 | [unrelated_spph], self.person) |
504 | 1209 | self.assertEqual(PackagePublishingStatus.PENDING, bpph.status) | 1194 | self.assertEqual(PackagePublishingStatus.PENDING, bpph.status) |
505 | 1210 | 1195 | ||
506 | 1211 | def test_requestDeletion_accepts_empty_sources_list(self): | 1196 | def test_requestDeletion_accepts_empty_sources_list(self): |
509 | 1212 | person = self.factory.makePerson() | 1197 | getUtility(IPublishingSet).requestDeletion([], self.person) |
508 | 1213 | getUtility(IPublishingSet).requestDeletion([], person) | ||
510 | 1214 | # The test is that this does not fail. | 1198 | # The test is that this does not fail. |
512 | 1215 | Store.of(person).flush() | 1199 | Store.of(self.person).flush() |
513 | 1216 | 1200 | ||
514 | 1217 | def test_requestDeletion_creates_DistroSeriesDifferenceJobs(self): | 1201 | def test_requestDeletion_creates_DistroSeriesDifferenceJobs(self): |
515 | 1218 | dsp = self.factory.makeDistroSeriesParent() | 1202 | dsp = self.factory.makeDistroSeriesParent() |
516 | 1219 | series = dsp.derived_series | ||
517 | 1220 | spph = self.factory.makeSourcePackagePublishingHistory( | 1203 | spph = self.factory.makeSourcePackagePublishingHistory( |
519 | 1221 | series, pocket=PackagePublishingPocket.RELEASE) | 1204 | dsp.derived_series, pocket=PackagePublishingPocket.RELEASE) |
520 | 1222 | spn = spph.sourcepackagerelease.sourcepackagename | 1205 | spn = spph.sourcepackagerelease.sourcepackagename |
525 | 1223 | 1206 | getUtility(IPublishingSet).requestDeletion([spph], self.person) | |
522 | 1224 | getUtility(IPublishingSet).requestDeletion( | ||
523 | 1225 | [spph], self.factory.makePerson()) | ||
524 | 1226 | |||
526 | 1227 | self.assertEqual( | 1207 | self.assertEqual( |
527 | 1228 | 1, len(find_waiting_jobs( | 1208 | 1, len(find_waiting_jobs( |
528 | 1229 | dsp.derived_series, spn, dsp.parent_series))) | 1209 | dsp.derived_series, spn, dsp.parent_series))) |
529 | 1230 | 1210 | ||
530 | 1211 | def test_requestDeletion_disallows_unmodifiable_suites(self): | ||
531 | 1212 | bpph = self.factory.makeBinaryPackagePublishingHistory( | ||
532 | 1213 | pocket=PackagePublishingPocket.RELEASE) | ||
533 | 1214 | spph = self.factory.makeSourcePackagePublishingHistory( | ||
534 | 1215 | distroseries=bpph.distroseries, | ||
535 | 1216 | pocket=PackagePublishingPocket.RELEASE) | ||
536 | 1217 | spph.distroseries.status = SeriesStatus.CURRENT | ||
537 | 1218 | message = "Cannot delete publications from suite '%s'" % ( | ||
538 | 1219 | spph.distroseries.getSuite(spph.pocket)) | ||
539 | 1220 | for pub in spph, bpph: | ||
540 | 1221 | self.assertRaisesWithContent( | ||
541 | 1222 | DeletionError, message, pub.requestDeletion, self.person) | ||
542 | 1223 | self.assertRaisesWithContent( | ||
543 | 1224 | DeletionError, message, pub.api_requestDeletion, self.person) | ||
544 | 1225 | |||
545 | 1231 | 1226 | ||
546 | 1232 | class TestSourceDomination(TestNativePublishingBase): | 1227 | class TestSourceDomination(TestNativePublishingBase): |
547 | 1233 | """Test SourcePackagePublishingHistory.supersede() operates correctly.""" | 1228 | """Test SourcePackagePublishingHistory.supersede() operates correctly.""" |
548 | @@ -1264,7 +1259,7 @@ | |||
549 | 1264 | 1259 | ||
550 | 1265 | self.assertRaises(AssertionError, source.supersede, super_source) | 1260 | self.assertRaises(AssertionError, source.supersede, super_source) |
551 | 1266 | self.checkSuperseded([source], super_source) | 1261 | self.checkSuperseded([source], super_source) |
553 | 1267 | self.assertEquals(super_date, source.datesuperseded) | 1262 | self.assertEqual(super_date, source.datesuperseded) |
554 | 1268 | 1263 | ||
555 | 1269 | 1264 | ||
556 | 1270 | class TestBinaryDomination(TestNativePublishingBase): | 1265 | class TestBinaryDomination(TestNativePublishingBase): |
557 | @@ -1343,7 +1338,7 @@ | |||
558 | 1343 | 1338 | ||
559 | 1344 | self.assertRaises(AssertionError, bin.supersede, super_bin) | 1339 | self.assertRaises(AssertionError, bin.supersede, super_bin) |
560 | 1345 | self.checkSuperseded([bin], super_bin) | 1340 | self.checkSuperseded([bin], super_bin) |
562 | 1346 | self.assertEquals(super_date, bin.datesuperseded) | 1341 | self.assertEqual(super_date, bin.datesuperseded) |
563 | 1347 | 1342 | ||
564 | 1348 | def testSkipsSupersededArchIndependentBinary(self): | 1343 | def testSkipsSupersededArchIndependentBinary(self): |
565 | 1349 | """Check that supersede() skips a superseded arch-indep binary. | 1344 | """Check that supersede() skips a superseded arch-indep binary. |
566 | @@ -1365,7 +1360,7 @@ | |||
567 | 1365 | 1360 | ||
568 | 1366 | bin.supersede(super_bin) | 1361 | bin.supersede(super_bin) |
569 | 1367 | self.checkSuperseded([bin], super_bin) | 1362 | self.checkSuperseded([bin], super_bin) |
571 | 1368 | self.assertEquals(super_date, bin.datesuperseded) | 1363 | self.assertEqual(super_date, bin.datesuperseded) |
572 | 1369 | 1364 | ||
573 | 1370 | def testSupersedesCorrespondingDDEB(self): | 1365 | def testSupersedesCorrespondingDDEB(self): |
574 | 1371 | """Check that supersede() takes with it any corresponding DDEB. | 1366 | """Check that supersede() takes with it any corresponding DDEB. |
575 | @@ -1379,8 +1374,7 @@ | |||
576 | 1379 | 1374 | ||
577 | 1380 | # Each of these will return (i386 deb, i386 ddeb, hppa deb, | 1375 | # Each of these will return (i386 deb, i386 ddeb, hppa deb, |
578 | 1381 | # hppa ddeb). | 1376 | # hppa ddeb). |
581 | 1382 | bins = self.getPubBinaries( | 1377 | bins = self.getPubBinaries(architecturespecific=True, with_debug=True) |
580 | 1383 | architecturespecific=True, with_debug=True) | ||
582 | 1384 | super_bins = self.getPubBinaries( | 1378 | super_bins = self.getPubBinaries( |
583 | 1385 | architecturespecific=True, with_debug=True) | 1379 | architecturespecific=True, with_debug=True) |
584 | 1386 | 1380 | ||
585 | @@ -1405,8 +1399,7 @@ | |||
586 | 1405 | distribution=self.ubuntutest) | 1399 | distribution=self.ubuntutest) |
587 | 1406 | 1400 | ||
588 | 1407 | # This will return (i386 deb, i386 ddeb, hppa deb, hppa ddeb). | 1401 | # This will return (i386 deb, i386 ddeb, hppa deb, hppa ddeb). |
591 | 1408 | bins = self.getPubBinaries( | 1402 | bins = self.getPubBinaries(architecturespecific=True, with_debug=True) |
590 | 1409 | architecturespecific=True, with_debug=True) | ||
592 | 1410 | self.assertRaises(AssertionError, bins[0].supersede, bins[1]) | 1403 | self.assertRaises(AssertionError, bins[0].supersede, bins[1]) |
593 | 1411 | 1404 | ||
594 | 1412 | 1405 | ||
595 | @@ -1414,9 +1407,8 @@ | |||
596 | 1414 | """Test BinaryPackagePublishingHistory._getOtherPublications() works.""" | 1407 | """Test BinaryPackagePublishingHistory._getOtherPublications() works.""" |
597 | 1415 | 1408 | ||
598 | 1416 | def checkOtherPublications(self, this, others): | 1409 | def checkOtherPublications(self, this, others): |
602 | 1417 | self.assertEquals( | 1410 | self.assertContentEqual( |
603 | 1418 | set(removeSecurityProxy(this)._getOtherPublications()), | 1411 | removeSecurityProxy(this)._getOtherPublications(), others) |
601 | 1419 | set(others)) | ||
604 | 1420 | 1412 | ||
605 | 1421 | def testFindsOtherArchIndepPublications(self): | 1413 | def testFindsOtherArchIndepPublications(self): |
606 | 1422 | """Arch-indep publications with the same overrides should be found.""" | 1414 | """Arch-indep publications with the same overrides should be found.""" |
607 | @@ -1529,7 +1521,7 @@ | |||
608 | 1529 | # SPPH, BPPHs and BPRs. | 1521 | # SPPH, BPPHs and BPRs. |
609 | 1530 | with StormStatementRecorder() as recorder: | 1522 | with StormStatementRecorder() as recorder: |
610 | 1531 | bins = spph.getBuiltBinaries() | 1523 | bins = spph.getBuiltBinaries() |
612 | 1532 | self.assertEquals(0, len(bins)) | 1524 | self.assertEqual(0, len(bins)) |
613 | 1533 | self.assertThat(recorder, HasQueryCount(Equals(3))) | 1525 | self.assertThat(recorder, HasQueryCount(Equals(3))) |
614 | 1534 | 1526 | ||
615 | 1535 | self.getPubBinaries(pub_source=spph) | 1527 | self.getPubBinaries(pub_source=spph) |
616 | @@ -1541,7 +1533,7 @@ | |||
617 | 1541 | # BPF has no query penalty. | 1533 | # BPF has no query penalty. |
618 | 1542 | with StormStatementRecorder() as recorder: | 1534 | with StormStatementRecorder() as recorder: |
619 | 1543 | bins = spph.getBuiltBinaries(want_files=True) | 1535 | bins = spph.getBuiltBinaries(want_files=True) |
621 | 1544 | self.assertEquals(2, len(bins)) | 1536 | self.assertEqual(2, len(bins)) |
622 | 1545 | for bpph in bins: | 1537 | for bpph in bins: |
623 | 1546 | files = bpph.binarypackagerelease.files | 1538 | files = bpph.binarypackagerelease.files |
624 | 1547 | self.assertEqual(1, len(files)) | 1539 | self.assertEqual(1, len(files)) |
625 | @@ -1595,8 +1587,7 @@ | |||
626 | 1595 | def test_architecture_independent(self): | 1587 | def test_architecture_independent(self): |
627 | 1596 | # Architecture-independent binaries get published to all enabled | 1588 | # Architecture-independent binaries get published to all enabled |
628 | 1597 | # DASes in the series. | 1589 | # DASes in the series. |
631 | 1598 | bpr = self.factory.makeBinaryPackageRelease( | 1590 | bpr = self.factory.makeBinaryPackageRelease(architecturespecific=False) |
630 | 1599 | architecturespecific=False) | ||
632 | 1600 | # Create 3 architectures. The binary will not be published in | 1591 | # Create 3 architectures. The binary will not be published in |
633 | 1601 | # the disabled one. | 1592 | # the disabled one. |
634 | 1602 | target_das_a = self.factory.makeDistroArchSeries() | 1593 | target_das_a = self.factory.makeDistroArchSeries() |
635 | @@ -1606,12 +1597,11 @@ | |||
636 | 1606 | self.factory.makeDistroArchSeries( | 1597 | self.factory.makeDistroArchSeries( |
637 | 1607 | distroseries=target_das_a.distroseries, enabled=False) | 1598 | distroseries=target_das_a.distroseries, enabled=False) |
638 | 1608 | args = self.makeArgs([bpr], target_das_a.distroseries) | 1599 | args = self.makeArgs([bpr], target_das_a.distroseries) |
645 | 1609 | bpphs = getUtility(IPublishingSet).publishBinaries( | 1600 | bpphs = getUtility(IPublishingSet).publishBinaries(**args) |
646 | 1610 | **args) | 1601 | self.assertEqual(2, len(bpphs)) |
647 | 1611 | self.assertEquals(2, len(bpphs)) | 1602 | self.assertContentEqual( |
648 | 1612 | self.assertEquals( | 1603 | (target_das_a, target_das_b), |
649 | 1613 | set((target_das_a, target_das_b)), | 1604 | [bpph.distroarchseries for bpph in bpphs]) |
644 | 1614 | set(bpph.distroarchseries for bpph in bpphs)) | ||
650 | 1615 | 1605 | ||
651 | 1616 | def test_architecture_disabled(self): | 1606 | def test_architecture_disabled(self): |
652 | 1617 | # An empty list is return if the DistroArchSeries was disabled. | 1607 | # An empty list is return if the DistroArchSeries was disabled. |
653 | @@ -1664,13 +1654,12 @@ | |||
654 | 1664 | build=build, binpackageformat=BinaryPackageFormat.DDEB) | 1654 | build=build, binpackageformat=BinaryPackageFormat.DDEB) |
655 | 1665 | args = self.makeArgs([normal, debug], das.distroseries) | 1655 | args = self.makeArgs([normal, debug], das.distroseries) |
656 | 1666 | bpphs = getUtility(IPublishingSet).publishBinaries(**args) | 1656 | bpphs = getUtility(IPublishingSet).publishBinaries(**args) |
664 | 1667 | self.assertEquals(2, len(bpphs)) | 1657 | self.assertEqual(2, len(bpphs)) |
665 | 1668 | self.assertEquals( | 1658 | self.assertContentEqual( |
666 | 1669 | set((normal, debug)), | 1659 | (normal, debug), [bpph.binarypackagerelease for bpph in bpphs]) |
667 | 1670 | set(bpph.binarypackagerelease for bpph in bpphs)) | 1660 | self.assertContentEqual( |
668 | 1671 | self.assertEquals( | 1661 | (das.main_archive, das.main_archive.debug_archive), |
669 | 1672 | set((das.main_archive, das.main_archive.debug_archive)), | 1662 | [bpph.archive for bpph in bpphs]) |
663 | 1673 | set(bpph.archive for bpph in bpphs)) | ||
670 | 1674 | 1663 | ||
671 | 1675 | # A second copy does nothing, because it checks in the debug | 1664 | # A second copy does nothing, because it checks in the debug |
672 | 1676 | # archive too. | 1665 | # archive too. |
673 | 1677 | 1666 | ||
674 | === modified file 'lib/lp/testing/factory.py' | |||
675 | --- lib/lp/testing/factory.py 2013-01-23 06:54:49 +0000 | |||
676 | +++ lib/lp/testing/factory.py 2013-03-08 22:44:24 +0000 | |||
677 | @@ -2,7 +2,7 @@ | |||
678 | 2 | # NOTE: The first line above must stay first; do not move the copyright | 2 | # NOTE: The first line above must stay first; do not move the copyright |
679 | 3 | # notice to the top. See http://www.python.org/dev/peps/pep-0263/. | 3 | # notice to the top. See http://www.python.org/dev/peps/pep-0263/. |
680 | 4 | # | 4 | # |
682 | 5 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the | 5 | # Copyright 2009-2013 Canonical Ltd. This software is licensed under the |
683 | 6 | # GNU Affero General Public License version 3 (see the file LICENSE). | 6 | # GNU Affero General Public License version 3 (see the file LICENSE). |
684 | 7 | 7 | ||
685 | 8 | """Testing infrastructure for the Launchpad application. | 8 | """Testing infrastructure for the Launchpad application. |
686 | @@ -3678,7 +3678,7 @@ | |||
687 | 3678 | initial source package release upload archive, or to the | 3678 | initial source package release upload archive, or to the |
688 | 3679 | distro series main archive. | 3679 | distro series main archive. |
689 | 3680 | :param pocket: The pocket to publish into. Can be specified as a | 3680 | :param pocket: The pocket to publish into. Can be specified as a |
691 | 3681 | string. Defaults to the RELEASE pocket. | 3681 | string. Defaults to the BACKPORTS pocket. |
692 | 3682 | :param status: The publication status. Defaults to PENDING. If | 3682 | :param status: The publication status. Defaults to PENDING. If |
693 | 3683 | set to PUBLISHED, the publisheddate will be set to now. | 3683 | set to PUBLISHED, the publisheddate will be set to now. |
694 | 3684 | :param dateremoved: The removal date. | 3684 | :param dateremoved: The removal date. |