Merge launchpad:master into launchpad:db-devel
- Git
- lp:launchpad
- master
- Merge into db-devel
Proposed by
Colin Watson
Status: | Merged |
---|---|
Approved by: | Colin Watson |
Approved revision: | 7cc2b100ff6cee4a1af522ffb6bd19200b0bc011 |
Merge reported by: | Otto Co-Pilot |
Merged at revision: | not available |
Proposed branch: | launchpad:master |
Merge into: | launchpad:db-devel |
Diff against target: |
1786 lines (+576/-268) 41 files modified
lib/lp/archivepublisher/archivegpgsigningkey.py (+55/-23) lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py (+105/-3) lib/lp/blueprints/browser/sprint.py (+3/-2) lib/lp/blueprints/browser/tests/test_views.py (+2/-1) lib/lp/blueprints/model/specification.py (+14/-13) lib/lp/blueprints/model/sprint.py (+96/-70) lib/lp/blueprints/model/sprintspecification.py (+39/-23) lib/lp/blueprints/vocabularies/sprint.py (+5/-9) lib/lp/code/mail/tests/test_codehandler.py (+2/-0) lib/lp/code/mail/tests/test_codereviewcomment.py (+1/-1) lib/lp/code/model/branchcollection.py (+2/-2) lib/lp/code/model/branchmergeproposal.py (+28/-15) lib/lp/code/model/codereviewcomment.py (+30/-14) lib/lp/code/model/codereviewvote.py (+36/-20) lib/lp/code/model/gitcollection.py (+2/-2) lib/lp/code/model/tests/test_branch.py (+4/-4) lib/lp/code/model/tests/test_gitrepository.py (+4/-4) lib/lp/code/stories/webservice/xx-branchmergeproposal.txt (+2/-2) lib/lp/codehosting/puller/tests/test_scheduler.py (+1/-1) lib/lp/registry/model/projectgroup.py (+13/-10) lib/lp/services/database/policy.py (+1/-1) lib/lp/services/gpg/handler.py (+3/-7) lib/lp/services/gpg/interfaces.py (+13/-2) lib/lp/services/librarianserver/tests/test_storage_db.py (+1/-1) lib/lp/services/mail/helpers.py (+11/-3) lib/lp/services/signing/tests/helpers.py (+2/-2) lib/lp/services/worlddata/vocabularies.py (+4/-1) lib/lp/soyuz/adapters/tests/test_archivedependencies.py (+1/-1) lib/lp/soyuz/configure.zcml (+0/-1) lib/lp/soyuz/interfaces/archive.py (+2/-0) lib/lp/soyuz/model/archive.py (+7/-6) lib/lp/soyuz/scripts/ppakeygenerator.py (+3/-3) lib/lp/soyuz/scripts/tests/test_ppakeygenerator.py (+5/-5) lib/lp/soyuz/stories/soyuz/xx-person-packages.txt (+1/-1) lib/lp/soyuz/tests/test_archive.py (+1/-1) lib/lp/testing/factory.py (+4/-4) lib/lp/translations/pottery/tests/test_detect_intltool.py (+14/-0) scripts/rosetta/pottery-generate-intltool.py (+56/-0) utilities/launchpad-database-setup (+0/-7) utilities/sourcedeps.cache (+2/-2) utilities/sourcedeps.conf (+1/-1) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Colin Watson (community) | Approve | ||
Review via email: mp+391043@code.launchpad.net |
Commit message
Manually merge from master to fix tests with PostgreSQL 10 base image
Description of the change
Now that buildbot is on LXD with fresh base images, db-devel can't pass tests until https:/
To post a comment you must log in.
Revision history for this message
Colin Watson (cjwatson) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/lib/lp/archivepublisher/archivegpgsigningkey.py b/lib/lp/archivepublisher/archivegpgsigningkey.py | |||
2 | index a342332..20a4d79 100644 | |||
3 | --- a/lib/lp/archivepublisher/archivegpgsigningkey.py | |||
4 | +++ b/lib/lp/archivepublisher/archivegpgsigningkey.py | |||
5 | @@ -37,17 +37,24 @@ from lp.archivepublisher.run_parts import ( | |||
6 | 37 | from lp.registry.interfaces.gpg import IGPGKeySet | 37 | from lp.registry.interfaces.gpg import IGPGKeySet |
7 | 38 | from lp.services.config import config | 38 | from lp.services.config import config |
8 | 39 | from lp.services.features import getFeatureFlag | 39 | from lp.services.features import getFeatureFlag |
10 | 40 | from lp.services.gpg.interfaces import IGPGHandler | 40 | from lp.services.gpg.interfaces import ( |
11 | 41 | IGPGHandler, | ||
12 | 42 | IPymeKey, | ||
13 | 43 | ) | ||
14 | 41 | from lp.services.osutils import remove_if_exists | 44 | from lp.services.osutils import remove_if_exists |
15 | 42 | from lp.services.propertycache import ( | 45 | from lp.services.propertycache import ( |
16 | 43 | cachedproperty, | 46 | cachedproperty, |
17 | 44 | get_property_cache, | 47 | get_property_cache, |
18 | 45 | ) | 48 | ) |
19 | 46 | from lp.services.signing.enums import ( | 49 | from lp.services.signing.enums import ( |
20 | 50 | OpenPGPKeyAlgorithm, | ||
21 | 47 | SigningKeyType, | 51 | SigningKeyType, |
22 | 48 | SigningMode, | 52 | SigningMode, |
23 | 49 | ) | 53 | ) |
25 | 50 | from lp.services.signing.interfaces.signingkey import ISigningKeySet | 54 | from lp.services.signing.interfaces.signingkey import ( |
26 | 55 | ISigningKey, | ||
27 | 56 | ISigningKeySet, | ||
28 | 57 | ) | ||
29 | 51 | 58 | ||
30 | 52 | 59 | ||
31 | 53 | @implementer(ISignableArchive) | 60 | @implementer(ISignableArchive) |
32 | @@ -72,7 +79,7 @@ class SignableArchive: | |||
33 | 72 | def can_sign(self): | 79 | def can_sign(self): |
34 | 73 | """See `ISignableArchive`.""" | 80 | """See `ISignableArchive`.""" |
35 | 74 | return ( | 81 | return ( |
37 | 75 | self.archive.signing_key is not None or | 82 | self.archive.signing_key_fingerprint is not None or |
38 | 76 | self._run_parts_dir is not None) | 83 | self._run_parts_dir is not None) |
39 | 77 | 84 | ||
40 | 78 | @cachedproperty | 85 | @cachedproperty |
41 | @@ -237,9 +244,9 @@ class ArchiveGPGSigningKey(SignableArchive): | |||
42 | 237 | with open(export_path, 'wb') as export_file: | 244 | with open(export_path, 'wb') as export_file: |
43 | 238 | export_file.write(key.export()) | 245 | export_file.write(key.export()) |
44 | 239 | 246 | ||
46 | 240 | def generateSigningKey(self, log=None): | 247 | def generateSigningKey(self, log=None, async_keyserver=False): |
47 | 241 | """See `IArchiveGPGSigningKey`.""" | 248 | """See `IArchiveGPGSigningKey`.""" |
49 | 242 | assert self.archive.signing_key is None, ( | 249 | assert self.archive.signing_key_fingerprint is None, ( |
50 | 243 | "Cannot override signing_keys.") | 250 | "Cannot override signing_keys.") |
51 | 244 | 251 | ||
52 | 245 | # Always generate signing keys for the default PPA, even if it | 252 | # Always generate signing keys for the default PPA, even if it |
53 | @@ -257,13 +264,26 @@ class ArchiveGPGSigningKey(SignableArchive): | |||
54 | 257 | 264 | ||
55 | 258 | key_displayname = ( | 265 | key_displayname = ( |
56 | 259 | "Launchpad PPA for %s" % self.archive.owner.displayname) | 266 | "Launchpad PPA for %s" % self.archive.owner.displayname) |
60 | 260 | secret_key = getUtility(IGPGHandler).generateKey( | 267 | if getFeatureFlag(PUBLISHER_GPG_USES_SIGNING_SERVICE): |
61 | 261 | key_displayname, logger=log) | 268 | try: |
62 | 262 | self._setupSigningKey(secret_key) | 269 | signing_key = getUtility(ISigningKeySet).generate( |
63 | 270 | SigningKeyType.OPENPGP, key_displayname, | ||
64 | 271 | openpgp_key_algorithm=OpenPGPKeyAlgorithm.RSA, length=4096) | ||
65 | 272 | except Exception as e: | ||
66 | 273 | if log is not None: | ||
67 | 274 | log.exception( | ||
68 | 275 | "Error generating signing key for %s: %s %s" % | ||
69 | 276 | (self.archive.reference, e.__class__.__name__, e)) | ||
70 | 277 | raise | ||
71 | 278 | else: | ||
72 | 279 | signing_key = getUtility(IGPGHandler).generateKey( | ||
73 | 280 | key_displayname, logger=log) | ||
74 | 281 | return self._setupSigningKey( | ||
75 | 282 | signing_key, async_keyserver=async_keyserver) | ||
76 | 263 | 283 | ||
77 | 264 | def setSigningKey(self, key_path, async_keyserver=False): | 284 | def setSigningKey(self, key_path, async_keyserver=False): |
78 | 265 | """See `IArchiveGPGSigningKey`.""" | 285 | """See `IArchiveGPGSigningKey`.""" |
80 | 266 | assert self.archive.signing_key is None, ( | 286 | assert self.archive.signing_key_fingerprint is None, ( |
81 | 267 | "Cannot override signing_keys.") | 287 | "Cannot override signing_keys.") |
82 | 268 | assert os.path.exists(key_path), ( | 288 | assert os.path.exists(key_path), ( |
83 | 269 | "%s does not exist" % key_path) | 289 | "%s does not exist" % key_path) |
84 | @@ -274,34 +294,46 @@ class ArchiveGPGSigningKey(SignableArchive): | |||
85 | 274 | return self._setupSigningKey( | 294 | return self._setupSigningKey( |
86 | 275 | secret_key, async_keyserver=async_keyserver) | 295 | secret_key, async_keyserver=async_keyserver) |
87 | 276 | 296 | ||
89 | 277 | def _uploadPublicSigningKey(self, secret_key): | 297 | def _uploadPublicSigningKey(self, signing_key): |
90 | 278 | """Upload the public half of a signing key to the keyserver.""" | 298 | """Upload the public half of a signing key to the keyserver.""" |
91 | 279 | # The handler's security proxying doesn't protect anything useful | 299 | # The handler's security proxying doesn't protect anything useful |
92 | 280 | # here, and when we're running in a thread we don't have an | 300 | # here, and when we're running in a thread we don't have an |
93 | 281 | # interaction. | 301 | # interaction. |
94 | 282 | gpghandler = removeSecurityProxy(getUtility(IGPGHandler)) | 302 | gpghandler = removeSecurityProxy(getUtility(IGPGHandler)) |
98 | 283 | pub_key = gpghandler.retrieveKey(secret_key.fingerprint) | 303 | if IPymeKey.providedBy(signing_key): |
99 | 284 | gpghandler.uploadPublicKey(pub_key.fingerprint) | 304 | pub_key = gpghandler.retrieveKey(signing_key.fingerprint) |
100 | 285 | return pub_key | 305 | gpghandler.uploadPublicKey(pub_key.fingerprint) |
101 | 306 | return pub_key | ||
102 | 307 | else: | ||
103 | 308 | assert ISigningKey.providedBy(signing_key) | ||
104 | 309 | gpghandler.submitKey(removeSecurityProxy(signing_key).public_key) | ||
105 | 310 | return signing_key | ||
106 | 286 | 311 | ||
107 | 287 | def _storeSigningKey(self, pub_key): | 312 | def _storeSigningKey(self, pub_key): |
108 | 288 | """Store signing key reference in the database.""" | 313 | """Store signing key reference in the database.""" |
109 | 289 | key_owner = getUtility(ILaunchpadCelebrities).ppa_key_guard | 314 | key_owner = getUtility(ILaunchpadCelebrities).ppa_key_guard |
113 | 290 | key, _ = getUtility(IGPGKeySet).activate( | 315 | if IPymeKey.providedBy(pub_key): |
114 | 291 | key_owner, pub_key, pub_key.can_encrypt) | 316 | key, _ = getUtility(IGPGKeySet).activate( |
115 | 292 | self.archive.signing_key_owner = key.owner | 317 | key_owner, pub_key, pub_key.can_encrypt) |
116 | 318 | else: | ||
117 | 319 | assert ISigningKey.providedBy(pub_key) | ||
118 | 320 | key = pub_key | ||
119 | 321 | self.archive.signing_key_owner = key_owner | ||
120 | 293 | self.archive.signing_key_fingerprint = key.fingerprint | 322 | self.archive.signing_key_fingerprint = key.fingerprint |
121 | 294 | del get_property_cache(self.archive).signing_key | 323 | del get_property_cache(self.archive).signing_key |
122 | 295 | 324 | ||
124 | 296 | def _setupSigningKey(self, secret_key, async_keyserver=False): | 325 | def _setupSigningKey(self, signing_key, async_keyserver=False): |
125 | 297 | """Mandatory setup for signing keys. | 326 | """Mandatory setup for signing keys. |
126 | 298 | 327 | ||
128 | 299 | * Export the secret key into the protected disk location. | 328 | * Export the secret key into the protected disk location (for |
129 | 329 | locally-generated keys). | ||
130 | 300 | * Upload public key to the keyserver. | 330 | * Upload public key to the keyserver. |
133 | 301 | * Store the public GPGKey reference in the database and update | 331 | * Store the public GPGKey reference in the database (for |
134 | 302 | the context archive.signing_key. | 332 | locally-generated keys) and update the context |
135 | 333 | archive.signing_key. | ||
136 | 303 | """ | 334 | """ |
138 | 304 | self.exportSecretKey(secret_key) | 335 | if IPymeKey.providedBy(signing_key): |
139 | 336 | self.exportSecretKey(signing_key) | ||
140 | 305 | if async_keyserver: | 337 | if async_keyserver: |
141 | 306 | # If we have an asynchronous keyserver running in the current | 338 | # If we have an asynchronous keyserver running in the current |
142 | 307 | # thread using Twisted, then we need some contortions to ensure | 339 | # thread using Twisted, then we need some contortions to ensure |
143 | @@ -310,10 +342,10 @@ class ArchiveGPGSigningKey(SignableArchive): | |||
144 | 310 | # Since that thread won't have a Zope interaction, we need to | 342 | # Since that thread won't have a Zope interaction, we need to |
145 | 311 | # unwrap the security proxy for it. | 343 | # unwrap the security proxy for it. |
146 | 312 | d = deferToThread( | 344 | d = deferToThread( |
148 | 313 | self._uploadPublicSigningKey, removeSecurityProxy(secret_key)) | 345 | self._uploadPublicSigningKey, removeSecurityProxy(signing_key)) |
149 | 314 | d.addCallback(ProxyFactory) | 346 | d.addCallback(ProxyFactory) |
150 | 315 | d.addCallback(self._storeSigningKey) | 347 | d.addCallback(self._storeSigningKey) |
151 | 316 | return d | 348 | return d |
152 | 317 | else: | 349 | else: |
154 | 318 | pub_key = self._uploadPublicSigningKey(secret_key) | 350 | pub_key = self._uploadPublicSigningKey(signing_key) |
155 | 319 | self._storeSigningKey(pub_key) | 351 | self._storeSigningKey(pub_key) |
156 | diff --git a/lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py b/lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py | |||
157 | index 87359e8..67366bd 100644 | |||
158 | --- a/lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py | |||
159 | +++ b/lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py | |||
160 | @@ -14,8 +14,15 @@ from testtools.matchers import ( | |||
161 | 14 | FileContains, | 14 | FileContains, |
162 | 15 | StartsWith, | 15 | StartsWith, |
163 | 16 | ) | 16 | ) |
166 | 17 | from testtools.twistedsupport import AsynchronousDeferredRunTest | 17 | from testtools.twistedsupport import ( |
167 | 18 | from twisted.internet import defer | 18 | AsynchronousDeferredRunTest, |
168 | 19 | AsynchronousDeferredRunTestForBrokenTwisted, | ||
169 | 20 | ) | ||
170 | 21 | import treq | ||
171 | 22 | from twisted.internet import ( | ||
172 | 23 | defer, | ||
173 | 24 | reactor, | ||
174 | 25 | ) | ||
175 | 19 | from zope.component import getUtility | 26 | from zope.component import getUtility |
176 | 20 | 27 | ||
177 | 21 | from lp.archivepublisher.config import getPubConfig | 28 | from lp.archivepublisher.config import getPubConfig |
178 | @@ -26,18 +33,27 @@ from lp.archivepublisher.interfaces.archivegpgsigningkey import ( | |||
179 | 26 | ) | 33 | ) |
180 | 27 | from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet | 34 | from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet |
181 | 28 | from lp.archivepublisher.tests.test_run_parts import RunPartsMixin | 35 | from lp.archivepublisher.tests.test_run_parts import RunPartsMixin |
182 | 36 | from lp.registry.interfaces.gpg import IGPGKeySet | ||
183 | 29 | from lp.services.compat import mock | 37 | from lp.services.compat import mock |
184 | 30 | from lp.services.features.testing import FeatureFixture | 38 | from lp.services.features.testing import FeatureFixture |
185 | 39 | from lp.services.gpg.interfaces import IGPGHandler | ||
186 | 40 | from lp.services.gpg.tests.test_gpghandler import FakeGenerateKey | ||
187 | 31 | from lp.services.log.logger import BufferLogger | 41 | from lp.services.log.logger import BufferLogger |
188 | 32 | from lp.services.osutils import write_file | 42 | from lp.services.osutils import write_file |
189 | 33 | from lp.services.signing.enums import ( | 43 | from lp.services.signing.enums import ( |
190 | 34 | SigningKeyType, | 44 | SigningKeyType, |
191 | 35 | SigningMode, | 45 | SigningMode, |
192 | 36 | ) | 46 | ) |
193 | 47 | from lp.services.signing.interfaces.signingkey import ISigningKeySet | ||
194 | 37 | from lp.services.signing.tests.helpers import SigningServiceClientFixture | 48 | from lp.services.signing.tests.helpers import SigningServiceClientFixture |
195 | 49 | from lp.services.twistedsupport.testing import TReqFixture | ||
196 | 50 | from lp.services.twistedsupport.treq import check_status | ||
197 | 38 | from lp.soyuz.enums import ArchivePurpose | 51 | from lp.soyuz.enums import ArchivePurpose |
198 | 39 | from lp.testing import TestCaseWithFactory | 52 | from lp.testing import TestCaseWithFactory |
200 | 40 | from lp.testing.gpgkeys import gpgkeysdir | 53 | from lp.testing.gpgkeys import ( |
201 | 54 | gpgkeysdir, | ||
202 | 55 | test_pubkey_from_email, | ||
203 | 56 | ) | ||
204 | 41 | from lp.testing.keyserver import InProcessKeyServerFixture | 57 | from lp.testing.keyserver import InProcessKeyServerFixture |
205 | 42 | from lp.testing.layers import ZopelessDatabaseLayer | 58 | from lp.testing.layers import ZopelessDatabaseLayer |
206 | 43 | 59 | ||
207 | @@ -271,3 +287,89 @@ class TestSignableArchiveWithRunParts(RunPartsMixin, TestCaseWithFactory): | |||
208 | 271 | FileContains( | 287 | FileContains( |
209 | 272 | "detached signature of %s (%s, %s/%s)\n" % | 288 | "detached signature of %s (%s, %s/%s)\n" % |
210 | 273 | (filename, self.archive_root, self.distro.name, self.suite))) | 289 | (filename, self.archive_root, self.distro.name, self.suite))) |
211 | 290 | |||
212 | 291 | |||
213 | 292 | class TestArchiveGPGSigningKey(TestCaseWithFactory): | ||
214 | 293 | |||
215 | 294 | layer = ZopelessDatabaseLayer | ||
216 | 295 | # treq.content doesn't close the connection before yielding control back | ||
217 | 296 | # to the test, so we need to spin the reactor at the end to finish | ||
218 | 297 | # things off. | ||
219 | 298 | run_tests_with = AsynchronousDeferredRunTestForBrokenTwisted.make_factory( | ||
220 | 299 | timeout=10000) | ||
221 | 300 | |||
222 | 301 | @defer.inlineCallbacks | ||
223 | 302 | def setUp(self): | ||
224 | 303 | super(TestArchiveGPGSigningKey, self).setUp() | ||
225 | 304 | self.temp_dir = self.makeTemporaryDirectory() | ||
226 | 305 | self.pushConfig("personalpackagearchive", root=self.temp_dir) | ||
227 | 306 | self.keyserver = self.useFixture(InProcessKeyServerFixture()) | ||
228 | 307 | yield self.keyserver.start() | ||
229 | 308 | |||
230 | 309 | @defer.inlineCallbacks | ||
231 | 310 | def test_generateSigningKey_local(self): | ||
232 | 311 | # Generating a signing key locally using GPGHandler stores it in the | ||
233 | 312 | # database and pushes it to the keyserver. | ||
234 | 313 | self.useFixture(FakeGenerateKey("ppa-sample@canonical.com.sec")) | ||
235 | 314 | logger = BufferLogger() | ||
236 | 315 | # Use a display name that matches the pregenerated sample key. | ||
237 | 316 | owner = self.factory.makePerson( | ||
238 | 317 | displayname="Celso \xe1\xe9\xed\xf3\xfa Providelo") | ||
239 | 318 | archive = self.factory.makeArchive(owner=owner) | ||
240 | 319 | yield IArchiveGPGSigningKey(archive).generateSigningKey( | ||
241 | 320 | log=logger, async_keyserver=True) | ||
242 | 321 | # The key is stored in the database. | ||
243 | 322 | self.assertIsNotNone(archive.signing_key_owner) | ||
244 | 323 | self.assertIsNotNone(archive.signing_key_fingerprint) | ||
245 | 324 | # The key is stored as a GPGKey, not a SigningKey. | ||
246 | 325 | self.assertIsNotNone( | ||
247 | 326 | getUtility(IGPGKeySet).getByFingerprint( | ||
248 | 327 | archive.signing_key_fingerprint)) | ||
249 | 328 | self.assertIsNone( | ||
250 | 329 | getUtility(ISigningKeySet).get( | ||
251 | 330 | SigningKeyType.OPENPGP, archive.signing_key_fingerprint)) | ||
252 | 331 | # The key is uploaded to the keyserver. | ||
253 | 332 | client = self.useFixture(TReqFixture(reactor)).client | ||
254 | 333 | response = yield client.get( | ||
255 | 334 | getUtility(IGPGHandler).getURLForKeyInServer( | ||
256 | 335 | archive.signing_key_fingerprint, "get")) | ||
257 | 336 | yield check_status(response) | ||
258 | 337 | content = yield treq.content(response) | ||
259 | 338 | self.assertIn(b"-----BEGIN PGP PUBLIC KEY BLOCK-----\n", content) | ||
260 | 339 | |||
261 | 340 | @defer.inlineCallbacks | ||
262 | 341 | def test_generateSigningKey_signing_service(self): | ||
263 | 342 | # Generating a signing key on the signing service stores it in the | ||
264 | 343 | # database and pushes it to the keyserver. | ||
265 | 344 | self.useFixture( | ||
266 | 345 | FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"})) | ||
267 | 346 | signing_service_client = self.useFixture( | ||
268 | 347 | SigningServiceClientFixture(self.factory)) | ||
269 | 348 | signing_service_client.generate.side_effect = None | ||
270 | 349 | test_key = test_pubkey_from_email("ftpmaster@canonical.com") | ||
271 | 350 | signing_service_client.generate.return_value = { | ||
272 | 351 | "fingerprint": "33C0A61893A5DC5EB325B29E415A12CAC2F30234", | ||
273 | 352 | "public-key": test_key, | ||
274 | 353 | } | ||
275 | 354 | logger = BufferLogger() | ||
276 | 355 | archive = self.factory.makeArchive() | ||
277 | 356 | yield IArchiveGPGSigningKey(archive).generateSigningKey( | ||
278 | 357 | log=logger, async_keyserver=True) | ||
279 | 358 | # The key is stored in the database. | ||
280 | 359 | self.assertIsNotNone(archive.signing_key_owner) | ||
281 | 360 | self.assertIsNotNone(archive.signing_key_fingerprint) | ||
282 | 361 | # The key is stored as a SigningKey, not a GPGKey. | ||
283 | 362 | self.assertIsNone( | ||
284 | 363 | getUtility(IGPGKeySet).getByFingerprint( | ||
285 | 364 | archive.signing_key_fingerprint)) | ||
286 | 365 | signing_key = getUtility(ISigningKeySet).get( | ||
287 | 366 | SigningKeyType.OPENPGP, archive.signing_key_fingerprint) | ||
288 | 367 | self.assertEqual(test_key, signing_key.public_key) | ||
289 | 368 | # The key is uploaded to the keyserver. | ||
290 | 369 | client = self.useFixture(TReqFixture(reactor)).client | ||
291 | 370 | response = yield client.get( | ||
292 | 371 | getUtility(IGPGHandler).getURLForKeyInServer( | ||
293 | 372 | archive.signing_key_fingerprint, "get")) | ||
294 | 373 | yield check_status(response) | ||
295 | 374 | content = yield treq.content(response) | ||
296 | 375 | self.assertIn(test_key, content) | ||
297 | diff --git a/lib/lp/blueprints/browser/sprint.py b/lib/lp/blueprints/browser/sprint.py | |||
298 | index aae704a..da94d43 100644 | |||
299 | --- a/lib/lp/blueprints/browser/sprint.py | |||
300 | +++ b/lib/lp/blueprints/browser/sprint.py | |||
301 | @@ -1,4 +1,4 @@ | |||
303 | 1 | # Copyright 2009-2018 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2020 Canonical Ltd. This software is licensed under the |
304 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
305 | 3 | 3 | ||
306 | 4 | """Sprint views.""" | 4 | """Sprint views.""" |
307 | @@ -27,8 +27,8 @@ from collections import defaultdict | |||
308 | 27 | import csv | 27 | import csv |
309 | 28 | 28 | ||
310 | 29 | from lazr.restful.utils import smartquote | 29 | from lazr.restful.utils import smartquote |
311 | 30 | import six | ||
312 | 31 | import pytz | 30 | import pytz |
313 | 31 | import six | ||
314 | 32 | from zope.component import getUtility | 32 | from zope.component import getUtility |
315 | 33 | from zope.formlib.widget import CustomWidgetFactory | 33 | from zope.formlib.widget import CustomWidgetFactory |
316 | 34 | from zope.formlib.widgets import TextAreaWidget | 34 | from zope.formlib.widgets import TextAreaWidget |
317 | @@ -462,6 +462,7 @@ class SprintTopicSetView(HasSpecificationsView, LaunchpadView): | |||
318 | 462 | # only a single item was selected, but we want to deal with a | 462 | # only a single item was selected, but we want to deal with a |
319 | 463 | # list for the general case, so convert it to a list | 463 | # list for the general case, so convert it to a list |
320 | 464 | selected_specs = [selected_specs] | 464 | selected_specs = [selected_specs] |
321 | 465 | selected_specs = [int(speclink) for speclink in selected_specs] | ||
322 | 465 | 466 | ||
323 | 466 | if action == 'Accepted': | 467 | if action == 'Accepted': |
324 | 467 | action_fn = self.context.acceptSpecificationLinks | 468 | action_fn = self.context.acceptSpecificationLinks |
325 | diff --git a/lib/lp/blueprints/browser/tests/test_views.py b/lib/lp/blueprints/browser/tests/test_views.py | |||
326 | index 5b68b37..cbbe2e5 100644 | |||
327 | --- a/lib/lp/blueprints/browser/tests/test_views.py | |||
328 | +++ b/lib/lp/blueprints/browser/tests/test_views.py | |||
329 | @@ -110,7 +110,8 @@ def test_suite(): | |||
330 | 110 | for filename in filenames: | 110 | for filename in filenames: |
331 | 111 | path = filename | 111 | path = filename |
332 | 112 | one_test = LayeredDocFileSuite( | 112 | one_test = LayeredDocFileSuite( |
334 | 113 | path, setUp=setUp, tearDown=tearDown, | 113 | path, |
335 | 114 | setUp=lambda test: setUp(test, future=True), tearDown=tearDown, | ||
336 | 114 | layer=DatabaseFunctionalLayer, | 115 | layer=DatabaseFunctionalLayer, |
337 | 115 | stdout_logging_level=logging.WARNING) | 116 | stdout_logging_level=logging.WARNING) |
338 | 116 | suite.addTest(one_test) | 117 | suite.addTest(one_test) |
339 | diff --git a/lib/lp/blueprints/model/specification.py b/lib/lp/blueprints/model/specification.py | |||
340 | index 40cf6ab..4b37d10 100644 | |||
341 | --- a/lib/lp/blueprints/model/specification.py | |||
342 | +++ b/lib/lp/blueprints/model/specification.py | |||
343 | @@ -23,14 +23,15 @@ from sqlobject import ( | |||
344 | 23 | SQLRelatedJoin, | 23 | SQLRelatedJoin, |
345 | 24 | StringCol, | 24 | StringCol, |
346 | 25 | ) | 25 | ) |
348 | 26 | from storm.expr import ( | 26 | from storm.locals import ( |
349 | 27 | Count, | 27 | Count, |
350 | 28 | Desc, | 28 | Desc, |
351 | 29 | Join, | 29 | Join, |
352 | 30 | Or, | 30 | Or, |
353 | 31 | ReferenceSet, | ||
354 | 31 | SQL, | 32 | SQL, |
355 | 33 | Store, | ||
356 | 32 | ) | 34 | ) |
357 | 33 | from storm.store import Store | ||
358 | 34 | from zope.component import getUtility | 35 | from zope.component import getUtility |
359 | 35 | from zope.event import notify | 36 | from zope.event import notify |
360 | 36 | from zope.interface import implementer | 37 | from zope.interface import implementer |
361 | @@ -237,11 +238,13 @@ class Specification(SQLBase, BugLinkTargetMixin, InformationTypeMixin): | |||
362 | 237 | joinColumn='specification', otherColumn='person', | 238 | joinColumn='specification', otherColumn='person', |
363 | 238 | intermediateTable='SpecificationSubscription', | 239 | intermediateTable='SpecificationSubscription', |
364 | 239 | orderBy=['display_name', 'name']) | 240 | orderBy=['display_name', 'name']) |
370 | 240 | sprint_links = SQLMultipleJoin('SprintSpecification', orderBy='id', | 241 | sprint_links = ReferenceSet( |
371 | 241 | joinColumn='specification') | 242 | '<primary key>', 'SprintSpecification.specification_id', |
372 | 242 | sprints = SQLRelatedJoin('Sprint', orderBy='name', | 243 | order_by='SprintSpecification.id') |
373 | 243 | joinColumn='specification', otherColumn='sprint', | 244 | sprints = ReferenceSet( |
374 | 244 | intermediateTable='SprintSpecification') | 245 | '<primary key>', 'SprintSpecification.specification_id', |
375 | 246 | 'SprintSpecification.sprint_id', 'Sprint.id', | ||
376 | 247 | order_by='Sprint.name') | ||
377 | 245 | spec_dependency_links = SQLMultipleJoin('SpecificationDependency', | 248 | spec_dependency_links = SQLMultipleJoin('SpecificationDependency', |
378 | 246 | joinColumn='specification', orderBy='id') | 249 | joinColumn='specification', orderBy='id') |
379 | 247 | 250 | ||
380 | @@ -827,13 +830,11 @@ class Specification(SQLBase, BugLinkTargetMixin, InformationTypeMixin): | |||
381 | 827 | 830 | ||
382 | 828 | def unlinkSprint(self, sprint): | 831 | def unlinkSprint(self, sprint): |
383 | 829 | """See ISpecification.""" | 832 | """See ISpecification.""" |
384 | 830 | from lp.blueprints.model.sprintspecification import ( | ||
385 | 831 | SprintSpecification) | ||
386 | 832 | for sprint_link in self.sprint_links: | 833 | for sprint_link in self.sprint_links: |
387 | 833 | # sprints have unique names | 834 | # sprints have unique names |
388 | 834 | if sprint_link.sprint.name == sprint.name: | 835 | if sprint_link.sprint.name == sprint.name: |
391 | 835 | SprintSpecification.delete(sprint_link.id) | 836 | sprint_link.destroySelf() |
392 | 836 | return sprint_link | 837 | return |
393 | 837 | 838 | ||
394 | 838 | # dependencies | 839 | # dependencies |
395 | 839 | def createDependency(self, specification): | 840 | def createDependency(self, specification): |
396 | @@ -1060,8 +1061,8 @@ class SpecificationSet(HasSpecificationsMixin): | |||
397 | 1060 | def coming_sprints(self): | 1061 | def coming_sprints(self): |
398 | 1061 | """See ISpecificationSet.""" | 1062 | """See ISpecificationSet.""" |
399 | 1062 | from lp.blueprints.model.sprint import Sprint | 1063 | from lp.blueprints.model.sprint import Sprint |
402 | 1063 | return Sprint.select("time_ends > 'NOW'", orderBy='time_starts', | 1064 | rows = IStore(Sprint).find(Sprint, Sprint.time_ends > UTC_NOW) |
403 | 1064 | limit=5) | 1065 | return rows.order_by(Sprint.time_starts).config(limit=5) |
404 | 1065 | 1066 | ||
405 | 1066 | def new(self, name, title, specurl, summary, definition_status, | 1067 | def new(self, name, title, specurl, summary, definition_status, |
406 | 1067 | owner, target, approver=None, assignee=None, drafter=None, | 1068 | owner, target, approver=None, assignee=None, drafter=None, |
407 | diff --git a/lib/lp/blueprints/model/sprint.py b/lib/lp/blueprints/model/sprint.py | |||
408 | index 2446437..ed6748a 100644 | |||
409 | --- a/lib/lp/blueprints/model/sprint.py | |||
410 | +++ b/lib/lp/blueprints/model/sprint.py | |||
411 | @@ -1,4 +1,4 @@ | |||
413 | 1 | # Copyright 2009-2017 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2020 Canonical Ltd. This software is licensed under the |
414 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
415 | 3 | 3 | ||
416 | 4 | __metaclass__ = type | 4 | __metaclass__ = type |
417 | @@ -8,17 +8,17 @@ __all__ = [ | |||
418 | 8 | 'HasSprintsMixin', | 8 | 'HasSprintsMixin', |
419 | 9 | ] | 9 | ] |
420 | 10 | 10 | ||
427 | 11 | 11 | import pytz | |
422 | 12 | from sqlobject import ( | ||
423 | 13 | BoolCol, | ||
424 | 14 | ForeignKey, | ||
425 | 15 | StringCol, | ||
426 | 16 | ) | ||
428 | 17 | from storm.locals import ( | 12 | from storm.locals import ( |
429 | 13 | Bool, | ||
430 | 14 | DateTime, | ||
431 | 18 | Desc, | 15 | Desc, |
432 | 16 | Int, | ||
433 | 19 | Join, | 17 | Join, |
434 | 20 | Or, | 18 | Or, |
435 | 19 | Reference, | ||
436 | 21 | Store, | 20 | Store, |
437 | 21 | Unicode, | ||
438 | 22 | ) | 22 | ) |
439 | 23 | from zope.component import getUtility | 23 | from zope.component import getUtility |
440 | 24 | from zope.interface import implementer | 24 | from zope.interface import implementer |
441 | @@ -38,7 +38,10 @@ from lp.blueprints.interfaces.sprint import ( | |||
442 | 38 | ISprint, | 38 | ISprint, |
443 | 39 | ISprintSet, | 39 | ISprintSet, |
444 | 40 | ) | 40 | ) |
446 | 41 | from lp.blueprints.model.specification import HasSpecificationsMixin | 41 | from lp.blueprints.model.specification import ( |
447 | 42 | HasSpecificationsMixin, | ||
448 | 43 | Specification, | ||
449 | 44 | ) | ||
450 | 42 | from lp.blueprints.model.specificationsearch import ( | 45 | from lp.blueprints.model.specificationsearch import ( |
451 | 43 | get_specification_active_product_filter, | 46 | get_specification_active_product_filter, |
452 | 44 | get_specification_filters, | 47 | get_specification_filters, |
453 | @@ -51,46 +54,66 @@ from lp.registry.interfaces.person import ( | |||
454 | 51 | validate_public_person, | 54 | validate_public_person, |
455 | 52 | ) | 55 | ) |
456 | 53 | from lp.registry.model.hasdrivers import HasDriversMixin | 56 | from lp.registry.model.hasdrivers import HasDriversMixin |
463 | 54 | from lp.services.database.constants import DEFAULT | 57 | from lp.services.database.constants import ( |
464 | 55 | from lp.services.database.datetimecol import UtcDateTimeCol | 58 | DEFAULT, |
465 | 56 | from lp.services.database.sqlbase import ( | 59 | UTC_NOW, |
460 | 57 | flush_database_updates, | ||
461 | 58 | quote, | ||
462 | 59 | SQLBase, | ||
466 | 60 | ) | 60 | ) |
467 | 61 | from lp.services.database.interfaces import IStore | ||
468 | 62 | from lp.services.database.sqlbase import flush_database_updates | ||
469 | 63 | from lp.services.database.stormbase import StormBase | ||
470 | 61 | from lp.services.propertycache import cachedproperty | 64 | from lp.services.propertycache import cachedproperty |
471 | 62 | 65 | ||
472 | 63 | 66 | ||
473 | 64 | @implementer(ISprint, IHasLogo, IHasMugshot, IHasIcon) | 67 | @implementer(ISprint, IHasLogo, IHasMugshot, IHasIcon) |
475 | 65 | class Sprint(SQLBase, HasDriversMixin, HasSpecificationsMixin): | 68 | class Sprint(StormBase, HasDriversMixin, HasSpecificationsMixin): |
476 | 66 | """See `ISprint`.""" | 69 | """See `ISprint`.""" |
477 | 67 | 70 | ||
479 | 68 | _defaultOrder = ['name'] | 71 | __storm_table__ = 'Sprint' |
480 | 72 | __storm_order__ = ['name'] | ||
481 | 69 | 73 | ||
482 | 70 | # db field names | 74 | # db field names |
506 | 71 | owner = ForeignKey( | 75 | id = Int(primary=True) |
507 | 72 | dbName='owner', foreignKey='Person', | 76 | owner_id = Int( |
508 | 73 | storm_validator=validate_public_person, notNull=True) | 77 | name='owner', validator=validate_public_person, allow_none=False) |
509 | 74 | name = StringCol(notNull=True, alternateID=True) | 78 | owner = Reference(owner_id, 'Person.id') |
510 | 75 | title = StringCol(notNull=True) | 79 | name = Unicode(allow_none=False) |
511 | 76 | summary = StringCol(notNull=True) | 80 | title = Unicode(allow_none=False) |
512 | 77 | driver = ForeignKey( | 81 | summary = Unicode(allow_none=False) |
513 | 78 | dbName='driver', foreignKey='Person', | 82 | driver_id = Int(name='driver', validator=validate_public_person) |
514 | 79 | storm_validator=validate_public_person) | 83 | driver = Reference(driver_id, 'Person.id') |
515 | 80 | home_page = StringCol(notNull=False, default=None) | 84 | home_page = Unicode(allow_none=True, default=None) |
516 | 81 | homepage_content = StringCol(default=None) | 85 | homepage_content = Unicode(default=None) |
517 | 82 | icon = ForeignKey( | 86 | icon_id = Int(name='icon', default=None) |
518 | 83 | dbName='icon', foreignKey='LibraryFileAlias', default=None) | 87 | icon = Reference(icon_id, 'LibraryFileAlias.id') |
519 | 84 | logo = ForeignKey( | 88 | logo_id = Int(name='logo', default=None) |
520 | 85 | dbName='logo', foreignKey='LibraryFileAlias', default=None) | 89 | logo = Reference(logo_id, 'LibraryFileAlias.id') |
521 | 86 | mugshot = ForeignKey( | 90 | mugshot_id = Int(name='mugshot', default=None) |
522 | 87 | dbName='mugshot', foreignKey='LibraryFileAlias', default=None) | 91 | mugshot = Reference(mugshot_id, 'LibraryFileAlias.id') |
523 | 88 | address = StringCol(notNull=False, default=None) | 92 | address = Unicode(allow_none=True, default=None) |
524 | 89 | datecreated = UtcDateTimeCol(notNull=True, default=DEFAULT) | 93 | datecreated = DateTime(tzinfo=pytz.UTC, allow_none=False, default=DEFAULT) |
525 | 90 | time_zone = StringCol(notNull=True) | 94 | time_zone = Unicode(allow_none=False) |
526 | 91 | time_starts = UtcDateTimeCol(notNull=True) | 95 | time_starts = DateTime(tzinfo=pytz.UTC, allow_none=False) |
527 | 92 | time_ends = UtcDateTimeCol(notNull=True) | 96 | time_ends = DateTime(tzinfo=pytz.UTC, allow_none=False) |
528 | 93 | is_physical = BoolCol(notNull=True, default=True) | 97 | is_physical = Bool(allow_none=False, default=True) |
529 | 98 | |||
530 | 99 | def __init__(self, owner, name, title, time_zone, time_starts, time_ends, | ||
531 | 100 | summary, address=None, driver=None, home_page=None, | ||
532 | 101 | mugshot=None, logo=None, icon=None, is_physical=True): | ||
533 | 102 | super(Sprint, self).__init__() | ||
534 | 103 | self.owner = owner | ||
535 | 104 | self.name = name | ||
536 | 105 | self.title = title | ||
537 | 106 | self.time_zone = time_zone | ||
538 | 107 | self.time_starts = time_starts | ||
539 | 108 | self.time_ends = time_ends | ||
540 | 109 | self.summary = summary | ||
541 | 110 | self.address = address | ||
542 | 111 | self.driver = driver | ||
543 | 112 | self.home_page = home_page | ||
544 | 113 | self.mugshot = mugshot | ||
545 | 114 | self.logo = logo | ||
546 | 115 | self.icon = icon | ||
547 | 116 | self.is_physical = is_physical | ||
548 | 94 | 117 | ||
549 | 95 | # attributes | 118 | # attributes |
550 | 96 | 119 | ||
551 | @@ -128,7 +151,7 @@ class Sprint(SQLBase, HasDriversMixin, HasSpecificationsMixin): | |||
552 | 128 | tables.append(Join( | 151 | tables.append(Join( |
553 | 129 | SprintSpecification, | 152 | SprintSpecification, |
554 | 130 | SprintSpecification.specification == Specification.id)) | 153 | SprintSpecification.specification == Specification.id)) |
556 | 131 | query.append(SprintSpecification.sprintID == self.id) | 154 | query.append(SprintSpecification.sprint == self) |
557 | 132 | 155 | ||
558 | 133 | if not filter: | 156 | if not filter: |
559 | 134 | # filter could be None or [] then we decide the default | 157 | # filter could be None or [] then we decide the default |
560 | @@ -209,7 +232,7 @@ class Sprint(SQLBase, HasDriversMixin, HasSpecificationsMixin): | |||
561 | 209 | context. Here we are a sprint that could cover many products and/or | 232 | context. Here we are a sprint that could cover many products and/or |
562 | 210 | distros. | 233 | distros. |
563 | 211 | """ | 234 | """ |
565 | 212 | speclink = SprintSpecification.get(speclink_id) | 235 | speclink = Store.of(self).get(SprintSpecification, speclink_id) |
566 | 213 | assert (speclink.sprint.id == self.id) | 236 | assert (speclink.sprint.id == self.id) |
567 | 214 | return speclink | 237 | return speclink |
568 | 215 | 238 | ||
569 | @@ -303,15 +326,16 @@ class SprintSet: | |||
570 | 303 | 326 | ||
571 | 304 | def __getitem__(self, name): | 327 | def __getitem__(self, name): |
572 | 305 | """See `ISprintSet`.""" | 328 | """See `ISprintSet`.""" |
574 | 306 | return Sprint.selectOneBy(name=name) | 329 | return IStore(Sprint).find(Sprint, name=name).one() |
575 | 307 | 330 | ||
576 | 308 | def __iter__(self): | 331 | def __iter__(self): |
577 | 309 | """See `ISprintSet`.""" | 332 | """See `ISprintSet`.""" |
579 | 310 | return iter(Sprint.select("time_ends > 'NOW'", orderBy='time_starts')) | 333 | return iter(IStore(Sprint).find( |
580 | 334 | Sprint, Sprint.time_ends > UTC_NOW).order_by(Sprint.time_starts)) | ||
581 | 311 | 335 | ||
582 | 312 | @property | 336 | @property |
583 | 313 | def all(self): | 337 | def all(self): |
585 | 314 | return Sprint.select(orderBy='-time_starts') | 338 | return IStore(Sprint).find(Sprint).order_by(Sprint.time_starts) |
586 | 315 | 339 | ||
587 | 316 | def new(self, owner, name, title, time_zone, time_starts, time_ends, | 340 | def new(self, owner, name, title, time_zone, time_starts, time_ends, |
588 | 317 | summary, address=None, driver=None, home_page=None, | 341 | summary, address=None, driver=None, home_page=None, |
589 | @@ -329,48 +353,50 @@ class HasSprintsMixin: | |||
590 | 329 | implementing IHasSprints. | 353 | implementing IHasSprints. |
591 | 330 | """ | 354 | """ |
592 | 331 | 355 | ||
596 | 332 | def _getBaseQueryAndClauseTablesForQueryingSprints(self): | 356 | def _getBaseClausesForQueryingSprints(self): |
597 | 333 | """Return the base SQL query and the clauseTables to be used when | 357 | """Return the base Storm clauses to be used when querying sprints |
598 | 334 | querying sprints related to this object. | 358 | related to this object. |
599 | 335 | 359 | ||
600 | 336 | Subclasses must overwrite this method if it doesn't suit them. | 360 | Subclasses must overwrite this method if it doesn't suit them. |
601 | 337 | """ | 361 | """ |
610 | 338 | query = """ | 362 | try: |
611 | 339 | Specification.%s = %s | 363 | table = getattr(self, "__storm_table__") |
612 | 340 | AND Specification.id = SprintSpecification.specification | 364 | except AttributeError: |
613 | 341 | AND SprintSpecification.sprint = Sprint.id | 365 | # XXX cjwatson 2020-09-10: Remove this once all inheritors have |
614 | 342 | AND SprintSpecification.status = %s | 366 | # been converted from SQLObject to Storm. |
615 | 343 | """ % (self._table, self.id, | 367 | table = getattr(self, "_table") |
616 | 344 | quote(SprintSpecificationStatus.ACCEPTED)) | 368 | return [ |
617 | 345 | return query, ['Specification', 'SprintSpecification'] | 369 | getattr(Specification, table.lower()) == self, |
618 | 370 | Specification.id == SprintSpecification.specification_id, | ||
619 | 371 | SprintSpecification.sprint == Sprint.id, | ||
620 | 372 | SprintSpecification.status == SprintSpecificationStatus.ACCEPTED, | ||
621 | 373 | ] | ||
622 | 346 | 374 | ||
623 | 347 | def getSprints(self): | 375 | def getSprints(self): |
627 | 348 | query, tables = self._getBaseQueryAndClauseTablesForQueryingSprints() | 376 | clauses = self._getBaseClausesForQueryingSprints() |
628 | 349 | return Sprint.select( | 377 | return IStore(Sprint).find(Sprint, *clauses).order_by( |
629 | 350 | query, clauseTables=tables, orderBy='-time_starts', distinct=True) | 378 | Desc(Sprint.time_starts)).config(distinct=True) |
630 | 351 | 379 | ||
631 | 352 | @cachedproperty | 380 | @cachedproperty |
632 | 353 | def sprints(self): | 381 | def sprints(self): |
633 | 354 | """See IHasSprints.""" | 382 | """See IHasSprints.""" |
634 | 355 | return list(self.getSprints()) | 383 | return list(self.getSprints()) |
635 | 356 | 384 | ||
642 | 357 | def getComingSprings(self): | 385 | def getComingSprints(self): |
643 | 358 | query, tables = self._getBaseQueryAndClauseTablesForQueryingSprints() | 386 | clauses = self._getBaseClausesForQueryingSprints() |
644 | 359 | query += " AND Sprint.time_ends > 'NOW'" | 387 | clauses.append(Sprint.time_ends > UTC_NOW) |
645 | 360 | return Sprint.select( | 388 | return IStore(Sprint).find(Sprint, *clauses).order_by( |
646 | 361 | query, clauseTables=tables, orderBy='time_starts', | 389 | Sprint.time_starts).config(distinct=True, limit=5) |
641 | 362 | distinct=True, limit=5) | ||
647 | 363 | 390 | ||
648 | 364 | @cachedproperty | 391 | @cachedproperty |
649 | 365 | def coming_sprints(self): | 392 | def coming_sprints(self): |
650 | 366 | """See IHasSprints.""" | 393 | """See IHasSprints.""" |
652 | 367 | return list(self.getComingSprings()) | 394 | return list(self.getComingSprints()) |
653 | 368 | 395 | ||
654 | 369 | @property | 396 | @property |
655 | 370 | def past_sprints(self): | 397 | def past_sprints(self): |
656 | 371 | """See IHasSprints.""" | 398 | """See IHasSprints.""" |
662 | 372 | query, tables = self._getBaseQueryAndClauseTablesForQueryingSprints() | 399 | clauses = self._getBaseClausesForQueryingSprints() |
663 | 373 | query += " AND Sprint.time_ends <= 'NOW'" | 400 | clauses.append(Sprint.time_ends <= UTC_NOW) |
664 | 374 | return Sprint.select( | 401 | return IStore(Sprint).find(Sprint, *clauses).order_by( |
665 | 375 | query, clauseTables=tables, orderBy='-time_starts', | 402 | Desc(Sprint.time_starts)).config(distinct=True) |
661 | 376 | distinct=True) | ||
666 | diff --git a/lib/lp/blueprints/model/sprintspecification.py b/lib/lp/blueprints/model/sprintspecification.py | |||
667 | index 46e691a..eed7649 100644 | |||
668 | --- a/lib/lp/blueprints/model/sprintspecification.py | |||
669 | +++ b/lib/lp/blueprints/model/sprintspecification.py | |||
670 | @@ -1,13 +1,17 @@ | |||
672 | 1 | # Copyright 2009 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2020 Canonical Ltd. This software is licensed under the |
673 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
674 | 3 | 3 | ||
675 | 4 | __metaclass__ = type | 4 | __metaclass__ = type |
676 | 5 | 5 | ||
677 | 6 | __all__ = ['SprintSpecification'] | 6 | __all__ = ['SprintSpecification'] |
678 | 7 | 7 | ||
682 | 8 | from sqlobject import ( | 8 | import pytz |
683 | 9 | ForeignKey, | 9 | from storm.locals import ( |
684 | 10 | StringCol, | 10 | DateTime, |
685 | 11 | Int, | ||
686 | 12 | Reference, | ||
687 | 13 | Store, | ||
688 | 14 | Unicode, | ||
689 | 11 | ) | 15 | ) |
690 | 12 | from zope.interface import implementer | 16 | from zope.interface import implementer |
691 | 13 | 17 | ||
692 | @@ -18,32 +22,41 @@ from lp.services.database.constants import ( | |||
693 | 18 | DEFAULT, | 22 | DEFAULT, |
694 | 19 | UTC_NOW, | 23 | UTC_NOW, |
695 | 20 | ) | 24 | ) |
699 | 21 | from lp.services.database.datetimecol import UtcDateTimeCol | 25 | from lp.services.database.enumcol import DBEnum |
700 | 22 | from lp.services.database.enumcol import EnumCol | 26 | from lp.services.database.stormbase import StormBase |
698 | 23 | from lp.services.database.sqlbase import SQLBase | ||
701 | 24 | 27 | ||
702 | 25 | 28 | ||
703 | 26 | @implementer(ISprintSpecification) | 29 | @implementer(ISprintSpecification) |
705 | 27 | class SprintSpecification(SQLBase): | 30 | class SprintSpecification(StormBase): |
706 | 28 | """A link between a sprint and a specification.""" | 31 | """A link between a sprint and a specification.""" |
707 | 29 | 32 | ||
709 | 30 | _table = 'SprintSpecification' | 33 | __storm_table__ = 'SprintSpecification' |
710 | 31 | 34 | ||
716 | 32 | sprint = ForeignKey(dbName='sprint', foreignKey='Sprint', | 35 | id = Int(primary=True) |
717 | 33 | notNull=True) | 36 | |
718 | 34 | specification = ForeignKey(dbName='specification', | 37 | sprint_id = Int(name='sprint', allow_none=False) |
719 | 35 | foreignKey='Specification', notNull=True) | 38 | sprint = Reference(sprint_id, 'Sprint.id') |
720 | 36 | status = EnumCol(schema=SprintSpecificationStatus, notNull=True, | 39 | specification_id = Int(name='specification', allow_none=False) |
721 | 40 | specification = Reference(specification_id, 'Specification.id') | ||
722 | 41 | status = DBEnum( | ||
723 | 42 | enum=SprintSpecificationStatus, allow_none=False, | ||
724 | 37 | default=SprintSpecificationStatus.PROPOSED) | 43 | default=SprintSpecificationStatus.PROPOSED) |
734 | 38 | whiteboard = StringCol(notNull=False, default=None) | 44 | whiteboard = Unicode(allow_none=True, default=None) |
735 | 39 | registrant = ForeignKey( | 45 | registrant_id = Int( |
736 | 40 | dbName='registrant', foreignKey='Person', | 46 | name='registrant', validator=validate_public_person, allow_none=False) |
737 | 41 | storm_validator=validate_public_person, notNull=True) | 47 | registrant = Reference(registrant_id, 'Person.id') |
738 | 42 | date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) | 48 | date_created = DateTime(tzinfo=pytz.UTC, allow_none=False, default=DEFAULT) |
739 | 43 | decider = ForeignKey( | 49 | decider_id = Int( |
740 | 44 | dbName='decider', foreignKey='Person', | 50 | name='decider', validator=validate_public_person, allow_none=True, |
741 | 45 | storm_validator=validate_public_person, notNull=False, default=None) | 51 | default=None) |
742 | 46 | date_decided = UtcDateTimeCol(notNull=False, default=None) | 52 | decider = Reference(decider_id, 'Person.id') |
743 | 53 | date_decided = DateTime(tzinfo=pytz.UTC, allow_none=True, default=None) | ||
744 | 54 | |||
745 | 55 | def __init__(self, sprint, specification, registrant): | ||
746 | 56 | super(SprintSpecification, self).__init__() | ||
747 | 57 | self.sprint = sprint | ||
748 | 58 | self.specification = specification | ||
749 | 59 | self.registrant = registrant | ||
750 | 47 | 60 | ||
751 | 48 | @property | 61 | @property |
752 | 49 | def is_confirmed(self): | 62 | def is_confirmed(self): |
753 | @@ -66,3 +79,6 @@ class SprintSpecification(SQLBase): | |||
754 | 66 | self.status = SprintSpecificationStatus.DECLINED | 79 | self.status = SprintSpecificationStatus.DECLINED |
755 | 67 | self.decider = decider | 80 | self.decider = decider |
756 | 68 | self.date_decided = UTC_NOW | 81 | self.date_decided = UTC_NOW |
757 | 82 | |||
758 | 83 | def destroySelf(self): | ||
759 | 84 | Store.of(self).remove(self) | ||
760 | diff --git a/lib/lp/blueprints/vocabularies/sprint.py b/lib/lp/blueprints/vocabularies/sprint.py | |||
761 | index f300b62..f98df43 100644 | |||
762 | --- a/lib/lp/blueprints/vocabularies/sprint.py | |||
763 | +++ b/lib/lp/blueprints/vocabularies/sprint.py | |||
764 | @@ -9,21 +9,17 @@ __all__ = [ | |||
765 | 9 | 'SprintVocabulary', | 9 | 'SprintVocabulary', |
766 | 10 | ] | 10 | ] |
767 | 11 | 11 | ||
768 | 12 | |||
769 | 13 | from lp.blueprints.model.sprint import Sprint | 12 | from lp.blueprints.model.sprint import Sprint |
771 | 14 | from lp.services.webapp.vocabulary import NamedSQLObjectVocabulary | 13 | from lp.services.database.constants import UTC_NOW |
772 | 14 | from lp.services.webapp.vocabulary import NamedStormVocabulary | ||
773 | 15 | 15 | ||
774 | 16 | 16 | ||
776 | 17 | class FutureSprintVocabulary(NamedSQLObjectVocabulary): | 17 | class FutureSprintVocabulary(NamedStormVocabulary): |
777 | 18 | """A vocab of all sprints that have not yet finished.""" | 18 | """A vocab of all sprints that have not yet finished.""" |
778 | 19 | 19 | ||
779 | 20 | _table = Sprint | 20 | _table = Sprint |
785 | 21 | 21 | _clauses = [Sprint.time_ends > UTC_NOW] | |
781 | 22 | def __iter__(self): | ||
782 | 23 | future_sprints = Sprint.select("time_ends > 'NOW'") | ||
783 | 24 | for sprint in future_sprints: | ||
784 | 25 | yield(self.toTerm(sprint)) | ||
786 | 26 | 22 | ||
787 | 27 | 23 | ||
789 | 28 | class SprintVocabulary(NamedSQLObjectVocabulary): | 24 | class SprintVocabulary(NamedStormVocabulary): |
790 | 29 | _table = Sprint | 25 | _table = Sprint |
791 | diff --git a/lib/lp/code/mail/tests/test_codehandler.py b/lib/lp/code/mail/tests/test_codehandler.py | |||
792 | index a02cc4d..97feda0 100644 | |||
793 | --- a/lib/lp/code/mail/tests/test_codehandler.py | |||
794 | +++ b/lib/lp/code/mail/tests/test_codehandler.py | |||
795 | @@ -3,6 +3,8 @@ | |||
796 | 3 | 3 | ||
797 | 4 | """Testing the CodeHandler.""" | 4 | """Testing the CodeHandler.""" |
798 | 5 | 5 | ||
799 | 6 | from __future__ import absolute_import, print_function, unicode_literals | ||
800 | 7 | |||
801 | 6 | __metaclass__ = type | 8 | __metaclass__ = type |
802 | 7 | 9 | ||
803 | 8 | from textwrap import dedent | 10 | from textwrap import dedent |
804 | diff --git a/lib/lp/code/mail/tests/test_codereviewcomment.py b/lib/lp/code/mail/tests/test_codereviewcomment.py | |||
805 | index 930753e..a692a97 100644 | |||
806 | --- a/lib/lp/code/mail/tests/test_codereviewcomment.py | |||
807 | +++ b/lib/lp/code/mail/tests/test_codereviewcomment.py | |||
808 | @@ -243,7 +243,7 @@ class TestCodeReviewComment(TestCaseWithFactory): | |||
809 | 243 | def test_generateEmailWithVoteAndTag(self): | 243 | def test_generateEmailWithVoteAndTag(self): |
810 | 244 | """Ensure that vote tags are displayed.""" | 244 | """Ensure that vote tags are displayed.""" |
811 | 245 | mailer, subscriber = self.makeMailer( | 245 | mailer, subscriber = self.makeMailer( |
813 | 246 | vote=CodeReviewVote.APPROVE, vote_tag='DBTAG') | 246 | vote=CodeReviewVote.APPROVE, vote_tag=u'DBTAG') |
814 | 247 | ctrl = mailer.generateEmail( | 247 | ctrl = mailer.generateEmail( |
815 | 248 | subscriber.preferredemail.email, subscriber) | 248 | subscriber.preferredemail.email, subscriber) |
816 | 249 | self.assertEqual('Review: Approve dbtag', ctrl.body.splitlines()[0]) | 249 | self.assertEqual('Review: Approve dbtag', ctrl.body.splitlines()[0]) |
817 | diff --git a/lib/lp/code/model/branchcollection.py b/lib/lp/code/model/branchcollection.py | |||
818 | index 7482778..bc3a026 100644 | |||
819 | --- a/lib/lp/code/model/branchcollection.py | |||
820 | +++ b/lib/lp/code/model/branchcollection.py | |||
821 | @@ -484,10 +484,10 @@ class GenericBranchCollection: | |||
822 | 484 | tables = [ | 484 | tables = [ |
823 | 485 | BranchMergeProposal, | 485 | BranchMergeProposal, |
824 | 486 | Join(CodeReviewVoteReference, | 486 | Join(CodeReviewVoteReference, |
826 | 487 | CodeReviewVoteReference.branch_merge_proposalID == \ | 487 | CodeReviewVoteReference.branch_merge_proposal == |
827 | 488 | BranchMergeProposal.id), | 488 | BranchMergeProposal.id), |
828 | 489 | LeftJoin(CodeReviewComment, | 489 | LeftJoin(CodeReviewComment, |
830 | 490 | CodeReviewVoteReference.commentID == CodeReviewComment.id)] | 490 | CodeReviewVoteReference.comment == CodeReviewComment.id)] |
831 | 491 | 491 | ||
832 | 492 | expressions = [ | 492 | expressions = [ |
833 | 493 | CodeReviewVoteReference.reviewer == reviewer, | 493 | CodeReviewVoteReference.reviewer == reviewer, |
834 | diff --git a/lib/lp/code/model/branchmergeproposal.py b/lib/lp/code/model/branchmergeproposal.py | |||
835 | index 338cc29..2a346b4 100644 | |||
836 | --- a/lib/lp/code/model/branchmergeproposal.py | |||
837 | +++ b/lib/lp/code/model/branchmergeproposal.py | |||
838 | @@ -46,6 +46,7 @@ from zope.interface import implementer | |||
839 | 46 | from zope.security.interfaces import Unauthorized | 46 | from zope.security.interfaces import Unauthorized |
840 | 47 | 47 | ||
841 | 48 | from lp.app.enums import PRIVATE_INFORMATION_TYPES | 48 | from lp.app.enums import PRIVATE_INFORMATION_TYPES |
842 | 49 | from lp.app.errors import NotFoundError | ||
843 | 49 | from lp.app.interfaces.launchpad import ILaunchpadCelebrities | 50 | from lp.app.interfaces.launchpad import ILaunchpadCelebrities |
844 | 50 | from lp.bugs.interfaces.bugtask import IBugTaskSet | 51 | from lp.bugs.interfaces.bugtask import IBugTaskSet |
845 | 51 | from lp.bugs.interfaces.bugtaskfilter import filter_bugtasks_by_context | 52 | from lp.bugs.interfaces.bugtaskfilter import filter_bugtasks_by_context |
846 | @@ -565,11 +566,14 @@ class BranchMergeProposal(SQLBase, BugLinkTargetMixin): | |||
847 | 565 | @property | 566 | @property |
848 | 566 | def all_comments(self): | 567 | def all_comments(self): |
849 | 567 | """See `IBranchMergeProposal`.""" | 568 | """See `IBranchMergeProposal`.""" |
851 | 568 | return CodeReviewComment.selectBy(branch_merge_proposal=self.id) | 569 | return IStore(CodeReviewComment).find( |
852 | 570 | CodeReviewComment, branch_merge_proposal=self) | ||
853 | 569 | 571 | ||
854 | 570 | def getComment(self, id): | 572 | def getComment(self, id): |
855 | 571 | """See `IBranchMergeProposal`.""" | 573 | """See `IBranchMergeProposal`.""" |
857 | 572 | comment = CodeReviewComment.get(id) | 574 | comment = IStore(CodeReviewComment).get(CodeReviewComment, id) |
858 | 575 | if comment is None: | ||
859 | 576 | raise NotFoundError(id) | ||
860 | 573 | if comment.branch_merge_proposal != self: | 577 | if comment.branch_merge_proposal != self: |
861 | 574 | raise WrongBranchMergeProposal | 578 | raise WrongBranchMergeProposal |
862 | 575 | return comment | 579 | return comment |
863 | @@ -583,7 +587,10 @@ class BranchMergeProposal(SQLBase, BugLinkTargetMixin): | |||
864 | 583 | 587 | ||
865 | 584 | def setCommentVisibility(self, user, comment_number, visible): | 588 | def setCommentVisibility(self, user, comment_number, visible): |
866 | 585 | """See `IBranchMergeProposal`.""" | 589 | """See `IBranchMergeProposal`.""" |
868 | 586 | comment = CodeReviewComment.get(comment_number) | 590 | comment = IStore(CodeReviewComment).get( |
869 | 591 | CodeReviewComment, comment_number) | ||
870 | 592 | if comment is None: | ||
871 | 593 | raise NotFoundError(comment_number) | ||
872 | 587 | if comment.branch_merge_proposal != self: | 594 | if comment.branch_merge_proposal != self: |
873 | 588 | raise WrongBranchMergeProposal | 595 | raise WrongBranchMergeProposal |
874 | 589 | if not comment.userCanSetCommentVisibility(user): | 596 | if not comment.userCanSetCommentVisibility(user): |
875 | @@ -596,7 +603,9 @@ class BranchMergeProposal(SQLBase, BugLinkTargetMixin): | |||
876 | 596 | """See `IBranchMergeProposal`. | 603 | """See `IBranchMergeProposal`. |
877 | 597 | 604 | ||
878 | 598 | This function can raise WrongBranchMergeProposal.""" | 605 | This function can raise WrongBranchMergeProposal.""" |
880 | 599 | vote = CodeReviewVoteReference.get(id) | 606 | vote = IStore(CodeReviewVoteReference).get(CodeReviewVoteReference, id) |
881 | 607 | if vote is None: | ||
882 | 608 | raise NotFoundError(id) | ||
883 | 600 | if vote.branch_merge_proposal != self: | 609 | if vote.branch_merge_proposal != self: |
884 | 601 | raise WrongBranchMergeProposal | 610 | raise WrongBranchMergeProposal |
885 | 602 | return vote | 611 | return vote |
886 | @@ -932,6 +941,7 @@ class BranchMergeProposal(SQLBase, BugLinkTargetMixin): | |||
887 | 932 | date_created=_date_created) | 941 | date_created=_date_created) |
888 | 933 | self._ensureAssociatedBranchesVisibleToReviewer(reviewer) | 942 | self._ensureAssociatedBranchesVisibleToReviewer(reviewer) |
889 | 934 | vote_reference.review_type = review_type | 943 | vote_reference.review_type = review_type |
890 | 944 | Store.of(vote_reference).flush() | ||
891 | 935 | if _notify_listeners: | 945 | if _notify_listeners: |
892 | 936 | notify(ReviewerNominatedEvent(vote_reference)) | 946 | notify(ReviewerNominatedEvent(vote_reference)) |
893 | 937 | return vote_reference | 947 | return vote_reference |
894 | @@ -1098,11 +1108,13 @@ class BranchMergeProposal(SQLBase, BugLinkTargetMixin): | |||
895 | 1098 | if team_ref is not None: | 1108 | if team_ref is not None: |
896 | 1099 | return team_ref | 1109 | return team_ref |
897 | 1100 | # Create a new reference. | 1110 | # Create a new reference. |
899 | 1101 | return CodeReviewVoteReference( | 1111 | vote_reference = CodeReviewVoteReference( |
900 | 1102 | branch_merge_proposal=self, | 1112 | branch_merge_proposal=self, |
901 | 1103 | registrant=user, | 1113 | registrant=user, |
902 | 1104 | reviewer=user, | 1114 | reviewer=user, |
903 | 1105 | review_type=review_type) | 1115 | review_type=review_type) |
904 | 1116 | Store.of(vote_reference).flush() | ||
905 | 1117 | return vote_reference | ||
906 | 1106 | 1118 | ||
907 | 1107 | def createCommentFromMessage(self, message, vote, review_type, | 1119 | def createCommentFromMessage(self, message, vote, review_type, |
908 | 1108 | original_email, _notify_listeners=True, | 1120 | original_email, _notify_listeners=True, |
909 | @@ -1126,6 +1138,7 @@ class BranchMergeProposal(SQLBase, BugLinkTargetMixin): | |||
910 | 1126 | vote_reference.reviewer = message.owner | 1138 | vote_reference.reviewer = message.owner |
911 | 1127 | vote_reference.review_type = review_type | 1139 | vote_reference.review_type = review_type |
912 | 1128 | vote_reference.comment = code_review_message | 1140 | vote_reference.comment = code_review_message |
913 | 1141 | Store.of(code_review_message).flush() | ||
914 | 1129 | if _notify_listeners: | 1142 | if _notify_listeners: |
915 | 1130 | notify(ObjectCreatedEvent(code_review_message)) | 1143 | notify(ObjectCreatedEvent(code_review_message)) |
916 | 1131 | return code_review_message | 1144 | return code_review_message |
917 | @@ -1389,15 +1402,15 @@ class BranchMergeProposal(SQLBase, BugLinkTargetMixin): | |||
918 | 1389 | if include_votes: | 1402 | if include_votes: |
919 | 1390 | votes = load_referencing( | 1403 | votes = load_referencing( |
920 | 1391 | CodeReviewVoteReference, branch_merge_proposals, | 1404 | CodeReviewVoteReference, branch_merge_proposals, |
922 | 1392 | ['branch_merge_proposalID']) | 1405 | ['branch_merge_proposal_id']) |
923 | 1393 | votes_map = defaultdict(list) | 1406 | votes_map = defaultdict(list) |
924 | 1394 | for vote in votes: | 1407 | for vote in votes: |
926 | 1395 | votes_map[vote.branch_merge_proposalID].append(vote) | 1408 | votes_map[vote.branch_merge_proposal_id].append(vote) |
927 | 1396 | for mp in branch_merge_proposals: | 1409 | for mp in branch_merge_proposals: |
928 | 1397 | get_property_cache(mp).votes = votes_map[mp.id] | 1410 | get_property_cache(mp).votes = votes_map[mp.id] |
932 | 1398 | comments = load_related(CodeReviewComment, votes, ['commentID']) | 1411 | comments = load_related(CodeReviewComment, votes, ['comment_id']) |
933 | 1399 | load_related(Message, comments, ['messageID']) | 1412 | load_related(Message, comments, ['message_id']) |
934 | 1400 | person_ids.update(vote.reviewerID for vote in votes) | 1413 | person_ids.update(vote.reviewer_id for vote in votes) |
935 | 1401 | 1414 | ||
936 | 1402 | # we also provide a summary of diffs, so load them | 1415 | # we also provide a summary of diffs, so load them |
937 | 1403 | load_related(LibraryFileAlias, diffs, ['diff_textID']) | 1416 | load_related(LibraryFileAlias, diffs, ['diff_textID']) |
938 | @@ -1439,8 +1452,8 @@ class BranchMergeProposalGetter: | |||
939 | 1439 | BranchMergeProposal.registrantID == participant.id) | 1452 | BranchMergeProposal.registrantID == participant.id) |
940 | 1440 | 1453 | ||
941 | 1441 | review_select = Select( | 1454 | review_select = Select( |
944 | 1442 | [CodeReviewVoteReference.branch_merge_proposalID], | 1455 | [CodeReviewVoteReference.branch_merge_proposal_id], |
945 | 1443 | [CodeReviewVoteReference.reviewerID == participant.id]) | 1456 | [CodeReviewVoteReference.reviewer == participant]) |
946 | 1444 | 1457 | ||
947 | 1445 | query = Store.of(participant).find( | 1458 | query = Store.of(participant).find( |
948 | 1446 | BranchMergeProposal, | 1459 | BranchMergeProposal, |
949 | @@ -1463,13 +1476,13 @@ class BranchMergeProposalGetter: | |||
950 | 1463 | # the actual vote for that person. | 1476 | # the actual vote for that person. |
951 | 1464 | tables = [ | 1477 | tables = [ |
952 | 1465 | CodeReviewVoteReference, | 1478 | CodeReviewVoteReference, |
954 | 1466 | Join(Person, CodeReviewVoteReference.reviewerID == Person.id), | 1479 | Join(Person, CodeReviewVoteReference.reviewer == Person.id), |
955 | 1467 | LeftJoin( | 1480 | LeftJoin( |
956 | 1468 | CodeReviewComment, | 1481 | CodeReviewComment, |
958 | 1469 | CodeReviewVoteReference.commentID == CodeReviewComment.id)] | 1482 | CodeReviewVoteReference.comment == CodeReviewComment.id)] |
959 | 1470 | results = store.using(*tables).find( | 1483 | results = store.using(*tables).find( |
960 | 1471 | (CodeReviewVoteReference, Person, CodeReviewComment), | 1484 | (CodeReviewVoteReference, Person, CodeReviewComment), |
962 | 1472 | CodeReviewVoteReference.branch_merge_proposalID.is_in(ids)) | 1485 | CodeReviewVoteReference.branch_merge_proposal_id.is_in(ids)) |
963 | 1473 | for reference, person, comment in results: | 1486 | for reference, person, comment in results: |
964 | 1474 | result[reference.branch_merge_proposal].append(reference) | 1487 | result[reference.branch_merge_proposal].append(reference) |
965 | 1475 | return result | 1488 | return result |
966 | diff --git a/lib/lp/code/model/codereviewcomment.py b/lib/lp/code/model/codereviewcomment.py | |||
967 | index 47f640c..1029b4e 100644 | |||
968 | --- a/lib/lp/code/model/codereviewcomment.py | |||
969 | +++ b/lib/lp/code/model/codereviewcomment.py | |||
970 | @@ -10,9 +10,11 @@ __all__ = [ | |||
971 | 10 | 10 | ||
972 | 11 | from textwrap import TextWrapper | 11 | from textwrap import TextWrapper |
973 | 12 | 12 | ||
977 | 13 | from sqlobject import ( | 13 | from storm.locals import ( |
978 | 14 | ForeignKey, | 14 | Int, |
979 | 15 | StringCol, | 15 | Reference, |
980 | 16 | Store, | ||
981 | 17 | Unicode, | ||
982 | 16 | ) | 18 | ) |
983 | 17 | from zope.interface import implementer | 19 | from zope.interface import implementer |
984 | 18 | 20 | ||
985 | @@ -22,8 +24,8 @@ from lp.code.interfaces.codereviewcomment import ( | |||
986 | 22 | ICodeReviewComment, | 24 | ICodeReviewComment, |
987 | 23 | ICodeReviewCommentDeletion, | 25 | ICodeReviewCommentDeletion, |
988 | 24 | ) | 26 | ) |
991 | 25 | from lp.services.database.enumcol import EnumCol | 27 | from lp.services.database.enumcol import DBEnum |
992 | 26 | from lp.services.database.sqlbase import SQLBase | 28 | from lp.services.database.stormbase import StormBase |
993 | 27 | from lp.services.mail.signedmessage import signed_message_from_string | 29 | from lp.services.mail.signedmessage import signed_message_from_string |
994 | 28 | 30 | ||
995 | 29 | 31 | ||
996 | @@ -60,17 +62,27 @@ def quote_text_as_email(text, width=80): | |||
997 | 60 | 62 | ||
998 | 61 | 63 | ||
999 | 62 | @implementer(ICodeReviewComment, ICodeReviewCommentDeletion, IHasBranchTarget) | 64 | @implementer(ICodeReviewComment, ICodeReviewCommentDeletion, IHasBranchTarget) |
1001 | 63 | class CodeReviewComment(SQLBase): | 65 | class CodeReviewComment(StormBase): |
1002 | 64 | """A table linking branch merge proposals and messages.""" | 66 | """A table linking branch merge proposals and messages.""" |
1003 | 65 | 67 | ||
1012 | 66 | _table = 'CodeReviewMessage' | 68 | __storm_table__ = 'CodeReviewMessage' |
1013 | 67 | 69 | ||
1014 | 68 | branch_merge_proposal = ForeignKey( | 70 | id = Int(primary=True) |
1015 | 69 | dbName='branch_merge_proposal', foreignKey='BranchMergeProposal', | 71 | branch_merge_proposal_id = Int( |
1016 | 70 | notNull=True) | 72 | name='branch_merge_proposal', allow_none=False) |
1017 | 71 | message = ForeignKey(dbName='message', foreignKey='Message', notNull=True) | 73 | branch_merge_proposal = Reference( |
1018 | 72 | vote = EnumCol(dbName='vote', notNull=False, schema=CodeReviewVote) | 74 | branch_merge_proposal_id, 'BranchMergeProposal.id') |
1019 | 73 | vote_tag = StringCol(default=None) | 75 | message_id = Int(name='message', allow_none=False) |
1020 | 76 | message = Reference(message_id, 'Message.id') | ||
1021 | 77 | vote = DBEnum(name='vote', allow_none=True, enum=CodeReviewVote) | ||
1022 | 78 | vote_tag = Unicode(default=None) | ||
1023 | 79 | |||
1024 | 80 | def __init__(self, branch_merge_proposal, message, vote=None, | ||
1025 | 81 | vote_tag=None): | ||
1026 | 82 | self.branch_merge_proposal = branch_merge_proposal | ||
1027 | 83 | self.message = message | ||
1028 | 84 | self.vote = vote | ||
1029 | 85 | self.vote_tag = vote_tag | ||
1030 | 74 | 86 | ||
1031 | 75 | @property | 87 | @property |
1032 | 76 | def author(self): | 88 | def author(self): |
1033 | @@ -134,3 +146,7 @@ class CodeReviewComment(SQLBase): | |||
1034 | 134 | return ( | 146 | return ( |
1035 | 135 | self.branch_merge_proposal.userCanSetCommentVisibility(user) or | 147 | self.branch_merge_proposal.userCanSetCommentVisibility(user) or |
1036 | 136 | (user is not None and user.inTeam(self.author))) | 148 | (user is not None and user.inTeam(self.author))) |
1037 | 149 | |||
1038 | 150 | def destroySelf(self): | ||
1039 | 151 | """Delete this comment.""" | ||
1040 | 152 | Store.of(self).remove(self) | ||
1041 | diff --git a/lib/lp/code/model/codereviewvote.py b/lib/lp/code/model/codereviewvote.py | |||
1042 | index d2e5c53..b695ebe 100644 | |||
1043 | --- a/lib/lp/code/model/codereviewvote.py | |||
1044 | +++ b/lib/lp/code/model/codereviewvote.py | |||
1045 | @@ -8,12 +8,15 @@ __all__ = [ | |||
1046 | 8 | 'CodeReviewVoteReference', | 8 | 'CodeReviewVoteReference', |
1047 | 9 | ] | 9 | ] |
1048 | 10 | 10 | ||
1052 | 11 | from sqlobject import ( | 11 | import pytz |
1053 | 12 | ForeignKey, | 12 | from storm.locals import ( |
1054 | 13 | StringCol, | 13 | DateTime, |
1055 | 14 | Int, | ||
1056 | 15 | Reference, | ||
1057 | 16 | Store, | ||
1058 | 17 | Unicode, | ||
1059 | 14 | ) | 18 | ) |
1060 | 15 | from zope.interface import implementer | 19 | from zope.interface import implementer |
1061 | 16 | from zope.schema import Int | ||
1062 | 17 | 20 | ||
1063 | 18 | from lp.code.errors import ( | 21 | from lp.code.errors import ( |
1064 | 19 | ClaimReviewFailed, | 22 | ClaimReviewFailed, |
1065 | @@ -22,27 +25,36 @@ from lp.code.errors import ( | |||
1066 | 22 | ) | 25 | ) |
1067 | 23 | from lp.code.interfaces.codereviewvote import ICodeReviewVoteReference | 26 | from lp.code.interfaces.codereviewvote import ICodeReviewVoteReference |
1068 | 24 | from lp.services.database.constants import DEFAULT | 27 | from lp.services.database.constants import DEFAULT |
1071 | 25 | from lp.services.database.datetimecol import UtcDateTimeCol | 28 | from lp.services.database.stormbase import StormBase |
1070 | 26 | from lp.services.database.sqlbase import SQLBase | ||
1072 | 27 | 29 | ||
1073 | 28 | 30 | ||
1074 | 29 | @implementer(ICodeReviewVoteReference) | 31 | @implementer(ICodeReviewVoteReference) |
1076 | 30 | class CodeReviewVoteReference(SQLBase): | 32 | class CodeReviewVoteReference(StormBase): |
1077 | 31 | """See `ICodeReviewVote`""" | 33 | """See `ICodeReviewVote`""" |
1078 | 32 | 34 | ||
1092 | 33 | _table = 'CodeReviewVote' | 35 | __storm_table__ = 'CodeReviewVote' |
1093 | 34 | id = Int() | 36 | |
1094 | 35 | branch_merge_proposal = ForeignKey( | 37 | id = Int(primary=True) |
1095 | 36 | dbName='branch_merge_proposal', foreignKey='BranchMergeProposal', | 38 | branch_merge_proposal_id = Int( |
1096 | 37 | notNull=True) | 39 | name='branch_merge_proposal', allow_none=False) |
1097 | 38 | date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) | 40 | branch_merge_proposal = Reference( |
1098 | 39 | registrant = ForeignKey( | 41 | branch_merge_proposal_id, 'BranchMergeProposal.id') |
1099 | 40 | dbName='registrant', foreignKey='Person', notNull=True) | 42 | date_created = DateTime(tzinfo=pytz.UTC, allow_none=False, default=DEFAULT) |
1100 | 41 | reviewer = ForeignKey( | 43 | registrant_id = Int(name='registrant', allow_none=False) |
1101 | 42 | dbName='reviewer', foreignKey='Person', notNull=True) | 44 | registrant = Reference(registrant_id, 'Person.id') |
1102 | 43 | review_type = StringCol(default=None) | 45 | reviewer_id = Int(name='reviewer', allow_none=False) |
1103 | 44 | comment = ForeignKey( | 46 | reviewer = Reference(reviewer_id, 'Person.id') |
1104 | 45 | dbName='vote_message', foreignKey='CodeReviewComment', default=None) | 47 | review_type = Unicode(default=None) |
1105 | 48 | comment_id = Int(name='vote_message', default=None) | ||
1106 | 49 | comment = Reference(comment_id, 'CodeReviewComment.id') | ||
1107 | 50 | |||
1108 | 51 | def __init__(self, branch_merge_proposal, registrant, reviewer, | ||
1109 | 52 | review_type=None, date_created=DEFAULT): | ||
1110 | 53 | self.branch_merge_proposal = branch_merge_proposal | ||
1111 | 54 | self.registrant = registrant | ||
1112 | 55 | self.reviewer = reviewer | ||
1113 | 56 | self.review_type = review_type | ||
1114 | 57 | self.date_created = date_created | ||
1115 | 46 | 58 | ||
1116 | 47 | @property | 59 | @property |
1117 | 48 | def is_pending(self): | 60 | def is_pending(self): |
1118 | @@ -96,6 +108,10 @@ class CodeReviewVoteReference(SQLBase): | |||
1119 | 96 | self.validateReasignReview(reviewer) | 108 | self.validateReasignReview(reviewer) |
1120 | 97 | self.reviewer = reviewer | 109 | self.reviewer = reviewer |
1121 | 98 | 110 | ||
1122 | 111 | def destroySelf(self): | ||
1123 | 112 | """Delete this vote.""" | ||
1124 | 113 | Store.of(self).remove(self) | ||
1125 | 114 | |||
1126 | 99 | def delete(self): | 115 | def delete(self): |
1127 | 100 | """See `ICodeReviewVote`""" | 116 | """See `ICodeReviewVote`""" |
1128 | 101 | if not self.is_pending: | 117 | if not self.is_pending: |
1129 | diff --git a/lib/lp/code/model/gitcollection.py b/lib/lp/code/model/gitcollection.py | |||
1130 | index 4095d5e..6447d0b 100644 | |||
1131 | --- a/lib/lp/code/model/gitcollection.py | |||
1132 | +++ b/lib/lp/code/model/gitcollection.py | |||
1133 | @@ -412,10 +412,10 @@ class GenericGitCollection: | |||
1134 | 412 | tables = [ | 412 | tables = [ |
1135 | 413 | BranchMergeProposal, | 413 | BranchMergeProposal, |
1136 | 414 | Join(CodeReviewVoteReference, | 414 | Join(CodeReviewVoteReference, |
1138 | 415 | CodeReviewVoteReference.branch_merge_proposalID == \ | 415 | CodeReviewVoteReference.branch_merge_proposal == |
1139 | 416 | BranchMergeProposal.id), | 416 | BranchMergeProposal.id), |
1140 | 417 | LeftJoin(CodeReviewComment, | 417 | LeftJoin(CodeReviewComment, |
1142 | 418 | CodeReviewVoteReference.commentID == CodeReviewComment.id)] | 418 | CodeReviewVoteReference.comment == CodeReviewComment.id)] |
1143 | 419 | 419 | ||
1144 | 420 | expressions = [ | 420 | expressions = [ |
1145 | 421 | CodeReviewVoteReference.reviewer == reviewer, | 421 | CodeReviewVoteReference.reviewer == reviewer, |
1146 | diff --git a/lib/lp/code/model/tests/test_branch.py b/lib/lp/code/model/tests/test_branch.py | |||
1147 | index 1978e88..e2421ba 100644 | |||
1148 | --- a/lib/lp/code/model/tests/test_branch.py | |||
1149 | +++ b/lib/lp/code/model/tests/test_branch.py | |||
1150 | @@ -1566,8 +1566,8 @@ class TestBranchDeletionConsequences(TestCase): | |||
1151 | 1566 | comment_id = comment.id | 1566 | comment_id = comment.id |
1152 | 1567 | branch = comment.branch_merge_proposal.source_branch | 1567 | branch = comment.branch_merge_proposal.source_branch |
1153 | 1568 | branch.destroySelf(break_references=True) | 1568 | branch.destroySelf(break_references=True) |
1156 | 1569 | self.assertRaises( | 1569 | self.assertIsNone( |
1157 | 1570 | SQLObjectNotFound, CodeReviewComment.get, comment_id) | 1570 | IStore(CodeReviewComment).get(CodeReviewComment, comment_id)) |
1158 | 1571 | 1571 | ||
1159 | 1572 | def test_deleteTargetCodeReviewComment(self): | 1572 | def test_deleteTargetCodeReviewComment(self): |
1160 | 1573 | """Deletion of branches that have CodeReviewComments works.""" | 1573 | """Deletion of branches that have CodeReviewComments works.""" |
1161 | @@ -1575,8 +1575,8 @@ class TestBranchDeletionConsequences(TestCase): | |||
1162 | 1575 | comment_id = comment.id | 1575 | comment_id = comment.id |
1163 | 1576 | branch = comment.branch_merge_proposal.target_branch | 1576 | branch = comment.branch_merge_proposal.target_branch |
1164 | 1577 | branch.destroySelf(break_references=True) | 1577 | branch.destroySelf(break_references=True) |
1167 | 1578 | self.assertRaises( | 1578 | self.assertIsNone( |
1168 | 1579 | SQLObjectNotFound, CodeReviewComment.get, comment_id) | 1579 | IStore(CodeReviewComment).get(CodeReviewComment, comment_id)) |
1169 | 1580 | 1580 | ||
1170 | 1581 | def test_branchWithBugRequirements(self): | 1581 | def test_branchWithBugRequirements(self): |
1171 | 1582 | """Deletion requirements for a branch with a bug are right.""" | 1582 | """Deletion requirements for a branch with a bug are right.""" |
1172 | diff --git a/lib/lp/code/model/tests/test_gitrepository.py b/lib/lp/code/model/tests/test_gitrepository.py | |||
1173 | index a8b9c35..62bf865 100644 | |||
1174 | --- a/lib/lp/code/model/tests/test_gitrepository.py | |||
1175 | +++ b/lib/lp/code/model/tests/test_gitrepository.py | |||
1176 | @@ -1086,8 +1086,8 @@ class TestGitRepositoryDeletionConsequences(TestCaseWithFactory): | |||
1177 | 1086 | comment_id = comment.id | 1086 | comment_id = comment.id |
1178 | 1087 | repository = comment.branch_merge_proposal.source_git_repository | 1087 | repository = comment.branch_merge_proposal.source_git_repository |
1179 | 1088 | repository.destroySelf(break_references=True) | 1088 | repository.destroySelf(break_references=True) |
1182 | 1089 | self.assertRaises( | 1089 | self.assertIsNone( |
1183 | 1090 | SQLObjectNotFound, CodeReviewComment.get, comment_id) | 1090 | IStore(CodeReviewComment).get(CodeReviewComment, comment_id)) |
1184 | 1091 | 1091 | ||
1185 | 1092 | def test_delete_target_CodeReviewComment(self): | 1092 | def test_delete_target_CodeReviewComment(self): |
1186 | 1093 | # Deletion of target repositories that have CodeReviewComments works. | 1093 | # Deletion of target repositories that have CodeReviewComments works. |
1187 | @@ -1095,8 +1095,8 @@ class TestGitRepositoryDeletionConsequences(TestCaseWithFactory): | |||
1188 | 1095 | comment_id = comment.id | 1095 | comment_id = comment.id |
1189 | 1096 | repository = comment.branch_merge_proposal.target_git_repository | 1096 | repository = comment.branch_merge_proposal.target_git_repository |
1190 | 1097 | repository.destroySelf(break_references=True) | 1097 | repository.destroySelf(break_references=True) |
1193 | 1098 | self.assertRaises( | 1098 | self.assertIsNone( |
1194 | 1099 | SQLObjectNotFound, CodeReviewComment.get, comment_id) | 1099 | IStore(CodeReviewComment).get(CodeReviewComment, comment_id)) |
1195 | 1100 | 1100 | ||
1196 | 1101 | def test_sourceBranchWithCodeReviewVoteReference(self): | 1101 | def test_sourceBranchWithCodeReviewVoteReference(self): |
1197 | 1102 | # break_references handles CodeReviewVoteReference source repository. | 1102 | # break_references handles CodeReviewVoteReference source repository. |
1198 | diff --git a/lib/lp/code/stories/webservice/xx-branchmergeproposal.txt b/lib/lp/code/stories/webservice/xx-branchmergeproposal.txt | |||
1199 | index 1580ea4..98483a4 100644 | |||
1200 | --- a/lib/lp/code/stories/webservice/xx-branchmergeproposal.txt | |||
1201 | +++ b/lib/lp/code/stories/webservice/xx-branchmergeproposal.txt | |||
1202 | @@ -463,7 +463,7 @@ which is the one we want the method to return. | |||
1203 | 463 | ... product=blob, set_state=BranchMergeProposalStatus.NEEDS_REVIEW, | 463 | ... product=blob, set_state=BranchMergeProposalStatus.NEEDS_REVIEW, |
1204 | 464 | ... registrant=branch_owner, source_branch=source_branch) | 464 | ... registrant=branch_owner, source_branch=source_branch) |
1205 | 465 | >>> proposal.nominateReviewer(target_owner, branch_owner) | 465 | >>> proposal.nominateReviewer(target_owner, branch_owner) |
1207 | 466 | <CodeReviewVoteReference at ...> | 466 | <lp.code.model.codereviewvote.CodeReviewVoteReference object at ...> |
1208 | 467 | 467 | ||
1209 | 468 | And then we propose a merge the other way, so that the owner is target, | 468 | And then we propose a merge the other way, so that the owner is target, |
1210 | 469 | but they have not been asked to review, meaning that the method shouldn't | 469 | but they have not been asked to review, meaning that the method shouldn't |
1211 | @@ -474,7 +474,7 @@ return this review. | |||
1212 | 474 | ... product=blob, set_state=BranchMergeProposalStatus.NEEDS_REVIEW, | 474 | ... product=blob, set_state=BranchMergeProposalStatus.NEEDS_REVIEW, |
1213 | 475 | ... registrant=target_owner, source_branch=target_branch) | 475 | ... registrant=target_owner, source_branch=target_branch) |
1214 | 476 | >>> proposal.nominateReviewer(branch_owner, target_owner) | 476 | >>> proposal.nominateReviewer(branch_owner, target_owner) |
1216 | 477 | <CodeReviewVoteReference at ...> | 477 | <lp.code.model.codereviewvote.CodeReviewVoteReference object at ...> |
1217 | 478 | >>> logout() | 478 | >>> logout() |
1218 | 479 | 479 | ||
1219 | 480 | >>> proposals = webservice.named_get('/~target', 'getRequestedReviews' | 480 | >>> proposals = webservice.named_get('/~target', 'getRequestedReviews' |
1220 | diff --git a/lib/lp/codehosting/puller/tests/test_scheduler.py b/lib/lp/codehosting/puller/tests/test_scheduler.py | |||
1221 | index 2b08509..34d53b6 100644 | |||
1222 | --- a/lib/lp/codehosting/puller/tests/test_scheduler.py | |||
1223 | +++ b/lib/lp/codehosting/puller/tests/test_scheduler.py | |||
1224 | @@ -553,7 +553,7 @@ class TestPullerMasterIntegration(PullerBranchTestCase): | |||
1225 | 553 | """Tests for the puller master that launch sub-processes.""" | 553 | """Tests for the puller master that launch sub-processes.""" |
1226 | 554 | 554 | ||
1227 | 555 | layer = ZopelessAppServerLayer | 555 | layer = ZopelessAppServerLayer |
1229 | 556 | run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=10) | 556 | run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=30) |
1230 | 557 | 557 | ||
1231 | 558 | def setUp(self): | 558 | def setUp(self): |
1232 | 559 | super(TestPullerMasterIntegration, self).setUp() | 559 | super(TestPullerMasterIntegration, self).setUp() |
1233 | diff --git a/lib/lp/registry/model/projectgroup.py b/lib/lp/registry/model/projectgroup.py | |||
1234 | index 8a22fa8..bc6c5e5 100644 | |||
1235 | --- a/lib/lp/registry/model/projectgroup.py | |||
1236 | +++ b/lib/lp/registry/model/projectgroup.py | |||
1237 | @@ -50,7 +50,11 @@ from lp.blueprints.model.specification import ( | |||
1238 | 50 | Specification, | 50 | Specification, |
1239 | 51 | ) | 51 | ) |
1240 | 52 | from lp.blueprints.model.specificationsearch import search_specifications | 52 | from lp.blueprints.model.specificationsearch import search_specifications |
1242 | 53 | from lp.blueprints.model.sprint import HasSprintsMixin | 53 | from lp.blueprints.model.sprint import ( |
1243 | 54 | HasSprintsMixin, | ||
1244 | 55 | Sprint, | ||
1245 | 56 | ) | ||
1246 | 57 | from lp.blueprints.model.sprintspecification import SprintSpecification | ||
1247 | 54 | from lp.bugs.interfaces.bugsummary import IBugSummaryDimension | 58 | from lp.bugs.interfaces.bugsummary import IBugSummaryDimension |
1248 | 55 | from lp.bugs.model.bugtarget import ( | 59 | from lp.bugs.model.bugtarget import ( |
1249 | 56 | BugTargetBase, | 60 | BugTargetBase, |
1250 | @@ -239,15 +243,14 @@ class ProjectGroup(SQLBase, BugTargetBase, HasSpecificationsMixin, | |||
1251 | 239 | """ See `IProjectGroup`.""" | 243 | """ See `IProjectGroup`.""" |
1252 | 240 | return not self.getBranches().is_empty() | 244 | return not self.getBranches().is_empty() |
1253 | 241 | 245 | ||
1263 | 242 | def _getBaseQueryAndClauseTablesForQueryingSprints(self): | 246 | def _getBaseClausesForQueryingSprints(self): |
1264 | 243 | query = """ | 247 | return [ |
1265 | 244 | Product.project = %s | 248 | Product.projectgroup == self, |
1266 | 245 | AND Specification.product = Product.id | 249 | Specification.product == Product.id, |
1267 | 246 | AND Specification.id = SprintSpecification.specification | 250 | Specification.id == SprintSpecification.specification_id, |
1268 | 247 | AND SprintSpecification.sprint = Sprint.id | 251 | SprintSpecification.sprint == Sprint.id, |
1269 | 248 | AND SprintSpecification.status = %s | 252 | SprintSpecification.status == SprintSpecificationStatus.ACCEPTED, |
1270 | 249 | """ % sqlvalues(self, SprintSpecificationStatus.ACCEPTED) | 253 | ] |
1262 | 250 | return query, ['Product', 'Specification', 'SprintSpecification'] | ||
1271 | 251 | 254 | ||
1272 | 252 | def specifications(self, user, sort=None, quantity=None, filter=None, | 255 | def specifications(self, user, sort=None, quantity=None, filter=None, |
1273 | 253 | series=None, need_people=True, need_branches=True, | 256 | series=None, need_people=True, need_branches=True, |
1274 | diff --git a/lib/lp/services/database/policy.py b/lib/lp/services/database/policy.py | |||
1275 | index 066fdb3..691baf4 100644 | |||
1276 | --- a/lib/lp/services/database/policy.py | |||
1277 | +++ b/lib/lp/services/database/policy.py | |||
1278 | @@ -358,7 +358,7 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy): | |||
1279 | 358 | slave_store = self.getStore(MAIN_STORE, SLAVE_FLAVOR) | 358 | slave_store = self.getStore(MAIN_STORE, SLAVE_FLAVOR) |
1280 | 359 | hot_standby, streaming_lag = slave_store.execute(""" | 359 | hot_standby, streaming_lag = slave_store.execute(""" |
1281 | 360 | SELECT | 360 | SELECT |
1283 | 361 | current_setting('hot_standby') = 'on', | 361 | pg_is_in_recovery(), |
1284 | 362 | now() - pg_last_xact_replay_timestamp() | 362 | now() - pg_last_xact_replay_timestamp() |
1285 | 363 | """).get_one() | 363 | """).get_one() |
1286 | 364 | if hot_standby and streaming_lag is not None: | 364 | if hot_standby and streaming_lag is not None: |
1287 | diff --git a/lib/lp/services/gpg/handler.py b/lib/lp/services/gpg/handler.py | |||
1288 | index b2bcbad..e3eba45 100644 | |||
1289 | --- a/lib/lp/services/gpg/handler.py | |||
1290 | +++ b/lib/lp/services/gpg/handler.py | |||
1291 | @@ -489,12 +489,8 @@ class GPGHandler: | |||
1292 | 489 | raise GPGKeyExpired(key) | 489 | raise GPGKeyExpired(key) |
1293 | 490 | return key | 490 | return key |
1294 | 491 | 491 | ||
1301 | 492 | def _submitKey(self, content): | 492 | def submitKey(self, content): |
1302 | 493 | """Submit an ASCII-armored public key export to the keyserver. | 493 | """See `IGPGHandler`.""" |
1297 | 494 | |||
1298 | 495 | It issues a POST at /pks/add on the keyserver specified in the | ||
1299 | 496 | configuration. | ||
1300 | 497 | """ | ||
1303 | 498 | keyserver_http_url = '%s:%s' % ( | 494 | keyserver_http_url = '%s:%s' % ( |
1304 | 499 | config.gpghandler.host, config.gpghandler.port) | 495 | config.gpghandler.host, config.gpghandler.port) |
1305 | 500 | 496 | ||
1306 | @@ -527,7 +523,7 @@ class GPGHandler: | |||
1307 | 527 | return | 523 | return |
1308 | 528 | 524 | ||
1309 | 529 | pub_key = self.retrieveKey(fingerprint) | 525 | pub_key = self.retrieveKey(fingerprint) |
1311 | 530 | self._submitKey(pub_key.export()) | 526 | self.submitKey(pub_key.export()) |
1312 | 531 | 527 | ||
1313 | 532 | def getURLForKeyInServer(self, fingerprint, action='index', public=False): | 528 | def getURLForKeyInServer(self, fingerprint, action='index', public=False): |
1314 | 533 | """See IGPGHandler""" | 529 | """See IGPGHandler""" |
1315 | diff --git a/lib/lp/services/gpg/interfaces.py b/lib/lp/services/gpg/interfaces.py | |||
1316 | index 78b44c8..d6f0f73 100644 | |||
1317 | --- a/lib/lp/services/gpg/interfaces.py | |||
1318 | +++ b/lib/lp/services/gpg/interfaces.py | |||
1319 | @@ -357,6 +357,17 @@ class IGPGHandler(Interface): | |||
1320 | 357 | :return: a `PymeKey`object containing the key information. | 357 | :return: a `PymeKey`object containing the key information. |
1321 | 358 | """ | 358 | """ |
1322 | 359 | 359 | ||
1323 | 360 | def submitKey(content): | ||
1324 | 361 | """Submit an ASCII-armored public key export to the keyserver. | ||
1325 | 362 | |||
1326 | 363 | It issues a POST at /pks/add on the keyserver specified in the | ||
1327 | 364 | configuration. | ||
1328 | 365 | |||
1329 | 366 | :param content: The exported public key, as a byte string. | ||
1330 | 367 | :raise GPGUploadFailure: if the keyserver could not be reached. | ||
1331 | 368 | :raise AssertionError: if the POST request failed. | ||
1332 | 369 | """ | ||
1333 | 370 | |||
1334 | 360 | def uploadPublicKey(fingerprint): | 371 | def uploadPublicKey(fingerprint): |
1335 | 361 | """Upload the specified public key to a keyserver. | 372 | """Upload the specified public key to a keyserver. |
1336 | 362 | 373 | ||
1337 | @@ -365,8 +376,8 @@ class IGPGHandler(Interface): | |||
1338 | 365 | 376 | ||
1339 | 366 | :param fingerprint: The key fingerprint, which must be an hexadecimal | 377 | :param fingerprint: The key fingerprint, which must be an hexadecimal |
1340 | 367 | string. | 378 | string. |
1343 | 368 | :raise GPGUploadFailure: if the keyserver could not be reaches. | 379 | :raise GPGUploadFailure: if the keyserver could not be reached. |
1344 | 369 | :raise AssertionError: if the POST request doesn't succeed. | 380 | :raise AssertionError: if the POST request failed. |
1345 | 370 | """ | 381 | """ |
1346 | 371 | 382 | ||
1347 | 372 | def localKeys(filter=None, secret=False): | 383 | def localKeys(filter=None, secret=False): |
1348 | diff --git a/lib/lp/services/librarianserver/tests/test_storage_db.py b/lib/lp/services/librarianserver/tests/test_storage_db.py | |||
1349 | index 47fe847..b24d87b 100644 | |||
1350 | --- a/lib/lp/services/librarianserver/tests/test_storage_db.py | |||
1351 | +++ b/lib/lp/services/librarianserver/tests/test_storage_db.py | |||
1352 | @@ -146,7 +146,7 @@ class LibrarianStorageDBTests(TestCase): | |||
1353 | 146 | class LibrarianStorageSwiftTests(TestCase): | 146 | class LibrarianStorageSwiftTests(TestCase): |
1354 | 147 | 147 | ||
1355 | 148 | layer = LaunchpadZopelessLayer | 148 | layer = LaunchpadZopelessLayer |
1357 | 149 | run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=10) | 149 | run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=30) |
1358 | 150 | 150 | ||
1359 | 151 | def setUp(self): | 151 | def setUp(self): |
1360 | 152 | super(LibrarianStorageSwiftTests, self).setUp() | 152 | super(LibrarianStorageSwiftTests, self).setUp() |
1361 | diff --git a/lib/lp/services/mail/helpers.py b/lib/lp/services/mail/helpers.py | |||
1362 | index 82640aa..80f68d3 100644 | |||
1363 | --- a/lib/lp/services/mail/helpers.py | |||
1364 | +++ b/lib/lp/services/mail/helpers.py | |||
1365 | @@ -35,7 +35,13 @@ class IncomingEmailError(Exception): | |||
1366 | 35 | 35 | ||
1367 | 36 | 36 | ||
1368 | 37 | def get_main_body(signed_msg): | 37 | def get_main_body(signed_msg): |
1370 | 38 | """Returns the first text part of the email.""" | 38 | """Returns the first text part of the email. |
1371 | 39 | |||
1372 | 40 | This always returns text (or None if the email has no text parts at | ||
1373 | 41 | all). It decodes using the character set in the text part's | ||
1374 | 42 | Content-Type, or ISO-8859-1 if unspecified (in order to minimise the | ||
1375 | 43 | chances of `UnicodeDecodeError`s). | ||
1376 | 44 | """ | ||
1377 | 39 | msg = getattr(signed_msg, 'signedMessage', None) | 45 | msg = getattr(signed_msg, 'signedMessage', None) |
1378 | 40 | if msg is None: | 46 | if msg is None: |
1379 | 41 | # The email wasn't signed. | 47 | # The email wasn't signed. |
1380 | @@ -43,9 +49,11 @@ def get_main_body(signed_msg): | |||
1381 | 43 | if msg.is_multipart(): | 49 | if msg.is_multipart(): |
1382 | 44 | for part in msg.walk(): | 50 | for part in msg.walk(): |
1383 | 45 | if part.get_content_type() == 'text/plain': | 51 | if part.get_content_type() == 'text/plain': |
1385 | 46 | return part.get_payload(decode=True) | 52 | charset = part.get_content_charset('ISO-8859-1') |
1386 | 53 | return part.get_payload(decode=True).decode(charset) | ||
1387 | 47 | else: | 54 | else: |
1389 | 48 | return msg.get_payload(decode=True) | 55 | charset = msg.get_content_charset('ISO-8859-1') |
1390 | 56 | return msg.get_payload(decode=True).decode(charset) | ||
1391 | 49 | 57 | ||
1392 | 50 | 58 | ||
1393 | 51 | def guess_bugtask(bug, person): | 59 | def guess_bugtask(bug, person): |
1394 | diff --git a/lib/lp/services/signing/tests/helpers.py b/lib/lp/services/signing/tests/helpers.py | |||
1395 | index f819745..6831edb 100644 | |||
1396 | --- a/lib/lp/services/signing/tests/helpers.py | |||
1397 | +++ b/lib/lp/services/signing/tests/helpers.py | |||
1398 | @@ -49,7 +49,7 @@ class SigningServiceClientFixture(fixtures.Fixture): | |||
1399 | 49 | openpgp_key_algorithm=None, length=None): | 49 | openpgp_key_algorithm=None, length=None): |
1400 | 50 | key = bytes(PrivateKey.generate().public_key) | 50 | key = bytes(PrivateKey.generate().public_key) |
1401 | 51 | data = { | 51 | data = { |
1403 | 52 | "fingerprint": self.factory.getUniqueHexString(40), | 52 | "fingerprint": self.factory.getUniqueHexString(40).upper(), |
1404 | 53 | "public-key": key, | 53 | "public-key": key, |
1405 | 54 | } | 54 | } |
1406 | 55 | self.generate_returns.append((key_type, data)) | 55 | self.generate_returns.append((key_type, data)) |
1407 | @@ -69,7 +69,7 @@ class SigningServiceClientFixture(fixtures.Fixture): | |||
1408 | 69 | 69 | ||
1409 | 70 | def _inject(self, key_type, private_key, public_key, description, | 70 | def _inject(self, key_type, private_key, public_key, description, |
1410 | 71 | created_at): | 71 | created_at): |
1412 | 72 | data = {'fingerprint': self.factory.getUniqueHexString(40)} | 72 | data = {'fingerprint': self.factory.getUniqueHexString(40).upper()} |
1413 | 73 | self.inject_returns.append(data) | 73 | self.inject_returns.append(data) |
1414 | 74 | return data | 74 | return data |
1415 | 75 | 75 | ||
1416 | diff --git a/lib/lp/services/worlddata/vocabularies.py b/lib/lp/services/worlddata/vocabularies.py | |||
1417 | index 58d2c8e..be963f4 100644 | |||
1418 | --- a/lib/lp/services/worlddata/vocabularies.py | |||
1419 | +++ b/lib/lp/services/worlddata/vocabularies.py | |||
1420 | @@ -1,6 +1,8 @@ | |||
1421 | 1 | # Copyright 2009 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009 Canonical Ltd. This software is licensed under the |
1422 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
1423 | 3 | 3 | ||
1424 | 4 | from __future__ import absolute_import, print_function, unicode_literals | ||
1425 | 5 | |||
1426 | 4 | __all__ = [ | 6 | __all__ = [ |
1427 | 5 | 'CountryNameVocabulary', | 7 | 'CountryNameVocabulary', |
1428 | 6 | 'LanguageVocabulary', | 8 | 'LanguageVocabulary', |
1429 | @@ -10,6 +12,7 @@ __all__ = [ | |||
1430 | 10 | __metaclass__ = type | 12 | __metaclass__ = type |
1431 | 11 | 13 | ||
1432 | 12 | import pytz | 14 | import pytz |
1433 | 15 | import six | ||
1434 | 13 | from sqlobject import SQLObjectNotFound | 16 | from sqlobject import SQLObjectNotFound |
1435 | 14 | from zope.interface import alsoProvides | 17 | from zope.interface import alsoProvides |
1436 | 15 | from zope.schema.vocabulary import ( | 18 | from zope.schema.vocabulary import ( |
1437 | @@ -24,7 +27,7 @@ from lp.services.worlddata.model.country import Country | |||
1438 | 24 | from lp.services.worlddata.model.language import Language | 27 | from lp.services.worlddata.model.language import Language |
1439 | 25 | 28 | ||
1440 | 26 | # create a sorted list of the common time zone names, with UTC at the start | 29 | # create a sorted list of the common time zone names, with UTC at the start |
1442 | 27 | _values = sorted(pytz.common_timezones) | 30 | _values = sorted(six.ensure_text(tz) for tz in pytz.common_timezones) |
1443 | 28 | _values.remove('UTC') | 31 | _values.remove('UTC') |
1444 | 29 | _values.insert(0, 'UTC') | 32 | _values.insert(0, 'UTC') |
1445 | 30 | 33 | ||
1446 | diff --git a/lib/lp/soyuz/adapters/tests/test_archivedependencies.py b/lib/lp/soyuz/adapters/tests/test_archivedependencies.py | |||
1447 | index 7cd3cad..d5a2068 100644 | |||
1448 | --- a/lib/lp/soyuz/adapters/tests/test_archivedependencies.py | |||
1449 | +++ b/lib/lp/soyuz/adapters/tests/test_archivedependencies.py | |||
1450 | @@ -128,7 +128,7 @@ class TestSourcesList(TestCaseWithFactory): | |||
1451 | 128 | """Test sources.list contents for building, and related mechanisms.""" | 128 | """Test sources.list contents for building, and related mechanisms.""" |
1452 | 129 | 129 | ||
1453 | 130 | layer = LaunchpadZopelessLayer | 130 | layer = LaunchpadZopelessLayer |
1455 | 131 | run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=10) | 131 | run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=30) |
1456 | 132 | 132 | ||
1457 | 133 | ubuntu_components = [ | 133 | ubuntu_components = [ |
1458 | 134 | "main", "restricted", "universe", "multiverse", "partner"] | 134 | "main", "restricted", "universe", "multiverse", "partner"] |
1459 | diff --git a/lib/lp/soyuz/configure.zcml b/lib/lp/soyuz/configure.zcml | |||
1460 | index 2e97958..643a85b 100644 | |||
1461 | --- a/lib/lp/soyuz/configure.zcml | |||
1462 | +++ b/lib/lp/soyuz/configure.zcml | |||
1463 | @@ -373,7 +373,6 @@ | |||
1464 | 373 | set_schema="lp.soyuz.interfaces.archive.IArchiveRestricted"/> | 373 | set_schema="lp.soyuz.interfaces.archive.IArchiveRestricted"/> |
1465 | 374 | <require | 374 | <require |
1466 | 375 | permission="launchpad.InternalScriptsOnly" | 375 | permission="launchpad.InternalScriptsOnly" |
1467 | 376 | attributes="signing_key_owner" | ||
1468 | 377 | set_attributes="dirty_suites distribution signing_key_owner | 376 | set_attributes="dirty_suites distribution signing_key_owner |
1469 | 378 | signing_key_fingerprint"/> | 377 | signing_key_fingerprint"/> |
1470 | 379 | </class> | 378 | </class> |
1471 | diff --git a/lib/lp/soyuz/interfaces/archive.py b/lib/lp/soyuz/interfaces/archive.py | |||
1472 | index 407f953..b6cc663 100644 | |||
1473 | --- a/lib/lp/soyuz/interfaces/archive.py | |||
1474 | +++ b/lib/lp/soyuz/interfaces/archive.py | |||
1475 | @@ -461,6 +461,8 @@ class IArchiveSubscriberView(Interface): | |||
1476 | 461 | "explicit publish flag and any other constraints.")) | 461 | "explicit publish flag and any other constraints.")) |
1477 | 462 | series_with_sources = Attribute( | 462 | series_with_sources = Attribute( |
1478 | 463 | "DistroSeries to which this archive has published sources") | 463 | "DistroSeries to which this archive has published sources") |
1479 | 464 | signing_key_owner = Reference( | ||
1480 | 465 | title=_("Archive signing key owner"), required=False, schema=IPerson) | ||
1481 | 464 | signing_key_fingerprint = exported( | 466 | signing_key_fingerprint = exported( |
1482 | 465 | Text( | 467 | Text( |
1483 | 466 | title=_("Archive signing key fingerprint"), required=False, | 468 | title=_("Archive signing key fingerprint"), required=False, |
1484 | diff --git a/lib/lp/soyuz/model/archive.py b/lib/lp/soyuz/model/archive.py | |||
1485 | index a37b737..0d58e56 100644 | |||
1486 | --- a/lib/lp/soyuz/model/archive.py | |||
1487 | +++ b/lib/lp/soyuz/model/archive.py | |||
1488 | @@ -466,7 +466,7 @@ class Archive(SQLBase): | |||
1489 | 466 | return ( | 466 | return ( |
1490 | 467 | not config.personalpackagearchive.require_signing_keys or | 467 | not config.personalpackagearchive.require_signing_keys or |
1491 | 468 | not self.is_ppa or | 468 | not self.is_ppa or |
1493 | 469 | self.signing_key is not None) | 469 | self.signing_key_fingerprint is not None) |
1494 | 470 | 470 | ||
1495 | 471 | @property | 471 | @property |
1496 | 472 | def reference(self): | 472 | def reference(self): |
1497 | @@ -2717,10 +2717,12 @@ class ArchiveSet: | |||
1498 | 2717 | (owner.name, distribution.name, name)) | 2717 | (owner.name, distribution.name, name)) |
1499 | 2718 | 2718 | ||
1500 | 2719 | # Signing-key for the default PPA is reused when it's already present. | 2719 | # Signing-key for the default PPA is reused when it's already present. |
1502 | 2720 | signing_key = None | 2720 | signing_key_owner = None |
1503 | 2721 | signing_key_fingerprint = None | ||
1504 | 2721 | if purpose == ArchivePurpose.PPA: | 2722 | if purpose == ArchivePurpose.PPA: |
1505 | 2722 | if owner.archive is not None: | 2723 | if owner.archive is not None: |
1507 | 2723 | signing_key = owner.archive.signing_key | 2724 | signing_key_owner = owner.archive.signing_key_owner |
1508 | 2725 | signing_key_fingerprint = owner.archive.signing_key_fingerprint | ||
1509 | 2724 | else: | 2726 | else: |
1510 | 2725 | # owner.archive is a cached property and we've just cached it. | 2727 | # owner.archive is a cached property and we've just cached it. |
1511 | 2726 | del get_property_cache(owner).archive | 2728 | del get_property_cache(owner).archive |
1512 | @@ -2729,9 +2731,8 @@ class ArchiveSet: | |||
1513 | 2729 | owner=owner, distribution=distribution, name=name, | 2731 | owner=owner, distribution=distribution, name=name, |
1514 | 2730 | displayname=displayname, description=description, | 2732 | displayname=displayname, description=description, |
1515 | 2731 | purpose=purpose, publish=publish, | 2733 | purpose=purpose, publish=publish, |
1519 | 2732 | signing_key_owner=signing_key.owner if signing_key else None, | 2734 | signing_key_owner=signing_key_owner, |
1520 | 2733 | signing_key_fingerprint=( | 2735 | signing_key_fingerprint=signing_key_fingerprint, |
1518 | 2734 | signing_key.fingerprint if signing_key else None), | ||
1521 | 2735 | require_virtualized=require_virtualized) | 2736 | require_virtualized=require_virtualized) |
1522 | 2736 | 2737 | ||
1523 | 2737 | # Upon creation archives are enabled by default. | 2738 | # Upon creation archives are enabled by default. |
1524 | diff --git a/lib/lp/soyuz/scripts/ppakeygenerator.py b/lib/lp/soyuz/scripts/ppakeygenerator.py | |||
1525 | index 190b4a0..88e1d84 100644 | |||
1526 | --- a/lib/lp/soyuz/scripts/ppakeygenerator.py | |||
1527 | +++ b/lib/lp/soyuz/scripts/ppakeygenerator.py | |||
1528 | @@ -34,7 +34,7 @@ class PPAKeyGenerator(LaunchpadCronScript): | |||
1529 | 34 | (archive.reference, archive.displayname)) | 34 | (archive.reference, archive.displayname)) |
1530 | 35 | archive_signing_key = IArchiveGPGSigningKey(archive) | 35 | archive_signing_key = IArchiveGPGSigningKey(archive) |
1531 | 36 | archive_signing_key.generateSigningKey(log=self.logger) | 36 | archive_signing_key.generateSigningKey(log=self.logger) |
1533 | 37 | self.logger.info("Key %s" % archive.signing_key.fingerprint) | 37 | self.logger.info("Key %s" % archive.signing_key_fingerprint) |
1534 | 38 | 38 | ||
1535 | 39 | def main(self): | 39 | def main(self): |
1536 | 40 | """Generate signing keys for the selected PPAs.""" | 40 | """Generate signing keys for the selected PPAs.""" |
1537 | @@ -45,11 +45,11 @@ class PPAKeyGenerator(LaunchpadCronScript): | |||
1538 | 45 | raise LaunchpadScriptFailure( | 45 | raise LaunchpadScriptFailure( |
1539 | 46 | "No archive named '%s' could be found." | 46 | "No archive named '%s' could be found." |
1540 | 47 | % self.options.archive) | 47 | % self.options.archive) |
1542 | 48 | if archive.signing_key is not None: | 48 | if archive.signing_key_fingerprint is not None: |
1543 | 49 | raise LaunchpadScriptFailure( | 49 | raise LaunchpadScriptFailure( |
1544 | 50 | "%s (%s) already has a signing_key (%s)" | 50 | "%s (%s) already has a signing_key (%s)" |
1545 | 51 | % (archive.reference, archive.displayname, | 51 | % (archive.reference, archive.displayname, |
1547 | 52 | archive.signing_key.fingerprint)) | 52 | archive.signing_key_fingerprint)) |
1548 | 53 | archives = [archive] | 53 | archives = [archive] |
1549 | 54 | else: | 54 | else: |
1550 | 55 | archive_set = getUtility(IArchiveSet) | 55 | archive_set = getUtility(IArchiveSet) |
1551 | diff --git a/lib/lp/soyuz/scripts/tests/test_ppakeygenerator.py b/lib/lp/soyuz/scripts/tests/test_ppakeygenerator.py | |||
1552 | index 56e8710..a5d3caf 100644 | |||
1553 | --- a/lib/lp/soyuz/scripts/tests/test_ppakeygenerator.py | |||
1554 | +++ b/lib/lp/soyuz/scripts/tests/test_ppakeygenerator.py | |||
1555 | @@ -83,7 +83,7 @@ class TestPPAKeyGenerator(TestCase): | |||
1556 | 83 | LaunchpadScriptFailure, | 83 | LaunchpadScriptFailure, |
1557 | 84 | ("~cprov/ubuntu/ppa (PPA for Celso Providelo) already has a " | 84 | ("~cprov/ubuntu/ppa (PPA for Celso Providelo) already has a " |
1558 | 85 | "signing_key (%s)" % | 85 | "signing_key (%s)" % |
1560 | 86 | cprov.archive.signing_key.fingerprint), | 86 | cprov.archive.signing_key_fingerprint), |
1561 | 87 | key_generator.main) | 87 | key_generator.main) |
1562 | 88 | 88 | ||
1563 | 89 | def testGenerateKeyForASinglePPA(self): | 89 | def testGenerateKeyForASinglePPA(self): |
1564 | @@ -95,14 +95,14 @@ class TestPPAKeyGenerator(TestCase): | |||
1565 | 95 | cprov = getUtility(IPersonSet).getByName('cprov') | 95 | cprov = getUtility(IPersonSet).getByName('cprov') |
1566 | 96 | self._fixArchiveForKeyGeneration(cprov.archive) | 96 | self._fixArchiveForKeyGeneration(cprov.archive) |
1567 | 97 | 97 | ||
1569 | 98 | self.assertTrue(cprov.archive.signing_key is None) | 98 | self.assertIsNone(cprov.archive.signing_key_fingerprint) |
1570 | 99 | 99 | ||
1571 | 100 | txn = FakeTransaction() | 100 | txn = FakeTransaction() |
1572 | 101 | key_generator = self._getKeyGenerator( | 101 | key_generator = self._getKeyGenerator( |
1573 | 102 | archive_reference='~cprov/ubuntutest/ppa', txn=txn) | 102 | archive_reference='~cprov/ubuntutest/ppa', txn=txn) |
1574 | 103 | key_generator.main() | 103 | key_generator.main() |
1575 | 104 | 104 | ||
1577 | 105 | self.assertTrue(cprov.archive.signing_key is not None) | 105 | self.assertIsNotNone(cprov.archive.signing_key_fingerprint) |
1578 | 106 | self.assertEqual(txn.commit_count, 1) | 106 | self.assertEqual(txn.commit_count, 1) |
1579 | 107 | 107 | ||
1580 | 108 | def testGenerateKeyForAllPPA(self): | 108 | def testGenerateKeyForAllPPA(self): |
1581 | @@ -115,13 +115,13 @@ class TestPPAKeyGenerator(TestCase): | |||
1582 | 115 | 115 | ||
1583 | 116 | for archive in archives: | 116 | for archive in archives: |
1584 | 117 | self._fixArchiveForKeyGeneration(archive) | 117 | self._fixArchiveForKeyGeneration(archive) |
1586 | 118 | self.assertTrue(archive.signing_key is None) | 118 | self.assertIsNone(archive.signing_key_fingerprint) |
1587 | 119 | 119 | ||
1588 | 120 | txn = FakeTransaction() | 120 | txn = FakeTransaction() |
1589 | 121 | key_generator = self._getKeyGenerator(txn=txn) | 121 | key_generator = self._getKeyGenerator(txn=txn) |
1590 | 122 | key_generator.main() | 122 | key_generator.main() |
1591 | 123 | 123 | ||
1592 | 124 | for archive in archives: | 124 | for archive in archives: |
1594 | 125 | self.assertTrue(archive.signing_key is not None) | 125 | self.assertIsNotNone(archive.signing_key_fingerprint) |
1595 | 126 | 126 | ||
1596 | 127 | self.assertEqual(txn.commit_count, len(archives)) | 127 | self.assertEqual(txn.commit_count, len(archives)) |
1597 | diff --git a/lib/lp/soyuz/stories/soyuz/xx-person-packages.txt b/lib/lp/soyuz/stories/soyuz/xx-person-packages.txt | |||
1598 | index d815121..95ff407 100644 | |||
1599 | --- a/lib/lp/soyuz/stories/soyuz/xx-person-packages.txt | |||
1600 | +++ b/lib/lp/soyuz/stories/soyuz/xx-person-packages.txt | |||
1601 | @@ -400,7 +400,7 @@ Then delete the 'source2' package. | |||
1602 | 400 | ... print(extract_text(empty_section)) | 400 | ... print(extract_text(empty_section)) |
1603 | 401 | >>> print_ppa_packages(admin_browser.contents) | 401 | >>> print_ppa_packages(admin_browser.contents) |
1604 | 402 | Source Published Status Series Section Build Status | 402 | Source Published Status Series Section Build Status |
1606 | 403 | source2 - 666... a moment ago Deleted ... | 403 | source2 - 666... Deleted ... |
1607 | 404 | >>> update_cached_records() | 404 | >>> update_cached_records() |
1608 | 405 | 405 | ||
1609 | 406 | Now re-list the PPA's packages, 'source2' was deleted but still | 406 | Now re-list the PPA's packages, 'source2' was deleted but still |
1610 | diff --git a/lib/lp/soyuz/tests/test_archive.py b/lib/lp/soyuz/tests/test_archive.py | |||
1611 | index 91acf53..5c3f3b6 100644 | |||
1612 | --- a/lib/lp/soyuz/tests/test_archive.py | |||
1613 | +++ b/lib/lp/soyuz/tests/test_archive.py | |||
1614 | @@ -4046,7 +4046,7 @@ class TestSigningKeyPropagation(TestCaseWithFactory): | |||
1615 | 4046 | 4046 | ||
1616 | 4047 | def test_ppa_created_with_no_signing_key(self): | 4047 | def test_ppa_created_with_no_signing_key(self): |
1617 | 4048 | ppa = self.factory.makeArchive(purpose=ArchivePurpose.PPA) | 4048 | ppa = self.factory.makeArchive(purpose=ArchivePurpose.PPA) |
1619 | 4049 | self.assertIsNone(ppa.signing_key) | 4049 | self.assertIsNone(ppa.signing_key_fingerprint) |
1620 | 4050 | 4050 | ||
1621 | 4051 | def test_default_signing_key_propagated_to_new_ppa(self): | 4051 | def test_default_signing_key_propagated_to_new_ppa(self): |
1622 | 4052 | person = self.factory.makePerson() | 4052 | person = self.factory.makePerson() |
1623 | diff --git a/lib/lp/testing/factory.py b/lib/lp/testing/factory.py | |||
1624 | index f5ad237..c109014 100644 | |||
1625 | --- a/lib/lp/testing/factory.py | |||
1626 | +++ b/lib/lp/testing/factory.py | |||
1627 | @@ -1121,14 +1121,14 @@ class BareLaunchpadObjectFactory(ObjectFactory): | |||
1628 | 1121 | def makeSprint(self, title=None, name=None): | 1121 | def makeSprint(self, title=None, name=None): |
1629 | 1122 | """Make a sprint.""" | 1122 | """Make a sprint.""" |
1630 | 1123 | if title is None: | 1123 | if title is None: |
1632 | 1124 | title = self.getUniqueString('title') | 1124 | title = self.getUniqueUnicode('title') |
1633 | 1125 | owner = self.makePerson() | 1125 | owner = self.makePerson() |
1634 | 1126 | if name is None: | 1126 | if name is None: |
1636 | 1127 | name = self.getUniqueString('name') | 1127 | name = self.getUniqueUnicode('name') |
1637 | 1128 | time_starts = datetime(2009, 1, 1, tzinfo=pytz.UTC) | 1128 | time_starts = datetime(2009, 1, 1, tzinfo=pytz.UTC) |
1638 | 1129 | time_ends = datetime(2009, 1, 2, tzinfo=pytz.UTC) | 1129 | time_ends = datetime(2009, 1, 2, tzinfo=pytz.UTC) |
1641 | 1130 | time_zone = 'UTC' | 1130 | time_zone = u'UTC' |
1642 | 1131 | summary = self.getUniqueString('summary') | 1131 | summary = self.getUniqueUnicode('summary') |
1643 | 1132 | return getUtility(ISprintSet).new( | 1132 | return getUtility(ISprintSet).new( |
1644 | 1133 | owner=owner, name=name, title=title, time_zone=time_zone, | 1133 | owner=owner, name=name, title=title, time_zone=time_zone, |
1645 | 1134 | time_starts=time_starts, time_ends=time_ends, summary=summary) | 1134 | time_starts=time_starts, time_ends=time_ends, summary=summary) |
1646 | diff --git a/lib/lp/translations/pottery/tests/test_detect_intltool.py b/lib/lp/translations/pottery/tests/test_detect_intltool.py | |||
1647 | index cc0d5d7..21dce33 100644 | |||
1648 | --- a/lib/lp/translations/pottery/tests/test_detect_intltool.py | |||
1649 | +++ b/lib/lp/translations/pottery/tests/test_detect_intltool.py | |||
1650 | @@ -6,9 +6,11 @@ __metaclass__ = type | |||
1651 | 6 | import errno | 6 | import errno |
1652 | 7 | import os | 7 | import os |
1653 | 8 | import tarfile | 8 | import tarfile |
1654 | 9 | from textwrap import dedent | ||
1655 | 9 | 10 | ||
1656 | 10 | from breezy.controldir import ControlDir | 11 | from breezy.controldir import ControlDir |
1657 | 11 | 12 | ||
1658 | 13 | from lp.services.scripts.tests import run_script | ||
1659 | 12 | from lp.testing import TestCase | 14 | from lp.testing import TestCase |
1660 | 13 | from lp.translations.pottery.detect_intltool import is_intltool_structure | 15 | from lp.translations.pottery.detect_intltool import is_intltool_structure |
1661 | 14 | 16 | ||
1662 | @@ -52,6 +54,18 @@ class SetupTestPackageMixin: | |||
1663 | 52 | with open(path, 'w') as the_file: | 54 | with open(path, 'w') as the_file: |
1664 | 53 | the_file.write(content) | 55 | the_file.write(content) |
1665 | 54 | 56 | ||
1666 | 57 | def test_pottery_generate_intltool_script(self): | ||
1667 | 58 | # Let the script run to see it works fine. | ||
1668 | 59 | self.prepare_package("intltool_POTFILES_in_2") | ||
1669 | 60 | |||
1670 | 61 | return_code, stdout, stderr = run_script( | ||
1671 | 62 | 'scripts/rosetta/pottery-generate-intltool.py', []) | ||
1672 | 63 | |||
1673 | 64 | self.assertEqual(dedent("""\ | ||
1674 | 65 | module1/po/messages.pot | ||
1675 | 66 | po/messages.pot | ||
1676 | 67 | """), stdout) | ||
1677 | 68 | |||
1678 | 55 | 69 | ||
1679 | 56 | class TestDetectIntltoolInBzrTree(TestCase, SetupTestPackageMixin): | 70 | class TestDetectIntltoolInBzrTree(TestCase, SetupTestPackageMixin): |
1680 | 57 | 71 | ||
1681 | diff --git a/scripts/rosetta/pottery-generate-intltool.py b/scripts/rosetta/pottery-generate-intltool.py | |||
1682 | 58 | new file mode 100755 | 72 | new file mode 100755 |
1683 | index 0000000..4557676 | |||
1684 | --- /dev/null | |||
1685 | +++ b/scripts/rosetta/pottery-generate-intltool.py | |||
1686 | @@ -0,0 +1,56 @@ | |||
1687 | 1 | #!/usr/bin/python2 -S | ||
1688 | 2 | # | ||
1689 | 3 | # Copyright 2009-2020 Canonical Ltd. This software is licensed under the | ||
1690 | 4 | # GNU Affero General Public License version 3 (see the file LICENSE). | ||
1691 | 5 | |||
1692 | 6 | """Print a list of directories that contain a valid intltool structure.""" | ||
1693 | 7 | |||
1694 | 8 | from __future__ import absolute_import, print_function, unicode_literals | ||
1695 | 9 | |||
1696 | 10 | import _pythonpath | ||
1697 | 11 | |||
1698 | 12 | import os.path | ||
1699 | 13 | |||
1700 | 14 | from lpbuildd.pottery.intltool import generate_pots | ||
1701 | 15 | from lpbuildd.tests.fakeslave import UncontainedBackend as _UncontainedBackend | ||
1702 | 16 | |||
1703 | 17 | from lp.services.scripts.base import LaunchpadScript | ||
1704 | 18 | |||
1705 | 19 | |||
1706 | 20 | class UncontainedBackend(_UncontainedBackend): | ||
1707 | 21 | """Like UncontainedBackend, except avoid executing "test". | ||
1708 | 22 | |||
1709 | 23 | Otherwise we can end up with confusion between the Unix "test" utility | ||
1710 | 24 | and Launchpad's bin/test. | ||
1711 | 25 | """ | ||
1712 | 26 | |||
1713 | 27 | def path_exists(self, path): | ||
1714 | 28 | """See `Backend`.""" | ||
1715 | 29 | return os.path.exists(path) | ||
1716 | 30 | |||
1717 | 31 | def isdir(self, path): | ||
1718 | 32 | """See `Backend`.""" | ||
1719 | 33 | return os.path.isdir(path) | ||
1720 | 34 | |||
1721 | 35 | def islink(self, path): | ||
1722 | 36 | """See `Backend`.""" | ||
1723 | 37 | return os.path.islink(path) | ||
1724 | 38 | |||
1725 | 39 | |||
1726 | 40 | class PotteryGenerateIntltool(LaunchpadScript): | ||
1727 | 41 | """Print a list of directories that contain a valid intltool structure.""" | ||
1728 | 42 | |||
1729 | 43 | def add_my_options(self): | ||
1730 | 44 | """See `LaunchpadScript`.""" | ||
1731 | 45 | self.parser.usage = "%prog [options] [PATH]" | ||
1732 | 46 | |||
1733 | 47 | def main(self): | ||
1734 | 48 | """See `LaunchpadScript`.""" | ||
1735 | 49 | path = self.args[0] if self.args else "." | ||
1736 | 50 | backend = UncontainedBackend("dummy") | ||
1737 | 51 | print("\n".join(generate_pots(backend, path))) | ||
1738 | 52 | |||
1739 | 53 | |||
1740 | 54 | if __name__ == "__main__": | ||
1741 | 55 | script = PotteryGenerateIntltool(name="pottery-generate-intltool") | ||
1742 | 56 | script.run() | ||
1743 | diff --git a/utilities/launchpad-database-setup b/utilities/launchpad-database-setup | |||
1744 | index 46a36ce..83b79b4 100755 | |||
1745 | --- a/utilities/launchpad-database-setup | |||
1746 | +++ b/utilities/launchpad-database-setup | |||
1747 | @@ -43,13 +43,6 @@ if ! sudo grep -q "port.*5432" /etc/postgresql/$pgversion/main/postgresql.conf; | |||
1748 | 43 | echo "ensure postgres is running on port 5432." | 43 | echo "ensure postgres is running on port 5432." |
1749 | 44 | fi; | 44 | fi; |
1750 | 45 | 45 | ||
1751 | 46 | if [ -e /etc/init.d/postgresql-$pgversion ]; then | ||
1752 | 47 | sudo /etc/init.d/postgresql-$pgversion stop | ||
1753 | 48 | else | ||
1754 | 49 | # This is Maverick. | ||
1755 | 50 | sudo /etc/init.d/postgresql stop $pgversion | ||
1756 | 51 | fi | ||
1757 | 52 | |||
1758 | 53 | echo Purging postgresql data... | 46 | echo Purging postgresql data... |
1759 | 54 | sudo pg_dropcluster $pgversion main --stop-server | 47 | sudo pg_dropcluster $pgversion main --stop-server |
1760 | 55 | echo Re-creating postgresql database... | 48 | echo Re-creating postgresql database... |
1761 | diff --git a/utilities/sourcedeps.cache b/utilities/sourcedeps.cache | |||
1762 | index ff07c4c..ca3b453 100644 | |||
1763 | --- a/utilities/sourcedeps.cache | |||
1764 | +++ b/utilities/sourcedeps.cache | |||
1765 | @@ -24,8 +24,8 @@ | |||
1766 | 24 | "cjwatson@canonical.com-20190614154330-091l9edcnubsjmsx" | 24 | "cjwatson@canonical.com-20190614154330-091l9edcnubsjmsx" |
1767 | 25 | ], | 25 | ], |
1768 | 26 | "loggerhead": [ | 26 | "loggerhead": [ |
1771 | 27 | 506, | 27 | 511, |
1772 | 28 | "cjwatson@canonical.com-20200710095850-o3aa6eo5a22jhuun" | 28 | "otto-copilot@canonical.com-20200918084828-dljpy2eewt6umnmd" |
1773 | 29 | ], | 29 | ], |
1774 | 30 | "pygettextpo": [ | 30 | "pygettextpo": [ |
1775 | 31 | 25, | 31 | 25, |
1776 | diff --git a/utilities/sourcedeps.conf b/utilities/sourcedeps.conf | |||
1777 | index 2377277..2815420 100644 | |||
1778 | --- a/utilities/sourcedeps.conf | |||
1779 | +++ b/utilities/sourcedeps.conf | |||
1780 | @@ -13,5 +13,5 @@ bzr-git lp:~launchpad-pqm/bzr-git/devel;revno=280 | |||
1781 | 13 | bzr-svn lp:~launchpad-pqm/bzr-svn/devel;revno=2725 | 13 | bzr-svn lp:~launchpad-pqm/bzr-svn/devel;revno=2725 |
1782 | 14 | cscvs lp:~launchpad-pqm/launchpad-cscvs/devel;revno=433 | 14 | cscvs lp:~launchpad-pqm/launchpad-cscvs/devel;revno=433 |
1783 | 15 | difftacular lp:~launchpad/difftacular/trunk;revno=11 | 15 | difftacular lp:~launchpad/difftacular/trunk;revno=11 |
1785 | 16 | loggerhead lp:~loggerhead-team/loggerhead/trunk-rich;revno=506 | 16 | loggerhead lp:~loggerhead-team/loggerhead/trunk-rich;revno=511 |
1786 | 17 | pygettextpo lp:~launchpad-pqm/pygettextpo/trunk;revno=25 | 17 | pygettextpo lp:~launchpad-pqm/pygettextpo/trunk;revno=25 |