Merge lp:~stub/launchpad/update-storm into lp:launchpad/db-devel
- update-storm
- Merge into db-devel
Proposed by
Stuart Bishop
on 2010-11-07
| Status: | Merged | ||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| Approved by: | Stuart Bishop on 2010-11-09 | ||||||||||||||||||||
| Approved revision: | no longer in the source branch. | ||||||||||||||||||||
| Merged at revision: | 9959 | ||||||||||||||||||||
| Proposed branch: | lp:~stub/launchpad/update-storm | ||||||||||||||||||||
| Merge into: | lp:launchpad/db-devel | ||||||||||||||||||||
| Diff against target: |
2644 lines (+362/-446) 44 files modified
lib/canonical/launchpad/database/account.py (+7/-7) lib/canonical/launchpad/database/stormsugar.py (+1/-19) lib/canonical/launchpad/database/tests/test_stormextensions.py (+0/-101) lib/canonical/launchpad/helpers.py (+45/-0) lib/canonical/launchpad/vocabularies/dbobjects.py (+7/-2) lib/canonical/launchpad/webapp/pgsession.py (+16/-14) lib/canonical/launchpad/webapp/vocabulary.py (+5/-4) lib/lp/archivepublisher/ftparchive.py (+15/-9) lib/lp/bugs/model/bug.py (+25/-17) lib/lp/bugs/model/bugtask.py (+13/-8) lib/lp/bugs/model/bugwatch.py (+20/-24) lib/lp/code/model/branch.py (+2/-2) lib/lp/hardwaredb/model/hwdb.py (+17/-13) lib/lp/registry/browser/distributionsourcepackage.py (+2/-1) lib/lp/registry/doc/vocabularies.txt (+1/-1) lib/lp/registry/model/distribution.py (+9/-8) lib/lp/registry/model/distributionsourcepackage.py (+2/-4) lib/lp/registry/model/person.py (+18/-33) lib/lp/registry/model/pillar.py (+4/-1) lib/lp/registry/model/projectgroup.py (+2/-3) lib/lp/registry/model/sourcepackage.py (+2/-3) lib/lp/registry/model/sourcepackagename.py (+3/-1) lib/lp/registry/vocabularies.py (+15/-16) lib/lp/scripts/garbo.py (+5/-5) lib/lp/scripts/utilities/sanitizedb.py (+1/-1) lib/lp/services/database/bulk.py (+1/-2) lib/lp/services/worlddata/model/language.py (+5/-4) lib/lp/soyuz/doc/gina.txt (+1/-1) lib/lp/soyuz/doc/packageset.txt (+2/-2) lib/lp/soyuz/model/archivepermission.py (+12/-13) lib/lp/soyuz/model/binarypackagebuild.py (+2/-3) lib/lp/soyuz/model/binarypackagename.py (+9/-5) lib/lp/soyuz/model/distroarchseriesbinarypackage.py (+4/-10) lib/lp/soyuz/model/packagediff.py (+5/-6) lib/lp/soyuz/model/packageset.py (+17/-17) lib/lp/soyuz/model/publishing.py (+22/-30) lib/lp/soyuz/model/queue.py (+28/-40) lib/lp/soyuz/scripts/initialise_distroseries.py (+3/-1) lib/lp/translations/model/pofile.py (+1/-2) lib/lp/translations/model/potemplate.py (+1/-2) lib/lp/translations/model/translationimportqueue.py (+1/-2) lib/lp/translations/scripts/migrate_variants.py (+9/-9) setup.py (+1/-0) versions.cfg (+1/-0) |
||||||||||||||||||||
| To merge this branch: | bzr merge lp:~stub/launchpad/update-storm | ||||||||||||||||||||
| Related bugs: |
|
| Reviewer | Review Type | Date Requested | Status |
|---|---|---|---|
| Launchpad code reviewers | 2010-11-07 | Pending | |
|
Review via email:
|
|||
Commit Message
Update to psycopg2 2.2.2
Description of the Change
Code changes to migrate us to modern psycopg2, such as the one packaged in Lucid. And delinting.
Rather than change the tests, I elected to cast to Unicode in the main code - fixing the tests might not be enough as other call sites might still be sending str on untested code paths.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
| 1 | === modified file 'lib/canonical/launchpad/database/account.py' |
| 2 | --- lib/canonical/launchpad/database/account.py 2010-10-03 15:30:06 +0000 |
| 3 | +++ lib/canonical/launchpad/database/account.py 2010-11-09 15:16:26 +0000 |
| 4 | @@ -16,14 +16,12 @@ |
| 5 | from zope.interface import implements |
| 6 | from zope.security.proxy import removeSecurityProxy |
| 7 | |
| 8 | -from canonical.database.constants import ( |
| 9 | - DEFAULT, |
| 10 | - UTC_NOW, |
| 11 | - ) |
| 12 | +from canonical.database.constants import UTC_NOW |
| 13 | from canonical.database.datetimecol import UtcDateTimeCol |
| 14 | from canonical.database.enumcol import EnumCol |
| 15 | from canonical.database.sqlbase import SQLBase |
| 16 | from canonical.launchpad.database.emailaddress import EmailAddress |
| 17 | +from canonical.launchpad.helpers import ensure_unicode |
| 18 | from canonical.launchpad.interfaces import ( |
| 19 | IMasterObject, |
| 20 | IMasterStore, |
| 21 | @@ -290,10 +288,12 @@ |
| 22 | |
| 23 | def getByEmail(self, email): |
| 24 | """See `IAccountSet`.""" |
| 25 | - conditions = [EmailAddress.account == Account.id, |
| 26 | - EmailAddress.email.lower() == email.lower().strip()] |
| 27 | store = IStore(Account) |
| 28 | - account = store.find(Account, *conditions).one() |
| 29 | + account = store.find( |
| 30 | + Account, |
| 31 | + EmailAddress.account == Account.id, |
| 32 | + EmailAddress.email.lower() |
| 33 | + == ensure_unicode(email).strip().lower()).one() |
| 34 | if account is None: |
| 35 | raise LookupError(email) |
| 36 | return account |
| 37 | |
| 38 | === modified file 'lib/canonical/launchpad/database/stormsugar.py' |
| 39 | --- lib/canonical/launchpad/database/stormsugar.py 2010-08-20 20:31:18 +0000 |
| 40 | +++ lib/canonical/launchpad/database/stormsugar.py 2010-11-09 15:16:26 +0000 |
| 41 | @@ -14,16 +14,11 @@ |
| 42 | __all__ = [ |
| 43 | 'ForeignKey', |
| 44 | 'ObjectNotFound', |
| 45 | - 'StartsWith', |
| 46 | 'Sugar', |
| 47 | 'UnknownProperty', |
| 48 | ] |
| 49 | |
| 50 | |
| 51 | -from storm.expr import ( |
| 52 | - Like, |
| 53 | - SQLRaw, |
| 54 | - ) |
| 55 | from storm.locals import ( |
| 56 | Int, |
| 57 | Reference, |
| 58 | @@ -64,22 +59,9 @@ |
| 59 | Reference.__init__(self, None, remote_key) |
| 60 | |
| 61 | |
| 62 | -class StartsWith(Like): |
| 63 | - """Allow Like matching but only at the beginning of a string. |
| 64 | - |
| 65 | - The string is properly escaped. |
| 66 | - """ |
| 67 | - def __init__(self, expr, string): |
| 68 | - # Escape instances of !, _, and % so they don't interfere with the |
| 69 | - # underlying LIKE operation. Use ! as the escape character. |
| 70 | - string = string.replace("!", "!!") \ |
| 71 | - .replace("_", "!_") \ |
| 72 | - .replace("%", "!%") |
| 73 | - Like.__init__(self, expr, string+"%", escape=SQLRaw("'!'")) |
| 74 | - |
| 75 | - |
| 76 | # Use Storm.__metaclass__ because storm.properties.PropertyPublisherMeta isn't |
| 77 | # in an __all__. |
| 78 | + |
| 79 | class Sugary(Storm.__metaclass__): |
| 80 | """Metaclass that adds support for ForeignKey.""" |
| 81 | |
| 82 | |
| 83 | === removed file 'lib/canonical/launchpad/database/tests/test_stormextensions.py' |
| 84 | --- lib/canonical/launchpad/database/tests/test_stormextensions.py 2010-08-20 20:31:18 +0000 |
| 85 | +++ lib/canonical/launchpad/database/tests/test_stormextensions.py 1970-01-01 00:00:00 +0000 |
| 86 | @@ -1,101 +0,0 @@ |
| 87 | -# Copyright 2009 Canonical Ltd. This software is licensed under the |
| 88 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
| 89 | - |
| 90 | -"""Tests for extensions in stormsugar, but not stormsugar proper.""" |
| 91 | - |
| 92 | -__metaclass__ = type |
| 93 | - |
| 94 | - |
| 95 | -from unittest import TestLoader |
| 96 | - |
| 97 | -from storm.expr import Lower |
| 98 | -from zope.component import getUtility |
| 99 | - |
| 100 | -from canonical.launchpad.database.stormsugar import StartsWith |
| 101 | -from canonical.launchpad.webapp.interfaces import ( |
| 102 | - IStoreSelector, |
| 103 | - MAIN_STORE, |
| 104 | - MASTER_FLAVOR, |
| 105 | - ) |
| 106 | -from canonical.testing.layers import DatabaseFunctionalLayer |
| 107 | -from lp.registry.model.person import Person |
| 108 | -from lp.testing import TestCaseWithFactory |
| 109 | - |
| 110 | - |
| 111 | -class TestStormExpressions(TestCaseWithFactory): |
| 112 | - |
| 113 | - layer = DatabaseFunctionalLayer |
| 114 | - |
| 115 | - def setUp(self): |
| 116 | - TestCaseWithFactory.setUp(self) |
| 117 | - selector = getUtility(IStoreSelector) |
| 118 | - self.store = selector.get(MAIN_STORE, MASTER_FLAVOR) |
| 119 | - |
| 120 | - def test_StartsWith_SQLGeneration(self): |
| 121 | - from storm.databases.postgres import compile |
| 122 | - |
| 123 | - # Show that the SQL generated uses LIKE with the '!' as the escape |
| 124 | - # character. |
| 125 | - expr = StartsWith("name", "value") |
| 126 | - sql = compile(expr) |
| 127 | - self.assertEqual(sql, "? LIKE ? ESCAPE '!'") |
| 128 | - |
| 129 | - # Unlike Storm's Like, StartsWith does not accept a case_sensitive |
| 130 | - # flag. |
| 131 | - self.assertRaises(TypeError, StartsWith, "name", "value", |
| 132 | - case_sensitive=False) |
| 133 | - |
| 134 | - def test_StartsWithUse(self): |
| 135 | - """StartWith correctly performs searches.""" |
| 136 | - |
| 137 | - person1 = self.factory.makePerson(name='aa', displayname="John Doe") |
| 138 | - person2 = self.factory.makePerson(name='bb', displayname="Johan Doe") |
| 139 | - person3 = self.factory.makePerson(name='cc', displayname="Joh%n Doe") |
| 140 | - |
| 141 | - # Successful search from the start of the name. |
| 142 | - expr = StartsWith(Person.displayname, 'John') |
| 143 | - results = self.store.find(Person, expr) |
| 144 | - self.assertEqual([person1], [p for p in results]) |
| 145 | - |
| 146 | - # Searching for a missing pattern returns no result. |
| 147 | - expr = StartsWith(Person.displayname, 'John Roe') |
| 148 | - results = self.store.find(Person, expr) |
| 149 | - self.assertEqual([], [p for p in results]) |
| 150 | - |
| 151 | - |
| 152 | - # Searching for a non-initial pattern returns no result. |
| 153 | - expr = StartsWith(Person.displayname, 'Roe') |
| 154 | - results = self.store.find(Person, expr) |
| 155 | - self.assertEqual([], [p for p in results]) |
| 156 | - |
| 157 | - # Multiple matches are returned. |
| 158 | - expr = StartsWith(Person.displayname, 'Joh') |
| 159 | - results = self.store.find(Person, expr) |
| 160 | - results.order_by('name') |
| 161 | - self.assertEqual([person1, person2, person3], [p for p in results]) |
| 162 | - |
| 163 | - # Wildcards are properly escaped. No need for quote_like or |
| 164 | - # equivalent. |
| 165 | - expr = StartsWith(Person.displayname, 'Joh%n') |
| 166 | - results = self.store.find(Person, expr) |
| 167 | - self.assertEqual([person3], [p for p in results]) |
| 168 | - |
| 169 | - # Searches are case-sensitive. |
| 170 | - expr = StartsWith(Person.displayname, 'john') |
| 171 | - results = self.store.find(Person, expr) |
| 172 | - self.assertEqual([], [p for p in results]) |
| 173 | - |
| 174 | - # Use of .lower allows case-insensitive searching. |
| 175 | - expr = StartsWith(Person.displayname.lower(), 'john') |
| 176 | - results = self.store.find(Person, expr) |
| 177 | - self.assertEqual([person1], [p for p in results]) |
| 178 | - |
| 179 | - # Use of Lower allows case-insensitive searching. |
| 180 | - expr = StartsWith(Lower(Person.displayname), 'john') |
| 181 | - results = self.store.find(Person, expr) |
| 182 | - self.assertEqual([person1], [p for p in results]) |
| 183 | - |
| 184 | - |
| 185 | - |
| 186 | -def test_suite(): |
| 187 | - return TestLoader().loadTestsFromName(__name__) |
| 188 | |
| 189 | === modified file 'lib/canonical/launchpad/helpers.py' |
| 190 | --- lib/canonical/launchpad/helpers.py 2010-10-03 15:30:06 +0000 |
| 191 | +++ lib/canonical/launchpad/helpers.py 2010-11-09 15:16:26 +0000 |
| 192 | @@ -582,3 +582,48 @@ |
| 193 | else: |
| 194 | items[-1] = '%s %s' % (conjunction, items[-1]) |
| 195 | return ', '.join(items) |
| 196 | + |
| 197 | + |
| 198 | +def ensure_unicode(string): |
| 199 | + r"""Return input as unicode. None is passed through unharmed. |
| 200 | + |
| 201 | + Do not use this method. This method exists only to help migration |
| 202 | + of legacy code where str objects were being passed into contexts |
| 203 | + where unicode objects are required. All invokations of |
| 204 | + ensure_unicode() should eventually be removed. |
| 205 | + |
| 206 | + This differs from the builtin unicode() function, as a TypeError |
| 207 | + exception will be raised if the parameter is not a basestring or if |
| 208 | + a raw string is not ASCII. |
| 209 | + |
| 210 | + >>> ensure_unicode(u'hello') |
| 211 | + u'hello' |
| 212 | + |
| 213 | + >>> ensure_unicode('hello') |
| 214 | + u'hello' |
| 215 | + |
| 216 | + >>> ensure_unicode(u'A'.encode('utf-16')) # Not ASCII |
| 217 | + Traceback (most recent call last): |
| 218 | + ... |
| 219 | + TypeError: '\xff\xfeA\x00' is not US-ASCII |
| 220 | + |
| 221 | + >>> ensure_unicode(42) |
| 222 | + Traceback (most recent call last): |
| 223 | + ... |
| 224 | + TypeError: 42 is not a basestring (<type 'int'>) |
| 225 | + |
| 226 | + >>> ensure_unicode(None) is None |
| 227 | + True |
| 228 | + """ |
| 229 | + if string is None: |
| 230 | + return None |
| 231 | + elif isinstance(string, unicode): |
| 232 | + return string |
| 233 | + elif isinstance(string, basestring): |
| 234 | + try: |
| 235 | + return string.decode('US-ASCII') |
| 236 | + except UnicodeDecodeError: |
| 237 | + raise TypeError("%s is not US-ASCII" % repr(string)) |
| 238 | + else: |
| 239 | + raise TypeError( |
| 240 | + "%r is not a basestring (%r)" % (string, type(string))) |
| 241 | |
| 242 | === modified file 'lib/canonical/launchpad/vocabularies/dbobjects.py' |
| 243 | --- lib/canonical/launchpad/vocabularies/dbobjects.py 2010-09-28 03:01:37 +0000 |
| 244 | +++ lib/canonical/launchpad/vocabularies/dbobjects.py 2010-11-09 15:16:26 +0000 |
| 245 | @@ -64,7 +64,10 @@ |
| 246 | quote, |
| 247 | sqlvalues, |
| 248 | ) |
| 249 | -from canonical.launchpad.helpers import shortlist |
| 250 | +from canonical.launchpad.helpers import ( |
| 251 | + ensure_unicode, |
| 252 | + shortlist, |
| 253 | + ) |
| 254 | from canonical.launchpad.interfaces.lpstorm import IStore |
| 255 | from canonical.launchpad.webapp.interfaces import ILaunchBag |
| 256 | from canonical.launchpad.webapp.vocabulary import ( |
| 257 | @@ -118,6 +121,7 @@ |
| 258 | |
| 259 | # Country.name may have non-ASCII characters, so we can't use |
| 260 | # NamedSQLObjectVocabulary here. |
| 261 | + |
| 262 | class CountryNameVocabulary(SQLObjectVocabularyBase): |
| 263 | """A vocabulary for country names.""" |
| 264 | |
| 265 | @@ -159,7 +163,7 @@ |
| 266 | |
| 267 | def search(self, query): |
| 268 | """Search for web bug trackers.""" |
| 269 | - query = query.lower() |
| 270 | + query = ensure_unicode(query).lower() |
| 271 | results = IStore(self._table).find( |
| 272 | self._table, And( |
| 273 | self._filter, |
| 274 | @@ -347,6 +351,7 @@ |
| 275 | yield self.toTerm(watch) |
| 276 | |
| 277 | def toTerm(self, watch): |
| 278 | + |
| 279 | def escape(string): |
| 280 | return cgi.escape(string, quote=True) |
| 281 | |
| 282 | |
| 283 | === modified file 'lib/canonical/launchpad/webapp/pgsession.py' |
| 284 | --- lib/canonical/launchpad/webapp/pgsession.py 2010-08-20 20:31:18 +0000 |
| 285 | +++ lib/canonical/launchpad/webapp/pgsession.py 2010-11-09 15:16:26 +0000 |
| 286 | @@ -5,11 +5,6 @@ |
| 287 | |
| 288 | __metaclass__ = type |
| 289 | |
| 290 | -from datetime import ( |
| 291 | - datetime, |
| 292 | - timedelta, |
| 293 | - ) |
| 294 | -from random import random |
| 295 | import time |
| 296 | from UserDict import DictMixin |
| 297 | |
| 298 | @@ -25,6 +20,7 @@ |
| 299 | ISessionPkgData, |
| 300 | ) |
| 301 | |
| 302 | +from canonical.launchpad.helpers import ensure_unicode |
| 303 | from canonical.launchpad.webapp.publisher import get_current_browser_request |
| 304 | |
| 305 | |
| 306 | @@ -95,7 +91,7 @@ |
| 307 | |
| 308 | def __init__(self, session_data_container, client_id): |
| 309 | self.session_data_container = session_data_container |
| 310 | - self.client_id = client_id |
| 311 | + self.client_id = ensure_unicode(client_id) |
| 312 | self.lastAccessTime = time.time() |
| 313 | |
| 314 | # Update the last access time in the db if it is out of date |
| 315 | @@ -105,7 +101,7 @@ |
| 316 | WHERE client_id = ? |
| 317 | AND last_accessed < CURRENT_TIMESTAMP - '%d seconds'::interval |
| 318 | """ % (table_name, session_data_container.resolution) |
| 319 | - self.store.execute(query, (client_id,), noresult=True) |
| 320 | + self.store.execute(query, (self.client_id,), noresult=True) |
| 321 | |
| 322 | def _ensureClientId(self): |
| 323 | if self._have_ensured_client_id: |
| 324 | @@ -170,7 +166,7 @@ |
| 325 | |
| 326 | def __init__(self, session_data, product_id): |
| 327 | self.session_data = session_data |
| 328 | - self.product_id = product_id |
| 329 | + self.product_id = ensure_unicode(product_id) |
| 330 | self.table_name = ( |
| 331 | session_data.session_data_container.session_pkg_data_table_name) |
| 332 | self._populate() |
| 333 | @@ -193,12 +189,15 @@ |
| 334 | return self._data_cache[key] |
| 335 | |
| 336 | def __setitem__(self, key, value): |
| 337 | - pickled_value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) |
| 338 | + key = ensure_unicode(key) |
| 339 | + pickled_value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) |
| 340 | |
| 341 | self.session_data._ensureClientId() |
| 342 | - self.store.execute("SELECT set_session_pkg_data(?, ?, ?, ?)", |
| 343 | - (self.session_data.client_id, self.product_id, |
| 344 | - key, pickled_value), noresult=True) |
| 345 | + self.store.execute( |
| 346 | + "SELECT set_session_pkg_data(?, ?, ?, ?)", |
| 347 | + (self.session_data.client_id, |
| 348 | + self.product_id, key, pickled_value), |
| 349 | + noresult=True) |
| 350 | |
| 351 | # Store the value in the cache too |
| 352 | self._data_cache[key] = value |
| 353 | @@ -219,8 +218,11 @@ |
| 354 | query = """ |
| 355 | DELETE FROM %s WHERE client_id = ? AND product_id = ? AND key = ? |
| 356 | """ % self.table_name |
| 357 | - self.store.execute(query, (self.session_data.client_id, |
| 358 | - self.product_id, key), noresult=True) |
| 359 | + self.store.execute( |
| 360 | + query, |
| 361 | + (self.session_data.client_id, |
| 362 | + self.product_id, ensure_unicode(key)), |
| 363 | + noresult=True) |
| 364 | |
| 365 | def keys(self): |
| 366 | return self._data_cache.keys() |
| 367 | |
| 368 | === modified file 'lib/canonical/launchpad/webapp/vocabulary.py' |
| 369 | --- lib/canonical/launchpad/webapp/vocabulary.py 2010-08-20 20:31:18 +0000 |
| 370 | +++ lib/canonical/launchpad/webapp/vocabulary.py 2010-11-09 15:16:26 +0000 |
| 371 | @@ -41,6 +41,7 @@ |
| 372 | from zope.security.proxy import isinstance as zisinstance |
| 373 | |
| 374 | from canonical.database.sqlbase import SQLBase |
| 375 | +from canonical.launchpad.helpers import ensure_unicode |
| 376 | |
| 377 | |
| 378 | class ForgivingSimpleVocabulary(SimpleVocabulary): |
| 379 | @@ -53,7 +54,6 @@ |
| 380 | raise TypeError('required argument "default_term" not provided') |
| 381 | return super(ForgivingSimpleVocabulary, self).__init__(*args, **kws) |
| 382 | |
| 383 | - |
| 384 | def getTerm(self, value): |
| 385 | """Look up a value, returning the default if it is not found.""" |
| 386 | try: |
| 387 | @@ -86,7 +86,6 @@ |
| 388 | # XXX: JonathanLange 2009-02-23: This should probably be fused with or at |
| 389 | # least adapted from storm.zope.interfaces.IResultSet. Or maybe just |
| 390 | # deleted in favour of passing around Storm ResultSets. |
| 391 | - |
| 392 | def count(): |
| 393 | """Return the number of items in the iterator.""" |
| 394 | |
| 395 | @@ -102,12 +101,14 @@ |
| 396 | # work; we should probably change that to either check for the |
| 397 | # presence of a count() method, or for a simpler interface than |
| 398 | # ISelectResults, but I'm not going to do that today. |
| 399 | + pass |
| 400 | |
| 401 | def __getslice__(argument): |
| 402 | """Return a slice of the collection.""" |
| 403 | # Python will use __getitem__ if this method is not implemented, |
| 404 | # but it is convenient to define it in the interface for |
| 405 | # allowing access to the attributes through the security proxy. |
| 406 | + pass |
| 407 | |
| 408 | |
| 409 | class CountableIterator: |
| 410 | @@ -351,7 +352,7 @@ |
| 411 | def search(self, query): |
| 412 | """Return terms where query is a subtring of the name.""" |
| 413 | if query: |
| 414 | - clause = CONTAINSSTRING(self._table.q.name, query) |
| 415 | + clause = CONTAINSSTRING(self._table.q.name, ensure_unicode(query)) |
| 416 | if self._filter: |
| 417 | clause = AND(clause, self._filter) |
| 418 | return self._table.select(clause, orderBy=self._orderBy) |
| 419 | @@ -384,7 +385,7 @@ |
| 420 | if not query: |
| 421 | return self.emptySelectResults() |
| 422 | |
| 423 | - query = query.lower() |
| 424 | + query = ensure_unicode(query).lower() |
| 425 | clause = CONTAINSSTRING(self._table.q.name, query) |
| 426 | if self._filter: |
| 427 | clause = AND(clause, self._filter) |
| 428 | |
| 429 | === modified file 'lib/lp/archivepublisher/ftparchive.py' |
| 430 | --- lib/lp/archivepublisher/ftparchive.py 2010-11-02 21:44:42 +0000 |
| 431 | +++ lib/lp/archivepublisher/ftparchive.py 2010-11-09 15:16:26 +0000 |
| 432 | @@ -8,7 +8,6 @@ |
| 433 | |
| 434 | from storm.expr import ( |
| 435 | Desc, |
| 436 | - In, |
| 437 | Join, |
| 438 | ) |
| 439 | from storm.store import EmptyResultSet |
| 440 | @@ -50,6 +49,7 @@ |
| 441 | # XXX malcc 2006-09-20 : Move this somewhere useful. If generalised with |
| 442 | # timeout handling and stderr passthrough, could be a single method used for |
| 443 | # this and the similar requirement in test_on_merge.py. |
| 444 | + |
| 445 | def run_subprocess_with_logging(process_and_args, log, prefix): |
| 446 | """Run a subprocess, gathering the output as it runs and logging it. |
| 447 | |
| 448 | @@ -140,6 +140,7 @@ |
| 449 | Generates file lists and configuration for apt-ftparchive, and kicks |
| 450 | off generation of the Sources and Releases files. |
| 451 | """ |
| 452 | + |
| 453 | def __init__(self, log, config, diskpool, distro, publisher): |
| 454 | self.log = log |
| 455 | self._config = config |
| 456 | @@ -188,7 +189,6 @@ |
| 457 | # |
| 458 | # Empty Pocket Requests |
| 459 | # |
| 460 | - |
| 461 | def createEmptyPocketRequests(self, fullpublish=False): |
| 462 | """Write out empty file lists etc for pockets. |
| 463 | |
| 464 | @@ -251,7 +251,6 @@ |
| 465 | # |
| 466 | # Override Generation |
| 467 | # |
| 468 | - |
| 469 | def getSourcesForOverrides(self, distroseries, pocket): |
| 470 | """Fetch override information about all published sources. |
| 471 | |
| 472 | @@ -292,6 +291,7 @@ |
| 473 | PackagePublishingStatus.PUBLISHED) |
| 474 | |
| 475 | suite = distroseries.getSuite(pocket) |
| 476 | + |
| 477 | def add_suite(result): |
| 478 | name, component, section = result |
| 479 | return (name, suite, component, section) |
| 480 | @@ -340,13 +340,14 @@ |
| 481 | (BinaryPackageName.name, Component.name, Section.name, |
| 482 | BinaryPackagePublishingHistory.priority), |
| 483 | BinaryPackagePublishingHistory.archive == self.publisher.archive, |
| 484 | - In(BinaryPackagePublishingHistory.distroarchseriesID, |
| 485 | - architectures_ids), |
| 486 | + BinaryPackagePublishingHistory.distroarchseriesID.is_in( |
| 487 | + architectures_ids), |
| 488 | BinaryPackagePublishingHistory.pocket == pocket, |
| 489 | BinaryPackagePublishingHistory.status == |
| 490 | PackagePublishingStatus.PUBLISHED) |
| 491 | |
| 492 | suite = distroseries.getSuite(pocket) |
| 493 | + |
| 494 | def add_suite(result): |
| 495 | name, component, section, priority = result |
| 496 | return (name, suite, component, section, priority.title.lower()) |
| 497 | @@ -437,12 +438,14 @@ |
| 498 | # Process huge iterations (more than 200k records) in batches. |
| 499 | # See `PublishingTunableLoop`. |
| 500 | self.log.debug("Calculating source overrides") |
| 501 | + |
| 502 | def update_source_override(pub_details): |
| 503 | updateOverride(*pub_details) |
| 504 | process_in_batches( |
| 505 | source_publications, update_source_override, self.log) |
| 506 | |
| 507 | self.log.debug("Calculating binary overrides") |
| 508 | + |
| 509 | def update_binary_override(pub_details): |
| 510 | updateOverride(*pub_details) |
| 511 | process_in_batches( |
| 512 | @@ -489,7 +492,7 @@ |
| 513 | |
| 514 | # Start to write the files out |
| 515 | ef = open(ef_override, "w") |
| 516 | - f = open(main_override , "w") |
| 517 | + f = open(main_override, "w") |
| 518 | for package, priority, section in bin_overrides: |
| 519 | origin = "\t".join([package, "Origin", "Ubuntu"]) |
| 520 | bugs = "\t".join([package, "Bugs", |
| 521 | @@ -542,7 +545,6 @@ |
| 522 | # |
| 523 | # File List Generation |
| 524 | # |
| 525 | - |
| 526 | def getSourceFiles(self, distroseries, pocket): |
| 527 | """Fetch publishing information about all published source files. |
| 528 | |
| 529 | @@ -572,6 +574,7 @@ |
| 530 | PackagePublishingStatus.PUBLISHED) |
| 531 | |
| 532 | suite = distroseries.getSuite(pocket) |
| 533 | + |
| 534 | def add_suite(result): |
| 535 | name, filename, component = result |
| 536 | return (name, suite, filename, component) |
| 537 | @@ -609,6 +612,7 @@ |
| 538 | PackagePublishingStatus.PUBLISHED) |
| 539 | |
| 540 | suite = distroseries.getSuite(pocket) |
| 541 | + |
| 542 | def add_suite(result): |
| 543 | name, filename, component, architecturetag = result |
| 544 | architecture = 'binary-' + architecturetag |
| 545 | @@ -657,12 +661,14 @@ |
| 546 | # Process huge iterations (more than 200K records) in batches. |
| 547 | # See `PublishingTunableLoop`. |
| 548 | self.log.debug("Calculating source filelist.") |
| 549 | + |
| 550 | def update_source_filelist(file_details): |
| 551 | updateFileList(*file_details) |
| 552 | process_in_batches( |
| 553 | sourcefiles, update_source_filelist, self.log) |
| 554 | |
| 555 | self.log.debug("Calculating binary filelist.") |
| 556 | + |
| 557 | def update_binary_filelist(file_details): |
| 558 | updateFileList(*file_details) |
| 559 | process_in_batches( |
| 560 | @@ -677,7 +683,8 @@ |
| 561 | series, pocket = ( |
| 562 | self.distro.getDistroSeriesAndPocket(suite)) |
| 563 | if (architecture != 'source' and |
| 564 | - not series.getDistroArchSeries(architecture[7:]).enabled): |
| 565 | + not series.getDistroArchSeries( |
| 566 | + architecture[7:]).enabled): |
| 567 | continue |
| 568 | self.writeFileList(architecture, file_names, |
| 569 | suite, component) |
| 570 | @@ -723,7 +730,6 @@ |
| 571 | # |
| 572 | # Config Generation |
| 573 | # |
| 574 | - |
| 575 | def generateConfig(self, fullpublish=False): |
| 576 | """Generate an APT FTPArchive configuration from the provided |
| 577 | config object and the paths we either know or have given to us. |
| 578 | |
| 579 | === modified file 'lib/lp/bugs/model/bug.py' |
| 580 | --- lib/lp/bugs/model/bug.py 2010-11-04 02:32:16 +0000 |
| 581 | +++ lib/lp/bugs/model/bug.py 2010-11-09 15:16:26 +0000 |
| 582 | @@ -48,7 +48,6 @@ |
| 583 | And, |
| 584 | Count, |
| 585 | Func, |
| 586 | - In, |
| 587 | LeftJoin, |
| 588 | Max, |
| 589 | Not, |
| 590 | @@ -224,8 +223,8 @@ |
| 591 | |
| 592 | :return: A list of tuples, (tag name, open bug count). |
| 593 | """ |
| 594 | - open_statuses_condition = In( |
| 595 | - BugTask.status, sqlvalues(*UNRESOLVED_BUGTASK_STATUSES)) |
| 596 | + open_statuses_condition = BugTask.status.is_in( |
| 597 | + UNRESOLVED_BUGTASK_STATUSES) |
| 598 | columns = [ |
| 599 | BugTag.tag, |
| 600 | Count(), |
| 601 | @@ -422,7 +421,7 @@ |
| 602 | """See `IBug`.""" |
| 603 | return Store.of(self).find( |
| 604 | Person, |
| 605 | - In(Person.id, self.user_ids_affected_with_dupes)) |
| 606 | + Person.id.is_in(self.user_ids_affected_with_dupes)) |
| 607 | |
| 608 | @property |
| 609 | def users_affected_count_with_dupes(self): |
| 610 | @@ -439,9 +438,9 @@ |
| 611 | |
| 612 | :param include_content: If True retrieve the content for the messages |
| 613 | too. |
| 614 | - :param include_parents: If True retrieve the object for parent messages |
| 615 | - too. If False the parent attribute will be *forced* to None to |
| 616 | - reduce database lookups. |
| 617 | + :param include_parents: If True retrieve the object for parent |
| 618 | + messages too. If False the parent attribute will be *forced* to |
| 619 | + None to reduce database lookups. |
| 620 | """ |
| 621 | # Make all messages be 'in' the main bugtask. |
| 622 | inside = self.default_bugtask |
| 623 | @@ -451,16 +450,18 @@ |
| 624 | to_messages = lambda rows: [row[0] for row in rows] |
| 625 | else: |
| 626 | to_messages = lambda rows: rows |
| 627 | + |
| 628 | def eager_load_owners(messages): |
| 629 | - # Because we may have multiple owners, we spend less time in storm |
| 630 | - # with very large bugs by not joining and instead querying a second |
| 631 | - # time. If this starts to show high db time, we can left outer join |
| 632 | - # instead. |
| 633 | + # Because we may have multiple owners, we spend less time |
| 634 | + # in storm with very large bugs by not joining and instead |
| 635 | + # querying a second time. If this starts to show high db |
| 636 | + # time, we can left outer join instead. |
| 637 | owner_ids = set(message.ownerID for message in messages) |
| 638 | owner_ids.discard(None) |
| 639 | if not owner_ids: |
| 640 | return |
| 641 | list(store.find(Person, Person.id.is_in(owner_ids))) |
| 642 | + |
| 643 | def eager_load_content(messages): |
| 644 | # To avoid the complexity of having multiple rows per |
| 645 | # message, or joining in the database (though perhaps in |
| 646 | @@ -480,11 +481,13 @@ |
| 647 | cache = get_property_cache(message) |
| 648 | cache.text_contents = Message.chunks_text( |
| 649 | chunk_map[message.id]) |
| 650 | + |
| 651 | def eager_load(rows, slice_info): |
| 652 | messages = to_messages(rows) |
| 653 | eager_load_owners(messages) |
| 654 | if include_content: |
| 655 | eager_load_content(messages) |
| 656 | + |
| 657 | def index_message(row, index): |
| 658 | # convert row to an IndexedMessage |
| 659 | if include_parents: |
| 660 | @@ -913,10 +916,13 @@ |
| 661 | |
| 662 | def getSubscribersForPerson(self, person): |
| 663 | """See `IBug.""" |
| 664 | + |
| 665 | assert person is not None |
| 666 | + |
| 667 | def cache_unsubscribed(rows): |
| 668 | if not rows: |
| 669 | self._unsubscribed_cache.add(person) |
| 670 | + |
| 671 | def cache_subscriber(row): |
| 672 | _, subscriber, subscription = row |
| 673 | if subscription.bug_id == self.id: |
| 674 | @@ -1911,15 +1917,17 @@ |
| 675 | def attachments(self): |
| 676 | """See `IBug`. |
| 677 | |
| 678 | - This property does eager loading of the index_messages so that the API |
| 679 | - which wants the message_link for the attachment can answer that without |
| 680 | - O(N^2) overhead. As such it is moderately expensive to call (it |
| 681 | - currently retrieves all messages before any attachments, and does this |
| 682 | - when attachments is evaluated, not when the resultset is processed). |
| 683 | + This property does eager loading of the index_messages so that |
| 684 | + the API which wants the message_link for the attachment can |
| 685 | + answer that without O(N^2) overhead. As such it is moderately |
| 686 | + expensive to call (it currently retrieves all messages before |
| 687 | + any attachments, and does this when attachments is evaluated, |
| 688 | + not when the resultset is processed). |
| 689 | """ |
| 690 | message_to_indexed = {} |
| 691 | for message in self._indexed_messages(include_parents=False): |
| 692 | message_to_indexed[message.id] = message |
| 693 | + |
| 694 | def set_indexed_message(row): |
| 695 | attachment = row[0] |
| 696 | # row[1] - the LibraryFileAlias is now in the storm cache and |
| 697 | @@ -2196,7 +2204,7 @@ |
| 698 | if bug_numbers is None or len(bug_numbers) < 1: |
| 699 | return EmptyResultSet() |
| 700 | store = IStore(Bug) |
| 701 | - result_set = store.find(Bug, In(Bug.id, bug_numbers)) |
| 702 | + result_set = store.find(Bug, Bug.id.is_in(bug_numbers)) |
| 703 | return result_set.order_by('id') |
| 704 | |
| 705 | def dangerousGetAllBugs(self): |
| 706 | |
| 707 | === modified file 'lib/lp/bugs/model/bugtask.py' |
| 708 | --- lib/lp/bugs/model/bugtask.py 2010-11-08 05:54:02 +0000 |
| 709 | +++ lib/lp/bugs/model/bugtask.py 2010-11-09 15:16:26 +0000 |
| 710 | @@ -37,7 +37,6 @@ |
| 711 | And, |
| 712 | AutoTables, |
| 713 | Desc, |
| 714 | - In, |
| 715 | Join, |
| 716 | LeftJoin, |
| 717 | Or, |
| 718 | @@ -1317,6 +1316,7 @@ |
| 719 | :seealso: get_bug_privacy_filter_with_decorator |
| 720 | """ |
| 721 | userid = user.id |
| 722 | + |
| 723 | def cache_user_can_view_bug(bugtask): |
| 724 | get_property_cache(bugtask.bug)._known_viewers = set([userid]) |
| 725 | return bugtask |
| 726 | @@ -1490,13 +1490,14 @@ |
| 727 | |
| 728 | bug_ids = list(set(bugtask.bugID for bugtask in bugtasks)) |
| 729 | bug_ids_with_specifications = set(IStore(SpecificationBug).find( |
| 730 | - SpecificationBug.bugID, In(SpecificationBug.bugID, bug_ids))) |
| 731 | + SpecificationBug.bugID, |
| 732 | + SpecificationBug.bugID.is_in(bug_ids))) |
| 733 | bug_ids_with_branches = set(IStore(BugBranch).find( |
| 734 | - BugBranch.bugID, In(BugBranch.bugID, bug_ids))) |
| 735 | + BugBranch.bugID, BugBranch.bugID.is_in(bug_ids))) |
| 736 | |
| 737 | # Cache all bugs at once to avoid one query per bugtask. We |
| 738 | # could rely on the Storm cache, but this is explicit. |
| 739 | - bugs = dict(IStore(Bug).find((Bug.id, Bug), In(Bug.id, bug_ids))) |
| 740 | + bugs = dict(IStore(Bug).find((Bug.id, Bug), Bug.id.is_in(bug_ids))) |
| 741 | |
| 742 | badge_properties = {} |
| 743 | for bugtask in bugtasks: |
| 744 | @@ -1739,7 +1740,8 @@ |
| 745 | BugTask.productseries = StructuralSubscription.productseries |
| 746 | AND StructuralSubscription.subscriber = %(personid)s |
| 747 | UNION ALL |
| 748 | - SELECT BugTask.id FROM BugTask, StructuralSubscription, Product |
| 749 | + SELECT BugTask.id |
| 750 | + FROM BugTask, StructuralSubscription, Product |
| 751 | WHERE |
| 752 | BugTask.product = Product.id |
| 753 | AND Product.project = StructuralSubscription.project |
| 754 | @@ -1921,6 +1923,7 @@ |
| 755 | if not decorators: |
| 756 | decorator = lambda x: x |
| 757 | else: |
| 758 | + |
| 759 | def decorator(obj): |
| 760 | for decor in decorators: |
| 761 | obj = decor(obj) |
| 762 | @@ -2216,6 +2219,7 @@ |
| 763 | # This may need revisiting if e.g. searches on behalf of different |
| 764 | # users are combined. |
| 765 | decorators.append(decorator) |
| 766 | + |
| 767 | def decorator(row): |
| 768 | bugtask = row[0] |
| 769 | for decorator in decorators: |
| 770 | @@ -2283,7 +2287,7 @@ |
| 771 | else: |
| 772 | store = search_results._store |
| 773 | milestones = store.find( |
| 774 | - Milestone, In(Milestone.id, milestone_ids)) |
| 775 | + Milestone, Milestone.id.is_in(milestone_ids)) |
| 776 | return sorted(milestones, key=milestone_sort_key, reverse=True) |
| 777 | |
| 778 | def createTask(self, bug, owner, product=None, productseries=None, |
| 779 | @@ -2849,8 +2853,9 @@ |
| 780 | for subscription in subscriptions)) |
| 781 | |
| 782 | if recipients is not None: |
| 783 | - # We need to process subscriptions, so pull all the subscribes into |
| 784 | - # the cache, then update recipients with the subscriptions. |
| 785 | + # We need to process subscriptions, so pull all the |
| 786 | + # subscribes into the cache, then update recipients with |
| 787 | + # the subscriptions. |
| 788 | subscribers = list(subscribers) |
| 789 | for subscription in subscriptions: |
| 790 | recipients.addStructuralSubscriber( |
| 791 | |
| 792 | === modified file 'lib/lp/bugs/model/bugwatch.py' |
| 793 | --- lib/lp/bugs/model/bugwatch.py 2010-09-29 19:14:41 +0000 |
| 794 | +++ lib/lp/bugs/model/bugwatch.py 2010-11-09 15:16:26 +0000 |
| 795 | @@ -29,7 +29,6 @@ |
| 796 | from storm.base import Storm |
| 797 | from storm.expr import ( |
| 798 | Desc, |
| 799 | - In, |
| 800 | Not, |
| 801 | ) |
| 802 | from storm.locals import ( |
| 803 | @@ -83,21 +82,22 @@ |
| 804 | |
| 805 | |
| 806 | BUG_TRACKER_URL_FORMATS = { |
| 807 | - BugTrackerType.BUGZILLA: 'show_bug.cgi?id=%s', |
| 808 | - BugTrackerType.DEBBUGS: 'cgi-bin/bugreport.cgi?bug=%s', |
| 809 | + BugTrackerType.BUGZILLA: 'show_bug.cgi?id=%s', |
| 810 | + BugTrackerType.DEBBUGS: 'cgi-bin/bugreport.cgi?bug=%s', |
| 811 | BugTrackerType.GOOGLE_CODE: 'detail?id=%s', |
| 812 | - BugTrackerType.MANTIS: 'view.php?id=%s', |
| 813 | - BugTrackerType.ROUNDUP: 'issue%s', |
| 814 | - BugTrackerType.RT: 'Ticket/Display.html?id=%s', |
| 815 | + BugTrackerType.MANTIS: 'view.php?id=%s', |
| 816 | + BugTrackerType.ROUNDUP: 'issue%s', |
| 817 | + BugTrackerType.RT: 'Ticket/Display.html?id=%s', |
| 818 | BugTrackerType.SOURCEFORGE: 'support/tracker.php?aid=%s', |
| 819 | - BugTrackerType.TRAC: 'ticket/%s', |
| 820 | - BugTrackerType.SAVANE: 'bugs/?%s', |
| 821 | - BugTrackerType.PHPPROJECT: 'bug.php?id=%s', |
| 822 | + BugTrackerType.TRAC: 'ticket/%s', |
| 823 | + BugTrackerType.SAVANE: 'bugs/?%s', |
| 824 | + BugTrackerType.PHPPROJECT: 'bug.php?id=%s', |
| 825 | } |
| 826 | |
| 827 | |
| 828 | WATCH_RESCHEDULE_THRESHOLD = 0.6 |
| 829 | |
| 830 | + |
| 831 | def get_bug_watch_ids(references): |
| 832 | """Yield bug watch IDs from any given iterator. |
| 833 | |
| 834 | @@ -105,6 +105,7 @@ |
| 835 | IBugWatch, and yields if it is an integer. Everything else is |
| 836 | discarded. |
| 837 | """ |
| 838 | + |
| 839 | for reference in references: |
| 840 | if IBugWatch.providedBy(reference): |
| 841 | yield reference.id |
| 842 | @@ -360,14 +361,11 @@ |
| 843 | |
| 844 | @property |
| 845 | def failed_activity(self): |
| 846 | - store = Store.of(self) |
| 847 | - success_status_ids = [ |
| 848 | - status.value for status in BUG_WATCH_ACTIVITY_SUCCESS_STATUSES] |
| 849 | - |
| 850 | - return store.find( |
| 851 | + return Store.of(self).find( |
| 852 | BugWatchActivity, |
| 853 | BugWatchActivity.bug_watch == self, |
| 854 | - Not(In(BugWatchActivity.result, success_status_ids))).order_by( |
| 855 | + Not(BugWatchActivity.result.is_in( |
| 856 | + BUG_WATCH_ACTIVITY_SUCCESS_STATUSES))).order_by( |
| 857 | Desc('activity_date')) |
| 858 | |
| 859 | def setNextCheck(self, next_check): |
| 860 | @@ -398,7 +396,7 @@ |
| 861 | self.title = 'A set of bug watches' |
| 862 | self.bugtracker_parse_functions = { |
| 863 | BugTrackerType.BUGZILLA: self.parseBugzillaURL, |
| 864 | - BugTrackerType.DEBBUGS: self.parseDebbugsURL, |
| 865 | + BugTrackerType.DEBBUGS: self.parseDebbugsURL, |
| 866 | BugTrackerType.EMAILADDRESS: self.parseEmailAddressURL, |
| 867 | BugTrackerType.GOOGLE_CODE: self.parseGoogleCodeURL, |
| 868 | BugTrackerType.MANTIS: self.parseMantisURL, |
| 869 | @@ -408,14 +406,14 @@ |
| 870 | BugTrackerType.SAVANE: self.parseSavaneURL, |
| 871 | BugTrackerType.SOURCEFORGE: self.parseSourceForgeLikeURL, |
| 872 | BugTrackerType.TRAC: self.parseTracURL, |
| 873 | - } |
| 874 | + } |
| 875 | |
| 876 | def get(self, watch_id): |
| 877 | """See `IBugWatch`Set.""" |
| 878 | try: |
| 879 | return BugWatch.get(watch_id) |
| 880 | except SQLObjectNotFound: |
| 881 | - raise NotFoundError, watch_id |
| 882 | + raise NotFoundError(watch_id) |
| 883 | |
| 884 | def search(self): |
| 885 | return BugWatch.select() |
| 886 | @@ -630,8 +628,7 @@ |
| 887 | # Launchpad, so we return that one if the hostname matches. |
| 888 | savannah_tracker = getUtility(ILaunchpadCelebrities).savannah_tracker |
| 889 | savannah_hosts = [ |
| 890 | - urlsplit(alias)[1] for alias in savannah_tracker.aliases |
| 891 | - ] |
| 892 | + urlsplit(alias)[1] for alias in savannah_tracker.aliases] |
| 893 | savannah_hosts.append(urlsplit(savannah_tracker.baseurl)[1]) |
| 894 | |
| 895 | # The remote bug is actually a key in the query dict rather than |
| 896 | @@ -724,7 +721,7 @@ |
| 897 | query = IStore(BugWatch).find( |
| 898 | BugWatch, BugWatch.remotebug == remote_bug) |
| 899 | if bug_watch_ids is not None: |
| 900 | - query = query.find(In(BugWatch.id, bug_watch_ids)) |
| 901 | + query = query.find(BugWatch.id.is_in(bug_watch_ids)) |
| 902 | return query |
| 903 | |
| 904 | def bulkSetError(self, references, last_error_type=None): |
| 905 | @@ -732,7 +729,7 @@ |
| 906 | bug_watch_ids = set(get_bug_watch_ids(references)) |
| 907 | if len(bug_watch_ids) > 0: |
| 908 | bug_watches_in_database = IStore(BugWatch).find( |
| 909 | - BugWatch, In(BugWatch.id, list(bug_watch_ids))) |
| 910 | + BugWatch, BugWatch.id.is_in(bug_watch_ids)) |
| 911 | bug_watches_in_database.set( |
| 912 | lastchecked=UTC_NOW, |
| 913 | last_error_type=last_error_type, |
| 914 | @@ -748,8 +745,7 @@ |
| 915 | "INSERT INTO BugWatchActivity" |
| 916 | " (bug_watch, result, message, oops_id) " |
| 917 | "SELECT BugWatch.id, %s, %s, %s FROM BugWatch" |
| 918 | - " WHERE BugWatch.id IN %s" |
| 919 | - ) |
| 920 | + " WHERE BugWatch.id IN %s") |
| 921 | IStore(BugWatch).execute( |
| 922 | insert_activity_statement % sqlvalues( |
| 923 | result, message, oops_id, bug_watch_ids)) |
| 924 | |
| 925 | === modified file 'lib/lp/code/model/branch.py' |
| 926 | --- lib/lp/code/model/branch.py 2010-11-08 17:17:45 +0000 |
| 927 | +++ lib/lp/code/model/branch.py 2010-11-09 15:16:26 +0000 |
| 928 | @@ -1083,12 +1083,12 @@ |
| 929 | name = "date_trunc" |
| 930 | |
| 931 | results = Store.of(self).find( |
| 932 | - (DateTrunc('day', Revision.revision_date), Count(Revision.id)), |
| 933 | + (DateTrunc(u'day', Revision.revision_date), Count(Revision.id)), |
| 934 | Revision.id == BranchRevision.revision_id, |
| 935 | Revision.revision_date > since, |
| 936 | BranchRevision.branch == self) |
| 937 | results = results.group_by( |
| 938 | - DateTrunc('day', Revision.revision_date)) |
| 939 | + DateTrunc(u'day', Revision.revision_date)) |
| 940 | return sorted(results) |
| 941 | |
| 942 | @property |
| 943 | |
| 944 | === modified file 'lib/lp/hardwaredb/model/hwdb.py' |
| 945 | --- lib/lp/hardwaredb/model/hwdb.py 2010-09-14 15:32:53 +0000 |
| 946 | +++ lib/lp/hardwaredb/model/hwdb.py 2010-11-09 15:16:26 +0000 |
| 947 | @@ -45,7 +45,6 @@ |
| 948 | Alias, |
| 949 | And, |
| 950 | Count, |
| 951 | - In, |
| 952 | Not, |
| 953 | Or, |
| 954 | Select, |
| 955 | @@ -386,7 +385,7 @@ |
| 956 | columns=[HWSubmissionDevice.submissionID], |
| 957 | tables=device_tables, where=And(*device_clauses)) |
| 958 | |
| 959 | - clauses.append(In(HWSubmission.id, submission_ids)) |
| 960 | + clauses.append(HWSubmission.id.is_in(submission_ids)) |
| 961 | submissions_with_device = Select( |
| 962 | columns=[target_column], tables=tables, where=And(*clauses), |
| 963 | distinct=True) |
| 964 | @@ -448,11 +447,11 @@ |
| 965 | |
| 966 | tables.append(Bug) |
| 967 | if bug_ids is not None and bug_ids is not []: |
| 968 | - clauses.append(In(Bug.id, bug_ids)) |
| 969 | + clauses.append(Bug.id.is_in(bug_ids)) |
| 970 | |
| 971 | if bug_tags is not None and bug_tags is not []: |
| 972 | clauses.extend([ |
| 973 | - Bug.id == BugTag.bugID, In(BugTag.tag, bug_tags)]) |
| 974 | + Bug.id == BugTag.bugID, BugTag.tag.is_in(bug_tags)]) |
| 975 | tables.append(BugTag) |
| 976 | |
| 977 | # If we OR-combine the search for bug owners, subscribers |
| 978 | @@ -460,6 +459,9 @@ |
| 979 | # So let's run the queries separately and join the results |
| 980 | # on Python level. |
| 981 | |
| 982 | + # This would be quicker still if we did it as a single query |
| 983 | + # using UNION. |
| 984 | + |
| 985 | owner_query = Select( |
| 986 | columns=[HWSubmission.ownerID], tables=tables, |
| 987 | where=And(*(clauses + [Bug.ownerID == HWSubmission.ownerID]))) |
| 988 | @@ -493,7 +495,8 @@ |
| 989 | if len(user_ids) == 0: |
| 990 | result = store.find(Person, False) |
| 991 | else: |
| 992 | - result = store.find(Person, In(Person.id, list(user_ids))) |
| 993 | + user_ids = [row[0] for row in user_ids] |
| 994 | + result = store.find(Person, Person.id.is_in(user_ids)) |
| 995 | result.order_by(Person.displayname) |
| 996 | return result |
| 997 | |
| 998 | @@ -517,14 +520,14 @@ |
| 999 | HWSubmissionDevice.submission == HWSubmission.id, |
| 1000 | HWSubmissionDevice.device_driver_link == HWDeviceDriverLink.id, |
| 1001 | HWDeviceDriverLink.device == HWDevice.id, |
| 1002 | - HWDevice.bus_vendor == HWVendorID.id |
| 1003 | - ] |
| 1004 | + HWDevice.bus_vendor == HWVendorID.id] |
| 1005 | |
| 1006 | if bug_ids is not None and bug_ids is not []: |
| 1007 | - clauses.append(In(Bug.id, bug_ids)) |
| 1008 | + clauses.append(Bug.id.is_in(bug_ids)) |
| 1009 | |
| 1010 | if bug_tags is not None and bug_tags is not []: |
| 1011 | - clauses.extend([Bug.id == BugTag.bugID, In(BugTag.tag, bug_tags)]) |
| 1012 | + clauses.extend( |
| 1013 | + [Bug.id == BugTag.bugID, BugTag.tag.is_in(bug_tags)]) |
| 1014 | |
| 1015 | clauses.append(_userCanAccessSubmissionStormClause(user)) |
| 1016 | |
| 1017 | @@ -545,8 +548,7 @@ |
| 1018 | query = Select( |
| 1019 | columns=[ |
| 1020 | Person.name, HWVendorID.bus, |
| 1021 | - HWVendorID.vendor_id_for_bus, HWDevice.bus_product_id |
| 1022 | - ], |
| 1023 | + HWVendorID.vendor_id_for_bus, HWDevice.bus_product_id], |
| 1024 | tables=tables, where=And(*clauses), distinct=True, |
| 1025 | order_by=[HWVendorID.bus, HWVendorID.vendor_id_for_bus, |
| 1026 | HWDevice.bus_product_id, Person.name]) |
| 1027 | @@ -631,6 +633,7 @@ |
| 1028 | HWBus.SCSI: scsi_product, |
| 1029 | } |
| 1030 | |
| 1031 | + |
| 1032 | def isValidVendorID(bus, id): |
| 1033 | """Check that the string id is a valid vendor ID for this bus. |
| 1034 | |
| 1035 | @@ -1271,8 +1274,7 @@ |
| 1036 | HWVendorID.vendor_id_for_bus == vendor_id, |
| 1037 | HWDevice.bus_vendor == HWVendorID.id, |
| 1038 | HWDeviceDriverLink.device == HWDevice.id, |
| 1039 | - HWDevice.bus_product_id == product_id |
| 1040 | - ]) |
| 1041 | + HWDevice.bus_product_id == product_id]) |
| 1042 | |
| 1043 | if driver_name is None and package_name is None: |
| 1044 | where_clauses.append(HWDeviceDriverLink.driver == None) |
| 1045 | @@ -1294,6 +1296,7 @@ |
| 1046 | |
| 1047 | return tables, where_clauses |
| 1048 | |
| 1049 | + |
| 1050 | def make_distro_target_clause(distro_target): |
| 1051 | """Create a where expression and a table list to limit results to a |
| 1052 | distro target. |
| 1053 | @@ -1324,6 +1327,7 @@ |
| 1054 | 'IDistroSeries or IDistroArchSeries') |
| 1055 | return ([], []) |
| 1056 | |
| 1057 | + |
| 1058 | def _userCanAccessSubmissionStormClause(user): |
| 1059 | """Limit results of HWSubmission queries to rows the user can access. |
| 1060 | """ |
| 1061 | |
| 1062 | === modified file 'lib/lp/registry/browser/distributionsourcepackage.py' |
| 1063 | --- lib/lp/registry/browser/distributionsourcepackage.py 2010-10-18 15:47:18 +0000 |
| 1064 | +++ lib/lp/registry/browser/distributionsourcepackage.py 2010-11-09 15:16:26 +0000 |
| 1065 | @@ -258,7 +258,8 @@ |
| 1066 | if not_empty(spr.changelog_entry)]) |
| 1067 | unique_bugs = extract_bug_numbers(the_changelog) |
| 1068 | self._bug_data = list( |
| 1069 | - getUtility(IBugSet).getByNumbers(unique_bugs.keys())) |
| 1070 | + getUtility(IBugSet).getByNumbers( |
| 1071 | + [int(key) for key in unique_bugs.keys()])) |
| 1072 | # Preload email/person data only if user is logged on. In the opposite |
| 1073 | # case the emails in the changelog will be obfuscated anyway and thus |
| 1074 | # cause no database lookups. |
| 1075 | |
| 1076 | === modified file 'lib/lp/registry/doc/vocabularies.txt' |
| 1077 | --- lib/lp/registry/doc/vocabularies.txt 2010-10-03 15:30:06 +0000 |
| 1078 | +++ lib/lp/registry/doc/vocabularies.txt 2010-11-09 15:16:26 +0000 |
| 1079 | @@ -189,7 +189,7 @@ |
| 1080 | `name` |
| 1081 | |
| 1082 | >>> distroseries_vocabulary = get_naked_vocab( |
| 1083 | - ... None,"DistroSeries") |
| 1084 | + ... None, "DistroSeries") |
| 1085 | >>> for term in distroseries_vocabulary: |
| 1086 | ... print "%30s %s" % (term.token, term.title) |
| 1087 | ubuntu/breezy-autotest Ubuntu: Breezy Badger Autotest |
| 1088 | |
| 1089 | === modified file 'lib/lp/registry/model/distribution.py' |
| 1090 | --- lib/lp/registry/model/distribution.py 2010-11-05 14:56:34 +0000 |
| 1091 | +++ lib/lp/registry/model/distribution.py 2010-11-09 15:16:26 +0000 |
| 1092 | @@ -20,7 +20,6 @@ |
| 1093 | from sqlobject.sqlbuilder import SQLConstant |
| 1094 | from storm.locals import ( |
| 1095 | Desc, |
| 1096 | - In, |
| 1097 | Int, |
| 1098 | Join, |
| 1099 | Or, |
| 1100 | @@ -51,7 +50,10 @@ |
| 1101 | Match, |
| 1102 | RANK, |
| 1103 | ) |
| 1104 | -from canonical.launchpad.helpers import shortlist |
| 1105 | +from canonical.launchpad.helpers import ( |
| 1106 | + ensure_unicode, |
| 1107 | + shortlist, |
| 1108 | + ) |
| 1109 | from canonical.launchpad.interfaces.launchpad import ( |
| 1110 | IHasIcon, |
| 1111 | IHasLogo, |
| 1112 | @@ -1220,8 +1222,7 @@ |
| 1113 | SourcePackageRelease.sourcepackagename == SourcePackageName.id, |
| 1114 | DistributionSourcePackageCache.sourcepackagename == |
| 1115 | SourcePackageName.id, |
| 1116 | - In( |
| 1117 | - DistributionSourcePackageCache.archiveID, |
| 1118 | + DistributionSourcePackageCache.archiveID.is_in( |
| 1119 | self.all_distro_archive_ids)) |
| 1120 | |
| 1121 | def searchBinaryPackages(self, package_name, exact_match=False): |
| 1122 | @@ -1232,7 +1233,8 @@ |
| 1123 | |
| 1124 | if exact_match: |
| 1125 | find_spec = self._binaryPackageSearchClause + ( |
| 1126 | - BinaryPackageRelease.binarypackagename == BinaryPackageName.id, |
| 1127 | + BinaryPackageRelease.binarypackagename |
| 1128 | + == BinaryPackageName.id, |
| 1129 | ) |
| 1130 | match_clause = (BinaryPackageName.name == package_name,) |
| 1131 | else: |
| 1132 | @@ -1241,8 +1243,7 @@ |
| 1133 | # DistributionSourcePackageCache records. |
| 1134 | find_spec = ( |
| 1135 | DistributionSourcePackageCache.distribution == self, |
| 1136 | - In( |
| 1137 | - DistributionSourcePackageCache.archiveID, |
| 1138 | + DistributionSourcePackageCache.archiveID.is_in( |
| 1139 | self.all_distro_archive_ids)) |
| 1140 | match_clause = ( |
| 1141 | DistributionSourcePackageCache.binpkgnames.like( |
| 1142 | @@ -1256,7 +1257,7 @@ |
| 1143 | def searchBinaryPackagesFTI(self, package_name): |
| 1144 | """See `IDistribution`.""" |
| 1145 | search_vector_column = DistroSeriesPackageCache.fti |
| 1146 | - query_function = FTQ(package_name) |
| 1147 | + query_function = FTQ(ensure_unicode(package_name)) |
| 1148 | rank = RANK(search_vector_column, query_function) |
| 1149 | |
| 1150 | extra_clauses = ( |
| 1151 | |
| 1152 | === modified file 'lib/lp/registry/model/distributionsourcepackage.py' |
| 1153 | --- lib/lp/registry/model/distributionsourcepackage.py 2010-10-24 21:00:11 +0000 |
| 1154 | +++ lib/lp/registry/model/distributionsourcepackage.py 2010-11-09 15:16:26 +0000 |
| 1155 | @@ -19,9 +19,7 @@ |
| 1156 | And, |
| 1157 | Count, |
| 1158 | Desc, |
| 1159 | - In, |
| 1160 | Join, |
| 1161 | - Lower, |
| 1162 | Max, |
| 1163 | Sum, |
| 1164 | ) |
| 1165 | @@ -428,7 +426,7 @@ |
| 1166 | (SourcePackageRelease, SourcePackagePublishingHistory), |
| 1167 | SourcePackagePublishingHistory.distroseries == DistroSeries.id, |
| 1168 | DistroSeries.distribution == self.distribution, |
| 1169 | - In(SourcePackagePublishingHistory.archiveID, |
| 1170 | + SourcePackagePublishingHistory.archiveID.is_in( |
| 1171 | self.distribution.all_distro_archive_ids), |
| 1172 | SourcePackagePublishingHistory.sourcepackagerelease == |
| 1173 | SourcePackageRelease.id, |
| 1174 | @@ -542,7 +540,7 @@ |
| 1175 | # Get all persons whose email addresses are in the list. |
| 1176 | result_set = store.using(*origin).find( |
| 1177 | (EmailAddress, Person), |
| 1178 | - In(Lower(EmailAddress.email), email_addresses)) |
| 1179 | + EmailAddress.email.lower().is_in(email_addresses)) |
| 1180 | return result_set |
| 1181 | |
| 1182 | @classmethod |
| 1183 | |
| 1184 | === modified file 'lib/lp/registry/model/person.py' |
| 1185 | --- lib/lp/registry/model/person.py 2010-11-08 01:08:15 +0000 |
| 1186 | +++ lib/lp/registry/model/person.py 2010-11-09 15:16:26 +0000 |
| 1187 | @@ -53,10 +53,8 @@ |
| 1188 | And, |
| 1189 | Desc, |
| 1190 | Exists, |
| 1191 | - In, |
| 1192 | Join, |
| 1193 | LeftJoin, |
| 1194 | - Lower, |
| 1195 | Min, |
| 1196 | Not, |
| 1197 | Or, |
| 1198 | @@ -115,12 +113,12 @@ |
| 1199 | OAuthAccessToken, |
| 1200 | OAuthRequestToken, |
| 1201 | ) |
| 1202 | -from canonical.launchpad.database.stormsugar import StartsWith |
| 1203 | from canonical.launchpad.event.interfaces import ( |
| 1204 | IJoinTeamEvent, |
| 1205 | ITeamInvitationEvent, |
| 1206 | ) |
| 1207 | from canonical.launchpad.helpers import ( |
| 1208 | + ensure_unicode, |
| 1209 | get_contact_email_addresses, |
| 1210 | get_email_template, |
| 1211 | shortlist, |
| 1212 | @@ -180,10 +178,7 @@ |
| 1213 | IllegalRelatedBugTasksParams, |
| 1214 | ) |
| 1215 | from lp.bugs.model.bugtarget import HasBugsBase |
| 1216 | -from lp.bugs.model.bugtask import ( |
| 1217 | - BugTask, |
| 1218 | - get_related_bugtasks_search_params, |
| 1219 | - ) |
| 1220 | +from lp.bugs.model.bugtask import get_related_bugtasks_search_params |
| 1221 | from lp.code.model.hasbranches import ( |
| 1222 | HasBranchesMixin, |
| 1223 | HasMergeProposalsMixin, |
| 1224 | @@ -2851,7 +2846,8 @@ |
| 1225 | email, account = ( |
| 1226 | join.find( |
| 1227 | (EmailAddress, Account), |
| 1228 | - Lower(EmailAddress.email) == Lower(email_address)).one() |
| 1229 | + EmailAddress.email.lower() == |
| 1230 | + ensure_unicode(email_address).lower()).one() |
| 1231 | or (None, None)) |
| 1232 | identifier = store.find( |
| 1233 | OpenIdIdentifier, identifier=openid_identifier).one() |
| 1234 | @@ -3154,7 +3150,7 @@ |
| 1235 | Not(Person.teamowner == None), |
| 1236 | Person.merged == None, |
| 1237 | EmailAddress.person == Person.id, |
| 1238 | - StartsWith(Lower(EmailAddress.email), text)) |
| 1239 | + EmailAddress.email.lower().startswith(ensure_unicode(text))) |
| 1240 | return team_email_query |
| 1241 | |
| 1242 | def _teamNameQuery(self, text): |
| 1243 | @@ -3177,9 +3173,7 @@ |
| 1244 | return EmptyResultSet() |
| 1245 | |
| 1246 | orderBy = Person._sortingColumnsForSetOperations |
| 1247 | - text = text.lower() |
| 1248 | - inactive_statuses = tuple( |
| 1249 | - status.value for status in INACTIVE_ACCOUNT_STATUSES) |
| 1250 | + text = ensure_unicode(text).lower() |
| 1251 | # Teams may not have email addresses, so we need to either use a LEFT |
| 1252 | # OUTER JOIN or do a UNION between four queries. Using a UNION makes |
| 1253 | # it a lot faster than with a LEFT OUTER JOIN. |
| 1254 | @@ -3188,8 +3182,8 @@ |
| 1255 | Person.merged == None, |
| 1256 | EmailAddress.person == Person.id, |
| 1257 | Person.account == Account.id, |
| 1258 | - Not(In(Account.status, inactive_statuses)), |
| 1259 | - StartsWith(Lower(EmailAddress.email), text)) |
| 1260 | + Not(Account.status.is_in(INACTIVE_ACCOUNT_STATUSES)), |
| 1261 | + EmailAddress.email.lower().startswith(text)) |
| 1262 | |
| 1263 | store = IStore(Person) |
| 1264 | |
| 1265 | @@ -3206,7 +3200,7 @@ |
| 1266 | Person.teamowner == None, |
| 1267 | Person.merged == None, |
| 1268 | Person.account == Account.id, |
| 1269 | - Not(In(Account.status, inactive_statuses)), |
| 1270 | + Not(Account.status.is_in(INACTIVE_ACCOUNT_STATUSES)), |
| 1271 | SQL("Person.fti @@ ftq(?)", (text, )) |
| 1272 | ) |
| 1273 | |
| 1274 | @@ -3226,10 +3220,8 @@ |
| 1275 | must_have_email=False, created_after=None, created_before=None): |
| 1276 | """See `IPersonSet`.""" |
| 1277 | orderBy = Person._sortingColumnsForSetOperations |
| 1278 | - text = text.lower() |
| 1279 | + text = ensure_unicode(text).lower() |
| 1280 | store = IStore(Person) |
| 1281 | - inactive_statuses = tuple( |
| 1282 | - status.value for status in INACTIVE_ACCOUNT_STATUSES) |
| 1283 | base_query = And( |
| 1284 | Person.teamowner == None, |
| 1285 | Person.merged == None) |
| 1286 | @@ -3241,7 +3233,7 @@ |
| 1287 | base_query = And( |
| 1288 | base_query, |
| 1289 | Person.account == Account.id, |
| 1290 | - Not(In(Account.status, inactive_statuses))) |
| 1291 | + Not(Account.status.is_in(INACTIVE_ACCOUNT_STATUSES))) |
| 1292 | email_clause_tables = clause_tables + ['EmailAddress'] |
| 1293 | if must_have_email: |
| 1294 | clause_tables = email_clause_tables |
| 1295 | @@ -3268,7 +3260,7 @@ |
| 1296 | email_query = And( |
| 1297 | base_query, |
| 1298 | EmailAddress.person == Person.id, |
| 1299 | - StartsWith(Lower(EmailAddress.email), text)) |
| 1300 | + EmailAddress.email.lower().startswith(ensure_unicode(text))) |
| 1301 | |
| 1302 | name_query = And( |
| 1303 | base_query, |
| 1304 | @@ -3281,7 +3273,7 @@ |
| 1305 | def findTeam(self, text=""): |
| 1306 | """See `IPersonSet`.""" |
| 1307 | orderBy = Person._sortingColumnsForSetOperations |
| 1308 | - text = text.lower() |
| 1309 | + text = ensure_unicode(text).lower() |
| 1310 | # Teams may not have email addresses, so we need to either use a LEFT |
| 1311 | # OUTER JOIN or do a UNION between two queries. Using a UNION makes |
| 1312 | # it a lot faster than with a LEFT OUTER JOIN. |
| 1313 | @@ -3302,18 +3294,11 @@ |
| 1314 | |
| 1315 | def getByEmail(self, email): |
| 1316 | """See `IPersonSet`.""" |
| 1317 | - # We lookup the EmailAddress in the auth store so we can |
| 1318 | - # lookup a Person by EmailAddress in the same transaction |
| 1319 | - # that the Person or EmailAddress was created. This is not |
| 1320 | - # optimal for production as it requires two database lookups, |
| 1321 | - # but is required by much of the test suite. |
| 1322 | - conditions = (Lower(EmailAddress.email) == email.lower().strip()) |
| 1323 | - email_address = IStore(EmailAddress).find( |
| 1324 | - EmailAddress, conditions).one() |
| 1325 | - if email_address is None: |
| 1326 | - return None |
| 1327 | - else: |
| 1328 | - return IStore(Person).get(Person, email_address.personID) |
| 1329 | + email = ensure_unicode(email).strip().lower() |
| 1330 | + return IStore(Person).find( |
| 1331 | + Person, |
| 1332 | + Person.id == EmailAddress.personID, |
| 1333 | + EmailAddress.email.lower() == email).one() |
| 1334 | |
| 1335 | def latest_teams(self, limit=5): |
| 1336 | """See `IPersonSet`.""" |
| 1337 | |
| 1338 | === modified file 'lib/lp/registry/model/pillar.py' |
| 1339 | --- lib/lp/registry/model/pillar.py 2010-09-03 06:36:45 +0000 |
| 1340 | +++ lib/lp/registry/model/pillar.py 2010-11-09 15:16:26 +0000 |
| 1341 | @@ -30,6 +30,7 @@ |
| 1342 | SQLBase, |
| 1343 | sqlvalues, |
| 1344 | ) |
| 1345 | +from canonical.launchpad.helpers import ensure_unicode |
| 1346 | from canonical.launchpad.webapp.interfaces import ( |
| 1347 | DEFAULT_FLAVOR, |
| 1348 | IStoreSelector, |
| 1349 | @@ -85,6 +86,7 @@ |
| 1350 | def __contains__(self, name): |
| 1351 | """See `IPillarNameSet`.""" |
| 1352 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
| 1353 | + name = ensure_unicode(name) |
| 1354 | result = store.execute(""" |
| 1355 | SELECT TRUE |
| 1356 | FROM PillarName |
| 1357 | @@ -127,6 +129,7 @@ |
| 1358 | """ |
| 1359 | if ignore_inactive: |
| 1360 | query += " AND active IS TRUE" |
| 1361 | + name = ensure_unicode(name) |
| 1362 | result = store.execute(query, [name, name]) |
| 1363 | row = result.get_one() |
| 1364 | if row is None: |
| 1365 | @@ -177,7 +180,7 @@ |
| 1366 | Distribution.fti @@ ftq(%(text)s) OR |
| 1367 | lower(Distribution.title) = lower(%(text)s) |
| 1368 | ) |
| 1369 | - ''' % sqlvalues(text=text)) |
| 1370 | + ''' % sqlvalues(text=ensure_unicode(text))) |
| 1371 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
| 1372 | columns = [ |
| 1373 | PillarName, OtherPillarName, Product, ProjectGroup, Distribution] |
| 1374 | |
| 1375 | === modified file 'lib/lp/registry/model/projectgroup.py' |
| 1376 | --- lib/lp/registry/model/projectgroup.py 2010-11-05 14:56:34 +0000 |
| 1377 | +++ lib/lp/registry/model/projectgroup.py 2010-11-09 15:16:26 +0000 |
| 1378 | @@ -20,7 +20,6 @@ |
| 1379 | ) |
| 1380 | from storm.expr import ( |
| 1381 | And, |
| 1382 | - In, |
| 1383 | SQL, |
| 1384 | ) |
| 1385 | from storm.locals import Int |
| 1386 | @@ -331,9 +330,9 @@ |
| 1387 | """See `IHasBugs`.""" |
| 1388 | if not self.products: |
| 1389 | return [] |
| 1390 | - product_ids = sqlvalues(*self.products) |
| 1391 | + product_ids = [product.id for product in self.products] |
| 1392 | return get_bug_tags_open_count( |
| 1393 | - In(BugTask.productID, product_ids), user) |
| 1394 | + BugTask.productID.is_in(product_ids), user) |
| 1395 | |
| 1396 | def _getBugTaskContextClause(self): |
| 1397 | """See `HasBugsBase`.""" |
| 1398 | |
| 1399 | === modified file 'lib/lp/registry/model/sourcepackage.py' |
| 1400 | --- lib/lp/registry/model/sourcepackage.py 2010-11-08 12:12:29 +0000 |
| 1401 | +++ lib/lp/registry/model/sourcepackage.py 2010-11-09 15:16:26 +0000 |
| 1402 | @@ -17,7 +17,6 @@ |
| 1403 | from storm.locals import ( |
| 1404 | And, |
| 1405 | Desc, |
| 1406 | - In, |
| 1407 | Select, |
| 1408 | SQL, |
| 1409 | Store, |
| 1410 | @@ -380,12 +379,12 @@ |
| 1411 | SourcePackageRelease.id, |
| 1412 | SourcePackageRelease.sourcepackagename == |
| 1413 | self.sourcepackagename, |
| 1414 | - In(SourcePackagePublishingHistory.archiveID, |
| 1415 | + SourcePackagePublishingHistory.archiveID.is_in( |
| 1416 | self.distribution.all_distro_archive_ids))) |
| 1417 | |
| 1418 | return IStore(SourcePackageRelease).find( |
| 1419 | SourcePackageRelease, |
| 1420 | - In(SourcePackageRelease.id, subselect)).order_by(Desc( |
| 1421 | + SourcePackageRelease.id.is_in(subselect)).order_by(Desc( |
| 1422 | SQL("debversion_sort_key(SourcePackageRelease.version)"))) |
| 1423 | |
| 1424 | @property |
| 1425 | |
| 1426 | === modified file 'lib/lp/registry/model/sourcepackagename.py' |
| 1427 | --- lib/lp/registry/model/sourcepackagename.py 2010-10-04 20:46:55 +0000 |
| 1428 | +++ lib/lp/registry/model/sourcepackagename.py 2010-11-09 15:16:26 +0000 |
| 1429 | @@ -23,6 +23,7 @@ |
| 1430 | SQLBase, |
| 1431 | sqlvalues, |
| 1432 | ) |
| 1433 | +from canonical.launchpad.helpers import ensure_unicode |
| 1434 | from lp.app.errors import NotFoundError |
| 1435 | from lp.registry.errors import NoSuchSourcePackageName |
| 1436 | from lp.registry.interfaces.sourcepackagename import ( |
| 1437 | @@ -62,6 +63,7 @@ |
| 1438 | |
| 1439 | def __getitem__(self, name): |
| 1440 | """See canonical.launchpad.interfaces.ISourcePackageNameSet.""" |
| 1441 | + name = ensure_unicode(name) |
| 1442 | try: |
| 1443 | return SourcePackageName.byName(name) |
| 1444 | except SQLObjectNotFound: |
| 1445 | @@ -145,7 +147,7 @@ |
| 1446 | |
| 1447 | descriptions = {} |
| 1448 | for binarypackagename, sourcepackagename in cur.fetchall(): |
| 1449 | - if not descriptions.has_key(sourcepackagename): |
| 1450 | + if not sourcepackagename in descriptions: |
| 1451 | descriptions[sourcepackagename] = ( |
| 1452 | "Source of: %s" % binarypackagename) |
| 1453 | else: |
| 1454 | |
| 1455 | === modified file 'lib/lp/registry/vocabularies.py' |
| 1456 | --- lib/lp/registry/vocabularies.py 2010-10-20 03:13:10 +0000 |
| 1457 | +++ lib/lp/registry/vocabularies.py 2010-11-09 15:16:26 +0000 |
| 1458 | @@ -70,7 +70,6 @@ |
| 1459 | Desc, |
| 1460 | Join, |
| 1461 | LeftJoin, |
| 1462 | - Lower, |
| 1463 | Not, |
| 1464 | Or, |
| 1465 | SQL, |
| 1466 | @@ -95,8 +94,10 @@ |
| 1467 | sqlvalues, |
| 1468 | ) |
| 1469 | from canonical.launchpad.database.emailaddress import EmailAddress |
| 1470 | -from canonical.launchpad.database.stormsugar import StartsWith |
| 1471 | -from canonical.launchpad.helpers import shortlist |
| 1472 | +from canonical.launchpad.helpers import ( |
| 1473 | + ensure_unicode, |
| 1474 | + shortlist, |
| 1475 | + ) |
| 1476 | from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus |
| 1477 | from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities |
| 1478 | from canonical.launchpad.interfaces.lpstorm import IStore |
| 1479 | @@ -187,13 +188,11 @@ |
| 1480 | If the token contains an '@', treat it like an email. Otherwise, |
| 1481 | treat it like a name. |
| 1482 | """ |
| 1483 | + token = ensure_unicode(token) |
| 1484 | if "@" in token: |
| 1485 | # This looks like an email token, so let's do an object |
| 1486 | # lookup based on that. |
| 1487 | - # We retrieve the email address via the main store, so |
| 1488 | - # we can easily traverse to email.person to retrieve the |
| 1489 | - # result from the main Store as expected by our call sites. |
| 1490 | - email = IStore(Person).find( |
| 1491 | + email = IStore(EmailAddress).find( |
| 1492 | EmailAddress, |
| 1493 | EmailAddress.email.lower() == token.strip().lower()).one() |
| 1494 | if email is None: |
| 1495 | @@ -256,7 +255,7 @@ |
| 1496 | if query is None or an empty string. |
| 1497 | """ |
| 1498 | if query: |
| 1499 | - query = query.lower() |
| 1500 | + query = ensure_unicode(query).lower() |
| 1501 | like_query = "'%%' || %s || '%%'" % quote_like(query) |
| 1502 | fti_query = quote(query) |
| 1503 | sql = "active = 't' AND (name LIKE %s OR fti @@ ftq(%s))" % ( |
| 1504 | @@ -301,7 +300,7 @@ |
| 1505 | if query is None or an empty string. |
| 1506 | """ |
| 1507 | if query: |
| 1508 | - query = query.lower() |
| 1509 | + query = ensure_unicode(query).lower() |
| 1510 | like_query = "'%%' || %s || '%%'" % quote_like(query) |
| 1511 | fti_query = quote(query) |
| 1512 | sql = "active = 't' AND (name LIKE %s OR fti @@ ftq(%s))" % ( |
| 1513 | @@ -378,7 +377,7 @@ |
| 1514 | if not text: |
| 1515 | return self.emptySelectResults() |
| 1516 | |
| 1517 | - return self._select(text.lower()) |
| 1518 | + return self._select(ensure_unicode(text).lower()) |
| 1519 | |
| 1520 | |
| 1521 | class PersonAccountToMergeVocabulary( |
| 1522 | @@ -411,7 +410,7 @@ |
| 1523 | if not text: |
| 1524 | return self.emptySelectResults() |
| 1525 | |
| 1526 | - text = text.lower() |
| 1527 | + text = ensure_unicode(text).lower() |
| 1528 | return self._select(text) |
| 1529 | |
| 1530 | |
| 1531 | @@ -641,7 +640,7 @@ |
| 1532 | else: |
| 1533 | return self.emptySelectResults() |
| 1534 | |
| 1535 | - text = text.lower() |
| 1536 | + text = ensure_unicode(text).lower() |
| 1537 | return self._doSearch(text=text) |
| 1538 | |
| 1539 | def searchForTerms(self, query=None): |
| 1540 | @@ -686,7 +685,7 @@ |
| 1541 | |
| 1542 | email_storm_query = self.store.find( |
| 1543 | EmailAddress.personID, |
| 1544 | - StartsWith(Lower(EmailAddress.email), text)) |
| 1545 | + EmailAddress.email.lower().startswith(text)) |
| 1546 | email_subquery = Alias(email_storm_query._get_select(), |
| 1547 | 'EmailAddress') |
| 1548 | tables += [ |
| 1549 | @@ -1011,7 +1010,7 @@ |
| 1550 | if not query: |
| 1551 | return self.emptySelectResults() |
| 1552 | |
| 1553 | - query = query.lower() |
| 1554 | + query = ensure_unicode(query).lower() |
| 1555 | objs = self._table.select( |
| 1556 | AND( |
| 1557 | Milestone.q.id == ProductRelease.q.milestoneID, |
| 1558 | @@ -1066,7 +1065,7 @@ |
| 1559 | if not query: |
| 1560 | return self.emptySelectResults() |
| 1561 | |
| 1562 | - query = query.lower().strip('/') |
| 1563 | + query = ensure_unicode(query).lower().strip('/') |
| 1564 | # If there is a slash splitting the product and productseries |
| 1565 | # names, they must both match. If there is no slash, we don't |
| 1566 | # know whether it is matching the product or the productseries |
| 1567 | @@ -1409,7 +1408,7 @@ |
| 1568 | if not query: |
| 1569 | return self.emptySelectResults() |
| 1570 | |
| 1571 | - query = query.lower() |
| 1572 | + query = ensure_unicode(query).lower() |
| 1573 | objs = self._table.select( |
| 1574 | AND( |
| 1575 | Distribution.q.id == DistroSeries.q.distributionID, |
| 1576 | |
| 1577 | === modified file 'lib/lp/scripts/garbo.py' |
| 1578 | --- lib/lp/scripts/garbo.py 2010-10-23 16:46:30 +0000 |
| 1579 | +++ lib/lp/scripts/garbo.py 2010-11-09 15:16:26 +0000 |
| 1580 | @@ -18,7 +18,6 @@ |
| 1581 | from psycopg2 import IntegrityError |
| 1582 | import pytz |
| 1583 | from storm.locals import ( |
| 1584 | - In, |
| 1585 | Max, |
| 1586 | Min, |
| 1587 | Select, |
| 1588 | @@ -203,7 +202,8 @@ |
| 1589 | class CodeImportEventPruner(TunableLoop): |
| 1590 | """Prune `CodeImportEvent`s that are more than a month old. |
| 1591 | |
| 1592 | - Events that happened more than 30 days ago are really of no interest to us. |
| 1593 | + Events that happened more than 30 days ago are really of no |
| 1594 | + interest to us. |
| 1595 | """ |
| 1596 | |
| 1597 | maximum_chunk_size = 10000 |
| 1598 | @@ -547,7 +547,7 @@ |
| 1599 | ids_to_remove = list(self._to_remove()[:chunk_size]) |
| 1600 | num_removed = IMasterStore(BugNotification).find( |
| 1601 | BugNotification, |
| 1602 | - In(BugNotification.id, ids_to_remove)).remove() |
| 1603 | + BugNotification.id.is_in(ids_to_remove)).remove() |
| 1604 | transaction.commit() |
| 1605 | self.log.debug("Removed %d rows" % num_removed) |
| 1606 | |
| 1607 | @@ -579,7 +579,7 @@ |
| 1608 | # constraint is ON DELETE CASCADE. |
| 1609 | IMasterStore(Job).find( |
| 1610 | Job, |
| 1611 | - In(Job.id, ids_to_remove)).remove() |
| 1612 | + Job.id.is_in(ids_to_remove)).remove() |
| 1613 | else: |
| 1614 | self._is_done = True |
| 1615 | transaction.commit() |
| 1616 | @@ -719,7 +719,7 @@ |
| 1617 | chunk_size = int(chunk_size) |
| 1618 | ids_to_remove = list(self._to_remove()[:chunk_size]) |
| 1619 | self.store.find( |
| 1620 | - BugAttachment, In(BugAttachment.id, ids_to_remove)).remove() |
| 1621 | + BugAttachment, BugAttachment.id.is_in(ids_to_remove)).remove() |
| 1622 | transaction.commit() |
| 1623 | |
| 1624 | |
| 1625 | |
| 1626 | === modified file 'lib/lp/scripts/utilities/sanitizedb.py' |
| 1627 | --- lib/lp/scripts/utilities/sanitizedb.py 2010-08-30 06:46:39 +0000 |
| 1628 | +++ lib/lp/scripts/utilities/sanitizedb.py 2010-11-09 15:16:26 +0000 |
| 1629 | @@ -380,7 +380,7 @@ |
| 1630 | EmailAddress.status == EmailAddressStatus.NEW, |
| 1631 | EmailAddress.status == EmailAddressStatus.OLD, |
| 1632 | EmailAddress.email.lower().like( |
| 1633 | - '%@example.com', case_sensitive=True))).remove() |
| 1634 | + u'%@example.com', case_sensitive=True))).remove() |
| 1635 | self.store.flush() |
| 1636 | self.logger.info( |
| 1637 | "Removed %d invalid, unvalidated and old email addresses.", count) |
| 1638 | |
| 1639 | === modified file 'lib/lp/services/database/bulk.py' |
| 1640 | --- lib/lp/services/database/bulk.py 2010-08-20 20:31:18 +0000 |
| 1641 | +++ lib/lp/services/database/bulk.py 2010-11-09 15:16:26 +0000 |
| 1642 | @@ -12,7 +12,6 @@ |
| 1643 | from collections import defaultdict |
| 1644 | |
| 1645 | from storm.base import Storm |
| 1646 | -from storm.expr import In |
| 1647 | from storm.info import get_cls_info |
| 1648 | from storm.store import Store |
| 1649 | from zope.security.proxy import removeSecurityProxy |
| 1650 | @@ -55,7 +54,7 @@ |
| 1651 | primary_key_column_getter = primary_key_column.__get__ |
| 1652 | for store, objects in collate(objects, Store.of): |
| 1653 | primary_keys = map(primary_key_column_getter, objects) |
| 1654 | - condition = In(primary_key_column, primary_keys) |
| 1655 | + condition = primary_key_column.is_in(primary_keys) |
| 1656 | yield store.find(object_type, condition) |
| 1657 | |
| 1658 | |
| 1659 | |
| 1660 | === modified file 'lib/lp/services/worlddata/model/language.py' |
| 1661 | --- lib/lp/services/worlddata/model/language.py 2010-10-23 16:45:43 +0000 |
| 1662 | +++ lib/lp/services/worlddata/model/language.py 2010-11-09 15:16:26 +0000 |
| 1663 | @@ -12,7 +12,6 @@ |
| 1664 | |
| 1665 | from sqlobject import ( |
| 1666 | BoolCol, |
| 1667 | - CONTAINSSTRING, |
| 1668 | IntCol, |
| 1669 | SQLObjectNotFound, |
| 1670 | SQLRelatedJoin, |
| 1671 | @@ -26,6 +25,7 @@ |
| 1672 | SQLBase, |
| 1673 | sqlvalues, |
| 1674 | ) |
| 1675 | +from canonical.launchpad.helpers import ensure_unicode |
| 1676 | from canonical.launchpad.interfaces.lpstorm import ISlaveStore |
| 1677 | from lp.app.errors import NotFoundError |
| 1678 | from lp.services.worlddata.interfaces.language import ( |
| 1679 | @@ -243,11 +243,12 @@ |
| 1680 | def search(self, text): |
| 1681 | """See `ILanguageSet`.""" |
| 1682 | if text: |
| 1683 | + text = ensure_unicode(text).lower() |
| 1684 | results = ISlaveStore(Language).find( |
| 1685 | Language, Or( |
| 1686 | - CONTAINSSTRING(Language.code.lower(), text.lower()), |
| 1687 | - CONTAINSSTRING(Language.englishname.lower(), text.lower()) |
| 1688 | - )).order_by(Language.englishname) |
| 1689 | + Language.code.lower().contains_string(text), |
| 1690 | + Language.englishname.lower().contains_string( |
| 1691 | + text))).order_by(Language.englishname) |
| 1692 | else: |
| 1693 | results = None |
| 1694 | |
| 1695 | |
| 1696 | === modified file 'lib/lp/soyuz/doc/gina.txt' |
| 1697 | --- lib/lp/soyuz/doc/gina.txt 2010-10-20 13:33:24 +0000 |
| 1698 | +++ lib/lp/soyuz/doc/gina.txt 2010-11-09 15:16:26 +0000 |
| 1699 | @@ -439,7 +439,7 @@ |
| 1700 | 2 being uploaded by mdz and 2 by doko). |
| 1701 | |
| 1702 | >>> from sqlobject import LIKE |
| 1703 | - >>> p = Person.selectOne(LIKE(Person.q.name, "cjwatson%")) |
| 1704 | + >>> p = Person.selectOne(LIKE(Person.q.name, u"cjwatson%")) |
| 1705 | >>> print p.name |
| 1706 | cjwatson |
| 1707 | >>> print Person.select().count() - orig_person_count |
| 1708 | |
| 1709 | === modified file 'lib/lp/soyuz/doc/packageset.txt' |
| 1710 | --- lib/lp/soyuz/doc/packageset.txt 2010-10-09 16:36:22 +0000 |
| 1711 | +++ lib/lp/soyuz/doc/packageset.txt 2010-11-09 15:16:26 +0000 |
| 1712 | @@ -1119,7 +1119,7 @@ |
| 1713 | note that non-existent package sets (e.g. 'not-there') are simply ignored. |
| 1714 | |
| 1715 | >>> to_be_added = ( |
| 1716 | - ... 'gnome', 'x-win', 'universe', 'multiverse', 'not-there') |
| 1717 | + ... u'gnome', u'x-win', u'universe', u'multiverse', u'not-there') |
| 1718 | >>> umbrella_ps.addSubsets(to_be_added) |
| 1719 | >>> print_data(umbrella_ps.setsIncluded(direct_inclusion=True)) |
| 1720 | 4 -> mozilla |
| 1721 | @@ -1131,7 +1131,7 @@ |
| 1722 | Package subsets can be removed in a similar fashion. Non-existent sets |
| 1723 | or sets which are not (direct) subsets are ignored again. |
| 1724 | |
| 1725 | - >>> to_be_removed = ('umbrella', 'universe', 'multiverse', 'not-mine') |
| 1726 | + >>> to_be_removed = (u'umbrella', u'universe', u'multiverse', u'not-mine') |
| 1727 | >>> umbrella_ps.removeSubsets(to_be_removed) |
| 1728 | >>> print_data(umbrella_ps.setsIncluded(direct_inclusion=True)) |
| 1729 | 4 -> mozilla |
| 1730 | |
| 1731 | === modified file 'lib/lp/soyuz/model/archivepermission.py' |
| 1732 | --- lib/lp/soyuz/model/archivepermission.py 2010-08-23 17:16:35 +0000 |
| 1733 | +++ lib/lp/soyuz/model/archivepermission.py 2010-11-09 15:16:26 +0000 |
| 1734 | @@ -14,10 +14,7 @@ |
| 1735 | BoolCol, |
| 1736 | ForeignKey, |
| 1737 | ) |
| 1738 | -from storm.expr import ( |
| 1739 | - In, |
| 1740 | - SQL, |
| 1741 | - ) |
| 1742 | +from storm.expr import SQL |
| 1743 | from storm.locals import ( |
| 1744 | Int, |
| 1745 | Reference, |
| 1746 | @@ -116,7 +113,7 @@ |
| 1747 | elif self.permission == ArchivePermissionType.QUEUE_ADMIN: |
| 1748 | alsoProvides(self, IArchiveQueueAdmin) |
| 1749 | else: |
| 1750 | - raise AssertionError, ( |
| 1751 | + raise AssertionError( |
| 1752 | "Unknown permission type %s" % self.permission) |
| 1753 | |
| 1754 | @property |
| 1755 | @@ -163,8 +160,7 @@ |
| 1756 | ArchivePermission.permission = %s AND |
| 1757 | ArchivePermission.person = TeamParticipation.team AND |
| 1758 | TeamParticipation.person = %s |
| 1759 | - """ % sqlvalues(archive, permission, person) |
| 1760 | - ] |
| 1761 | + """ % sqlvalues(archive, permission, person)] |
| 1762 | |
| 1763 | prejoins = [] |
| 1764 | |
| 1765 | @@ -248,8 +244,7 @@ |
| 1766 | clauses = [""" |
| 1767 | ArchivePermission.archive = %s AND |
| 1768 | ArchivePermission.permission = %s |
| 1769 | - """ % sqlvalues(archive, ArchivePermissionType.UPLOAD) |
| 1770 | - ] |
| 1771 | + """ % sqlvalues(archive, ArchivePermissionType.UPLOAD)] |
| 1772 | |
| 1773 | if component is not None: |
| 1774 | component = self._nameToComponent(component) |
| 1775 | @@ -386,7 +381,8 @@ |
| 1776 | AND ap.packageset IS NOT NULL |
| 1777 | ''' |
| 1778 | query = SQL(query, (person.id, archive.id)) |
| 1779 | - return store.find(ArchivePermission, In(ArchivePermission.id, query)) |
| 1780 | + return store.find( |
| 1781 | + ArchivePermission, ArchivePermission.id.is_in(query)) |
| 1782 | |
| 1783 | def uploadersForPackageset( |
| 1784 | self, archive, packageset, direct_permissions=True): |
| 1785 | @@ -405,7 +401,8 @@ |
| 1786 | ''' |
| 1787 | query += " AND ap.archive = ?" |
| 1788 | query = SQL(query, (packageset.id, archive.id)) |
| 1789 | - return store.find(ArchivePermission, In(ArchivePermission.id, query)) |
| 1790 | + return store.find( |
| 1791 | + ArchivePermission, ArchivePermission.id.is_in(query)) |
| 1792 | |
| 1793 | def newPackagesetUploader( |
| 1794 | self, archive, person, packageset, explicit=False): |
| 1795 | @@ -424,7 +421,8 @@ |
| 1796 | ''' |
| 1797 | query = SQL(query, (person.id, packageset.id, archive.id)) |
| 1798 | permissions = list( |
| 1799 | - store.find(ArchivePermission, In(ArchivePermission.id, query))) |
| 1800 | + store.find( |
| 1801 | + ArchivePermission, ArchivePermission.id.is_in(query))) |
| 1802 | if len(permissions) > 0: |
| 1803 | # Found permissions in the database, does the 'explicit' flag |
| 1804 | # have the requested value? |
| 1805 | @@ -496,7 +494,8 @@ |
| 1806 | ''' |
| 1807 | query = SQL( |
| 1808 | query, (person.id, sourcepackagename.id, archive.id)) |
| 1809 | - return store.find(ArchivePermission, In(ArchivePermission.id, query)) |
| 1810 | + return store.find( |
| 1811 | + ArchivePermission, ArchivePermission.id.is_in(query)) |
| 1812 | |
| 1813 | def packagesetsForSource( |
| 1814 | self, archive, sourcepackagename, direct_permissions=True): |
| 1815 | |
| 1816 | === modified file 'lib/lp/soyuz/model/binarypackagebuild.py' |
| 1817 | --- lib/lp/soyuz/model/binarypackagebuild.py 2010-10-06 11:46:51 +0000 |
| 1818 | +++ lib/lp/soyuz/model/binarypackagebuild.py 2010-11-09 15:16:26 +0000 |
| 1819 | @@ -14,7 +14,6 @@ |
| 1820 | from sqlobject import SQLObjectNotFound |
| 1821 | from storm.expr import ( |
| 1822 | Desc, |
| 1823 | - In, |
| 1824 | Join, |
| 1825 | LeftJoin, |
| 1826 | ) |
| 1827 | @@ -821,7 +820,7 @@ |
| 1828 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
| 1829 | return store.find( |
| 1830 | BinaryPackageBuild, |
| 1831 | - In(BinaryPackageBuild.distro_arch_series_id, archseries_ids), |
| 1832 | + BinaryPackageBuild.distro_arch_series_id.is_in(archseries_ids), |
| 1833 | BinaryPackageBuild.package_build == PackageBuild.id, |
| 1834 | PackageBuild.build_farm_job == BuildFarmJob.id, |
| 1835 | BuildFarmJob.status == BuildStatus.NEEDSBUILD) |
| 1836 | @@ -1177,7 +1176,7 @@ |
| 1837 | result_set = store.using(*origin).find( |
| 1838 | (SourcePackageRelease, LibraryFileAlias, SourcePackageName, |
| 1839 | LibraryFileContent, Builder, PackageBuild, BuildFarmJob), |
| 1840 | - In(BinaryPackageBuild.id, build_ids)) |
| 1841 | + BinaryPackageBuild.id.is_in(build_ids)) |
| 1842 | |
| 1843 | # Force query execution so that the ancillary data gets fetched |
| 1844 | # and added to StupidCache. |
| 1845 | |
| 1846 | === modified file 'lib/lp/soyuz/model/binarypackagename.py' |
| 1847 | --- lib/lp/soyuz/model/binarypackagename.py 2010-10-03 15:30:06 +0000 |
| 1848 | +++ lib/lp/soyuz/model/binarypackagename.py 2010-11-09 15:16:26 +0000 |
| 1849 | @@ -13,7 +13,6 @@ |
| 1850 | |
| 1851 | # SQLObject/SQLBase |
| 1852 | from sqlobject import ( |
| 1853 | - CONTAINSSTRING, |
| 1854 | SQLObjectNotFound, |
| 1855 | StringCol, |
| 1856 | ) |
| 1857 | @@ -26,6 +25,8 @@ |
| 1858 | SQLBase, |
| 1859 | sqlvalues, |
| 1860 | ) |
| 1861 | +from canonical.launchpad.helpers import ensure_unicode |
| 1862 | +from canonical.launchpad.interfaces.lpstorm import IStore |
| 1863 | from canonical.launchpad.webapp.vocabulary import ( |
| 1864 | BatchedCountableIterator, |
| 1865 | NamedSQLObjectHugeVocabulary, |
| 1866 | @@ -66,14 +67,16 @@ |
| 1867 | |
| 1868 | def findByName(self, name): |
| 1869 | """Find binarypackagenames by its name or part of it.""" |
| 1870 | - return BinaryPackageName.select( |
| 1871 | - CONTAINSSTRING(BinaryPackageName.q.name, name)) |
| 1872 | + return IStore(BinaryPackageName).find( |
| 1873 | + BinaryPackageName, |
| 1874 | + BinaryPackageName.name.contains_string(ensure_unicode(name))) |
| 1875 | |
| 1876 | def queryByName(self, name): |
| 1877 | - return BinaryPackageName.selectOneBy(name=name) |
| 1878 | + return IStore(BinaryPackageName).find( |
| 1879 | + BinaryPackageName, name=ensure_unicode(name)).one() |
| 1880 | |
| 1881 | def new(self, name): |
| 1882 | - return BinaryPackageName(name=name) |
| 1883 | + return BinaryPackageName(name=ensure_unicode(name)) |
| 1884 | |
| 1885 | def ensure(self, name): |
| 1886 | """Ensure that the given BinaryPackageName exists, creating it |
| 1887 | @@ -81,6 +84,7 @@ |
| 1888 | |
| 1889 | Returns the BinaryPackageName |
| 1890 | """ |
| 1891 | + name = ensure_unicode(name) |
| 1892 | try: |
| 1893 | return self[name] |
| 1894 | except NotFoundError: |
| 1895 | |
| 1896 | === modified file 'lib/lp/soyuz/model/distroarchseriesbinarypackage.py' |
| 1897 | --- lib/lp/soyuz/model/distroarchseriesbinarypackage.py 2010-11-05 14:17:11 +0000 |
| 1898 | +++ lib/lp/soyuz/model/distroarchseriesbinarypackage.py 2010-11-09 15:16:26 +0000 |
| 1899 | @@ -11,10 +11,7 @@ |
| 1900 | 'DistroArchSeriesBinaryPackage', |
| 1901 | ] |
| 1902 | |
| 1903 | -from storm.locals import ( |
| 1904 | - Desc, |
| 1905 | - In, |
| 1906 | - ) |
| 1907 | +from storm.locals import Desc |
| 1908 | from zope.interface import implements |
| 1909 | |
| 1910 | from canonical.database.sqlbase import sqlvalues |
| 1911 | @@ -180,7 +177,7 @@ |
| 1912 | orderBy='-datecreated', |
| 1913 | limit=1, |
| 1914 | distinct=True, |
| 1915 | - clauseTables=['BinaryPackagePublishingHistory',]) |
| 1916 | + clauseTables=['BinaryPackagePublishingHistory']) |
| 1917 | |
| 1918 | # Listify to limit the SQL queries to one only. |
| 1919 | results = list(releases) |
| 1920 | @@ -198,12 +195,10 @@ |
| 1921 | BinaryPackageRelease.binarypackagename == self.binarypackagename, |
| 1922 | BinaryPackagePublishingHistory.distroarchseries == |
| 1923 | self.distroarchseries, |
| 1924 | - In( |
| 1925 | - BinaryPackagePublishingHistory.archiveID, |
| 1926 | + BinaryPackagePublishingHistory.archiveID.is_in( |
| 1927 | self.distribution.all_distro_archive_ids), |
| 1928 | BinaryPackagePublishingHistory.binarypackagereleaseID == |
| 1929 | - BinaryPackageRelease.id |
| 1930 | - ).config(distinct=True).order_by( |
| 1931 | + BinaryPackageRelease.id).config(distinct=True).order_by( |
| 1932 | Desc(BinaryPackagePublishingHistory.datecreated)) |
| 1933 | |
| 1934 | @property |
| 1935 | @@ -246,4 +241,3 @@ |
| 1936 | return None |
| 1937 | else: |
| 1938 | return src_pkg_release.sourcepackage |
| 1939 | - |
| 1940 | |
| 1941 | === modified file 'lib/lp/soyuz/model/packagediff.py' |
| 1942 | --- lib/lp/soyuz/model/packagediff.py 2010-08-24 12:05:25 +0000 |
| 1943 | +++ lib/lp/soyuz/model/packagediff.py 2010-11-09 15:16:26 +0000 |
| 1944 | @@ -14,10 +14,7 @@ |
| 1945 | import tempfile |
| 1946 | |
| 1947 | from sqlobject import ForeignKey |
| 1948 | -from storm.expr import ( |
| 1949 | - Desc, |
| 1950 | - In, |
| 1951 | - ) |
| 1952 | +from storm.expr import Desc |
| 1953 | from storm.store import EmptyResultSet |
| 1954 | from zope.component import getUtility |
| 1955 | from zope.interface import implements |
| 1956 | @@ -135,7 +132,8 @@ |
| 1957 | ancestry_identifier = "%s (in %s)" % ( |
| 1958 | self.from_source.version, |
| 1959 | ancestry_archive.distribution.name.capitalize()) |
| 1960 | - return 'diff from %s to %s' % (ancestry_identifier, self.to_source.version) |
| 1961 | + return 'diff from %s to %s' % ( |
| 1962 | + ancestry_identifier, self.to_source.version) |
| 1963 | |
| 1964 | @property |
| 1965 | def private(self): |
| 1966 | @@ -280,7 +278,8 @@ |
| 1967 | return EmptyResultSet() |
| 1968 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
| 1969 | spr_ids = [spr.id for spr in sprs] |
| 1970 | - result = store.find(PackageDiff, In(PackageDiff.to_sourceID, spr_ids)) |
| 1971 | + result = store.find( |
| 1972 | + PackageDiff, PackageDiff.to_sourceID.is_in(spr_ids)) |
| 1973 | result.order_by(PackageDiff.to_sourceID, |
| 1974 | Desc(PackageDiff.date_requested)) |
| 1975 | return result |
| 1976 | |
| 1977 | === modified file 'lib/lp/soyuz/model/packageset.py' |
| 1978 | --- lib/lp/soyuz/model/packageset.py 2010-08-20 20:31:18 +0000 |
| 1979 | +++ lib/lp/soyuz/model/packageset.py 2010-11-09 15:16:26 +0000 |
| 1980 | @@ -6,10 +6,7 @@ |
| 1981 | |
| 1982 | import pytz |
| 1983 | from storm.exceptions import IntegrityError |
| 1984 | -from storm.expr import ( |
| 1985 | - In, |
| 1986 | - SQL, |
| 1987 | - ) |
| 1988 | +from storm.expr import SQL |
| 1989 | from storm.locals import ( |
| 1990 | DateTime, |
| 1991 | Int, |
| 1992 | @@ -20,6 +17,7 @@ |
| 1993 | from zope.component import getUtility |
| 1994 | from zope.interface import implements |
| 1995 | |
| 1996 | +from canonical.launchpad.helpers import ensure_unicode |
| 1997 | from canonical.launchpad.interfaces.lpstorm import ( |
| 1998 | IMasterStore, |
| 1999 | IStore, |
| 2000 | @@ -164,7 +162,7 @@ |
| 2001 | store = IStore(Packageset) |
| 2002 | source_names = SQL(source_name_query, (self.id,)) |
| 2003 | result_set = store.find( |
| 2004 | - SourcePackageName, In(SourcePackageName.id, source_names)) |
| 2005 | + SourcePackageName, SourcePackageName.id.is_in(source_names)) |
| 2006 | return _order_result_set(result_set) |
| 2007 | |
| 2008 | def getSourcesIncluded(self, direct_inclusion=False): |
| 2009 | @@ -191,7 +189,7 @@ |
| 2010 | params = (self.id,) |
| 2011 | store = IStore(Packageset) |
| 2012 | predecessors = SQL(query, params) |
| 2013 | - result_set = store.find(Packageset, In(Packageset.id, predecessors)) |
| 2014 | + result_set = store.find(Packageset, Packageset.id.is_in(predecessors)) |
| 2015 | return _order_result_set(result_set) |
| 2016 | |
| 2017 | def setsIncluded(self, direct_inclusion=False): |
| 2018 | @@ -213,7 +211,7 @@ |
| 2019 | params = (self.id,) |
| 2020 | store = IStore(Packageset) |
| 2021 | successors = SQL(query, params) |
| 2022 | - result_set = store.find(Packageset, In(Packageset.id, successors)) |
| 2023 | + result_set = store.find(Packageset, Packageset.id.is_in(successors)) |
| 2024 | return _order_result_set(result_set) |
| 2025 | |
| 2026 | def sourcesSharedBy(self, other_package_set, direct_inclusion=False): |
| 2027 | @@ -240,7 +238,7 @@ |
| 2028 | store = IStore(Packageset) |
| 2029 | source_names = SQL(query, (self.id, other_package_set.id)) |
| 2030 | result_set = store.find( |
| 2031 | - SourcePackageName, In(SourcePackageName.id, source_names)) |
| 2032 | + SourcePackageName, SourcePackageName.id.is_in(source_names)) |
| 2033 | return _order_result_set(result_set) |
| 2034 | |
| 2035 | def getSourcesSharedBy(self, other_package_set, direct_inclusion=False): |
| 2036 | @@ -253,13 +251,13 @@ |
| 2037 | if direct_inclusion == False: |
| 2038 | query = ''' |
| 2039 | SELECT pss_this.sourcepackagename |
| 2040 | - FROM packagesetsources pss_this, |
| 2041 | + FROM packagesetsources pss_this, |
| 2042 | flatpackagesetinclusion fpsi_this |
| 2043 | WHERE pss_this.packageset = fpsi_this.child |
| 2044 | AND fpsi_this.parent = ? |
| 2045 | EXCEPT |
| 2046 | SELECT pss_other.sourcepackagename |
| 2047 | - FROM packagesetsources pss_other, |
| 2048 | + FROM packagesetsources pss_other, |
| 2049 | flatpackagesetinclusion fpsi_other |
| 2050 | WHERE pss_other.packageset = fpsi_other.child |
| 2051 | AND fpsi_other.parent = ? |
| 2052 | @@ -276,7 +274,7 @@ |
| 2053 | store = IStore(Packageset) |
| 2054 | source_names = SQL(query, (self.id, other_package_set.id)) |
| 2055 | result_set = store.find( |
| 2056 | - SourcePackageName, In(SourcePackageName.id, source_names)) |
| 2057 | + SourcePackageName, SourcePackageName.id.is_in(source_names)) |
| 2058 | return _order_result_set(result_set) |
| 2059 | |
| 2060 | def getSourcesNotSharedBy( |
| 2061 | @@ -295,25 +293,27 @@ |
| 2062 | |
| 2063 | def addSources(self, names): |
| 2064 | """See `IPackageset`.""" |
| 2065 | - clauses = (SourcePackageName, In(SourcePackageName.name, names)) |
| 2066 | + if isinstance(names, basestring): |
| 2067 | + names = [ensure_unicode(names)] |
| 2068 | + clauses = (SourcePackageName, SourcePackageName.name.is_in(names)) |
| 2069 | self._api_add_or_remove(clauses, self._addSourcePackageNames) |
| 2070 | |
| 2071 | def removeSources(self, names): |
| 2072 | """See `IPackageset`.""" |
| 2073 | - clauses = (SourcePackageName, In(SourcePackageName.name, names)) |
| 2074 | + clauses = (SourcePackageName, SourcePackageName.name.is_in(names)) |
| 2075 | self._api_add_or_remove(clauses, self._removeSourcePackageNames) |
| 2076 | |
| 2077 | def addSubsets(self, names): |
| 2078 | """See `IPackageset`.""" |
| 2079 | clauses = ( |
| 2080 | - Packageset, In(Packageset.name, names), |
| 2081 | + Packageset, Packageset.name.is_in(names), |
| 2082 | Packageset.distroseries == self.distroseries) |
| 2083 | self._api_add_or_remove(clauses, self._addDirectSuccessors) |
| 2084 | |
| 2085 | def removeSubsets(self, names): |
| 2086 | """See `IPackageset`.""" |
| 2087 | clauses = ( |
| 2088 | - Packageset, In(Packageset.name, names), |
| 2089 | + Packageset, Packageset.name.is_in(names), |
| 2090 | Packageset.distroseries == self.distroseries) |
| 2091 | self._api_add_or_remove(clauses, self._removeDirectSuccessors) |
| 2092 | |
| 2093 | @@ -381,7 +381,7 @@ |
| 2094 | if not isinstance(name, unicode): |
| 2095 | name = unicode(name, 'utf-8') |
| 2096 | |
| 2097 | - ubuntu = getUtility(IDistributionSet).getByName('ubuntu') |
| 2098 | + ubuntu = getUtility(IDistributionSet).getByName(u'ubuntu') |
| 2099 | extra_args = [] |
| 2100 | if distroseries is not None: |
| 2101 | # If the user just passed a distro series name, look it up. |
| 2102 | @@ -439,7 +439,7 @@ |
| 2103 | ''' |
| 2104 | store = IStore(Packageset) |
| 2105 | psets = SQL(query, (sourcepackagename.id,)) |
| 2106 | - clauses = [In(Packageset.id, psets)] |
| 2107 | + clauses = [Packageset.id.is_in(psets)] |
| 2108 | if distroseries: |
| 2109 | clauses.append(Packageset.distroseries == distroseries) |
| 2110 | |
| 2111 | |
| 2112 | === modified file 'lib/lp/soyuz/model/publishing.py' |
| 2113 | --- lib/lp/soyuz/model/publishing.py 2010-11-09 00:01:48 +0000 |
| 2114 | +++ lib/lp/soyuz/model/publishing.py 2010-11-09 15:16:26 +0000 |
| 2115 | @@ -31,7 +31,6 @@ |
| 2116 | ) |
| 2117 | from storm.expr import ( |
| 2118 | Desc, |
| 2119 | - In, |
| 2120 | LeftJoin, |
| 2121 | Sum, |
| 2122 | ) |
| 2123 | @@ -116,6 +115,7 @@ |
| 2124 | |
| 2125 | |
| 2126 | # XXX cprov 2006-08-18: move it away, perhaps archivepublisher/pool.py |
| 2127 | + |
| 2128 | def makePoolPath(source_name, component_name): |
| 2129 | """Return the pool path for a given source name and component name.""" |
| 2130 | from lp.archivepublisher.diskpool import poolify |
| 2131 | @@ -137,7 +137,8 @@ |
| 2132 | sha1 = filealias.content.sha1 |
| 2133 | path = diskpool.pathFor(component, source, filename) |
| 2134 | |
| 2135 | - action = diskpool.addFile(component, source, filename, sha1, filealias) |
| 2136 | + action = diskpool.addFile( |
| 2137 | + component, source, filename, sha1, filealias) |
| 2138 | if action == diskpool.results.FILE_ADDED: |
| 2139 | log.debug("Added %s from library" % path) |
| 2140 | elif action == diskpool.results.SYMLINK_ADDED: |
| 2141 | @@ -295,7 +296,7 @@ |
| 2142 | for pub_file in self.files: |
| 2143 | pub_file.publish(diskpool, log) |
| 2144 | except PoolFileOverwriteError, e: |
| 2145 | - message = "PoolFileOverwriteError: %s, skipping." % e |
| 2146 | + message = "PoolFileOverwriteError: %s, skipping." % e |
| 2147 | properties = [('error-explanation', message)] |
| 2148 | request = ScriptRequest(properties) |
| 2149 | error_utility = ErrorReportingUtility() |
| 2150 | @@ -635,22 +636,19 @@ |
| 2151 | def meta_sourcepackage(self): |
| 2152 | """see `ISourcePackagePublishingHistory`.""" |
| 2153 | return self.distroseries.getSourcePackage( |
| 2154 | - self.sourcepackagerelease.sourcepackagename |
| 2155 | - ) |
| 2156 | + self.sourcepackagerelease.sourcepackagename) |
| 2157 | |
| 2158 | @property |
| 2159 | def meta_sourcepackagerelease(self): |
| 2160 | """see `ISourcePackagePublishingHistory`.""" |
| 2161 | return self.distroseries.distribution.getSourcePackageRelease( |
| 2162 | - self.sourcepackagerelease |
| 2163 | - ) |
| 2164 | + self.sourcepackagerelease) |
| 2165 | |
| 2166 | @property |
| 2167 | def meta_distroseriessourcepackagerelease(self): |
| 2168 | """see `ISourcePackagePublishingHistory`.""" |
| 2169 | return self.distroseries.getSourcePackageRelease( |
| 2170 | - self.sourcepackagerelease |
| 2171 | - ) |
| 2172 | + self.sourcepackagerelease) |
| 2173 | |
| 2174 | @property |
| 2175 | def meta_supersededby(self): |
| 2176 | @@ -658,8 +656,7 @@ |
| 2177 | if not self.supersededby: |
| 2178 | return None |
| 2179 | return self.distroseries.distribution.getSourcePackageRelease( |
| 2180 | - self.supersededby |
| 2181 | - ) |
| 2182 | + self.supersededby) |
| 2183 | |
| 2184 | @property |
| 2185 | def source_package_name(self): |
| 2186 | @@ -778,8 +775,7 @@ |
| 2187 | distroseries, |
| 2188 | self.component, |
| 2189 | self.section, |
| 2190 | |
| 2191 | - ) |
| 2192 | + pocket) |
| 2193 | |
| 2194 | def getStatusSummaryForBuilds(self): |
| 2195 | """See `ISourcePackagePublishingHistory`.""" |
| 2196 | @@ -1006,7 +1002,8 @@ |
| 2197 | by new overrides from superseding itself. |
| 2198 | """ |
| 2199 | available_architectures = [ |
| 2200 | - das.id for das in self.distroarchseries.distroseries.architectures] |
| 2201 | + das.id for das in |
| 2202 | + self.distroarchseries.distroseries.architectures] |
| 2203 | return IMasterStore(BinaryPackagePublishingHistory).find( |
| 2204 | BinaryPackagePublishingHistory, |
| 2205 | BinaryPackagePublishingHistory.status.is_in( |
| 2206 | @@ -1370,8 +1367,7 @@ |
| 2207 | DistroArchSeries.distroseriesID, |
| 2208 | SourcePackagePublishingHistory.sourcepackagereleaseID == |
| 2209 | BinaryPackageBuild.source_package_release_id, |
| 2210 | - In(SourcePackagePublishingHistory.id, source_publication_ids) |
| 2211 | - ) |
| 2212 | + SourcePackagePublishingHistory.id.is_in(source_publication_ids)) |
| 2213 | |
| 2214 | # First, we'll find the builds that were built in the same |
| 2215 | # archive context as the published sources. |
| 2216 | @@ -1486,16 +1482,14 @@ |
| 2217 | SourcePackagePublishingHistory.pocket, |
| 2218 | BinaryPackagePublishingHistory.archiveID == |
| 2219 | SourcePackagePublishingHistory.archiveID, |
| 2220 | - In(SourcePackagePublishingHistory.id, source_publication_ids) |
| 2221 | - ] |
| 2222 | + SourcePackagePublishingHistory.id.is_in(source_publication_ids)] |
| 2223 | |
| 2224 | # If the call-site requested to join only on binaries published |
| 2225 | # with an active publishing status then we need to further restrict |
| 2226 | # the join. |
| 2227 | if active_binaries_only: |
| 2228 | - join.append( |
| 2229 | - In(BinaryPackagePublishingHistory.status, |
| 2230 | - [enum.value for enum in active_publishing_status])) |
| 2231 | + join.append(BinaryPackagePublishingHistory.status.is_in( |
| 2232 | + active_publishing_status)) |
| 2233 | |
| 2234 | return join |
| 2235 | |
| 2236 | @@ -1519,11 +1513,9 @@ |
| 2237 | one_or_more_source_publications) |
| 2238 | |
| 2239 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
| 2240 | - published_builds = store.find(( |
| 2241 | - SourcePackagePublishingHistory, |
| 2242 | - BinaryPackageBuild, |
| 2243 | - DistroArchSeries |
| 2244 | - ), |
| 2245 | + published_builds = store.find( |
| 2246 | + (SourcePackagePublishingHistory, BinaryPackageBuild, |
| 2247 | + DistroArchSeries), |
| 2248 | self._getSourceBinaryJoinForSources( |
| 2249 | source_publication_ids, active_binaries_only=False), |
| 2250 | BinaryPackagePublishingHistory.datepublished != None, |
| 2251 | @@ -1567,7 +1559,7 @@ |
| 2252 | BinaryPackageRelease.id, |
| 2253 | BinaryPackagePublishingHistory.archiveID == |
| 2254 | SourcePackagePublishingHistory.archiveID, |
| 2255 | - In(SourcePackagePublishingHistory.id, source_publication_ids)) |
| 2256 | + SourcePackagePublishingHistory.id.is_in(source_publication_ids)) |
| 2257 | |
| 2258 | return binary_result.order_by(LibraryFileAlias.id) |
| 2259 | |
| 2260 | @@ -1584,7 +1576,7 @@ |
| 2261 | LibraryFileAlias.id == SourcePackageReleaseFile.libraryfileID, |
| 2262 | SourcePackageReleaseFile.sourcepackagerelease == |
| 2263 | SourcePackagePublishingHistory.sourcepackagereleaseID, |
| 2264 | - In(SourcePackagePublishingHistory.id, source_publication_ids)) |
| 2265 | + SourcePackagePublishingHistory.id.is_in(source_publication_ids)) |
| 2266 | |
| 2267 | binary_result = self.getBinaryFilesForSources( |
| 2268 | one_or_more_source_publications) |
| 2269 | @@ -1638,7 +1630,7 @@ |
| 2270 | LibraryFileAlias, LibraryFileContent), |
| 2271 | SourcePackagePublishingHistory.sourcepackagereleaseID == |
| 2272 | PackageDiff.to_sourceID, |
| 2273 | - In(SourcePackagePublishingHistory.id, source_publication_ids)) |
| 2274 | + SourcePackagePublishingHistory.id.is_in(source_publication_ids)) |
| 2275 | |
| 2276 | result_set.order_by( |
| 2277 | SourcePackagePublishingHistory.id, |
| 2278 | @@ -1676,7 +1668,7 @@ |
| 2279 | SourcePackageRelease.id, |
| 2280 | SourcePackageRelease.id == |
| 2281 | SourcePackagePublishingHistory.sourcepackagereleaseID, |
| 2282 | - In(SourcePackagePublishingHistory.id, source_publication_ids)) |
| 2283 | + SourcePackagePublishingHistory.id.is_in(source_publication_ids)) |
| 2284 | |
| 2285 | result_set.order_by(SourcePackagePublishingHistory.id) |
| 2286 | return result_set |
| 2287 | |
| 2288 | === modified file 'lib/lp/soyuz/model/queue.py' |
| 2289 | --- lib/lp/soyuz/model/queue.py 2010-10-22 04:12:39 +0000 |
| 2290 | +++ lib/lp/soyuz/model/queue.py 2010-11-09 15:16:26 +0000 |
| 2291 | @@ -26,7 +26,6 @@ |
| 2292 | ) |
| 2293 | from storm.locals import ( |
| 2294 | Desc, |
| 2295 | - In, |
| 2296 | Join, |
| 2297 | ) |
| 2298 | from storm.store import Store |
| 2299 | @@ -103,7 +102,6 @@ |
| 2300 | # of the archivepublisher which cause circular import errors if they |
| 2301 | # are placed here. |
| 2302 | |
| 2303 | - |
| 2304 | def debug(logger, msg): |
| 2305 | """Shorthand debug notation for publish() methods.""" |
| 2306 | if logger is not None: |
| 2307 | @@ -675,23 +673,20 @@ |
| 2308 | """See `IPackageUpload`.""" |
| 2309 | return PackageUploadSource( |
| 2310 | packageupload=self, |
| 2311 | - sourcepackagerelease=spr.id |
| 2312 | - ) |
| 2313 | + sourcepackagerelease=spr.id) |
| 2314 | |
| 2315 | def addBuild(self, build): |
| 2316 | """See `IPackageUpload`.""" |
| 2317 | return PackageUploadBuild( |
| 2318 | packageupload=self, |
| 2319 | - build=build.id |
| 2320 | - ) |
| 2321 | + build=build.id) |
| 2322 | |
| 2323 | def addCustom(self, library_file, custom_type): |
| 2324 | """See `IPackageUpload`.""" |
| 2325 | return PackageUploadCustom( |
| 2326 | packageupload=self, |
| 2327 | libraryfilealias=library_file.id, |
| 2328 | - customformat=custom_type |
| 2329 | - ) |
| 2330 | + customformat=custom_type) |
| 2331 | |
| 2332 | def isPPA(self): |
| 2333 | """See `IPackageUpload`.""" |
| 2334 | @@ -767,12 +762,12 @@ |
| 2335 | # uploads. |
| 2336 | for build in self.builds: |
| 2337 | for bpr in build.build.binarypackages: |
| 2338 | - files.extend( |
| 2339 | - [(bpf.libraryfile.filename,'','') for bpf in bpr.files]) |
| 2340 | + files.extend([ |
| 2341 | + (bpf.libraryfile.filename, '', '') for bpf in bpr.files]) |
| 2342 | |
| 2343 | if self.customfiles: |
| 2344 | files.extend( |
| 2345 | - [(file.libraryfilealias.filename,'','') |
| 2346 | + [(file.libraryfilealias.filename, '', '') |
| 2347 | for file in self.customfiles]) |
| 2348 | |
| 2349 | return files |
| 2350 | @@ -1115,7 +1110,7 @@ |
| 2351 | # There can be no recipients if none of the emails are registered |
| 2352 | # in LP. |
| 2353 | if not recipients: |
| 2354 | - debug(self.logger,"No recipients on email, not sending.") |
| 2355 | + debug(self.logger, "No recipients on email, not sending.") |
| 2356 | return |
| 2357 | |
| 2358 | # Make the content of the actual changes file available to the |
| 2359 | @@ -1227,7 +1222,7 @@ |
| 2360 | :attach_changes: A flag governing whether the original changesfile |
| 2361 | content shall be attached to the email. |
| 2362 | """ |
| 2363 | - extra_headers = { 'X-Katie' : 'Launchpad actually' } |
| 2364 | + extra_headers = {'X-Katie': 'Launchpad actually'} |
| 2365 | |
| 2366 | # XXX cprov 20071212: ideally we only need to check archive.purpose, |
| 2367 | # however the current code in uploadprocessor.py (around line 259) |
| 2368 | @@ -1403,8 +1398,7 @@ |
| 2369 | |
| 2370 | packageupload = ForeignKey( |
| 2371 | dbName='packageupload', |
| 2372 | - foreignKey='PackageUpload' |
| 2373 | - ) |
| 2374 | + foreignKey='PackageUpload') |
| 2375 | |
| 2376 | build = ForeignKey(dbName='build', foreignKey='BinaryPackageBuild') |
| 2377 | |
| 2378 | @@ -1440,6 +1434,7 @@ |
| 2379 | # At this point (uploads are already processed) sections are |
| 2380 | # guaranteed to exist in the DB. We don't care if sections are |
| 2381 | # not official. |
| 2382 | + pass |
| 2383 | |
| 2384 | def publish(self, logger=None): |
| 2385 | """See `IPackageUploadBuild`.""" |
| 2386 | @@ -1499,8 +1494,7 @@ |
| 2387 | component=binary.component, |
| 2388 | section=binary.section, |
| 2389 | priority=binary.priority, |
| 2390 | - pocket=self.packageupload.pocket |
| 2391 | - ) |
| 2392 | + pocket=self.packageupload.pocket) |
| 2393 | published_binaries.append(bpph) |
| 2394 | return published_binaries |
| 2395 | |
| 2396 | @@ -1514,13 +1508,11 @@ |
| 2397 | |
| 2398 | packageupload = ForeignKey( |
| 2399 | dbName='packageupload', |
| 2400 | - foreignKey='PackageUpload' |
| 2401 | - ) |
| 2402 | + foreignKey='PackageUpload') |
| 2403 | |
| 2404 | sourcepackagerelease = ForeignKey( |
| 2405 | dbName='sourcepackagerelease', |
| 2406 | - foreignKey='SourcePackageRelease' |
| 2407 | - ) |
| 2408 | + foreignKey='SourcePackageRelease') |
| 2409 | |
| 2410 | def getSourceAncestry(self): |
| 2411 | """See `IPackageUploadSource`.""" |
| 2412 | @@ -1627,6 +1619,7 @@ |
| 2413 | # At this point (uploads are already processed) sections are |
| 2414 | # guaranteed to exist in the DB. We don't care if sections are |
| 2415 | # not official. |
| 2416 | + pass |
| 2417 | |
| 2418 | def publish(self, logger=None): |
| 2419 | """See `IPackageUploadSource`.""" |
| 2420 | @@ -1644,8 +1637,7 @@ |
| 2421 | distroseries=self.packageupload.distroseries, |
| 2422 | component=self.sourcepackagerelease.component, |
| 2423 | section=self.sourcepackagerelease.section, |
| 2424 | - pocket=self.packageupload.pocket |
| 2425 | - ) |
| 2426 | + pocket=self.packageupload.pocket) |
| 2427 | |
| 2428 | |
| 2429 | class PackageUploadCustom(SQLBase): |
| 2430 | @@ -1656,8 +1648,7 @@ |
| 2431 | |
| 2432 | packageupload = ForeignKey( |
| 2433 | dbName='packageupload', |
| 2434 | - foreignKey='PackageUpload' |
| 2435 | - ) |
| 2436 | + foreignKey='PackageUpload') |
| 2437 | |
| 2438 | customformat = EnumCol(dbName='customformat', unique=False, |
| 2439 | notNull=True, schema=PackageUploadCustomFormat) |
| 2440 | @@ -1903,11 +1894,11 @@ |
| 2441 | # method can be removed and call sites updated to use this one. |
| 2442 | store = Store.of(distroseries) |
| 2443 | |
| 2444 | - def dbitem_values_tuple(item_or_list): |
| 2445 | + def dbitem_tuple(item_or_list): |
| 2446 | if not isinstance(item_or_list, list): |
| 2447 | - return (item_or_list.value,) |
| 2448 | + return (item_or_list,) |
| 2449 | else: |
| 2450 | - return tuple(item.value for item in item_or_list) |
| 2451 | + return tuple(item_or_list) |
| 2452 | |
| 2453 | timestamp_query_clause = () |
| 2454 | if created_since_date is not None: |
| 2455 | @@ -1916,34 +1907,31 @@ |
| 2456 | |
| 2457 | status_query_clause = () |
| 2458 | if status is not None: |
| 2459 | - status = dbitem_values_tuple(status) |
| 2460 | - status_query_clause = ( |
| 2461 | - In(PackageUpload.status, status),) |
| 2462 | + status = dbitem_tuple(status) |
| 2463 | + status_query_clause = (PackageUpload.status.is_in(status),) |
| 2464 | |
| 2465 | archives = distroseries.distribution.getArchiveIDList(archive) |
| 2466 | - archive_query_clause = ( |
| 2467 | - In(PackageUpload.archiveID, archives),) |
| 2468 | + archive_query_clause = (PackageUpload.archiveID.is_in(archives),) |
| 2469 | |
| 2470 | pocket_query_clause = () |
| 2471 | if pocket is not None: |
| 2472 | - pocket = dbitem_values_tuple(pocket) |
| 2473 | - pocket_query_clause = ( |
| 2474 | - In(PackageUpload.pocket, pocket),) |
| 2475 | + pocket = dbitem_tuple(pocket) |
| 2476 | + pocket_query_clause = (PackageUpload.pocket.is_in(pocket),) |
| 2477 | |
| 2478 | custom_type_query_clause = () |
| 2479 | if custom_type is not None: |
| 2480 | - custom_type = dbitem_values_tuple(custom_type) |
| 2481 | + custom_type = dbitem_tuple(custom_type) |
| 2482 | custom_type_query_clause = ( |
| 2483 | PackageUpload.id == PackageUploadCustom.packageuploadID, |
| 2484 | - In(PackageUploadCustom.customformat, custom_type)) |
| 2485 | + PackageUploadCustom.customformat.is_in(custom_type)) |
| 2486 | |
| 2487 | return store.find( |
| 2488 | PackageUpload, |
| 2489 | PackageUpload.distroseries == distroseries, |
| 2490 | *(status_query_clause + archive_query_clause + |
| 2491 | pocket_query_clause + timestamp_query_clause + |
| 2492 | - custom_type_query_clause) |
| 2493 | - ).order_by(Desc(PackageUpload.id)).config(distinct=True) |
| 2494 | + custom_type_query_clause)).order_by( |
| 2495 | + Desc(PackageUpload.id)).config(distinct=True) |
| 2496 | |
| 2497 | def getBuildByBuildIDs(self, build_ids): |
| 2498 | """See `IPackageUploadSet`.""" |
| 2499 | |
| 2500 | === modified file 'lib/lp/soyuz/scripts/initialise_distroseries.py' |
| 2501 | --- lib/lp/soyuz/scripts/initialise_distroseries.py 2010-10-18 04:19:10 +0000 |
| 2502 | +++ lib/lp/soyuz/scripts/initialise_distroseries.py 2010-11-09 15:16:26 +0000 |
| 2503 | @@ -13,6 +13,7 @@ |
| 2504 | from zope.component import getUtility |
| 2505 | |
| 2506 | from canonical.database.sqlbase import sqlvalues |
| 2507 | +from canonical.launchpad.helpers import ensure_unicode |
| 2508 | from canonical.launchpad.interfaces.lpstorm import IMasterStore |
| 2509 | from lp.buildmaster.enums import BuildStatus |
| 2510 | from lp.registry.interfaces.pocket import PackagePublishingPocket |
| 2511 | @@ -65,7 +66,8 @@ |
| 2512 | self.distroseries = distroseries |
| 2513 | self.parent = self.distroseries.parent_series |
| 2514 | self.arches = arches |
| 2515 | - self.packagesets = packagesets |
| 2516 | + self.packagesets = [ |
| 2517 | + ensure_unicode(packageset) for packageset in packagesets] |
| 2518 | self.rebuild = rebuild |
| 2519 | self._store = IMasterStore(DistroSeries) |
| 2520 | |
| 2521 | |
| 2522 | === modified file 'lib/lp/translations/model/pofile.py' |
| 2523 | --- lib/lp/translations/model/pofile.py 2010-11-05 14:56:34 +0000 |
| 2524 | +++ lib/lp/translations/model/pofile.py 2010-11-09 15:16:26 +0000 |
| 2525 | @@ -27,7 +27,6 @@ |
| 2526 | And, |
| 2527 | Coalesce, |
| 2528 | Exists, |
| 2529 | - In, |
| 2530 | Join, |
| 2531 | LeftJoin, |
| 2532 | Not, |
| 2533 | @@ -1574,7 +1573,7 @@ |
| 2534 | TranslationTemplateItem.potemplateID == POFile.potemplateID, |
| 2535 | POTMsgSet.id == TranslationTemplateItem.potmsgsetID, |
| 2536 | POTMsgSet.msgid_singular == POMsgID.id, |
| 2537 | - In(POMsgID.msgid, POTMsgSet.credits_message_ids)] |
| 2538 | + POMsgID.msgid.is_in(POTMsgSet.credits_message_ids)] |
| 2539 | if untranslated: |
| 2540 | message_select = Select( |
| 2541 | True, |
| 2542 | |
| 2543 | === modified file 'lib/lp/translations/model/potemplate.py' |
| 2544 | --- lib/lp/translations/model/potemplate.py 2010-11-08 12:09:21 +0000 |
| 2545 | +++ lib/lp/translations/model/potemplate.py 2010-11-09 15:16:26 +0000 |
| 2546 | @@ -32,7 +32,6 @@ |
| 2547 | from storm.expr import ( |
| 2548 | And, |
| 2549 | Desc, |
| 2550 | - In, |
| 2551 | Join, |
| 2552 | LeftJoin, |
| 2553 | Or, |
| 2554 | @@ -463,7 +462,7 @@ |
| 2555 | result = store.using(POTMsgSet, origin1, origin2).find( |
| 2556 | POTMsgSet, |
| 2557 | TranslationTemplateItem.potemplate == self, |
| 2558 | - In(POMsgID.msgid, POTMsgSet.credits_message_ids)) |
| 2559 | + POMsgID.msgid.is_in(POTMsgSet.credits_message_ids)) |
| 2560 | # Filter these candidates because is_translation_credit checks for |
| 2561 | # more conditions than the special msgids. |
| 2562 | for potmsgset in result: |
| 2563 | |
| 2564 | === modified file 'lib/lp/translations/model/translationimportqueue.py' |
| 2565 | --- lib/lp/translations/model/translationimportqueue.py 2010-10-29 10:09:04 +0000 |
| 2566 | +++ lib/lp/translations/model/translationimportqueue.py 2010-11-09 15:16:26 +0000 |
| 2567 | @@ -28,7 +28,6 @@ |
| 2568 | ) |
| 2569 | from storm.expr import ( |
| 2570 | And, |
| 2571 | - Like, |
| 2572 | Or, |
| 2573 | ) |
| 2574 | from storm.locals import ( |
| 2575 | @@ -1363,7 +1362,7 @@ |
| 2576 | deletion_clauses.append(And( |
| 2577 | TranslationImportQueueEntry.distroseries_id != None, |
| 2578 | TranslationImportQueueEntry.date_status_changed < blocked_cutoff, |
| 2579 | - Like(TranslationImportQueueEntry.path, '%.po'))) |
| 2580 | + TranslationImportQueueEntry.path.like(u'%.po'))) |
| 2581 | |
| 2582 | entries = store.find( |
| 2583 | TranslationImportQueueEntry, Or(*deletion_clauses)) |
| 2584 | |
| 2585 | === modified file 'lib/lp/translations/scripts/migrate_variants.py' |
| 2586 | --- lib/lp/translations/scripts/migrate_variants.py 2010-08-20 20:31:18 +0000 |
| 2587 | +++ lib/lp/translations/scripts/migrate_variants.py 2010-11-09 15:16:26 +0000 |
| 2588 | @@ -8,7 +8,6 @@ |
| 2589 | |
| 2590 | import logging |
| 2591 | |
| 2592 | -from storm.expr import In |
| 2593 | from zope.component import getUtility |
| 2594 | from zope.interface import implements |
| 2595 | |
| 2596 | @@ -70,15 +69,16 @@ |
| 2597 | self.start_at = None |
| 2598 | else: |
| 2599 | if self.title == 'TranslationMessage': |
| 2600 | - results = self.store.find(TranslationMessage, |
| 2601 | - In(TranslationMessage.id, object_ids)) |
| 2602 | - results.set(TranslationMessage.language==self.language, |
| 2603 | - variant=None) |
| 2604 | + results = self.store.find( |
| 2605 | + TranslationMessage, |
| 2606 | + TranslationMessage.id.is_in(object_ids)) |
| 2607 | + results.set( |
| 2608 | + TranslationMessage.language==self.language, variant=None) |
| 2609 | else: |
| 2610 | - results = self.store.find(POFile, |
| 2611 | - In(POFile.id, object_ids)) |
| 2612 | - results.set(POFile.language==self.language, |
| 2613 | - variant=None) |
| 2614 | + results = self.store.find( |
| 2615 | + POFile, POFile.id.is_in(object_ids)) |
| 2616 | + results.set( |
| 2617 | + POFile.language==self.language, variant=None) |
| 2618 | |
| 2619 | self.transaction.commit() |
| 2620 | self.transaction.begin() |
| 2621 | |
| 2622 | === modified file 'setup.py' |
| 2623 | --- setup.py 2010-11-08 01:08:15 +0000 |
| 2624 | +++ setup.py 2010-11-09 15:16:26 +0000 |
| 2625 | @@ -54,6 +54,7 @@ |
| 2626 | 'mocker', |
| 2627 | 'oauth', |
| 2628 | 'paramiko', |
| 2629 | + 'psycopg2', |
| 2630 | 'python-memcached', |
| 2631 | 'pyasn1', |
| 2632 | 'pydkim', |
| 2633 | |
| 2634 | === modified file 'versions.cfg' |
| 2635 | --- versions.cfg 2010-11-09 07:13:41 +0000 |
| 2636 | +++ versions.cfg 2010-11-09 15:16:26 +0000 |
| 2637 | @@ -48,6 +48,7 @@ |
| 2638 | paramiko = 1.7.4 |
| 2639 | Paste = 1.7.2 |
| 2640 | PasteDeploy = 1.3.3 |
| 2641 | +psycopg2 = 2.2.2 |
| 2642 | pyasn1 = 0.0.9a |
| 2643 | pycrypto = 2.0.1 |
| 2644 | pydkim = 0.3-mbp-r7 |
