Merge lp:~stefanor/ibid/db-case-insensitive-401320 into lp:~ibid-core/ibid/old-trunk-1.6
- db-case-insensitive-401320
- Merge into old-trunk-1.6
Proposed by
Stefano Rivera
Status: | Merged |
---|---|
Approved by: | Michael Gorven |
Approved revision: | 812 |
Merged at revision: | 816 |
Proposed branch: | lp:~stefanor/ibid/db-case-insensitive-401320 |
Merge into: | lp:~ibid-core/ibid/old-trunk-1.6 |
Diff against target: |
2585 lines (+976/-689) 20 files modified
ibid/auth.py (+7/-6) ibid/core.py (+7/-6) ibid/db/__init__.py (+18/-0) ibid/db/models.py (+96/-478) ibid/db/types.py (+62/-0) ibid/db/versioned_schema.py (+515/-0) ibid/plugins/auth.py (+4/-6) ibid/plugins/factoid.py (+48/-29) ibid/plugins/feeds.py (+38/-26) ibid/plugins/identity.py (+23/-24) ibid/plugins/karma.py (+33/-20) ibid/plugins/memo.py (+61/-43) ibid/plugins/seen.py (+28/-18) ibid/plugins/url.py (+12/-6) ibid/source/irc.py (+5/-3) scripts/ibid-db (+5/-3) scripts/ibid-factpack (+6/-9) scripts/ibid-plugin (+1/-1) scripts/ibid-setup (+3/-2) scripts/ibid_import (+4/-9) |
To merge this branch: | bzr merge lp:~stefanor/ibid/db-case-insensitive-401320 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Michael Gorven | Approve | ||
Jonathan Hitchcock | Approve | ||
Review via email: mp+16403@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Stefano Rivera (stefanor) wrote : | # |
Revision history for this message
Stefano Rivera (stefanor) wrote : | # |
The review diff isn't very heplful because I broke models.py into db/models.py and db/versioned_
Here are some diffs for those sections:
http://
http://
- 805. By Stefano Rivera
-
SQLAlchemy 0.4 compatibility
- 806. By Stefano Rivera
-
Don't lower() in ibid_improt and ibid-factpack
- 807. By Stefano Rivera
-
Unicode please, ensure we have a factpack ID before setting it everywhere
- 808. By Stefano Rivera
-
Merge from trunk
- 809. By Stefano Rivera
-
Merge from trunk
- 810. By Stefano Rivera
-
summon- uses and_
Revision history for this message
Jonathan Hitchcock (vhata) : | # |
review:
Approve
- 811. By Stefano Rivera
-
Querying the wrong table, use .filter() syntax
- 812. By Stefano Rivera
-
Merge from trunk
Revision history for this message
Michael Gorven (mgorven) wrote : | # |
Let's break stuff! ;-)
review approve
status approved
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'ibid/auth.py' |
2 | --- ibid/auth.py 2009-10-19 15:07:40 +0000 |
3 | +++ ibid/auth.py 2009-12-29 16:00:32 +0000 |
4 | @@ -8,7 +8,7 @@ |
5 | |
6 | import ibid |
7 | from ibid.compat import hashlib |
8 | -from ibid.models import Credential, Permission |
9 | +from ibid.db.models import Credential, Permission |
10 | |
11 | chars = string.letters + string.digits |
12 | permission_re = re.compile('^([+-]?)(\S+)$') |
13 | @@ -23,8 +23,8 @@ |
14 | def permission(name, account, source, session): |
15 | if account: |
16 | permission = session.query(Permission) \ |
17 | - .filter_by(account_id=account) \ |
18 | - .filter_by(name=name).first() |
19 | + .filter_by(account_id=account, name=name) \ |
20 | + .first() |
21 | |
22 | if permission: |
23 | return permission.value |
24 | @@ -123,9 +123,10 @@ |
25 | return False |
26 | |
27 | for credential in event.session.query(Credential) \ |
28 | - .filter_by(method=u'password') \ |
29 | - .filter_by(account_id=event.account) \ |
30 | - .filter(or_(Credential.source == event.source, Credential.source == None)).all(): |
31 | + .filter_by(method=u'password', account_id=event.account) \ |
32 | + .filter(or_(Credential.source == event.source, |
33 | + Credential.source == None)) \ |
34 | + .all(): |
35 | if hash(password, credential.credential) == credential.credential: |
36 | return True |
37 | |
38 | |
39 | === modified file 'ibid/core.py' |
40 | --- ibid/core.py 2009-12-21 12:43:58 +0000 |
41 | +++ ibid/core.py 2009-12-29 16:00:32 +0000 |
42 | @@ -12,6 +12,7 @@ |
43 | |
44 | import ibid |
45 | from ibid.event import Event |
46 | +from ibid.db import SchemaVersionException, schema_version_check |
47 | |
48 | import auth |
49 | |
50 | @@ -60,8 +61,8 @@ |
51 | |
52 | filtered = [] |
53 | for response in event['responses']: |
54 | - source = response['source'].lower() |
55 | - if source == event.source.lower(): |
56 | + source = response['source'] |
57 | + if source == event.source: |
58 | if (response.get('action', False) |
59 | and 'action' not in ibid.sources[source].supports): |
60 | response['reply'] = '* %s %s' % ( |
61 | @@ -238,8 +239,8 @@ |
62 | self.log.debug("Skipping Processor: %s.%s", name, klass.__name__) |
63 | |
64 | try: |
65 | - ibid.models.check_schema_versions(ibid.databases['ibid']) |
66 | - except ibid.models.SchemaVersionException, e: |
67 | + schema_version_check(ibid.databases['ibid']) |
68 | + except SchemaVersionException, e: |
69 | self.log.error(u'Tables out of date: %s. Run "ibid-db --upgrade"', e.message) |
70 | except Exception, e: |
71 | self.log.exception(u"Couldn't instantiate %s processor of %s plugin", classname, name) |
72 | @@ -306,8 +307,8 @@ |
73 | |
74 | if check_schema_versions: |
75 | try: |
76 | - ibid.models.check_schema_versions(self['ibid']) |
77 | - except ibid.models.SchemaVersionException, e: |
78 | + schema_version_check(self['ibid']) |
79 | + except SchemaVersionException, e: |
80 | self.log.error(u'Tables out of date: %s. Run "ibid-db --upgrade"', e.message) |
81 | raise |
82 | |
83 | |
84 | === added directory 'ibid/db' |
85 | === added file 'ibid/db/__init__.py' |
86 | --- ibid/db/__init__.py 1970-01-01 00:00:00 +0000 |
87 | +++ ibid/db/__init__.py 2009-12-29 16:00:32 +0000 |
88 | @@ -0,0 +1,18 @@ |
89 | +from ibid.db.types import TypeDecorator, Integer, DateTime, Boolean, \ |
90 | + IbidUnicode, IbidUnicodeText |
91 | + |
92 | +from sqlalchemy import Table, Column, ForeignKey, Index, UniqueConstraint, \ |
93 | + PassiveDefault, or_, and_, MetaData as _MetaData |
94 | +from sqlalchemy.orm import eagerload, relation, synonym |
95 | +from sqlalchemy.sql import func |
96 | +from sqlalchemy.ext.declarative import declarative_base as _declarative_base |
97 | + |
98 | +from sqlalchemy.exceptions import IntegrityError |
99 | + |
100 | +metadata = _MetaData() |
101 | +Base = _declarative_base(metadata=metadata) |
102 | + |
103 | +from ibid.db.versioned_schema import VersionedSchema, SchemaVersionException, \ |
104 | + schema_version_check, upgrade_schemas |
105 | + |
106 | +# vi: set et sta sw=4 ts=4: |
107 | |
108 | === renamed file 'ibid/models.py' => 'ibid/db/models.py' |
109 | --- ibid/models.py 2009-12-11 12:13:05 +0000 |
110 | +++ ibid/db/models.py 2009-12-29 16:00:32 +0000 |
111 | @@ -1,428 +1,29 @@ |
112 | from datetime import datetime |
113 | -import logging |
114 | -import re |
115 | |
116 | -from sqlalchemy import Column, Integer, Unicode, UnicodeText, DateTime, \ |
117 | - ForeignKey, UniqueConstraint, MetaData, Table, Index, \ |
118 | - __version__ as sqlalchemy_version |
119 | +from ibid.db.types import IbidUnicode, IbidUnicodeText, Integer, DateTime |
120 | +from sqlalchemy import Table, Column, ForeignKey, UniqueConstraint |
121 | from sqlalchemy.orm import relation |
122 | -from sqlalchemy.ext.declarative import declarative_base |
123 | -from sqlalchemy.exceptions import InvalidRequestError, OperationalError, \ |
124 | - ProgrammingError |
125 | - |
126 | -if sqlalchemy_version < '0.5': |
127 | - NoResultFound = InvalidRequestError |
128 | -else: |
129 | - from sqlalchemy.orm.exc import NoResultFound |
130 | - |
131 | -metadata = MetaData() |
132 | -Base = declarative_base(metadata=metadata) |
133 | -log = logging.getLogger('ibid.models') |
134 | - |
135 | -class VersionedSchema(object): |
136 | - """For an initial table schema, set |
137 | - table.versioned_schema = VersionedSchema(__table__, 1) |
138 | - Table creation (upgrading to version 1) is implicitly supported. |
139 | - |
140 | - When you have upgrades to the schema, instead of using VersionedSchema |
141 | - directly, derive from it and include your own upgrade_x_to_y(self) methods, |
142 | - where y = x + 1 |
143 | - |
144 | - In the upgrade methods, you can call the helper functions: |
145 | - add_column, drop_column, rename_column, alter_column |
146 | - They try to do the correct thing in most situations, including rebuilding |
147 | - tables in SQLite, which doesn't actually support dropping/altering columns. |
148 | - For column parameters, while you can point to columns in the table |
149 | - definition, it is better style to repeat the Column() specification as the |
150 | - column might be altered in a future version. |
151 | - """ |
152 | - foreign_key_re = re.compile(r'^FOREIGN KEY\(.*?\) (REFERENCES .*)$', re.I) |
153 | - |
154 | - def __init__(self, table, version): |
155 | - self.table = table |
156 | - self.version = version |
157 | - |
158 | - def is_up_to_date(self, session): |
159 | - "Is the table in the database up to date with the schema?" |
160 | - |
161 | - if not session.bind.has_table(self.table.name): |
162 | - return False |
163 | - |
164 | - try: |
165 | - schema = session.query(Schema) \ |
166 | - .filter_by(table=unicode(self.table.name)).one() |
167 | - return schema.version == self.version |
168 | - except NoResultFound: |
169 | - return False |
170 | - |
171 | - def upgrade_schema(self, sessionmaker): |
172 | - "Upgrade the table's schema to the latest version." |
173 | - |
174 | - for fk in self.table.foreign_keys: |
175 | - dependency = fk.target_fullname.split('.')[0] |
176 | - log.debug("Upgrading table %s before %s", |
177 | - dependency, self.table.name) |
178 | - metadata.tables[dependency].versioned_schema \ |
179 | - .upgrade_schema(sessionmaker) |
180 | - |
181 | - self.upgrade_session = session = sessionmaker() |
182 | - self.upgrade_reflected_model = MetaData(session.bind, reflect=True) |
183 | - |
184 | - if self.table.name == 'schema': |
185 | - if not session.bind.has_table(self.table.name): |
186 | - metadata.bind = session.bind |
187 | - self._create_table() |
188 | - |
189 | - schema = Schema(unicode(self.table.name), self.version) |
190 | - session.save_or_update(schema) |
191 | - return |
192 | - Schema.__table__ = self._get_reflected_model() |
193 | - |
194 | - schema = session.query(Schema) \ |
195 | - .filter_by(table=unicode(self.table.name)).first() |
196 | - |
197 | - try: |
198 | - if not schema: |
199 | - log.info(u"Creating table %s", self.table.name) |
200 | - |
201 | - self._create_table() |
202 | - |
203 | - schema = Schema(unicode(self.table.name), self.version) |
204 | - session.save_or_update(schema) |
205 | - |
206 | - elif self.version > schema.version: |
207 | - for version in range(schema.version + 1, self.version + 1): |
208 | - log.info(u"Upgrading table %s to version %i", |
209 | - self.table.name, version) |
210 | - |
211 | - session.commit() |
212 | - |
213 | - getattr(self, 'upgrade_%i_to_%i' % (version - 1, version))() |
214 | - |
215 | - schema.version = version |
216 | - session.save_or_update(schema) |
217 | - |
218 | - self.upgrade_reflected_model = \ |
219 | - MetaData(session.bind, reflect=True) |
220 | - |
221 | - session.commit() |
222 | - |
223 | - except: |
224 | - session.rollback() |
225 | - raise |
226 | - |
227 | - session.close() |
228 | - del self.upgrade_session |
229 | - |
230 | - def _index_name(self, col): |
231 | - """ |
232 | - We'd like not to duplicate an existing index so try to abide by the |
233 | - local customs |
234 | - """ |
235 | - session = self.upgrade_session |
236 | - |
237 | - if session.bind.engine.name == 'sqlite': |
238 | - return 'ix_%s_%s' % (self.table.name, col.name) |
239 | - elif session.bind.engine.name == 'postgres': |
240 | - return '%s_%s_key' % (self.table.name, col.name) |
241 | - elif session.bind.engine.name == 'mysql': |
242 | - return col.name |
243 | - |
244 | - log.warning(u"Unknown database type, %s, you may end up with " |
245 | - u"duplicate indices" % session.bind.engine.name) |
246 | - return 'ix_%s_%s' % (self.table.name, col.name) |
247 | - |
248 | - def _mysql_constraint_createstring(self, constraint): |
249 | - """ |
250 | - Generate the description of a constraint for insertion into a CREATE |
251 | - string |
252 | - """ |
253 | - return ', '.join( |
254 | - (isinstance(column.type, UnicodeText) |
255 | - and '"%(name)s"(%(length)i)' |
256 | - or '"%(name)s"') % { |
257 | - 'name': column.name, |
258 | - 'length': column.info.get('ibid_mysql_index_length', 8), |
259 | - } for column in constraint.columns |
260 | - ) |
261 | - |
262 | - def _create_table(self): |
263 | - """ |
264 | - Check that the table is in a suitable form for all DBs, before |
265 | - creating. Yes, SQLAlchemy's abstractions are leaky enough that you have |
266 | - to do this |
267 | - """ |
268 | - session = self.upgrade_session |
269 | - indices = [] |
270 | - old_indexes = list(self.table.indexes) |
271 | - old_constraints = list(self.table.constraints) |
272 | - |
273 | - for column in self.table.c: |
274 | - if column.unique and not column.index: |
275 | - raise Exception(u"Column %s.%s is unique but not indexed. " |
276 | - u"SQLite doesn't like such things, " |
277 | - u"so please be nice and don't do that." |
278 | - % (self.table.name, self.column.name)) |
279 | - |
280 | - # Strip out Indexes and Constraints that SQLAlchemy can't create by |
281 | - # itself |
282 | - if session.bind.engine.name == 'mysql': |
283 | - for type, old_list in ( |
284 | - ('constraints', old_constraints), |
285 | - ('indexes', old_indexes)): |
286 | - for constraint in old_list: |
287 | - if any(True for column in constraint.columns |
288 | - if isinstance(column.type, UnicodeText)): |
289 | - indices.append(( |
290 | - isinstance(constraint, UniqueConstraint), |
291 | - self._mysql_constraint_createstring(constraint) |
292 | - )) |
293 | - |
294 | - getattr(self.table, type).remove(constraint) |
295 | - # In case the database's DEFAULT CHARSET isn't set to UTF8 |
296 | - self.table.kwargs['mysql_charset'] = 'utf8' |
297 | - |
298 | - self.table.create(bind=session.bind) |
299 | - |
300 | - if session.bind.engine.name == 'mysql': |
301 | - for constraint in old_constraints: |
302 | - if constraint not in self.table.constraints: |
303 | - self.table.constraints.add(constraint) |
304 | - |
305 | - for index in old_indexes: |
306 | - if index not in self.table.indexes: |
307 | - self.table.indexes.add(index) |
308 | - |
309 | - for unique, columnspec in indices: |
310 | - session.execute('ALTER TABLE "%s" ADD %s INDEX (%s);' % ( |
311 | - self.table.name, unique and 'UNIQUE' or '', columnspec)) |
312 | - |
313 | - def _get_reflected_model(self): |
314 | - "Get a reflected table from the current DB's schema" |
315 | - |
316 | - return self.upgrade_reflected_model.tables.get(self.table.name, None) |
317 | - |
318 | - def add_column(self, col): |
319 | - "Add column col to table" |
320 | - |
321 | - session = self.upgrade_session |
322 | - table = self._get_reflected_model() |
323 | - |
324 | - log.debug(u"Adding column %s to table %s", col.name, table.name) |
325 | - |
326 | - constraints = table.constraints.copy() |
327 | - table.append_column(col) |
328 | - constraints = table.constraints - constraints |
329 | - |
330 | - sg = session.bind.dialect.schemagenerator(session.bind.dialect, |
331 | - session.bind) |
332 | - description = sg.get_column_specification(col) |
333 | - |
334 | - for constraint in constraints: |
335 | - sg.traverse_single(constraint) |
336 | - |
337 | - constraints = [] |
338 | - for constraint in [x.strip() for x in sg.buffer.getvalue().split(',')]: |
339 | - m = self.foreign_key_re.match(constraint) |
340 | - if m: |
341 | - constraints.append(m.group(1)) |
342 | - else: |
343 | - constraints.append(constraint) |
344 | - |
345 | - session.execute('ALTER TABLE "%s" ADD COLUMN %s %s;' |
346 | - % (table.name, description, " ".join(constraints))) |
347 | - |
348 | - def add_index(self, col, unique=False): |
349 | - "Add an index to the table" |
350 | - |
351 | - engine = self.upgrade_session.bind.engine.name |
352 | - |
353 | - try: |
354 | - Index(self._index_name(col), col, unique=unique) \ |
355 | - .create(bind=self.upgrade_session.bind) |
356 | - |
357 | - # We understand that occasionaly we'll duplicate an Index. |
358 | - # This is due to differences in index-creation requirements |
359 | - # between DBMS |
360 | - except OperationalError, e: |
361 | - if engine == 'sqlalchemy' and u'already exists' in unicode(e): |
362 | - return |
363 | - if engine == 'mysql' and u'Duplicate' in unicode(e): |
364 | - return |
365 | - raise |
366 | - except ProgrammingError, e: |
367 | - if engine == 'postgres' and u'already exists' in unicode(e): |
368 | - return |
369 | - raise |
370 | - |
371 | - def drop_column(self, col_name): |
372 | - "Drop column col_name from table" |
373 | - |
374 | - session = self.upgrade_session |
375 | - |
376 | - log.debug(u"Dropping column %s from table %s", |
377 | - col_name, self.table.name) |
378 | - |
379 | - if session.bind.engine.name == 'sqlite': |
380 | - self._rebuild_sqlite({col_name: None}) |
381 | - else: |
382 | - session.execute('ALTER TABLE "%s" DROP COLUMN "%s";' |
383 | - % (self.table.name, col_name)) |
384 | - |
385 | - def rename_column(self, col, old_name): |
386 | - "Rename column from old_name to Column col" |
387 | - |
388 | - session = self.upgrade_session |
389 | - table = self._get_reflected_model() |
390 | - |
391 | - log.debug(u"Rename column %s to %s in table %s", |
392 | - old_name, col.name, table.name) |
393 | - |
394 | - if session.bind.engine.name == 'sqlite': |
395 | - self._rebuild_sqlite({old_name: col}) |
396 | - elif session.bind.engine.name == 'mysql': |
397 | - self.alter_column(col, old_name) |
398 | - else: |
399 | - session.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s";' |
400 | - % (table.name, old_name, col.name)) |
401 | - |
402 | - def alter_column(self, col, old_name=None): |
403 | - """Change a column (possibly renaming from old_name) to Column col.""" |
404 | - |
405 | - session = self.upgrade_session |
406 | - table = self._get_reflected_model() |
407 | - |
408 | - log.debug(u"Altering column %s in table %s", col.name, table.name) |
409 | - |
410 | - sg = session.bind.dialect.schemagenerator(session.bind.dialect, |
411 | - session.bind) |
412 | - description = sg.get_column_specification(col) |
413 | - old_col = table.c[old_name or col.name] |
414 | - |
415 | - if session.bind.engine.name == 'sqlite': |
416 | - if (isinstance(col.type, (UnicodeText, Unicode)) |
417 | - and isinstance(old_col.type, (UnicodeText, Unicode)) |
418 | - ) or (isinstance(col.type, Integer) |
419 | - and isinstance(old_col.type, (Integer))): |
420 | - # SQLite doesn't enforce value length restrictions |
421 | - # only type changes have a real effect |
422 | - return |
423 | - |
424 | - self._rebuild_sqlite( |
425 | - {old_name is None and col.name or old_name: col}) |
426 | - |
427 | - elif session.bind.engine.name == 'mysql': |
428 | - # Special handling for columns of TEXT type, because SQLAlchemy |
429 | - # can't create indexes for them |
430 | - recreate = [] |
431 | - if isinstance(col.type, UnicodeText) \ |
432 | - or isinstance(old_col.type, UnicodeText): |
433 | - for type in (table.constraints, table.indexes): |
434 | - for constraint in list(type): |
435 | - if any(True for column in constraint.columns |
436 | - if old_col.name == column.name): |
437 | - constraint.drop() |
438 | - |
439 | - constraint.columns = [ |
440 | - (old_col.name == column.name) and col or column |
441 | - for column in constraint.columns |
442 | - ] |
443 | - recreate.append(( |
444 | - isinstance(constraint, UniqueConstraint), |
445 | - self._mysql_constraint_createstring(constraint) |
446 | - )) |
447 | - |
448 | - session.execute('ALTER TABLE "%s" CHANGE "%s" %s;' % |
449 | - (table.name, old_col.name, description)) |
450 | - |
451 | - for unique, columnspec in recreate: |
452 | - session.execute('ALTER TABLE "%s" ADD %s INDEX (%s);' % |
453 | - (self.table.name, unique and 'UNIQUE' or '', columnspec)) |
454 | - |
455 | - else: |
456 | - if old_name is not None: |
457 | - self.rename_column(col, old_name) |
458 | - |
459 | - session.execute('ALTER TABLE "%s" ALTER COLUMN "%s" TYPE %s;' % |
460 | - (table.name, col.name, description.split(" ", 3)[1])) |
461 | - |
462 | - if old_col.nullable != col.nullable: |
463 | - session.execute( |
464 | - 'ALTER TABLE "%s" ALTER COLUMN "%s" %s NOT NULL;' |
465 | - % (table.name, col.name, col.nullable and 'DROP' or 'SET') |
466 | - ) |
467 | - |
468 | - def _rebuild_sqlite(self, colmap): |
469 | - """ |
470 | - SQLite doesn't support modification of table schema - must rebuild the |
471 | - table. |
472 | - colmap maps old column names to new Columns |
473 | - (or None for column deletion). |
474 | - Only modified columns need to be listed, unchaged columns are carried |
475 | - over automatically. |
476 | - Specify table in case name has changed in a more recent version. |
477 | - """ |
478 | - |
479 | - session = self.upgrade_session |
480 | - table = self._get_reflected_model() |
481 | - |
482 | - log.debug(u"Rebuilding SQLite table %s", table.name) |
483 | - |
484 | - fullcolmap = {} |
485 | - for col in table.c: |
486 | - if col.name in colmap: |
487 | - if colmap[col.name] is not None: |
488 | - fullcolmap[col.name] = colmap[col.name].name |
489 | - else: |
490 | - fullcolmap[col.name] = col.name |
491 | - |
492 | - for old, col in colmap.iteritems(): |
493 | - del table.c[old] |
494 | - if col is not None: |
495 | - table.append_column(col) |
496 | - |
497 | - session.execute('ALTER TABLE "%s" RENAME TO "%s_old";' |
498 | - % (table.name, table.name)) |
499 | - |
500 | - # SQLAlchemy indexes aren't attached to tables, they must be dropped |
501 | - # around now or we'll get a clash |
502 | - for constraint in table.indexes: |
503 | - constraint.drop() |
504 | - |
505 | - table.create() |
506 | - |
507 | - session.execute('INSERT INTO "%s" ("%s") SELECT "%s" FROM "%s_old";' |
508 | - % ( |
509 | - table.name, |
510 | - '", "'.join(fullcolmap.values()), |
511 | - '", "'.join(fullcolmap.keys()), |
512 | - table.name |
513 | - )) |
514 | - |
515 | - session.execute('DROP TABLE "%s_old";' % table.name) |
516 | - |
517 | - # SQLAlchemy doesn't pick up all the indexes in the reflected table. |
518 | - # It's ok to use indexes that may be further in the future than this |
519 | - # upgrade because either we can already support them or we'll be |
520 | - # rebuilding again soon |
521 | - for constraint in self.table.indexes: |
522 | - try: |
523 | - constraint.create(bind=session.bind) |
524 | - except OperationalError: |
525 | - pass |
526 | +from ibid.db import Base |
527 | +from ibid.db.versioned_schema import VersionedSchema |
528 | |
529 | class Schema(Base): |
530 | __table__ = Table('schema', Base.metadata, |
531 | Column('id', Integer, primary_key=True), |
532 | - Column('table', Unicode(32), unique=True, nullable=False, index=True), |
533 | + Column('table', IbidUnicode(32), unique=True, nullable=False, |
534 | + index=True), |
535 | Column('version', Integer, nullable=False), |
536 | useexisting=True) |
537 | |
538 | class SchemaSchema(VersionedSchema): |
539 | def upgrade_1_to_2(self): |
540 | - self.add_index(self.table.c.table, unique=True) |
541 | + self.add_index(self.table.c.table) |
542 | + def upgrade_2_to_3(self): |
543 | + self.drop_index(self.table.c.table) |
544 | + self.alter_column(Column('table', IbidUnicode(32), unique=True, |
545 | + nullable=False, index=True), force_rebuild=True) |
546 | + self.add_index(self.table.c.table) |
547 | |
548 | - __table__.versioned_schema = SchemaSchema(__table__, 2) |
549 | + __table__.versioned_schema = SchemaSchema(__table__, 3) |
550 | |
551 | def __init__(self, table, version=0): |
552 | self.table = table |
553 | @@ -435,9 +36,10 @@ |
554 | __table__ = Table('identities', Base.metadata, |
555 | Column('id', Integer, primary_key=True), |
556 | Column('account_id', Integer, ForeignKey('accounts.id'), index=True), |
557 | - Column('source', Unicode(32), nullable=False, index=True), |
558 | - Column('identity', UnicodeText, nullable=False, index=True, |
559 | - info={'ibid_mysql_index_length': 32}), |
560 | + Column('source', IbidUnicode(32, case_insensitive=True), |
561 | + nullable=False, index=True), |
562 | + Column('identity', IbidUnicodeText(32, case_insensitive=True), |
563 | + nullable=False, index=True), |
564 | Column('created', DateTime), |
565 | UniqueConstraint('source', 'identity'), |
566 | useexisting=True) |
567 | @@ -449,12 +51,25 @@ |
568 | self.add_index(self.table.c.identity) |
569 | |
570 | def upgrade_2_to_3(self): |
571 | + self.alter_column(Column('source', IbidUnicode(32), |
572 | + nullable=False, index=True)) |
573 | + self.alter_column(Column('identity', IbidUnicodeText, |
574 | + nullable=False, index=True)) |
575 | + def upgrade_3_to_4(self): |
576 | + self.drop_index(self.table.c.source) |
577 | + self.drop_index(self.table.c.identity) |
578 | self.alter_column(Column('source', |
579 | - Unicode(32), nullable=False, index=True)) |
580 | + IbidUnicode(32, case_insensitive=True), |
581 | + nullable=False, index=True), |
582 | + force_rebuild=True) |
583 | self.alter_column(Column('identity', |
584 | - UnicodeText, nullable=False, index=True)) |
585 | + IbidUnicodeText(32, case_insensitive=True), |
586 | + nullable=False, index=True), |
587 | + force_rebuild=True) |
588 | + self.add_index(self.table.c.source) |
589 | + self.add_index(self.table.c.identity) |
590 | |
591 | - __table__.versioned_schema = IdentitySchema(__table__, 3) |
592 | + __table__.versioned_schema = IdentitySchema(__table__, 4) |
593 | |
594 | def __init__(self, source, identity, account_id=None): |
595 | self.source = source |
596 | @@ -469,9 +84,10 @@ |
597 | __table__ = Table('account_attributes', Base.metadata, |
598 | Column('id', Integer, primary_key=True), |
599 | Column('account_id', Integer, ForeignKey('accounts.id'), |
600 | - nullable=False, index=True), |
601 | - Column('name', Unicode(32), nullable=False, index=True), |
602 | - Column('value', UnicodeText, nullable=False), |
603 | + nullable=False, index=True), |
604 | + Column('name', IbidUnicode(32, case_insensitive=True), |
605 | + nullable=False, index=True), |
606 | + Column('value', IbidUnicodeText, nullable=False), |
607 | UniqueConstraint('account_id', 'name'), |
608 | useexisting=True) |
609 | |
610 | @@ -480,9 +96,18 @@ |
611 | self.add_index(self.table.c.account_id) |
612 | self.add_index(self.table.c.name) |
613 | def upgrade_2_to_3(self): |
614 | - self.alter_column(Column('value', UnicodeText, nullable=False)) |
615 | + self.alter_column(Column('value', IbidUnicodeText, nullable=False)) |
616 | + def upgrade_3_to_4(self): |
617 | + self.drop_index(self.table.c.name) |
618 | + self.alter_column(Column('name', |
619 | + IbidUnicode(32, case_insensitive=True), |
620 | + nullable=False, index=True), |
621 | + force_rebuild=True) |
622 | + self.alter_column(Column('value', IbidUnicodeText, nullable=False), |
623 | + force_rebuild=True) |
624 | + self.add_index(self.table.c.name) |
625 | |
626 | - __table__.versioned_schema = AttributeSchema(__table__, 3) |
627 | + __table__.versioned_schema = AttributeSchema(__table__, 4) |
628 | |
629 | def __init__(self, name, value): |
630 | self.name = name |
631 | @@ -495,10 +120,11 @@ |
632 | __table__ = Table('credentials', Base.metadata, |
633 | Column('id', Integer, primary_key=True), |
634 | Column('account_id', Integer, ForeignKey('accounts.id'), |
635 | - nullable=False, index=True), |
636 | - Column('source', Unicode(32), index=True), |
637 | - Column('method', Unicode(16), nullable=False, index=True), |
638 | - Column('credential', UnicodeText, nullable=False), |
639 | + nullable=False, index=True), |
640 | + Column('source', IbidUnicode(32, case_insensitive=True), index=True), |
641 | + Column('method', IbidUnicode(16, case_insensitive=True), |
642 | + nullable=False, index=True), |
643 | + Column('credential', IbidUnicodeText, nullable=False), |
644 | useexisting=True) |
645 | |
646 | class CredentialSchema(VersionedSchema): |
647 | @@ -507,11 +133,25 @@ |
648 | self.add_index(self.table.c.source) |
649 | self.add_index(self.table.c.method) |
650 | def upgrade_2_to_3(self): |
651 | - self.alter_column(Column('source', Unicode(32), index=True)) |
652 | + self.alter_column(Column('source', IbidUnicode(32), index=True)) |
653 | self.alter_column(Column('credential', |
654 | - UnicodeText, nullable=False)) |
655 | + IbidUnicodeText, nullable=False)) |
656 | + def upgrade_3_to_4(self): |
657 | + self.drop_index(self.table.c.source) |
658 | + self.drop_index(self.table.c.method) |
659 | + self.alter_column(Column('source', |
660 | + IbidUnicode(32, case_insensitive=True), |
661 | + index=True), force_rebuild=True) |
662 | + self.alter_column(Column('method', |
663 | + IbidUnicode(16, case_insensitive=True), |
664 | + nullable=False, index=True), |
665 | + force_rebuild=True) |
666 | + self.alter_column(Column('credential', IbidUnicodeText, |
667 | + nullable=False), force_rebuild=True) |
668 | + self.add_index(self.table.c.source) |
669 | + self.add_index(self.table.c.method) |
670 | |
671 | - __table__.versioned_schema = CredentialSchema(__table__, 3) |
672 | + __table__.versioned_schema = CredentialSchema(__table__, 4) |
673 | |
674 | def __init__(self, method, credential, source=None, account_id=None): |
675 | self.account_id = account_id |
676 | @@ -523,9 +163,10 @@ |
677 | __table__ = Table('permissions', Base.metadata, |
678 | Column('id', Integer, primary_key=True), |
679 | Column('account_id', Integer, ForeignKey('accounts.id'), |
680 | - nullable=False, index=True), |
681 | - Column('name', Unicode(16), nullable=False, index=True), |
682 | - Column('value', Unicode(4), nullable=False), |
683 | + nullable=False, index=True), |
684 | + Column('name', IbidUnicode(16, case_insensitive=True), |
685 | + nullable=False, index=True), |
686 | + Column('value', IbidUnicode(4, case_insensitive=True), nullable=False), |
687 | UniqueConstraint('account_id', 'name'), |
688 | useexisting=True) |
689 | |
690 | @@ -533,8 +174,18 @@ |
691 | def upgrade_1_to_2(self): |
692 | self.add_index(self.table.c.account_id) |
693 | self.add_index(self.table.c.name) |
694 | + def upgrade_2_to_3(self): |
695 | + self.drop_index(self.table.c.name) |
696 | + self.alter_column(Column('name', |
697 | + IbidUnicode(16, case_insensitive=True), |
698 | + index=True), force_rebuild=True) |
699 | + self.alter_column(Column('value', |
700 | + IbidUnicode(4, case_insensitive=True), |
701 | + nullable=False, index=True), |
702 | + force_rebuild=True) |
703 | + self.add_index(self.table.c.name) |
704 | |
705 | - __table__.versioned_schema = PermissionSchema(__table__, 2) |
706 | + __table__.versioned_schema = PermissionSchema(__table__, 3) |
707 | |
708 | def __init__(self, name=None, value=None): |
709 | self.name = name |
710 | @@ -543,15 +194,22 @@ |
711 | class Account(Base): |
712 | __table__ = Table('accounts', Base.metadata, |
713 | Column('id', Integer, primary_key=True), |
714 | - Column('username', Unicode(32), unique=True, nullable=False, |
715 | - index=True), |
716 | + Column('username', IbidUnicode(32, case_insensitive=True), |
717 | + unique=True, nullable=False, index=True), |
718 | useexisting=True) |
719 | |
720 | class AccountSchema(VersionedSchema): |
721 | def upgrade_1_to_2(self): |
722 | - self.add_index(self.table.c.username, unique=True) |
723 | + self.add_index(self.table.c.username) |
724 | + def upgrade_2_to_3(self): |
725 | + self.drop_index(self.table.c.username) |
726 | + self.alter_column(Column('username', |
727 | + IbidUnicode(32, case_insensitive=True), |
728 | + unique=True, nullable=False, index=True), |
729 | + force_rebuild=True) |
730 | + self.add_index(self.table.c.username) |
731 | |
732 | - __table__.versioned_schema = AccountSchema(__table__, 2) |
733 | + __table__.versioned_schema = AccountSchema(__table__, 3) |
734 | |
735 | identities = relation(Identity, backref='account') |
736 | attributes = relation(Attribute, cascade='all, delete-orphan') |
737 | @@ -564,44 +222,4 @@ |
738 | def __repr__(self): |
739 | return '<Account %s>' % self.username |
740 | |
741 | - |
742 | -class SchemaVersionException(Exception): |
743 | - """There is an out-of-date table. |
744 | - The message should be a list of out of date tables. |
745 | - """ |
746 | - pass |
747 | - |
748 | - |
749 | -def check_schema_versions(sessionmaker): |
750 | - """Pass through all tables, log out of date ones, |
751 | - and except if not all up to date""" |
752 | - |
753 | - session = sessionmaker() |
754 | - upgrades = [] |
755 | - for table in metadata.tables.itervalues(): |
756 | - if not hasattr(table, 'versioned_schema'): |
757 | - log.error("Table %s is not versioned.", table.name) |
758 | - continue |
759 | - |
760 | - if not table.versioned_schema.is_up_to_date(session): |
761 | - upgrades.append(table.name) |
762 | - |
763 | - if not upgrades: |
764 | - return |
765 | - |
766 | - raise SchemaVersionException(u", ".join(upgrades)) |
767 | - |
768 | -def upgrade_schemas(sessionmaker): |
769 | - "Pass through all tables and update schemas" |
770 | - |
771 | - # Make sure schema table is created first |
772 | - metadata.tables['schema'].versioned_schema.upgrade_schema(sessionmaker) |
773 | - |
774 | - for table in metadata.tables.itervalues(): |
775 | - if not hasattr(table, 'versioned_schema'): |
776 | - log.error("Table %s is not versioned.", table.name) |
777 | - continue |
778 | - |
779 | - table.versioned_schema.upgrade_schema(sessionmaker) |
780 | - |
781 | # vi: set et sta sw=4 ts=4: |
782 | |
783 | === added file 'ibid/db/types.py' |
784 | --- ibid/db/types.py 1970-01-01 00:00:00 +0000 |
785 | +++ ibid/db/types.py 2009-12-29 16:00:32 +0000 |
786 | @@ -0,0 +1,62 @@ |
787 | +from sqlalchemy.types import TypeDecorator, Integer, DateTime, Boolean, \ |
788 | + Unicode as _Unicode, UnicodeText as _UnicodeText |
789 | + |
790 | +class _CIDecorator(TypeDecorator): |
791 | + "Abstract class for collation aware columns" |
792 | + |
793 | + def __init__(self, length=None, case_insensitive=False): |
794 | + self.case_insensitive = case_insensitive |
795 | + super(_CIDecorator, self).__init__(length=length) |
796 | + |
797 | + def load_dialect_impl(self, dialect): |
798 | + if hasattr(dialect, 'name'): |
799 | + self.dialect = dialect.name |
800 | + # SQLAlchemy 0.4: |
801 | + else: |
802 | + self.dialect = { |
803 | + 'SQLiteDialect': 'sqlite', |
804 | + 'PGDialect': 'postgres', |
805 | + 'MySQLDialect': 'mysql', |
806 | + }[dialect.__class__.__name__] |
807 | + |
808 | + return dialect.type_descriptor(self.impl) |
809 | + |
810 | + def get_col_spec(self): |
811 | + colspec = self.impl.get_col_spec() |
812 | + if hasattr(self, 'case_insensitive'): |
813 | + collation = None |
814 | + if self.dialect == 'mysql': |
815 | + if self.case_insensitive: |
816 | + collation = 'utf8_general_ci' |
817 | + else: |
818 | + collation = 'utf8_bin' |
819 | + elif self.dialect == 'sqlite': |
820 | + if self.case_insensitive: |
821 | + collation = 'NOCASE' |
822 | + else: |
823 | + collation = 'BINARY' |
824 | + elif self.dialect == 'postgres' and self.case_insensitive: |
825 | + return 'CITEXT' |
826 | + |
827 | + if collation is not None: |
828 | + return colspec + ' COLLATE ' + collation |
829 | + return colspec |
830 | + |
831 | +class IbidUnicode(_CIDecorator): |
832 | + "Collaiton aware Unicode" |
833 | + |
834 | + impl = _Unicode |
835 | + |
836 | + def __init__(self, length, **kwargs): |
837 | + super(IbidUnicode, self).__init__(length, **kwargs) |
838 | + |
839 | +class IbidUnicodeText(_CIDecorator): |
840 | + "Collation aware UnicodeText" |
841 | + |
842 | + impl = _UnicodeText |
843 | + |
844 | + def __init__(self, index_length=8, **kwargs): |
845 | + self.index_length = index_length |
846 | + super(IbidUnicodeText, self).__init__(length=None, **kwargs) |
847 | + |
848 | +# vi: set et sta sw=4 ts=4: |
849 | |
850 | === added file 'ibid/db/versioned_schema.py' |
851 | --- ibid/db/versioned_schema.py 1970-01-01 00:00:00 +0000 |
852 | +++ ibid/db/versioned_schema.py 2009-12-29 16:00:32 +0000 |
853 | @@ -0,0 +1,515 @@ |
854 | +import logging |
855 | +import re |
856 | + |
857 | +from sqlalchemy import Column, Index, UniqueConstraint, MetaData, \ |
858 | + __version__ as _sqlalchemy_version |
859 | +from sqlalchemy.exceptions import InvalidRequestError, OperationalError, \ |
860 | + ProgrammingError, InternalError |
861 | +if _sqlalchemy_version < '0.5': |
862 | + NoResultFound = InvalidRequestError |
863 | +else: |
864 | + from sqlalchemy.orm.exc import NoResultFound |
865 | + |
866 | +from ibid.db.types import Integer, IbidUnicodeText, IbidUnicode |
867 | + |
868 | +from ibid.db import metadata |
869 | + |
870 | +log = logging.getLogger('ibid.db.versioned_schema') |
871 | + |
872 | +class VersionedSchema(object): |
873 | + """For an initial table schema, set |
874 | + table.versioned_schema = VersionedSchema(__table__, 1) |
875 | + Table creation (upgrading to version 1) is implicitly supported. |
876 | + |
877 | + When you have upgrades to the schema, instead of using VersionedSchema |
878 | + directly, derive from it and include your own upgrade_x_to_y(self) methods, |
879 | + where y = x + 1 |
880 | + |
881 | + In the upgrade methods, you can call the helper functions: |
882 | + add_column, drop_column, rename_column, alter_column |
883 | + They try to do the correct thing in most situations, including rebuilding |
884 | + tables in SQLite, which doesn't actually support dropping/altering columns. |
885 | + For column parameters, while you can point to columns in the table |
886 | + definition, it is better style to repeat the Column() specification as the |
887 | + column might be altered in a future version. |
888 | + """ |
889 | + foreign_key_re = re.compile(r'^FOREIGN KEY\(.*?\) (REFERENCES .*)$', re.I) |
890 | + |
891 | + def __init__(self, table, version): |
892 | + self.table = table |
893 | + self.version = version |
894 | + |
895 | + def is_up_to_date(self, session): |
896 | + "Is the table in the database up to date with the schema?" |
897 | + |
898 | + from ibid.db.models import Schema |
899 | + |
900 | + if not session.bind.has_table(self.table.name): |
901 | + return False |
902 | + |
903 | + try: |
904 | + schema = session.query(Schema) \ |
905 | + .filter_by(table=unicode(self.table.name)).one() |
906 | + return schema.version == self.version |
907 | + except NoResultFound: |
908 | + return False |
909 | + |
910 | + def upgrade_schema(self, sessionmaker): |
911 | + "Upgrade the table's schema to the latest version." |
912 | + |
913 | + from ibid.db.models import Schema |
914 | + |
915 | + for fk in self.table.foreign_keys: |
916 | + dependency = fk.target_fullname.split('.')[0] |
917 | + log.debug("Upgrading table %s before %s", |
918 | + dependency, self.table.name) |
919 | + metadata.tables[dependency].versioned_schema \ |
920 | + .upgrade_schema(sessionmaker) |
921 | + |
922 | + self.upgrade_session = session = sessionmaker() |
923 | + self.upgrade_reflected_model = MetaData(session.bind, reflect=True) |
924 | + |
925 | + if self.table.name == 'schema': |
926 | + if not session.bind.has_table(self.table.name): |
927 | + metadata.bind = session.bind |
928 | + self._create_table() |
929 | + |
930 | + schema = Schema(unicode(self.table.name), self.version) |
931 | + session.save_or_update(schema) |
932 | + return |
933 | + Schema.__table__ = self._get_reflected_model() |
934 | + |
935 | + schema = session.query(Schema) \ |
936 | + .filter_by(table=unicode(self.table.name)).first() |
937 | + |
938 | + try: |
939 | + if not schema: |
940 | + log.info(u"Creating table %s", self.table.name) |
941 | + |
942 | + self._create_table() |
943 | + |
944 | + schema = Schema(unicode(self.table.name), self.version) |
945 | + session.save_or_update(schema) |
946 | + |
947 | + elif self.version > schema.version: |
948 | + for version in range(schema.version + 1, self.version + 1): |
949 | + log.info(u"Upgrading table %s to version %i", |
950 | + self.table.name, version) |
951 | + |
952 | + session.commit() |
953 | + |
954 | + getattr(self, 'upgrade_%i_to_%i' % (version - 1, version))() |
955 | + |
956 | + schema.version = version |
957 | + session.save_or_update(schema) |
958 | + |
959 | + self.upgrade_reflected_model = \ |
960 | + MetaData(session.bind, reflect=True) |
961 | + |
962 | + session.commit() |
963 | + |
964 | + except: |
965 | + session.rollback() |
966 | + raise |
967 | + |
968 | + session.close() |
969 | + del self.upgrade_session |
970 | + |
971 | + def _index_name(self, col): |
972 | + """ |
973 | + We'd like not to duplicate an existing index so try to abide by the |
974 | + local customs |
975 | + """ |
976 | + session = self.upgrade_session |
977 | + |
978 | + if session.bind.engine.name == 'sqlite': |
979 | + return 'ix_%s_%s' % (self.table.name, col.name) |
980 | + elif session.bind.engine.name == 'postgres': |
981 | + return '%s_%s_key' % (self.table.name, col.name) |
982 | + elif session.bind.engine.name == 'mysql': |
983 | + return col.name |
984 | + |
985 | + log.warning(u"Unknown database type, %s, you may end up with " |
986 | + u"duplicate indices" % session.bind.engine.name) |
987 | + return 'ix_%s_%s' % (self.table.name, col.name) |
988 | + |
989 | + def _mysql_constraint_createstring(self, constraint): |
990 | + """ |
991 | + Generate the description of a constraint for insertion into a CREATE |
992 | + string |
993 | + """ |
994 | + names = [] |
995 | + for column in constraint.columns: |
996 | + if isinstance(column.type, IbidUnicodeText): |
997 | + names.append('"%s"(%i)' |
998 | + % (column.name, column.type.index_length)) |
999 | + else: |
1000 | + names.append(column.name) |
1001 | + |
1002 | + return ', '.join(names) |
1003 | + |
1004 | + def _create_table(self): |
1005 | + """ |
1006 | + Check that the table is in a suitable form for all DBs, before |
1007 | + creating. Yes, SQLAlchemy's abstractions are leaky enough that you have |
1008 | + to do this |
1009 | + """ |
1010 | + session = self.upgrade_session |
1011 | + indices = [] |
1012 | + old_indexes = list(self.table.indexes) |
1013 | + old_constraints = list(self.table.constraints) |
1014 | + |
1015 | + for column in self.table.c: |
1016 | + if column.unique and not column.index: |
1017 | + raise Exception(u"Column %s.%s is unique but not indexed. " |
1018 | + u"SQLite doesn't like such things, " |
1019 | + u"so please be nice and don't do that." |
1020 | + % (self.table.name, self.column.name)) |
1021 | + |
1022 | + # Strip out Indexes and Constraints that SQLAlchemy can't create by |
1023 | + # itself |
1024 | + if session.bind.engine.name == 'mysql': |
1025 | + for type, old_list in ( |
1026 | + ('constraints', old_constraints), |
1027 | + ('indexes', old_indexes)): |
1028 | + for constraint in old_list: |
1029 | + if any(True for column in constraint.columns |
1030 | + if isinstance(column.type, IbidUnicodeText)): |
1031 | + indices.append(( |
1032 | + isinstance(constraint, UniqueConstraint), |
1033 | + self._mysql_constraint_createstring(constraint) |
1034 | + )) |
1035 | + |
1036 | + getattr(self.table, type).remove(constraint) |
1037 | + # In case the database's DEFAULT CHARSET isn't set to UTF8 |
1038 | + self.table.kwargs['mysql_charset'] = 'utf8' |
1039 | + |
1040 | + self.table.create(bind=session.bind) |
1041 | + |
1042 | + if session.bind.engine.name == 'mysql': |
1043 | + for constraint in old_constraints: |
1044 | + if constraint not in self.table.constraints: |
1045 | + self.table.constraints.add(constraint) |
1046 | + |
1047 | + for index in old_indexes: |
1048 | + if index not in self.table.indexes: |
1049 | + self.table.indexes.add(index) |
1050 | + |
1051 | + for unique, columnspec in indices: |
1052 | + session.execute('ALTER TABLE "%s" ADD %s INDEX (%s);' % ( |
1053 | + self.table.name, unique and 'UNIQUE' or '', columnspec)) |
1054 | + |
1055 | + def _get_reflected_model(self): |
1056 | + "Get a reflected table from the current DB's schema" |
1057 | + |
1058 | + return self.upgrade_reflected_model.tables.get(self.table.name, None) |
1059 | + |
1060 | + def add_column(self, col): |
1061 | + "Add column col to table" |
1062 | + |
1063 | + session = self.upgrade_session |
1064 | + table = self._get_reflected_model() |
1065 | + |
1066 | + log.debug(u"Adding column %s to table %s", col.name, table.name) |
1067 | + |
1068 | + constraints = table.constraints.copy() |
1069 | + table.append_column(col) |
1070 | + constraints = table.constraints - constraints |
1071 | + |
1072 | + sg = session.bind.dialect.schemagenerator(session.bind.dialect, |
1073 | + session.bind) |
1074 | + description = sg.get_column_specification(col) |
1075 | + |
1076 | + for constraint in constraints: |
1077 | + sg.traverse_single(constraint) |
1078 | + |
1079 | + constraints = [] |
1080 | + for constraint in [x.strip() for x in sg.buffer.getvalue().split(',')]: |
1081 | + m = self.foreign_key_re.match(constraint) |
1082 | + if m: |
1083 | + constraints.append(m.group(1)) |
1084 | + else: |
1085 | + constraints.append(constraint) |
1086 | + |
1087 | + session.execute('ALTER TABLE "%s" ADD COLUMN %s %s;' |
1088 | + % (table.name, description, " ".join(constraints))) |
1089 | + |
1090 | + def add_index(self, col): |
1091 | + "Add an index to the table" |
1092 | + |
1093 | + engine = self.upgrade_session.bind.engine.name |
1094 | + query = None |
1095 | + |
1096 | + if engine == 'mysql' and isinstance(col.type, IbidUnicodeText): |
1097 | + query = 'ALTER TABLE "%s" ADD %s INDEX "%s" ("%s"(%i));' % ( |
1098 | + self.table.name, col.unique and 'UNIQUE' or '', |
1099 | + self._index_name(col), col.name, col.type.index_length) |
1100 | + elif engine == 'postgres': |
1101 | + # SQLAlchemy hangs if it tries to do this, because it forgets the ; |
1102 | + query = 'CREATE %s INDEX "%s" ON "%s" ("%s")' % ( |
1103 | + col.unique and 'UNIQUE' or '',self._index_name(col), |
1104 | + self.table.name, col.name) |
1105 | + |
1106 | + try: |
1107 | + if query is not None: |
1108 | + self.upgrade_session.execute(query) |
1109 | + else: |
1110 | + Index(self._index_name(col), col, unique=col.unique) \ |
1111 | + .create(bind=self.upgrade_session.bind) |
1112 | + |
1113 | + # We understand that occasionaly we'll duplicate an Index. |
1114 | + # This is due to differences in index-creation requirements |
1115 | + # between DBMS |
1116 | + except OperationalError, e: |
1117 | + if engine == 'sqlite' and u'already exists' in unicode(e): |
1118 | + return |
1119 | + if engine == 'mysql' and u'Duplicate' in unicode(e): |
1120 | + return |
1121 | + raise |
1122 | + except ProgrammingError, e: |
1123 | + if engine == 'postgres' and u'already exists' in unicode(e): |
1124 | + return |
1125 | + raise |
1126 | + |
1127 | + def drop_index(self, col): |
1128 | + "Drop an index from the table" |
1129 | + |
1130 | + engine = self.upgrade_session.bind.engine.name |
1131 | + |
1132 | + try: |
1133 | + if isinstance(col, Column): |
1134 | + Index(self._index_name(col), col, unique=col.unique) \ |
1135 | + .drop(bind=self.upgrade_session.bind) |
1136 | + else: |
1137 | + col.drop() |
1138 | + |
1139 | + except OperationalError, e: |
1140 | + if engine == 'sqlite' and u'no such index' in unicode(e): |
1141 | + return |
1142 | + if engine == 'mysql' \ |
1143 | + and u'check that column/key exists' in unicode(e): |
1144 | + return |
1145 | + raise |
1146 | + |
1147 | + except ProgrammingError, e: |
1148 | + if engine == 'postgres' and u'does not exist' in unicode(e): |
1149 | + return |
1150 | + # In SQLAlchemy 0.4, the InternalError below is a ProgrammingError |
1151 | + # and can't be executed in the upgrade transaction: |
1152 | + if engine == 'postgres' and u'requires' in unicode(e): |
1153 | + self.upgrade_session.bind.execute( |
1154 | + 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % ( |
1155 | + self.table.name, self._index_name(col))) |
1156 | + return |
1157 | + raise |
1158 | + |
1159 | + # Postgres constraints can be attached to tables and can't be dropped |
1160 | + # at DB level. |
1161 | + except InternalError, e: |
1162 | + if engine == 'postgres': |
1163 | + self.upgrade_session.execute( |
1164 | + 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % ( |
1165 | + self.table.name, self._index_name(col))) |
1166 | + |
1167 | + def drop_column(self, col_name): |
1168 | + "Drop column col_name from table" |
1169 | + |
1170 | + session = self.upgrade_session |
1171 | + |
1172 | + log.debug(u"Dropping column %s from table %s", |
1173 | + col_name, self.table.name) |
1174 | + |
1175 | + if session.bind.engine.name == 'sqlite': |
1176 | + self._rebuild_sqlite({col_name: None}) |
1177 | + else: |
1178 | + session.execute('ALTER TABLE "%s" DROP COLUMN "%s";' |
1179 | + % (self.table.name, col_name)) |
1180 | + |
1181 | + def rename_column(self, col, old_name): |
1182 | + "Rename column from old_name to Column col" |
1183 | + |
1184 | + session = self.upgrade_session |
1185 | + table = self._get_reflected_model() |
1186 | + |
1187 | + log.debug(u"Rename column %s to %s in table %s", |
1188 | + old_name, col.name, table.name) |
1189 | + |
1190 | + if session.bind.engine.name == 'sqlite': |
1191 | + self._rebuild_sqlite({old_name: col}) |
1192 | + elif session.bind.engine.name == 'mysql': |
1193 | + self.alter_column(col, old_name) |
1194 | + else: |
1195 | + session.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s";' |
1196 | + % (table.name, old_name, col.name)) |
1197 | + |
1198 | + def alter_column(self, col, old_name=None, force_rebuild=False): |
1199 | + """Change a column (possibly renaming from old_name) to Column col.""" |
1200 | + |
1201 | + session = self.upgrade_session |
1202 | + table = self._get_reflected_model() |
1203 | + |
1204 | + log.debug(u"Altering column %s in table %s", col.name, table.name) |
1205 | + |
1206 | + sg = session.bind.dialect.schemagenerator(session.bind.dialect, |
1207 | + session.bind) |
1208 | + description = sg.get_column_specification(col) |
1209 | + old_col = table.c[old_name or col.name] |
1210 | + |
1211 | + # SQLite doesn't enforce value length restrictions |
1212 | + # only type changes have a real effect |
1213 | + if session.bind.engine.name == 'sqlite': |
1214 | + if not force_rebuild and ( |
1215 | + (isinstance(col.type, (IbidUnicodeText, IbidUnicode)) |
1216 | + and isinstance(old_col.type, (IbidUnicodeText, IbidUnicode) |
1217 | + ) or (isinstance(col.type, Integer) |
1218 | + and isinstance(old_col.type, Integer)))): |
1219 | + return |
1220 | + |
1221 | + self._rebuild_sqlite( |
1222 | + {old_name is None and col.name or old_name: col}) |
1223 | + |
1224 | + elif session.bind.engine.name == 'mysql': |
1225 | + # Special handling for columns of TEXT type, because SQLAlchemy |
1226 | + # can't create indexes for them |
1227 | + recreate = [] |
1228 | + if isinstance(col.type, IbidUnicodeText) \ |
1229 | + or isinstance(old_col.type, IbidUnicodeText): |
1230 | + for type in (table.constraints, table.indexes): |
1231 | + for constraint in list(type): |
1232 | + if any(True for column in constraint.columns |
1233 | + if old_col.name == column.name): |
1234 | + |
1235 | + self.drop_index(constraint) |
1236 | + |
1237 | + constraint.columns = [ |
1238 | + (old_col.name == column.name) and col or column |
1239 | + for column in constraint.columns |
1240 | + ] |
1241 | + recreate.append(( |
1242 | + isinstance(constraint, UniqueConstraint), |
1243 | + self._mysql_constraint_createstring(constraint) |
1244 | + )) |
1245 | + |
1246 | + session.execute('ALTER TABLE "%s" CHANGE "%s" %s;' % |
1247 | + (table.name, old_col.name, description)) |
1248 | + |
1249 | + for unique, columnspec in recreate: |
1250 | + session.execute('ALTER TABLE "%s" ADD %s INDEX (%s);' % |
1251 | + (self.table.name, unique and 'UNIQUE' or '', columnspec)) |
1252 | + |
1253 | + else: |
1254 | + if old_name is not None: |
1255 | + self.rename_column(col, old_name) |
1256 | + |
1257 | + session.execute('ALTER TABLE "%s" ALTER COLUMN "%s" TYPE %s;' % |
1258 | + (table.name, col.name, description.split(" ", 3)[1])) |
1259 | + |
1260 | + if old_col.nullable != col.nullable: |
1261 | + session.execute( |
1262 | + 'ALTER TABLE "%s" ALTER COLUMN "%s" %s NOT NULL;' |
1263 | + % (table.name, col.name, col.nullable and 'DROP' or 'SET') |
1264 | + ) |
1265 | + |
1266 | + def _rebuild_sqlite(self, colmap): |
1267 | + """ |
1268 | + SQLite doesn't support modification of table schema - must rebuild the |
1269 | + table. |
1270 | + colmap maps old column names to new Columns |
1271 | + (or None for column deletion). |
1272 | + Only modified columns need to be listed, unchaged columns are carried |
1273 | + over automatically. |
1274 | + Specify table in case name has changed in a more recent version. |
1275 | + """ |
1276 | + |
1277 | + session = self.upgrade_session |
1278 | + table = self._get_reflected_model() |
1279 | + |
1280 | + log.debug(u"Rebuilding SQLite table %s", table.name) |
1281 | + |
1282 | + fullcolmap = {} |
1283 | + for col in table.c: |
1284 | + if col.name in colmap: |
1285 | + if colmap[col.name] is not None: |
1286 | + fullcolmap[col.name] = colmap[col.name].name |
1287 | + else: |
1288 | + fullcolmap[col.name] = col.name |
1289 | + |
1290 | + for old, col in colmap.iteritems(): |
1291 | + del table.c[old] |
1292 | + if col is not None: |
1293 | + table.append_column(col) |
1294 | + |
1295 | + session.execute('ALTER TABLE "%s" RENAME TO "%s_old";' |
1296 | + % (table.name, table.name)) |
1297 | + |
1298 | + # SQLAlchemy indexes aren't attached to tables, they must be dropped |
1299 | + # around now or we'll get a clash |
1300 | + for constraint in table.indexes: |
1301 | + try: |
1302 | + constraint.drop() |
1303 | + except OperationalError: |
1304 | + pass |
1305 | + |
1306 | + table.create() |
1307 | + |
1308 | + session.execute('INSERT INTO "%s" ("%s") SELECT "%s" FROM "%s_old";' |
1309 | + % ( |
1310 | + table.name, |
1311 | + '", "'.join(fullcolmap.values()), |
1312 | + '", "'.join(fullcolmap.keys()), |
1313 | + table.name |
1314 | + )) |
1315 | + |
1316 | + session.execute('DROP TABLE "%s_old";' % table.name) |
1317 | + |
1318 | + # SQLAlchemy doesn't pick up all the indexes in the reflected table. |
1319 | + # It's ok to use indexes that may be further in the future than this |
1320 | + # upgrade because either we can already support them or we'll be |
1321 | + # rebuilding again soon |
1322 | + for constraint in self.table.indexes: |
1323 | + try: |
1324 | + constraint.create(bind=session.bind) |
1325 | + except OperationalError: |
1326 | + pass |
1327 | + |
1328 | + |
1329 | +class SchemaVersionException(Exception): |
1330 | + """There is an out-of-date table. |
1331 | + The message should be a list of out of date tables. |
1332 | + """ |
1333 | + pass |
1334 | + |
1335 | + |
1336 | +def schema_version_check(sessionmaker): |
1337 | + """Pass through all tables, log out of date ones, |
1338 | + and except if not all up to date""" |
1339 | + |
1340 | + session = sessionmaker() |
1341 | + upgrades = [] |
1342 | + for table in metadata.tables.itervalues(): |
1343 | + if not hasattr(table, 'versioned_schema'): |
1344 | + log.error("Table %s is not versioned.", table.name) |
1345 | + continue |
1346 | + |
1347 | + if not table.versioned_schema.is_up_to_date(session): |
1348 | + upgrades.append(table.name) |
1349 | + |
1350 | + if not upgrades: |
1351 | + return |
1352 | + |
1353 | + raise SchemaVersionException(u", ".join(upgrades)) |
1354 | + |
1355 | +def upgrade_schemas(sessionmaker): |
1356 | + "Pass through all tables and update schemas" |
1357 | + |
1358 | + # Make sure schema table is created first |
1359 | + metadata.tables['schema'].versioned_schema.upgrade_schema(sessionmaker) |
1360 | + |
1361 | + for table in metadata.tables.itervalues(): |
1362 | + if not hasattr(table, 'versioned_schema'): |
1363 | + log.error("Table %s is not versioned.", table.name) |
1364 | + continue |
1365 | + |
1366 | + table.versioned_schema.upgrade_schema(sessionmaker) |
1367 | + |
1368 | +# vi: set et sta sw=4 ts=4: |
1369 | |
1370 | === modified file 'ibid/plugins/auth.py' |
1371 | --- ibid/plugins/auth.py 2009-10-20 15:45:46 +0000 |
1372 | +++ ibid/plugins/auth.py 2009-12-29 16:00:32 +0000 |
1373 | @@ -1,10 +1,8 @@ |
1374 | import logging |
1375 | |
1376 | -from sqlalchemy.sql import func |
1377 | - |
1378 | import ibid |
1379 | +from ibid.db.models import Credential, Permission, Account |
1380 | from ibid.plugins import Processor, match, auth_responses, authorise |
1381 | -from ibid.models import Credential, Permission, Account |
1382 | from ibid.auth import hash |
1383 | from ibid.utils import human_join |
1384 | |
1385 | @@ -79,8 +77,7 @@ |
1386 | return |
1387 | |
1388 | permission = event.session.query(Permission) \ |
1389 | - .filter_by(account_id=account.id) \ |
1390 | - .filter(func.lower(Permission.name) == name.lower()).first() |
1391 | + .filter_by(account_id=account.id, name=name).first() |
1392 | if action.lower() == 'remove': |
1393 | if permission: |
1394 | event.session.delete(permission) |
1395 | @@ -128,7 +125,8 @@ |
1396 | else: |
1397 | if not auth_responses(event, u'accounts'): |
1398 | return |
1399 | - account = event.session.query(Account).filter_by(username=username).first() |
1400 | + account = event.session.query(Account) \ |
1401 | + .filter_by(username=username).first() |
1402 | if not account: |
1403 | event.addresponse(u"I don't know who %s is", username) |
1404 | return |
1405 | |
1406 | === modified file 'ibid/plugins/factoid.py' |
1407 | --- ibid/plugins/factoid.py 2009-12-22 09:48:56 +0000 |
1408 | +++ ibid/plugins/factoid.py 2009-12-29 16:00:32 +0000 |
1409 | @@ -4,16 +4,15 @@ |
1410 | import re |
1411 | |
1412 | from dateutil.tz import tzlocal, tzutc |
1413 | -from sqlalchemy import Column, Table, ForeignKey, or_, Boolean, Integer, \ |
1414 | - Unicode, UnicodeText, DateTime, PassiveDefault |
1415 | -from sqlalchemy.orm import relation, synonym |
1416 | -from sqlalchemy.sql import func |
1417 | |
1418 | from ibid.plugins import Processor, match, handler, authorise, auth_responses, \ |
1419 | RPC |
1420 | from ibid.config import Option, IntOption, ListOption |
1421 | +from ibid.db import IbidUnicode, IbidUnicodeText, Boolean, Integer, DateTime, \ |
1422 | + Table, Column, ForeignKey, PassiveDefault, \ |
1423 | + relation, synonym, func, or_, \ |
1424 | + Base, VersionedSchema |
1425 | from ibid.plugins.identity import get_identities |
1426 | -from ibid.models import Base, VersionedSchema |
1427 | from ibid.utils import format_date |
1428 | |
1429 | help = {'factoids': u'Factoids are arbitrary pieces of information stored by a key. ' |
1430 | @@ -40,9 +39,10 @@ |
1431 | class FactoidName(Base): |
1432 | __table__ = Table('factoid_names', Base.metadata, |
1433 | Column('id', Integer, primary_key=True), |
1434 | - Column('name', UnicodeText, key='_name', nullable=False, unique=True, index=True, |
1435 | - info={'ibid_mysql_index_length': 32}), |
1436 | - Column('factoid_id', Integer, ForeignKey('factoids.id'), nullable=False, index=True), |
1437 | + Column('name', IbidUnicodeText(32, case_insensitive=True), |
1438 | + key='_name', nullable=False, unique=True, index=True), |
1439 | + Column('factoid_id', Integer, ForeignKey('factoids.id'), nullable=False, |
1440 | + index=True), |
1441 | Column('identity_id', Integer, ForeignKey('identities.id'), index=True), |
1442 | Column('time', DateTime, nullable=False), |
1443 | Column('factpack', Integer, ForeignKey('factpacks.id'), index=True), |
1444 | @@ -51,21 +51,21 @@ |
1445 | |
1446 | class FactoidNameSchema(VersionedSchema): |
1447 | def upgrade_1_to_2(self): |
1448 | - self.add_column(Column('factpack', Integer, ForeignKey('factpacks.id'))) |
1449 | + self.add_column(Column('factpack', Integer, |
1450 | + ForeignKey('factpacks.id'))) |
1451 | def upgrade_2_to_3(self): |
1452 | - self.add_index(self.table.c.name, unique=True) |
1453 | + self.add_index(self.table.c.name) |
1454 | def upgrade_3_to_4(self): |
1455 | - self.add_index(self.table.c.name, unique=True) |
1456 | + self.add_index(self.table.c.name) |
1457 | self.add_index(self.table.c.factoid_id) |
1458 | self.add_index(self.table.c.identity_id) |
1459 | self.add_index(self.table.c.factpack) |
1460 | def upgrade_4_to_5(self): |
1461 | - self.alter_column(Column('name', Unicode(64), key='_name', nullable=False, |
1462 | - unique=True, index=True)) |
1463 | + self.alter_column(Column('name', IbidUnicode(64), key='_name', |
1464 | + nullable=False, unique=True, index=True)) |
1465 | def upgrade_5_to_6(self): |
1466 | - self.alter_column(Column('name', UnicodeText, key='_name', nullable=False, |
1467 | - unique=True, index=True, |
1468 | - info={'ibid_mysql_index_length': 32})) |
1469 | + self.alter_column(Column('name', IbidUnicodeText(32), key='_name', |
1470 | + nullable=False, unique=True, index=True)) |
1471 | def upgrade_6_to_7(self): |
1472 | self.add_column(Column('wild', Boolean, PassiveDefault('0'), |
1473 | nullable=False, index=True, default=False)) |
1474 | @@ -77,8 +77,15 @@ |
1475 | .all(): |
1476 | row.wild = True |
1477 | self.upgrade_session.save_or_update(row) |
1478 | + def upgrade_7_to_8(self): |
1479 | + self.drop_index(self.table.c._name) |
1480 | + self.alter_column(Column('name', |
1481 | + IbidUnicodeText(32, case_insensitive=True), |
1482 | + key='_name', nullable=False, unique=True, |
1483 | + index=True), force_rebuild=True) |
1484 | + self.add_index(self.table.c._name) |
1485 | |
1486 | - __table__.versioned_schema = FactoidNameSchema(__table__, 7) |
1487 | + __table__.versioned_schema = FactoidNameSchema(__table__, 8) |
1488 | |
1489 | def __init__(self, name, identity_id, factoid_id=None, factpack=None): |
1490 | self.name = name |
1491 | @@ -102,8 +109,9 @@ |
1492 | class FactoidValue(Base): |
1493 | __table__ = Table('factoid_values', Base.metadata, |
1494 | Column('id', Integer, primary_key=True), |
1495 | - Column('value', UnicodeText, nullable=False), |
1496 | - Column('factoid_id', Integer, ForeignKey('factoids.id'), nullable=False, index=True), |
1497 | + Column('value', IbidUnicodeText, nullable=False), |
1498 | + Column('factoid_id', Integer, ForeignKey('factoids.id'), nullable=False, |
1499 | + index=True), |
1500 | Column('identity_id', Integer, ForeignKey('identities.id'), index=True), |
1501 | Column('time', DateTime, nullable=False), |
1502 | Column('factpack', Integer, ForeignKey('factpacks.id'), index=True), |
1503 | @@ -116,8 +124,11 @@ |
1504 | self.add_index(self.table.c.factoid_id) |
1505 | self.add_index(self.table.c.identity_id) |
1506 | self.add_index(self.table.c.factpack) |
1507 | + def upgrade_3_to_4(self): |
1508 | + self.alter_column(Column('value', IbidUnicodeText, nullable=False), |
1509 | + force_rebuild=True) |
1510 | |
1511 | - __table__.versioned_schema = FactoidValueSchema(__table__, 3) |
1512 | + __table__.versioned_schema = FactoidValueSchema(__table__, 4) |
1513 | |
1514 | def __init__(self, value, identity_id, factoid_id=None, factpack=None): |
1515 | self.value = value |
1516 | @@ -141,7 +152,8 @@ |
1517 | |
1518 | class FactoidSchema(VersionedSchema): |
1519 | def upgrade_1_to_2(self): |
1520 | - self.add_column(Column('factpack', Integer, ForeignKey('factpacks.id'))) |
1521 | + self.add_column(Column('factpack', Integer, |
1522 | + ForeignKey('factpacks.id'))) |
1523 | def upgrade_2_to_3(self): |
1524 | self.add_index(self.table.c.factpack) |
1525 | |
1526 | @@ -157,14 +169,22 @@ |
1527 | class Factpack(Base): |
1528 | __table__ = Table('factpacks', Base.metadata, |
1529 | Column('id', Integer, primary_key=True), |
1530 | - Column('name', Unicode(64), nullable=False, unique=True, index=True), |
1531 | + Column('name', IbidUnicode(64, case_insensitive=True), |
1532 | + nullable=False, unique=True, index=True), |
1533 | useexisting=True) |
1534 | |
1535 | class FactpackSchema(VersionedSchema): |
1536 | def upgrade_1_to_2(self): |
1537 | - self.add_index(self.table.c.name, unique=True) |
1538 | + self.add_index(self.table.c.name) |
1539 | + def upgrade_2_to_3(self): |
1540 | + self.drop_index(self.table.c.name) |
1541 | + self.alter_column(Column('name', |
1542 | + IbidUnicode(64, case_insensitive=True), |
1543 | + nullable=False, unique=True, index=True), |
1544 | + force_rebuild=True) |
1545 | + self.add_index(self.table.c.name) |
1546 | |
1547 | - __table__.versioned_schema = FactpackSchema(__table__, 2) |
1548 | + __table__.versioned_schema = FactpackSchema(__table__, 3) |
1549 | |
1550 | def __init__(self, name): |
1551 | self.name = name |
1552 | @@ -202,8 +222,7 @@ |
1553 | query = query.filter(':fact LIKE name ESCAPE :escape') \ |
1554 | .params(fact=name, escape='\\') |
1555 | else: |
1556 | - query = query.filter(func.lower(FactoidName.name) \ |
1557 | - == escape_name(name).lower()) |
1558 | + query = query.filter(FactoidName.name == escape_name(name)) |
1559 | # For normal matches, restrict to the subset applicable |
1560 | if not literal: |
1561 | query = query.filter(FactoidName.wild == wild) |
1562 | @@ -328,10 +347,10 @@ |
1563 | return |
1564 | |
1565 | factoid = event.session.query(Factoid).join(Factoid.names) \ |
1566 | - .filter(func.lower(FactoidName.name)==escape_name(source).lower()).first() |
1567 | + .filter(FactoidName.name==escape_name(source)).first() |
1568 | if factoid: |
1569 | target_factoid = event.session.query(FactoidName) \ |
1570 | - .filter(func.lower(FactoidName.name)==escape_name(target).lower()).first() |
1571 | + .filter(FactoidName.name==escape_name(target)).first() |
1572 | if target_factoid: |
1573 | event.addresponse(u"I already know stuff about %s", target) |
1574 | return |
1575 | @@ -510,7 +529,7 @@ |
1576 | return |
1577 | |
1578 | factoid = event.session.query(Factoid).join(Factoid.names)\ |
1579 | - .filter(func.lower(FactoidName.name)==escape_name(name).lower()).first() |
1580 | + .filter(FactoidName.name==escape_name(name)).first() |
1581 | if factoid: |
1582 | if correction: |
1583 | identities = get_identities(event) |
1584 | |
1585 | === modified file 'ibid/plugins/feeds.py' |
1586 | --- ibid/plugins/feeds.py 2009-12-28 13:34:30 +0000 |
1587 | +++ ibid/plugins/feeds.py 2009-12-29 16:00:32 +0000 |
1588 | @@ -3,15 +3,13 @@ |
1589 | import logging |
1590 | from urlparse import urljoin |
1591 | |
1592 | -from sqlalchemy import Column, Integer, Unicode, DateTime, UnicodeText, \ |
1593 | - ForeignKey, Table |
1594 | -from sqlalchemy.sql import func |
1595 | import feedparser |
1596 | from html2text import html2text_file |
1597 | |
1598 | from ibid.config import IntOption |
1599 | +from ibid.db import IbidUnicode, IbidUnicodeText, Integer, DateTime, \ |
1600 | + Table, Column, ForeignKey, Base, VersionedSchema |
1601 | from ibid.plugins import Processor, match, authorise, run_every |
1602 | -from ibid.models import Base, VersionedSchema |
1603 | from ibid.utils import cacheable_download, human_join |
1604 | from ibid.utils.html import get_html_parse_tree |
1605 | |
1606 | @@ -22,25 +20,45 @@ |
1607 | class Feed(Base): |
1608 | __table__ = Table('feeds', Base.metadata, |
1609 | Column('id', Integer, primary_key=True), |
1610 | - Column('name', Unicode(32), unique=True, nullable=False, index=True), |
1611 | - Column('url', UnicodeText, nullable=False), |
1612 | + Column('name', IbidUnicode(32, case_insensitive=True), |
1613 | + unique=True, nullable=False, index=True), |
1614 | + Column('url', IbidUnicodeText, nullable=False), |
1615 | Column('identity_id', Integer, ForeignKey('identities.id'), |
1616 | - nullable=False, index=True), |
1617 | + nullable=False, index=True), |
1618 | Column('time', DateTime, nullable=False), |
1619 | - Column('source', Unicode(32), index=True), |
1620 | - Column('target', Unicode(32), index=True), |
1621 | + Column('source', IbidUnicode(32, case_insensitive=True), index=True), |
1622 | + Column('target', IbidUnicode(32, case_insensitive=True), index=True), |
1623 | useexisting=True) |
1624 | |
1625 | class FeedSchema(VersionedSchema): |
1626 | def upgrade_1_to_2(self): |
1627 | - self.add_index(self.table.c.name, unique=True) |
1628 | + self.add_index(self.table.c.name) |
1629 | self.add_index(self.table.c.identity_id) |
1630 | def upgrade_2_to_3(self): |
1631 | - from sqlalchemy import Column, Unicode |
1632 | - self.add_column(Column('source', Unicode(32), index=True)) |
1633 | - self.add_column(Column('target', Unicode(32), index=True)) |
1634 | + from ibid.db import IbidUnicode, Column |
1635 | + self.add_column(Column('source', IbidUnicode(32), index=True)) |
1636 | + self.add_column(Column('target', IbidUnicode(32), index=True)) |
1637 | + def upgrade_3_to_4(self): |
1638 | + self.drop_index(self.table.c.name) |
1639 | + self.drop_index(self.table.c.source) |
1640 | + self.drop_index(self.table.c.target) |
1641 | + self.alter_column(Column('name', |
1642 | + IbidUnicode(32, case_insensitive=True), |
1643 | + unique=True, nullable=False, index=True), |
1644 | + force_rebuild=True) |
1645 | + self.alter_column(Column('url', IbidUnicodeText, nullable=False), |
1646 | + force_rebuild=True) |
1647 | + self.alter_column(Column('source', |
1648 | + IbidUnicode(32, case_insensitive=True), |
1649 | + index=True), force_rebuild=True) |
1650 | + self.alter_column(Column('target', |
1651 | + IbidUnicode(32, case_insensitive=True), |
1652 | + index=True), force_rebuild=True) |
1653 | + self.add_index(self.table.c.name) |
1654 | + self.add_index(self.table.c.source) |
1655 | + self.add_index(self.table.c.target) |
1656 | |
1657 | - __table__.versioned_schema = FeedSchema(__table__, 3) |
1658 | + __table__.versioned_schema = FeedSchema(__table__, 4) |
1659 | |
1660 | feed = None |
1661 | entries = None |
1662 | @@ -82,8 +100,7 @@ |
1663 | @match(r'^add\s+feed\s+(.+?)\s+as\s+(.+?)$') |
1664 | @authorise() |
1665 | def add(self, event, url, name): |
1666 | - feed = event.session.query(Feed) \ |
1667 | - .filter(func.lower(Feed.name) == name.lower()).first() |
1668 | + feed = event.session.query(Feed).filter_by(name=name).first() |
1669 | |
1670 | if feed: |
1671 | event.addresponse(u"I already have the %s feed", name) |
1672 | @@ -134,8 +151,7 @@ |
1673 | @match(r'^remove\s+(.+?)\s+feed$') |
1674 | @authorise() |
1675 | def remove(self, event, name): |
1676 | - feed = event.session.query(Feed) \ |
1677 | - .filter(func.lower(Feed.name) == name.lower()).first() |
1678 | + feed = event.session.query(Feed).filter_by(name=name).first() |
1679 | |
1680 | if not feed: |
1681 | event.addresponse(u"I don't have the %s feed anyway", name) |
1682 | @@ -150,8 +166,7 @@ |
1683 | @match(r'^(?:stop|don\'t)\s+poll(?:ing)?\s(.+)\s+feed$') |
1684 | @authorise() |
1685 | def no_poll(self, event, name): |
1686 | - feed = event.session.query(Feed) \ |
1687 | - .filter(func.lower(Feed.name) == name.lower()).first() |
1688 | + feed = event.session.query(Feed).filter_by(name=name).first() |
1689 | |
1690 | if not feed: |
1691 | event.addresponse(u"I don't have the %s feed anyway", name) |
1692 | @@ -167,8 +182,7 @@ |
1693 | @match(r'^poll\s(.+)\s+feed\s+(?:to|notify)\s+(.+)\s+on\s+(.+)$') |
1694 | @authorise(fallthrough=False) |
1695 | def enable_poll(self, event, name, target, source): |
1696 | - feed = event.session.query(Feed) \ |
1697 | - .filter(func.lower(Feed.name) == name.lower()).first() |
1698 | + feed = event.session.query(Feed).filter_by(name=name).first() |
1699 | |
1700 | if not feed: |
1701 | event.addresponse(u"I don't have the %s feed anyway", name) |
1702 | @@ -194,8 +208,7 @@ |
1703 | number = number and int(number) or 10 |
1704 | start = start and int(start) or 0 |
1705 | |
1706 | - feed = event.session.query(Feed) \ |
1707 | - .filter(func.lower(Feed.name) == name.lower()).first() |
1708 | + feed = event.session.query(Feed).filter_by(name=name).first() |
1709 | |
1710 | if not feed: |
1711 | event.addresponse(u"I don't know about the %s feed", name) |
1712 | @@ -214,8 +227,7 @@ |
1713 | |
1714 | @match(r'^article\s+(?:(\d+)|/(.+?)/)\s+from\s+(.+?)$') |
1715 | def article(self, event, number, pattern, name): |
1716 | - feed = event.session.query(Feed) \ |
1717 | - .filter(func.lower(Feed.name) == name.lower()).first() |
1718 | + feed = event.session.query(Feed).filter_by(name=name).first() |
1719 | |
1720 | if not feed: |
1721 | event.addresponse(u"I don't know about the %s feed", name) |
1722 | |
1723 | === modified file 'ibid/plugins/identity.py' |
1724 | --- ibid/plugins/identity.py 2009-10-16 16:31:34 +0000 |
1725 | +++ ibid/plugins/identity.py 2009-12-29 16:00:32 +0000 |
1726 | @@ -2,13 +2,10 @@ |
1727 | from random import choice |
1728 | import logging |
1729 | |
1730 | -from sqlalchemy.orm import eagerload |
1731 | -from sqlalchemy.sql import func |
1732 | -from sqlalchemy.exceptions import IntegrityError |
1733 | - |
1734 | import ibid |
1735 | +from ibid.db import eagerload, IntegrityError |
1736 | +from ibid.db.models import Account, Identity, Attribute |
1737 | from ibid.plugins import Processor, match, auth_responses |
1738 | -from ibid.models import Account, Identity, Attribute |
1739 | from ibid.utils import human_join |
1740 | |
1741 | help = {} |
1742 | @@ -54,7 +51,7 @@ |
1743 | |
1744 | if admin: |
1745 | identity = event.session.query(Identity) \ |
1746 | - .filter_by(identity=username, source=event.source.lower()).first() |
1747 | + .filter_by(identity=username, source=event.source).first() |
1748 | if identity: |
1749 | identity.account_id = account.id |
1750 | event.session.save_or_update(identity) |
1751 | @@ -158,15 +155,16 @@ |
1752 | else: |
1753 | account = event.session.query(Account) \ |
1754 | .join('identities') \ |
1755 | - .filter(func.lower(Identity.identity) == identity.lower()) \ |
1756 | - .filter(func.lower(Identity.source) == source.lower()).first() |
1757 | + .filter(Identity.identity == identity) \ |
1758 | + .filter(Identity.source == source).first() |
1759 | |
1760 | if account: |
1761 | reverse_attach = True |
1762 | else: |
1763 | username = event.sender['id'] |
1764 | |
1765 | - account = event.session.query(Account).filter_by(username=username).first() |
1766 | + account = event.session.query(Account) \ |
1767 | + .filter_by(username=username).first() |
1768 | |
1769 | if account: |
1770 | event.addresponse(u'I tried to create the account %s for you, but it already exists. ' |
1771 | @@ -176,7 +174,8 @@ |
1772 | account = Account(username) |
1773 | event.session.save_or_update(account) |
1774 | |
1775 | - currentidentity = event.session.query(Identity).get(event.identity) |
1776 | + currentidentity = event.session.query(Identity) \ |
1777 | + .get(event.identity) |
1778 | currentidentity.account_id = account.id |
1779 | event.session.save_or_update(currentidentity) |
1780 | |
1781 | @@ -194,7 +193,8 @@ |
1782 | if not auth_responses(event, 'accounts'): |
1783 | return |
1784 | admin = True |
1785 | - account = event.session.query(Account).filter_by(username=username).first() |
1786 | + account = event.session.query(Account) \ |
1787 | + .filter_by(username=username).first() |
1788 | if not account: |
1789 | event.addresponse(u"I don't know who %s is", username) |
1790 | return |
1791 | @@ -203,8 +203,7 @@ |
1792 | ident = event.session.query(Identity).get(event.identity) |
1793 | else: |
1794 | ident = event.session.query(Identity) \ |
1795 | - .filter(func.lower(Identity.identity) == identity.lower()) \ |
1796 | - .filter(func.lower(Identity.source) == source.lower()).first() |
1797 | + .filter_by(identity=identity, source=source).first() |
1798 | if ident and ident.account: |
1799 | event.addresponse(u'This identity is already attached to account %s', |
1800 | ident.account.username) |
1801 | @@ -262,8 +261,7 @@ |
1802 | return |
1803 | |
1804 | identity = event.session.query(Identity) \ |
1805 | - .filter(func.lower(Identity.identity) == user.lower()) \ |
1806 | - .filter(func.lower(Identity.source) == source.lower()).first() |
1807 | + .filter_by(identity=user, source=source).first() |
1808 | if not identity: |
1809 | identity = Identity(source, user) |
1810 | identity.account_id = account_id |
1811 | @@ -286,15 +284,15 @@ |
1812 | else: |
1813 | if not auth_responses(event, 'accounts'): |
1814 | return |
1815 | - account = event.session.query(Account).filter_by(username=username).first() |
1816 | + account = event.session.query(Account) \ |
1817 | + .filter_by(username=username).first() |
1818 | if not account: |
1819 | event.addresponse(u"I don't know who %s is", username) |
1820 | return |
1821 | |
1822 | identity = event.session.query(Identity) \ |
1823 | - .filter_by(account_id=account.id) \ |
1824 | - .filter(func.lower(Identity.identity) == user.lower()) \ |
1825 | - .filter(func.lower(Identity.source) == source.lower()).first() |
1826 | + .filter_by(account_id=account.id, identity=user, |
1827 | + source=source).first() |
1828 | if not identity: |
1829 | event.addresponse(u"I don't know about that identity") |
1830 | else: |
1831 | @@ -328,7 +326,8 @@ |
1832 | else: |
1833 | if not auth_responses(event, 'accounts'): |
1834 | return |
1835 | - account = event.session.query(Account).filter_by(username=username).first() |
1836 | + account = event.session.query(Account) \ |
1837 | + .filter_by(username=username).first() |
1838 | if not account: |
1839 | event.addresponse(u"I don't know who %s is", username) |
1840 | return |
1841 | @@ -381,8 +380,8 @@ |
1842 | |
1843 | identity = event.session.query(Identity) \ |
1844 | .options(eagerload('account')) \ |
1845 | - .filter(func.lower(Identity.source) == event.source.lower()) \ |
1846 | - .filter(func.lower(Identity.identity) == event.sender['id'].lower()) \ |
1847 | + .filter_by(source=event.source, |
1848 | + identity=event.sender['id']) \ |
1849 | .first() |
1850 | if not identity: |
1851 | identity = Identity(event.source, event.sender['id']) |
1852 | @@ -397,8 +396,8 @@ |
1853 | log.debug(u'Race encountered creating identity for %s on %s', event.sender['id'], event.source) |
1854 | identity = event.session.query(Identity) \ |
1855 | .options(eagerload('account')) \ |
1856 | - .filter(func.lower(Identity.source) == event.source.lower()) \ |
1857 | - .filter(func.lower(Identity.identity) == event.sender['id'].lower()) \ |
1858 | + .filter_by(source=event.source, |
1859 | + identity=event.sender['id']) \ |
1860 | .one() |
1861 | |
1862 | event.identity = identity.id |
1863 | |
1864 | === modified file 'ibid/plugins/karma.py' |
1865 | --- ibid/plugins/karma.py 2009-10-20 15:45:46 +0000 |
1866 | +++ ibid/plugins/karma.py 2009-12-29 16:00:32 +0000 |
1867 | @@ -2,12 +2,10 @@ |
1868 | import re |
1869 | import logging |
1870 | |
1871 | -from sqlalchemy import Column, Integer, Unicode, DateTime, Table |
1872 | -from sqlalchemy.sql import func |
1873 | - |
1874 | +from ibid.config import BoolOption, IntOption, ListOption |
1875 | +from ibid.db import IbidUnicode, DateTime, Integer, Table, Column, Base, \ |
1876 | + VersionedSchema |
1877 | from ibid.plugins import Processor, match, handler, authorise |
1878 | -from ibid.config import BoolOption, IntOption, ListOption |
1879 | -from ibid.models import Base, VersionedSchema |
1880 | |
1881 | help = {'karma': u'Keeps track of karma for people and things.'} |
1882 | |
1883 | @@ -16,7 +14,8 @@ |
1884 | class Karma(Base): |
1885 | __table__ = Table('karma', Base.metadata, |
1886 | Column('id', Integer, primary_key=True), |
1887 | - Column('subject', Unicode(64), unique=True, nullable=False, index=True), |
1888 | + Column('subject', IbidUnicode(64, case_insensitive=True), unique=True, |
1889 | + nullable=False, index=True), |
1890 | Column('changes', Integer, nullable=False), |
1891 | Column('value', Integer, nullable=False), |
1892 | Column('time', DateTime, nullable=False), |
1893 | @@ -24,11 +23,19 @@ |
1894 | |
1895 | class KarmaSchema(VersionedSchema): |
1896 | def upgrade_1_to_2(self): |
1897 | - self.add_index(self.table.c.subject, unique=True) |
1898 | + self.add_index(self.table.c.subject) |
1899 | def upgrade_2_to_3(self): |
1900 | - self.alter_column(Column('subject', Unicode(64), unique=True, nullable=False, index=True)) |
1901 | + self.alter_column(Column('subject', IbidUnicode(64), unique=True, |
1902 | + nullable=False, index=True)) |
1903 | + def upgrade_3_to_4(self): |
1904 | + self.drop_index(self.table.c.subject) |
1905 | + self.alter_column(Column('subject', |
1906 | + IbidUnicode(64, case_insensitive=True), |
1907 | + unique=True, nullable=False, index=True), |
1908 | + force_rebuild=True) |
1909 | + self.add_index(self.table.c.subject) |
1910 | |
1911 | - __table__.versioned_schema = KarmaSchema(__table__, 3) |
1912 | + __table__.versioned_schema = KarmaSchema(__table__, 4) |
1913 | |
1914 | def __init__(self, subject): |
1915 | self.subject = subject |
1916 | @@ -45,16 +52,25 @@ |
1917 | |
1918 | permission = u'karma' |
1919 | |
1920 | - increase = ListOption('increase', 'Suffixes which indicate increased karma', ('++', 'ftw')) |
1921 | - decrease = ListOption('decrease', 'Suffixes which indicate decreased karma', ('--', 'ftl')) |
1922 | - neutral = ListOption('neutral', 'Suffixes which indicate neutral karma', ('==',)) |
1923 | + increase = ListOption('increase', |
1924 | + 'Suffixes which indicate increased karma', |
1925 | + ('++', 'ftw')) |
1926 | + decrease = ListOption('decrease', 'Suffixes which indicate decreased karma', |
1927 | + ('--', 'ftl')) |
1928 | + neutral = ListOption('neutral', 'Suffixes which indicate neutral karma', |
1929 | + ('==',)) |
1930 | reply = BoolOption('reply', 'Acknowledge karma changes', False) |
1931 | public = BoolOption('public', 'Only allow karma changes in public', True) |
1932 | ignore = ListOption('ignore', 'Karma subjects to silently ignore', ()) |
1933 | - importance = IntOption('importance', "Threshold for number of changes after which a karma won't be forgotten", 0) |
1934 | + importance = IntOption('importance', 'Threshold for number of changes after' |
1935 | + " which a karma won't be forgotten", 0) |
1936 | |
1937 | def setup(self): |
1938 | - self.set.im_func.pattern = re.compile(r'^(.+?)\s*(%s)\s*(?:[[{(]+\s*(.+?)\s*[\]})]+)?$' % '|'.join([re.escape(token) for token in self.increase + self.decrease + self.neutral]), re.I) |
1939 | + self.set.im_func.pattern = re.compile( |
1940 | + r'^(.+?)\s*(%s)\s*(?:[[{(]+\s*(.+?)\s*[\]})]+)?$' % '|'.join( |
1941 | + re.escape(token) for token |
1942 | + in self.increase + self.decrease + self.neutral |
1943 | + ), re.I) |
1944 | |
1945 | @handler |
1946 | @authorise(fallthrough=False) |
1947 | @@ -66,8 +82,7 @@ |
1948 | if subject.lower() in self.ignore: |
1949 | return |
1950 | |
1951 | - karma = event.session.query(Karma) \ |
1952 | - .filter(func.lower(Karma.subject) == subject.lower()).first() |
1953 | + karma = event.session.query(Karma).filter_by(subject=subject).first() |
1954 | if not karma: |
1955 | karma = Karma(subject) |
1956 | |
1957 | @@ -109,8 +124,7 @@ |
1958 | |
1959 | @match(r'^karma\s+(?:for\s+)?(.+)$') |
1960 | def handle_karma(self, event, subject): |
1961 | - karma = event.session.query(Karma) \ |
1962 | - .filter(func.lower(Karma.subject) == subject.lower()).first() |
1963 | + karma = event.session.query(Karma).filter_by(subject=subject).first() |
1964 | if not karma: |
1965 | event.addresponse(u'nobody cares, dude') |
1966 | elif karma.value == 0: |
1967 | @@ -143,8 +157,7 @@ |
1968 | @match(r'^forget\s+karma\s+for\s+(.+?)(?:\s*[[{(]+\s*(.+?)\s*[\]})]+)?$') |
1969 | @authorise(fallthrough=False) |
1970 | def forget(self, event, subject, reason): |
1971 | - karma = event.session.query(Karma) \ |
1972 | - .filter(func.lower(Karma.subject) == subject.lower()).first() |
1973 | + karma = event.session.query(Karma).filter_by(subject=subject).first() |
1974 | if not karma: |
1975 | karma = Karma(subject) |
1976 | event.addresponse(u"I was pretty ambivalent about %s, anyway", subject) |
1977 | |
1978 | === modified file 'ibid/plugins/memo.py' |
1979 | --- ibid/plugins/memo.py 2009-12-05 18:26:45 +0000 |
1980 | +++ ibid/plugins/memo.py 2009-12-29 16:00:32 +0000 |
1981 | @@ -1,16 +1,15 @@ |
1982 | from datetime import datetime |
1983 | import logging |
1984 | |
1985 | -from sqlalchemy import Column, Integer, DateTime, ForeignKey, Boolean, UnicodeText, Table |
1986 | -from sqlalchemy.orm import relation |
1987 | -from sqlalchemy.sql import func |
1988 | - |
1989 | import ibid |
1990 | from ibid.plugins import Processor, handler, match, authorise |
1991 | +from ibid.compat import any |
1992 | from ibid.config import IntOption |
1993 | +from ibid.db import IbidUnicodeText, Boolean, Integer, DateTime, \ |
1994 | + Table, Column, ForeignKey, relation, Base, VersionedSchema |
1995 | +from ibid.db.models import Identity, Account |
1996 | from ibid.auth import permission |
1997 | from ibid.plugins.identity import get_identities |
1998 | -from ibid.models import Base, VersionedSchema, Identity, Account |
1999 | from ibid.utils import ago, format_date |
2000 | |
2001 | help = {'memo': u'Keeps messages for people.'} |
2002 | @@ -23,9 +22,11 @@ |
2003 | class Memo(Base): |
2004 | __table__ = Table('memos', Base.metadata, |
2005 | Column('id', Integer, primary_key=True), |
2006 | - Column('from_id', Integer, ForeignKey('identities.id'), nullable=False, index=True), |
2007 | - Column('to_id', Integer, ForeignKey('identities.id'), nullable=False, index=True), |
2008 | - Column('memo', UnicodeText, nullable=False), |
2009 | + Column('from_id', Integer, ForeignKey('identities.id'), |
2010 | + nullable=False, index=True), |
2011 | + Column('to_id', Integer, ForeignKey('identities.id'), |
2012 | + nullable=False, index=True), |
2013 | + Column('memo', IbidUnicodeText, nullable=False), |
2014 | Column('private', Boolean, nullable=False), |
2015 | Column('delivered', Boolean, nullable=False, index=True), |
2016 | Column('time', DateTime, nullable=False), |
2017 | @@ -36,8 +37,11 @@ |
2018 | self.add_index(self.table.c.from_id) |
2019 | self.add_index(self.table.c.to_id) |
2020 | self.add_index(self.table.c.delivered) |
2021 | + def upgrade_2_to_3(self): |
2022 | + self.alter_column(Column('memo', IbidUnicodeText, nullable=False), |
2023 | + force_rebuild=True) |
2024 | |
2025 | - __table__.versioned_schema = MemoSchema(__table__, 2) |
2026 | + __table__.versioned_schema = MemoSchema(__table__, 3) |
2027 | |
2028 | def __init__(self, from_id, to_id, memo, private=False): |
2029 | self.from_id = from_id |
2030 | @@ -47,8 +51,10 @@ |
2031 | self.delivered = False |
2032 | self.time = datetime.utcnow() |
2033 | |
2034 | -Identity.memos_sent = relation(Memo, primaryjoin=Identity.id==Memo.from_id, backref='sender') |
2035 | -Identity.memos_recvd = relation(Memo, primaryjoin=Identity.id==Memo.to_id, backref='recipient') |
2036 | +Identity.memos_sent = relation(Memo, primaryjoin=Identity.id==Memo.from_id, |
2037 | + backref='sender') |
2038 | +Identity.memos_recvd = relation(Memo, primaryjoin=Identity.id==Memo.to_id, |
2039 | + backref='recipient') |
2040 | |
2041 | class Tell(Processor): |
2042 | u"""(tell|pm|privmsg|msg|ask) <person> [on <source>] <message> |
2043 | @@ -58,8 +64,8 @@ |
2044 | permission = u'sendmemo' |
2045 | permissions = (u'recvmemo',) |
2046 | |
2047 | - @match(r'^\s*(?:please\s+)?(tell|pm|privmsg|msg|ask)\s+(\S+)\s+(?:on\s+(\S+)\s+)?(.+?)\s*$', |
2048 | - version='deaddressed') |
2049 | + @match(r'^\s*(?:please\s+)?(tell|pm|privmsg|msg|ask)' |
2050 | + r'\s+(\S+)\s+(?:on\s+(\S+)\s+)?(.+?)\s*$', version='deaddressed') |
2051 | @authorise(fallthrough=False) |
2052 | def tell(self, event, how, who, source, memo): |
2053 | source_specified = bool(source) |
2054 | @@ -69,17 +75,17 @@ |
2055 | source = source.lower() |
2056 | |
2057 | if source.lower() == event.source and \ |
2058 | - [True for name in ibid.config.plugins['core']['names'] if name.lower() == who.lower()]: |
2059 | + any(True for name in ibid.config.plugins['core']['names'] |
2060 | + if name.lower() == who.lower()): |
2061 | event.addresponse(u"I can't deliver messages to myself") |
2062 | return |
2063 | |
2064 | to = event.session.query(Identity) \ |
2065 | - .filter(func.lower(Identity.identity) == who.lower()) \ |
2066 | - .filter_by(source=source).first() |
2067 | + .filter_by(identity=who, source=source).first() |
2068 | |
2069 | if not to and not source_specified: |
2070 | account = event.session.query(Account) \ |
2071 | - .filter(func.lower(Account.username) == who.lower()).first() |
2072 | + .filter_by(username=who).first() |
2073 | if account: |
2074 | for identity in account.identities: |
2075 | if identity.source == source: |
2076 | @@ -90,7 +96,8 @@ |
2077 | if not to and not source_specified: |
2078 | event.addresponse( |
2079 | u"I don't know who %(who)s is. " |
2080 | - u"Say '%(who)s on %(source)s' and I'll take your word that %(who)s exists", { |
2081 | + u"Say '%(who)s on %(source)s' and I'll take your word " |
2082 | + u'that %(who)s exists', { |
2083 | 'who': who, |
2084 | 'source': source, |
2085 | }) |
2086 | @@ -104,20 +111,24 @@ |
2087 | event.session.save(to) |
2088 | event.session.commit() |
2089 | |
2090 | - log.info(u"Created identity %s for %s on %s", to.id, to.identity, to.source) |
2091 | + log.info(u"Created identity %s for %s on %s", to.id, to.identity, |
2092 | + to.source) |
2093 | |
2094 | - if permission(u'recvmemo', to.account and to.account.id or None, to.source, event.session) != 'yes': |
2095 | + if permission(u'recvmemo', to.account and to.account.id or None, |
2096 | + to.source, event.session) != 'yes': |
2097 | event.addresponse(u'Just tell %s yourself', who) |
2098 | return |
2099 | |
2100 | memo = u' '.join((how, who, memo)) |
2101 | |
2102 | - memo = Memo(event.identity, to.id, memo, how.lower() in (u'pm', u'privmsg', u'msg')) |
2103 | + memo = Memo(event.identity, to.id, memo, |
2104 | + how.lower() in (u'pm', u'privmsg', u'msg')) |
2105 | event.session.save_or_update(memo) |
2106 | |
2107 | event.session.commit() |
2108 | log.info(u"Stored memo %s for %s (%s) from %s (%s): %s", |
2109 | - memo.id, to.id, who, event.identity, event.sender['connection'], memo.memo) |
2110 | + memo.id, to.id, who, event.identity, event.sender['connection'], |
2111 | + memo.memo) |
2112 | event.memo = memo.id |
2113 | nomemos_cache.clear() |
2114 | notified_overlimit_cache.discard(to.id) |
2115 | @@ -151,22 +162,20 @@ |
2116 | |
2117 | # Join on column x isn't possible in SQLAlchemy 0.4: |
2118 | identities_to = event.session.query(Identity) \ |
2119 | - .filter_by(source=source) \ |
2120 | - .filter(func.lower(Identity.identity) == who.lower()) \ |
2121 | - .all() |
2122 | + .filter_by(source=source, identity=who).all() |
2123 | |
2124 | identities_to = [identity.id for identity in identities_to] |
2125 | |
2126 | memos = event.session.query(Memo) \ |
2127 | - .filter_by(delivered=False) \ |
2128 | - .filter_by(from_id=event.identity) \ |
2129 | + .filter_by(delivered=False, from_id=event.identity) \ |
2130 | .filter(Memo.to_id.in_(identities_to)) \ |
2131 | .order_by(Memo.time.asc()) |
2132 | count = memos.count() |
2133 | |
2134 | if not count: |
2135 | event.addresponse( |
2136 | - u"You don't have any outstanding messages for %(who)s on %(source)s", { |
2137 | + u"You don't have any outstanding messages for %(who)s on " |
2138 | + u'%(source)s', { |
2139 | 'who': who, |
2140 | 'source': source, |
2141 | }) |
2142 | @@ -174,7 +183,8 @@ |
2143 | |
2144 | if abs(number) > count: |
2145 | event.addresponse( |
2146 | - u"That memo does not exist, you only have %(count)i outstanding memos for %(who)s on %(source)s", { |
2147 | + u'That memo does not exist, you only have %(count)i ' |
2148 | + u'outstanding memos for %(who)s on %(source)s', { |
2149 | 'count': count, |
2150 | 'who': who, |
2151 | 'source': source, |
2152 | @@ -188,12 +198,14 @@ |
2153 | |
2154 | event.session.delete(memo) |
2155 | event.session.commit() |
2156 | - log.info(u"Cancelled memo %s for %s (%s) from %s (%s): %s", |
2157 | - memo.id, memo.to_id, who, event.identity, event.sender['connection'], memo.memo) |
2158 | + log.info(u'Cancelled memo %s for %s (%s) from %s (%s): %s', |
2159 | + memo.id, memo.to_id, who, event.identity, |
2160 | + event.sender['connection'], memo.memo) |
2161 | |
2162 | if count > 1: |
2163 | event.addresponse( |
2164 | - u"Forgotten memo %(number)i for %(who)s on %(source)s, but you still have %(count)i pending", { |
2165 | + u'Forgotten memo %(number)i for %(who)s on %(source)s, ' |
2166 | + u'but you still have %(count)i pending', { |
2167 | 'number': number, |
2168 | 'who': who, |
2169 | 'source': source, |
2170 | @@ -215,7 +227,8 @@ |
2171 | addressed = False |
2172 | processed = True |
2173 | |
2174 | - public_limit = IntOption('public_limit', 'Maximum number of memos to read out in public (flood-protection)', 2) |
2175 | + public_limit = IntOption('public_limit', 'Maximum number of memos to read ' |
2176 | + 'out in public (flood-protection)', 2) |
2177 | |
2178 | @handler |
2179 | def deliver(self, event): |
2180 | @@ -227,11 +240,13 @@ |
2181 | if len(memos) > self.public_limit and event.public: |
2182 | if event.identity not in notified_overlimit_cache: |
2183 | public = [True for memo in memos if not memo.private] |
2184 | - message = u'By the way, you have a pile of memos waiting for you, too many to read out in public. PM me' |
2185 | + message = u'By the way, you have a pile of memos waiting for ' \ |
2186 | + u'you, too many to read out in public. PM me' |
2187 | if public: |
2188 | event.addresponse(u'%s: ' + message, event.sender['nick']) |
2189 | else: |
2190 | - event.addresponse(message, target=event.sender['connection']) |
2191 | + event.addresponse(message, |
2192 | + target=event.sender['connection']) |
2193 | notified_overlimit_cache.add(event.identity) |
2194 | return |
2195 | |
2196 | @@ -241,7 +256,8 @@ |
2197 | continue |
2198 | |
2199 | if memo.private: |
2200 | - message = u'By the way, %(sender)s on %(source)s told me "%(message)s" %(ago)s ago' % { |
2201 | + message = u'By the way, %(sender)s on %(source)s told me ' \ |
2202 | + u'"%(message)s" %(ago)s ago' % { |
2203 | 'sender': memo.sender.identity, |
2204 | 'source': memo.sender.source, |
2205 | 'message': memo.memo, |
2206 | @@ -249,7 +265,8 @@ |
2207 | } |
2208 | event.addresponse(message, target=event.sender['connection']) |
2209 | else: |
2210 | - event.addresponse(u'By the way, %(sender)s on %(source)s told me "%(message)s" %(ago)s ago', { |
2211 | + event.addresponse(u'By the way, %(sender)s on %(source)s ' |
2212 | + u'told me "%(message)s" %(ago)s ago', { |
2213 | 'sender': memo.sender.identity, |
2214 | 'source': memo.sender.source, |
2215 | 'message': memo.memo, |
2216 | @@ -272,7 +289,8 @@ |
2217 | addressed = False |
2218 | processed = True |
2219 | |
2220 | - public_limit = IntOption('public_limit', 'Maximum number of memos to read out in public (flood-protection)', 2) |
2221 | + public_limit = IntOption('public_limit', 'Maximum number of memos to read ' |
2222 | + 'out in public (flood-protection)', 2) |
2223 | |
2224 | @handler |
2225 | def state(self, event): |
2226 | @@ -326,9 +344,7 @@ |
2227 | |
2228 | # Join on column x isn't possible in SQLAlchemy 0.4: |
2229 | identities_to = event.session.query(Identity) \ |
2230 | - .filter_by(source=source) \ |
2231 | - .filter(func.lower(Identity.identity) == who.lower()) \ |
2232 | - .all() |
2233 | + .filter_by(source=source, identity=who).all() |
2234 | |
2235 | identities_to = [identity.id for identity in identities_to] |
2236 | |
2237 | @@ -344,7 +360,8 @@ |
2238 | for i, memo in enumerate(memos) |
2239 | )) |
2240 | else: |
2241 | - event.addresponse(u"Sorry, all your memos to %(who)s on %(source)s are already delivered", { |
2242 | + event.addresponse(u'Sorry, all your memos to %(who)s on %(source)s ' |
2243 | + u'are already delivered', { |
2244 | 'who': who, |
2245 | 'source': source, |
2246 | }) |
2247 | @@ -360,7 +377,8 @@ |
2248 | |
2249 | memo = memos[number] |
2250 | |
2251 | - event.addresponse(u"From %(sender)s on %(source)s at %(time)s: %(message)s", { |
2252 | + event.addresponse(u'From %(sender)s on %(source)s at %(time)s: ' |
2253 | + u'%(message)s', { |
2254 | 'sender': memo.sender.identity, |
2255 | 'source': memo.sender.source, |
2256 | 'time': format_date(memo.time), |
2257 | |
2258 | === modified file 'ibid/plugins/seen.py' |
2259 | --- ibid/plugins/seen.py 2009-10-16 16:31:34 +0000 |
2260 | +++ ibid/plugins/seen.py 2009-12-29 16:00:32 +0000 |
2261 | @@ -1,13 +1,11 @@ |
2262 | from datetime import datetime |
2263 | import logging |
2264 | |
2265 | -from sqlalchemy import Column, Integer, Unicode, DateTime, ForeignKey, UnicodeText, UniqueConstraint, Table |
2266 | -from sqlalchemy.orm import relation |
2267 | -from sqlalchemy.sql import func |
2268 | -from sqlalchemy.exceptions import IntegrityError |
2269 | - |
2270 | +from ibid.db import IbidUnicode, IbidUnicodeText, Integer, DateTime, \ |
2271 | + Table, Column, ForeignKey, UniqueConstraint, \ |
2272 | + relation, IntegrityError, Base, VersionedSchema |
2273 | +from ibid.db.models import Identity, Account |
2274 | from ibid.plugins import Processor, match |
2275 | -from ibid.models import Base, VersionedSchema, Identity, Account |
2276 | from ibid.utils import ago, format_date |
2277 | |
2278 | log = logging.getLogger('plugins.seen') |
2279 | @@ -17,10 +15,11 @@ |
2280 | class Sighting(Base): |
2281 | __table__ = Table('seen', Base.metadata, |
2282 | Column('id', Integer, primary_key=True), |
2283 | - Column('identity_id', Integer, ForeignKey('identities.id'), nullable=False, index=True), |
2284 | - Column('type', Unicode(8), nullable=False, index=True), |
2285 | - Column('channel', Unicode(32)), |
2286 | - Column('value', UnicodeText), |
2287 | + Column('identity_id', Integer, ForeignKey('identities.id'), nullable=False, |
2288 | + index=True), |
2289 | + Column('type', IbidUnicode(8), nullable=False, index=True), |
2290 | + Column('channel', IbidUnicode(32)), |
2291 | + Column('value', IbidUnicodeText), |
2292 | Column('time', DateTime, nullable=False), |
2293 | Column('count', Integer, nullable=False), |
2294 | UniqueConstraint('identity_id', 'type'), |
2295 | @@ -30,12 +29,22 @@ |
2296 | def upgrade_1_to_2(self): |
2297 | self.add_index(self.table.c.identity_id) |
2298 | self.add_index(self.table.c.type) |
2299 | + def upgrade_2_to_3(self): |
2300 | + self.drop_index(self.table.c.type) |
2301 | + self.alter_column(Column('type', IbidUnicode(8), nullable=False, |
2302 | + index=True), force_rebuild=True) |
2303 | + self.alter_column(Column('channel', IbidUnicode(32)), |
2304 | + force_rebuild=True) |
2305 | + self.alter_column(Column('value', IbidUnicodeText), |
2306 | + force_rebuild=True) |
2307 | + self.add_index(self.table.c.type) |
2308 | |
2309 | - __table__.versioned_schema = SightingSchema(__table__, 2) |
2310 | + __table__.versioned_schema = SightingSchema(__table__, 3) |
2311 | |
2312 | identity = relation('Identity') |
2313 | |
2314 | - def __init__(self, identity_id=None, type='message', channel=None, value=None): |
2315 | + def __init__(self, identity_id=None, type='message', channel=None, |
2316 | + value=None): |
2317 | self.identity_id = identity_id |
2318 | self.type = type |
2319 | self.channel = channel |
2320 | @@ -44,7 +53,8 @@ |
2321 | self.count = 0 |
2322 | |
2323 | def __repr__(self): |
2324 | - return u'<Sighting %s %s in %s at %s: %s>' % (self.type, self.identity_id, self.channel, self.time, self.value) |
2325 | + return u'<Sighting %s %s in %s at %s: %s>' % ( |
2326 | + self.type, self.identity_id, self.channel, self.time, self.value) |
2327 | |
2328 | class See(Processor): |
2329 | feature = 'seen' |
2330 | @@ -56,8 +66,7 @@ |
2331 | return |
2332 | |
2333 | sighting = event.session.query(Sighting) \ |
2334 | - .filter_by(identity_id=event.identity) \ |
2335 | - .filter_by(type=event.type).first() |
2336 | + .filter_by(identity_id=event.identity, type=event.type).first() |
2337 | if not sighting: |
2338 | sighting = Sighting(event.identity, event.type) |
2339 | |
2340 | @@ -89,13 +98,14 @@ |
2341 | |
2342 | account = None |
2343 | identity = event.session.query(Identity) \ |
2344 | - .filter(func.lower(Identity.source) == (source and source or event.source).lower()) \ |
2345 | - .filter(func.lower(Identity.identity) == who.lower()).first() |
2346 | + .filter_by(source=(source or event.source), identity=who) \ |
2347 | + .first() |
2348 | if identity and identity.account and not source: |
2349 | account = identity.account |
2350 | |
2351 | if not identity and not source: |
2352 | - account = event.session.query(Account).filter_by(username=who).first() |
2353 | + account = event.session.query(Account).filter_by(username=who) \ |
2354 | + .first() |
2355 | |
2356 | if not identity and not account: |
2357 | event.addresponse(u"I don't know who %s is", who) |
2358 | |
2359 | === modified file 'ibid/plugins/url.py' |
2360 | --- ibid/plugins/url.py 2009-12-28 13:34:30 +0000 |
2361 | +++ ibid/plugins/url.py 2009-12-29 16:00:32 +0000 |
2362 | @@ -7,12 +7,12 @@ |
2363 | import re |
2364 | |
2365 | from pkg_resources import resource_exists, resource_stream |
2366 | -from sqlalchemy import Column, Integer, Unicode, DateTime, UnicodeText, ForeignKey, Table |
2367 | |
2368 | import ibid |
2369 | from ibid.plugins import Processor, match, handler |
2370 | from ibid.config import Option, ListOption |
2371 | -from ibid.models import Base, VersionedSchema |
2372 | +from ibid.db import IbidUnicode, IbidUnicodeText, Integer, DateTime, \ |
2373 | + Table, Column, ForeignKey, Base, VersionedSchema |
2374 | from ibid.utils.html import get_html_parse_tree |
2375 | |
2376 | help = {'url': u'Captures URLs seen in channel to database and/or to delicious, and shortens and lengthens URLs'} |
2377 | @@ -22,17 +22,23 @@ |
2378 | class URL(Base): |
2379 | __table__ = Table('urls', Base.metadata, |
2380 | Column('id', Integer, primary_key=True), |
2381 | - Column('url', UnicodeText, nullable=False), |
2382 | - Column('channel', Unicode(32), nullable=False), |
2383 | - Column('identity_id', Integer, ForeignKey('identities.id'), nullable=False, index=True), |
2384 | + Column('url', IbidUnicodeText, nullable=False), |
2385 | + Column('channel', IbidUnicode(32, case_insensitive=True), nullable=False), |
2386 | + Column('identity_id', Integer, ForeignKey('identities.id'), |
2387 | + nullable=False, index=True), |
2388 | Column('time', DateTime, nullable=False), |
2389 | useexisting=True) |
2390 | |
2391 | class URLSchema(VersionedSchema): |
2392 | def upgrade_1_to_2(self): |
2393 | self.add_index(self.table.c.identity_id) |
2394 | + def upgrade_2_to_3(self): |
2395 | + self.alter_column(Column('url', IbidUnicodeText, nullable=False)) |
2396 | + self.alter_column(Column('channel', |
2397 | + IbidUnicode(32, case_insensitive=True), |
2398 | + nullable=False), force_rebuild=True) |
2399 | |
2400 | - __table__.versioned_schema = URLSchema(__table__, 2) |
2401 | + __table__.versioned_schema = URLSchema(__table__, 3) |
2402 | |
2403 | def __init__(self, url, channel, identity_id): |
2404 | self.url = url |
2405 | |
2406 | === modified file 'ibid/source/irc.py' |
2407 | --- ibid/source/irc.py 2009-10-31 14:28:20 +0000 |
2408 | +++ ibid/source/irc.py 2009-12-29 16:00:32 +0000 |
2409 | @@ -10,7 +10,7 @@ |
2410 | |
2411 | import ibid |
2412 | from ibid.config import Option, IntOption, BoolOption, FloatOption, ListOption |
2413 | -from ibid.models import Credential |
2414 | +from ibid.db.models import Credential |
2415 | from ibid.source import IbidSourceFactory |
2416 | from ibid.event import Event |
2417 | from ibid.utils import ibid_version |
2418 | @@ -261,8 +261,10 @@ |
2419 | |
2420 | def auth_hostmask(self, event, credential = None): |
2421 | for credential in event.session.query(Credential) \ |
2422 | - .filter_by(method=u'hostmask').filter_by(account_id=event.account) \ |
2423 | - .filter(or_(Credential.source == event.source, Credential.source == None)).all(): |
2424 | + .filter_by(method=u'hostmask', account_id=event.account) \ |
2425 | + .filter(or_(Credential.source == event.source, |
2426 | + Credential.source == None)) \ |
2427 | + .all(): |
2428 | if fnmatch(event.sender['connection'], credential.credential): |
2429 | return True |
2430 | |
2431 | |
2432 | === modified file 'scripts/ibid-db' |
2433 | --- scripts/ibid-db 2009-12-10 09:40:08 +0000 |
2434 | +++ scripts/ibid-db 2009-12-29 16:00:33 +0000 |
2435 | @@ -7,14 +7,15 @@ |
2436 | from os.path import exists |
2437 | from sys import stdin, stdout, stderr, exit |
2438 | |
2439 | -from sqlalchemy import select, DateTime, Unicode, UnicodeText |
2440 | +from sqlalchemy import select, DateTime |
2441 | from twisted.python.modules import getModule |
2442 | |
2443 | import ibid |
2444 | from ibid.compat import json |
2445 | from ibid.config import FileConfig |
2446 | from ibid.core import DatabaseManager |
2447 | -from ibid.models import metadata, upgrade_schemas |
2448 | +from ibid.db import metadata, upgrade_schemas |
2449 | +from ibid.db.types import IbidUnicode, IbidUnicodeText |
2450 | from ibid.utils import ibid_version |
2451 | |
2452 | parser = OptionParser(usage='%prog [options...]', description= |
2453 | @@ -163,7 +164,8 @@ |
2454 | if isinstance(dbtable.c[field].type, DateTime): |
2455 | row[field] = datetime.strptime(row[field], |
2456 | '%Y-%m-%dT%H:%M:%SZ') |
2457 | - elif isinstance(dbtable.c[field].type, (Unicode, UnicodeText)): |
2458 | + elif isinstance(dbtable.c[field].type, |
2459 | + (IbidUnicode, IbidUnicodeText)): |
2460 | row[field] = unicode(row[field]) |
2461 | sql = dbtable.insert().values(**row) |
2462 | db.execute(sql) |
2463 | |
2464 | === modified file 'scripts/ibid-factpack' |
2465 | --- scripts/ibid-factpack 2009-12-12 12:36:34 +0000 |
2466 | +++ scripts/ibid-factpack 2009-12-29 16:00:33 +0000 |
2467 | @@ -1,11 +1,9 @@ |
2468 | #!/usr/bin/env python |
2469 | |
2470 | -from sys import argv, exit, stderr |
2471 | +from sys import exit, stderr |
2472 | from os.path import basename, exists |
2473 | from optparse import OptionParser |
2474 | |
2475 | -from sqlalchemy.sql import func |
2476 | - |
2477 | import ibid |
2478 | from ibid.compat import json |
2479 | from ibid.config import FileConfig |
2480 | @@ -69,7 +67,7 @@ |
2481 | print >> stderr, u"Invalid factpack" |
2482 | exit(4) |
2483 | |
2484 | -name = basename(filename).replace('.json', '') |
2485 | +name = unicode(basename(filename).replace('.json', '')) |
2486 | factpack = session.query(Factpack).filter_by(name=name).first() |
2487 | if factpack: |
2488 | print >> stderr, u'Factpack is already imported' |
2489 | @@ -77,18 +75,17 @@ |
2490 | |
2491 | factpack = Factpack(name) |
2492 | session.save(factpack) |
2493 | +session.flush() |
2494 | |
2495 | existing = [] |
2496 | for names, values in facts: |
2497 | factoid = Factoid(factpack.id) |
2498 | for name in names: |
2499 | - if session.query(FactoidName) \ |
2500 | - .filter(func.lower(FactoidName.name) |
2501 | - == escape_name(name).lower()) \ |
2502 | - .first(): |
2503 | + name = unicode(name) |
2504 | + if session.query(FactoidName).filter_by(name=escape_name(name)).first(): |
2505 | existing.append(name) |
2506 | continue |
2507 | - fname = FactoidName(unicode(name), None, factpack=factpack.id) |
2508 | + fname = FactoidName(name, None, factpack=factpack.id) |
2509 | factoid.names.append(fname) |
2510 | for value in values: |
2511 | fvalue = FactoidValue(unicode(value), None, factpack=factpack.id) |
2512 | |
2513 | === modified file 'scripts/ibid-plugin' |
2514 | --- scripts/ibid-plugin 2009-12-21 12:43:58 +0000 |
2515 | +++ scripts/ibid-plugin 2009-12-29 16:00:33 +0000 |
2516 | @@ -17,8 +17,8 @@ |
2517 | import ibid |
2518 | import ibid.plugins |
2519 | from ibid.config import FileConfig |
2520 | +from ibid.db.models import Identity |
2521 | from ibid.event import Event |
2522 | -from ibid.models import Identity |
2523 | |
2524 | parser = OptionParser(usage="""%prog [options...] [plugins...] |
2525 | plugins is the list of plugins to load. A plugin name followed by a - will be disabled rather than loaded.""") |
2526 | |
2527 | === modified file 'scripts/ibid-setup' |
2528 | --- scripts/ibid-setup 2009-12-13 13:21:53 +0000 |
2529 | +++ scripts/ibid-setup 2009-12-29 16:00:33 +0000 |
2530 | @@ -13,10 +13,11 @@ |
2531 | from twisted.python.modules import getModule |
2532 | |
2533 | import ibid |
2534 | -from ibid.plugins.auth import hash |
2535 | from ibid.config import FileConfig |
2536 | from ibid.core import DatabaseManager |
2537 | -from ibid.models import Account, Identity, Permission, Credential, metadata, upgrade_schemas |
2538 | +from ibid.db import metadata, upgrade_schemas |
2539 | +from ibid.db.models import Account, Identity, Permission, Credential |
2540 | +from ibid.plugins.auth import hash |
2541 | |
2542 | logging.basicConfig(level=logging.DEBUG) |
2543 | |
2544 | |
2545 | === modified file 'scripts/ibid_import' |
2546 | --- scripts/ibid_import 2009-12-11 08:29:47 +0000 |
2547 | +++ scripts/ibid_import 2009-12-29 16:00:33 +0000 |
2548 | @@ -4,14 +4,12 @@ |
2549 | |
2550 | from chardet import detect |
2551 | from dateutil.tz import tzlocal, tzutc |
2552 | -from sqlalchemy import create_engine, Column, Integer, String, DateTime, \ |
2553 | - or_, ForeignKey, Boolean, Text |
2554 | +from sqlalchemy import create_engine, Column, Integer, String, DateTime, Text |
2555 | from sqlalchemy.ext.declarative import declarative_base |
2556 | from sqlalchemy.orm import sessionmaker |
2557 | -from sqlalchemy.sql import func |
2558 | |
2559 | from ibid.config import FileConfig |
2560 | -from ibid.models import Identity |
2561 | +from ibid.db.models import Identity |
2562 | from ibid.plugins.seen import Sighting |
2563 | from ibid.plugins.factoid import Factoid, FactoidName, FactoidValue |
2564 | from ibid.plugins.karma import Karma |
2565 | @@ -116,8 +114,7 @@ |
2566 | user = decode(user) |
2567 | |
2568 | identity = session.query(Identity) \ |
2569 | - .filter(func.lower(Identity.identity) == user.lower()) \ |
2570 | - .filter(func.lower(Identity.source) == source.lower()).first() |
2571 | + .filter_by(identity=user, source=source).first() |
2572 | if not identity: |
2573 | identity = Identity(source, user) |
2574 | identity.created = created |
2575 | @@ -153,9 +150,7 @@ |
2576 | stdout.flush() |
2577 | |
2578 | fname = ibid.query(FactoidName) \ |
2579 | - .filter(func.lower(FactoidName.name) |
2580 | - == kfactoid.decoded_fact.lower()) \ |
2581 | - .first() |
2582 | + .filter_by(name=kfactoid.decoded_fact).first() |
2583 | if not fname: |
2584 | factoid = Factoid() |
2585 | try: |
You'll love this.
There's a good chance that there are still some bugs in here, so backup first.
But it seems to work.