Merge lp:~exarkun/divmod.org/remove-axiom-1325288 into lp:divmod.org

Proposed by Jean-Paul Calderone on 2014-06-30
Status: Merged
Approved by: Tristan Seligmann on 2014-06-30
Approved revision: 2751
Merged at revision: 2751
Proposed branch: lp:~exarkun/divmod.org/remove-axiom-1325288
Merge into: lp:divmod.org
Diff against target: 24641 lines (+1/-23880)
144 files modified
Axiom/.coveragerc (+0/-9)
Axiom/LICENSE (+0/-20)
Axiom/MANIFEST.in (+0/-5)
Axiom/NAME.txt (+0/-15)
Axiom/NEWS.txt (+0/-313)
Axiom/README.txt (+0/-23)
Axiom/axiom/__init__.py (+0/-8)
Axiom/axiom/_fincache.py (+0/-167)
Axiom/axiom/_pysqlite2.py (+0/-162)
Axiom/axiom/_schema.py (+0/-71)
Axiom/axiom/_version.py (+0/-1)
Axiom/axiom/attributes.py (+0/-1326)
Axiom/axiom/batch.py (+0/-1230)
Axiom/axiom/batch.tac (+0/-20)
Axiom/axiom/benchmarks/benchmark_batchitemcreation.py (+0/-25)
Axiom/axiom/benchmarks/benchmark_batchitemdeletion.py (+0/-27)
Axiom/axiom/benchmarks/benchmark_itemcreation.py (+0/-28)
Axiom/axiom/benchmarks/benchmark_itemdeletion.py (+0/-29)
Axiom/axiom/benchmarks/benchmark_tagnames.py (+0/-43)
Axiom/axiom/benchmarks/benchmark_tagsof.py (+0/-48)
Axiom/axiom/benchmarks/testbase.py (+0/-5)
Axiom/axiom/benchmarks/testindex.py (+0/-5)
Axiom/axiom/benchmarks/testinit.py (+0/-10)
Axiom/axiom/benchmarks/testreader.py (+0/-10)
Axiom/axiom/benchmarks/testwriter.py (+0/-13)
Axiom/axiom/dependency.py (+0/-289)
Axiom/axiom/errors.py (+0/-193)
Axiom/axiom/examples/bucket.py (+0/-52)
Axiom/axiom/examples/library.py (+0/-114)
Axiom/axiom/iaxiom.py (+0/-363)
Axiom/axiom/item.py (+0/-1137)
Axiom/axiom/listversions.py (+0/-142)
Axiom/axiom/plugins/__init__.py (+0/-12)
Axiom/axiom/plugins/axiom_plugins.py (+0/-299)
Axiom/axiom/queryutil.py (+0/-154)
Axiom/axiom/scheduler.py (+0/-557)
Axiom/axiom/scripts/axiomatic.py (+0/-197)
Axiom/axiom/scripts/pysql.py (+0/-19)
Axiom/axiom/sequence.py (+0/-175)
Axiom/axiom/slotmachine.py (+0/-181)
Axiom/axiom/store.py (+0/-2376)
Axiom/axiom/substore.py (+0/-122)
Axiom/axiom/tags.py (+0/-125)
Axiom/axiom/test/brokenapp.py (+0/-59)
Axiom/axiom/test/cursortest.py (+0/-164)
Axiom/axiom/test/deleteswordapp.py (+0/-17)
Axiom/axiom/test/historic/__init__.py (+0/-1)
Axiom/axiom/test/historic/stub_account1to2.py (+0/-17)
Axiom/axiom/test/historic/stub_catalog1to2.py (+0/-28)
Axiom/axiom/test/historic/stub_loginMethod1to2.py (+0/-17)
Axiom/axiom/test/historic/stub_manhole1to2.py (+0/-12)
Axiom/axiom/test/historic/stub_parentHook2to3.py (+0/-23)
Axiom/axiom/test/historic/stub_parentHook3to4.py (+0/-24)
Axiom/axiom/test/historic/stub_processor1to2.py (+0/-29)
Axiom/axiom/test/historic/stub_scheduler1to2.py (+0/-19)
Axiom/axiom/test/historic/stub_subStoreStartupService1to2.py (+0/-51)
Axiom/axiom/test/historic/stub_subscheduler1to2.py (+0/-21)
Axiom/axiom/test/historic/stub_textlist.py (+0/-25)
Axiom/axiom/test/historic/stubloader.py (+0/-74)
Axiom/axiom/test/historic/test_account1to2.py (+0/-25)
Axiom/axiom/test/historic/test_catalog1to2.py (+0/-14)
Axiom/axiom/test/historic/test_loginMethod1to2.py (+0/-25)
Axiom/axiom/test/historic/test_manhole1to2.py (+0/-16)
Axiom/axiom/test/historic/test_parentHook2to3.py (+0/-44)
Axiom/axiom/test/historic/test_parentHook3to4.py (+0/-11)
Axiom/axiom/test/historic/test_processor1to2.py (+0/-40)
Axiom/axiom/test/historic/test_scheduler1to2.py (+0/-21)
Axiom/axiom/test/historic/test_subStoreStartupService1to2.py (+0/-22)
Axiom/axiom/test/historic/test_subscheduler1to2.py (+0/-28)
Axiom/axiom/test/historic/test_textlist.py (+0/-11)
Axiom/axiom/test/itemtest.py (+0/-8)
Axiom/axiom/test/itemtestmain.py (+0/-15)
Axiom/axiom/test/morenewapp.py (+0/-104)
Axiom/axiom/test/newapp.py (+0/-59)
Axiom/axiom/test/newcirc.py (+0/-32)
Axiom/axiom/test/newobsolete.py (+0/-23)
Axiom/axiom/test/newpath.py (+0/-14)
Axiom/axiom/test/oldapp.py (+0/-18)
Axiom/axiom/test/oldcirc.py (+0/-14)
Axiom/axiom/test/oldobsolete.py (+0/-11)
Axiom/axiom/test/oldpath.py (+0/-10)
Axiom/axiom/test/onestepapp.py (+0/-104)
Axiom/axiom/test/openthenload.py (+0/-28)
Axiom/axiom/test/path_postcopy.py (+0/-24)
Axiom/axiom/test/reactorimporthelper.py (+0/-17)
Axiom/axiom/test/test_attributes.py (+0/-458)
Axiom/axiom/test/test_axiomatic.py (+0/-410)
Axiom/axiom/test/test_batch.py (+0/-733)
Axiom/axiom/test/test_count.py (+0/-69)
Axiom/axiom/test/test_crossstore.py (+0/-68)
Axiom/axiom/test/test_dependency.py (+0/-598)
Axiom/axiom/test/test_files.py (+0/-89)
Axiom/axiom/test/test_inheritance.py (+0/-28)
Axiom/axiom/test/test_item.py (+0/-586)
Axiom/axiom/test/test_listversions.py (+0/-116)
Axiom/axiom/test/test_mixin.py (+0/-56)
Axiom/axiom/test/test_paginate.py (+0/-171)
Axiom/axiom/test/test_powerup.py (+0/-356)
Axiom/axiom/test/test_pysqlite2.py (+0/-31)
Axiom/axiom/test/test_query.py (+0/-1798)
Axiom/axiom/test/test_queryutil.py (+0/-146)
Axiom/axiom/test/test_reference.py (+0/-217)
Axiom/axiom/test/test_reprs.py (+0/-110)
Axiom/axiom/test/test_scheduler.py (+0/-843)
Axiom/axiom/test/test_sequence.py (+0/-580)
Axiom/axiom/test/test_slotmachine.py (+0/-141)
Axiom/axiom/test/test_substore.py (+0/-201)
Axiom/axiom/test/test_tablecreate.py (+0/-103)
Axiom/axiom/test/test_tags.py (+0/-36)
Axiom/axiom/test/test_unavailable_type.py (+0/-27)
Axiom/axiom/test/test_upgrading.py (+0/-916)
Axiom/axiom/test/test_userbase.py (+0/-659)
Axiom/axiom/test/test_xatop.py (+0/-1013)
Axiom/axiom/test/toonewapp.py (+0/-64)
Axiom/axiom/test/upgrade_fixtures/__init__.py (+0/-2)
Axiom/axiom/test/upgrade_fixtures/override_init_new.py (+0/-24)
Axiom/axiom/test/upgrade_fixtures/override_init_old.py (+0/-10)
Axiom/axiom/test/upgrade_fixtures/reentrant_new.py (+0/-23)
Axiom/axiom/test/upgrade_fixtures/reentrant_old.py (+0/-10)
Axiom/axiom/test/upgrade_fixtures/replace_attribute_new.py (+0/-43)
Axiom/axiom/test/upgrade_fixtures/replace_attribute_old.py (+0/-20)
Axiom/axiom/test/upgrade_fixtures/replace_delete_new.py (+0/-42)
Axiom/axiom/test/upgrade_fixtures/replace_delete_old.py (+0/-20)
Axiom/axiom/test/upgrade_fixtures/two_upgrades_new.py (+0/-32)
Axiom/axiom/test/upgrade_fixtures/two_upgrades_old.py (+0/-10)
Axiom/axiom/test/util.py (+0/-158)
Axiom/axiom/upgrade.py (+0/-258)
Axiom/axiom/userbase.py (+0/-587)
Axiom/benchmark/batch-deletion (+0/-45)
Axiom/benchmark/benchlib.py (+0/-30)
Axiom/benchmark/inmemory-setting (+0/-46)
Axiom/benchmark/integer-setting (+0/-47)
Axiom/benchmark/item-creation (+0/-39)
Axiom/benchmark/item-loading (+0/-39)
Axiom/benchmark/multiquery-creation (+0/-40)
Axiom/benchmark/powerup-loading (+0/-58)
Axiom/benchmark/query-creation (+0/-38)
Axiom/benchmark/store-opening (+0/-41)
Axiom/bin/axiomatic (+0/-3)
Axiom/requirements-testing.txt (+0/-2)
Axiom/setup.py (+0/-61)
Axiom/tox.ini (+0/-28)
Axiom/twisted/plugins/axiom_plugins.py (+0/-60)
Divmod.pth (+1/-2)
To merge this branch: bzr merge lp:~exarkun/divmod.org/remove-axiom-1325288
Reviewer Review Type Date Requested Status
Tristan Seligmann 2014-06-30 Approve on 2014-06-30
Review via email: mp+224944@code.launchpad.net

Description of the change

To post a comment you must log in.
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== removed directory 'Axiom'
2=== removed file 'Axiom/.coveragerc'
3--- Axiom/.coveragerc 2014-01-22 19:09:17 +0000
4+++ Axiom/.coveragerc 1970-01-01 00:00:00 +0000
5@@ -1,9 +0,0 @@
6-[run]
7-branch = True
8-source =
9- axiom
10-
11-[report]
12-exclude_lines =
13- pragma: no cover
14-show_missing = True
15
16=== removed file 'Axiom/LICENSE'
17--- Axiom/LICENSE 2005-12-10 22:31:51 +0000
18+++ Axiom/LICENSE 1970-01-01 00:00:00 +0000
19@@ -1,20 +0,0 @@
20-Copyright (c) 2005 Divmod Inc.
21-
22-Permission is hereby granted, free of charge, to any person obtaining
23-a copy of this software and associated documentation files (the
24-"Software"), to deal in the Software without restriction, including
25-without limitation the rights to use, copy, modify, merge, publish,
26-distribute, sublicense, and/or sell copies of the Software, and to
27-permit persons to whom the Software is furnished to do so, subject to
28-the following conditions:
29-
30-The above copyright notice and this permission notice shall be
31-included in all copies or substantial portions of the Software.
32-
33-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
34-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
35-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
36-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
37-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
38-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
39-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40\ No newline at end of file
41
42=== removed file 'Axiom/MANIFEST.in'
43--- Axiom/MANIFEST.in 2014-01-15 22:31:13 +0000
44+++ Axiom/MANIFEST.in 1970-01-01 00:00:00 +0000
45@@ -1,5 +0,0 @@
46-include LICENSE
47-include NAME.txt
48-recursive-include axiom/test/historic *.tbz2
49-include axiom/batch.tac
50-graft axiom/examples
51
52=== removed file 'Axiom/NAME.txt'
53--- Axiom/NAME.txt 2005-08-27 23:09:07 +0000
54+++ Axiom/NAME.txt 1970-01-01 00:00:00 +0000
55@@ -1,15 +0,0 @@
56-
57-See: http://mathworld.wolfram.com/Axiom.html
58-
59-An axiom is a statement taken as true without proof or supporting arguments.
60-
61-Divmod Axiom is so named because it is a database, and a database is where you
62-put assertions about the world. In particular a database is where you put
63-values which you do not wish to re-calculate; the data that your computation is
64-based upon. In this way axiom items are similar to axioms, since (for example)
65-euclidean geometry can be derived from the set axioms known as "euclid's
66-postulates", but those axioms need to be stored independently; they cannot be
67-derived from anything.
68-
69-Plus it has an X in it, which sounds neat.
70-
71
72=== removed file 'Axiom/NEWS.txt'
73--- Axiom/NEWS.txt 2014-03-22 19:16:37 +0000
74+++ Axiom/NEWS.txt 1970-01-01 00:00:00 +0000
75@@ -1,313 +0,0 @@
76-0.7.1 (2014-03-22):
77- Major:
78-
79- - Fix some packaging issues that led to some important files being missing
80- from the 0.7.0 release.
81-
82- Minor:
83-
84- - Uses of the deprecated unsignedID and isWinNT Twisted APIs have been
85- removed.
86-
87-
88-0.7.0 (2014-01-11):
89- Major:
90-
91- - Only Python 2.6 and 2.7 are supported now. 2.4, 2.5 is deprecated.
92- - setup.py now uses setuptools, and stores its dependencies. This
93- means you no longer need to manually install dependencies.
94- - setup.py no longer requires Epsilon for egg_info, making it easier
95- to install Axiom using pip.
96- - Significant improvements to PyPy support. PyPy is now a supported
97- platform, with CI support.
98- - Axiom now uses the stdlib sqlite3 if pysqlite2 is not available.
99- Since all supported versions have this, installing pysqlite2 is
100- now no longer necessary, and is only an (optional) performance
101- improvement on CPython. This is a huge improvement for PyPy, where
102- the stdlib version is reportedly much faster.
103-
104- Minor:
105-
106- - Passing a string to SubStore.createNew now raises an exception
107- instead of silently almost certainly doing the wrong thing.
108- - Setting an integer value that is too negative will now raise an
109- exception.
110- - __conform__ (interface adaptation) now also works for items that
111- are not in a store.
112- - Starting the store service now automatically activates the
113- scheduler service as well.
114- - Batch processing can now be triggered by adding remote work.
115- - Startup performance for stores with many legacy type declarations
116- is improved.
117- - Several benchmarks were added.
118- - Many internal cleanups.
119-
120-0.6.0 (2009-11-25):
121- - Speed up creation, insertion, and various other operations on Item by
122- optimizing Item.getSchema.
123- - Improve error reporting from the batch upgrade system.
124- - Speed up setting attributes on Item instances.
125- - Remove the batch process manhole service.
126- - Improve the reliability of some unit tests.
127- - Fix `axiomatic --reactor ...`.
128- - Remove invalid SQL normalization code which would occassionally corrupt
129- certain obscure but valid SQL statements.
130- - Add an in-memory `IScheduler` powerup for stores and substores.
131-
132-0.5.31 (2008-12-09):
133- - An IStatEvent is now logged when a store is opened.
134- - Different schema versions of the same item type now no longer
135- compare equal, which fixes some breakage in the upgrade system,
136- among other things.
137- - Significantly reduce the runtime cost of opening a store by
138- reducing the amount of work spent to verify schema correctness.
139-
140-0.5.30 (2008-10-15):
141- - Fixed a _SubSchedulerParentHook bug where a transient run failure
142- would cause future event scheduling in the relevant substore to
143- fail/traceback.
144-
145-0.5.29 (2008-10-02):
146- - Added 'requiresFromSite' to axiom.dependency, expressing a
147- requirement on the site store for the successful installation of
148- an item in a user store.
149- - Made errors from duplicate item type definition friendlier.
150-
151-0.5.28 (2008-08-12):
152- - Upgraders can now safely upgrade reference attributes.
153- - The batch process is no longer started unless it's needed.
154- - Removed use of private Decimal APIs that changed in Python 2.5.2.
155-
156- - "axiomatic start" changed to use the public interface to twistd's
157- behaviour instead of relying on internal details.
158- - Store now uses FilePaths to refer to its database or files directory.
159- - Automatic powerup discovery is now a feature of powerups rather
160- than of axiom.dependency.
161- - Stores now record the released versions of code used to open them.
162- - "axiomatic upgrade" added, a command for completely upgrading a store.
163- - Removed no-longer-working APSW support code.
164-
165-0.5.27 (2007-11-27):
166- - Substores and file storage for in-memory stores are now supported.
167-
168-0.5.26 (2007-09-05):
169- - A bug where exceptions were raised when tables were created concurrently is
170- now fixed.
171-
172-0.5.25 (2007-08-01):
173- - Added the beginnings of a query introspection API.
174-
175-0.5.24 (2007-07-06):
176- - Added a 'postCopy' argument to
177- upgrade.registerAttributeCopyingUpgrader, a callable run
178- with the new item after upgrading.
179-
180-0.5.23 (2007-06-06):
181- - Fixed a bug where user store insertion/extraction failed if a
182- SubScheduler was installed but no TimedEvents existed.
183-
184-0.5.22 (2007-05-24):
185- - Fixed docstrings in axiom.dependency.
186- - Scheduler and SubScheduler now declared to implement IScheduler.
187-
188-0.5.21 (2007-04-27):
189- - Multi-version upgraders are now supported: an upgrader function
190- can upgrade items more than a single version at a time.
191- - Multi-item-class queries now supported: Store.query takes a tuple
192- as its first argument, similar to a comma-separated column clause
193- for a SELECT statement in SQL.
194- - Empty textlists are now properly distinguished from a textlist
195- containing a single empty string.
196- - Handling of items scheduled to run with axiom.scheduler being
197- deleted before they run has been fixed.
198-
199-0.5.20 (2007-02-23):
200- - AxiomaticCommand is no longer itself an axiom plugin.
201- - axiom.test.historic.stubloader.StubbedTest now has an
202- 'openLegacyStore' method, for opening the unupgraded store
203- multiple times.
204- - The default argument to Store.getItemByID is now respected in the
205- case where an attempt is made to load an item which was created
206- and deleted within a single transaction.
207-
208-0.5.19 (2007-01-11):
209- - A new method, axiom.store.ItemQuery.paginate, has been added, which splits
210- a query's result-gathering work into multiple "pages" so that we can deal
211- with extremely large result sets.
212- - A dependency management system for Items has been added in
213- axiom.dependency. InstallableMixin has been removed;
214- axiom.dependency.installOn is now used to install Items and connect powerups.
215- Items can declare their dependence on another item by declaring attributes
216- with axiom.dependency.dependsOn. When items are installed, their dependencies
217- will be created and installed as well. Installation is no longer tracked by
218- 'installedOn' attributes but by _DependencyConnector items.
219- - A bug preventing 'axiomatic userbase list' from working on a fresh
220- mantissa database has been fixed.
221-
222-0.5.18 (2006-12-08):
223- - Change ItemQuery.deleteFromStore so that it will call deleteFromStore on an
224- Item subclass if it has overridden that method.
225-
226-0.5.17 (2006-11-20):
227- - Added fullyQualifiedName to IColumn, _StoreIDComparer, and _PlaceholderColumn.
228- - Added support for distinct Item queries and for counting distinct attribute
229- queries.
230- - Exceptions raised by Axiom upgrade methods are logged instead of silently
231- swallowing them.
232-
233-0.5.16 (2006-11-17):
234- - Updated axiomatic to work with Twisted trunk.
235-
236-0.5.15 (2006-10-31):
237-
238- - Raise a more informative exception when accessing Item references pointing
239- to nonexistent items.
240- - Enforce prevention of deletion of items referred to by references set to
241- reference.DISALLOW.
242- - Tables in the FROM clause of SQL generated by queries are now ordered by the
243- order of the Item subclasses in the comparisons used to generate them.
244- - A new IComparison implementation has been added to allow application-level
245- code to explicitly specify the order of types in the join.
246-
247-0.5.14 (2006-10-17):
248- - Added a 'batchInsert' method to Store, allowing insertion of items without
249- loading them into memory.
250- - Change ItemQuery.deleteFromStore to delete items without loading them if
251- possible.
252-
253-0.5.13 (2006-10-05):
254- - Changed userbase.getLoginMethods to return LoginMethods rather than
255- (localpart, domain) tuples.
256-
257-0.5.12 (2006-09-29):
258- - Fixed a scheduler bug that would cause tasks scheduled in a substore to be
259- removed from the scheduler.
260-
261-0.5.11 (2006-09-20):
262- - dependency.dependsOn now takes similar arguments to attributes.reference.
263-
264-0.5.10 (2006-09-12):
265- - The axiomatic commands "insert-user" and "extract-user" now interact with
266- the scheduler properly.
267-
268-0.5.9 (2006-08-30):
269- - A new dependency-management system has been added, in axiom.dependency.
270-
271-0.5.8 (2006-08-17):
272- - The upgrader added in the previous release has been fixed.
273-
274-0.5.7 (2006-08-14):
275- - item.Item has a new method, stored, which will be called the first time an
276- item is added to a store, in the same transaction as it is added.
277- - A new class, item.Placeholder, has been added to assist in self-join
278- queries.
279-
280-0.5.6 (2006-07-18):
281- - userbase.LoginSystem now raises a new exception type when login is attempted
282- using a username with no domain part.
283-
284-0.5.5 (2006-07-08):
285- - SubStoreStartupService was removed; user stores' services are no longer
286- incorrectly started when the Mantissa administrative powerup is installed.
287- - IPowerupIndirector was added, allowing for installation of SubStore items
288- as powerups on other items.
289-
290-0.5.4 (2006-07-05):
291- - Items with attributes.path attributes can now be upgraded.
292- - axiom.scheduler has been improved to make clock-related tests easier to write.
293- - Improved test coverage and various bugfixes.
294-
295-0.5.3 (2006-06-27):
296- - A bug causing the table name cache to grow too large was fixed.
297-
298-0.5.2 (2006-06-26):
299- - Type names are now determined on a per-store basis, rather than cached
300- globally on the Item.
301-
302-0.5.1 (2006-06-16):
303- - axiom.slotmachine._structlike removed in favor of the implementation in
304- Epsilon, epsilon.structlike.record.
305- - The batch process has been adjusted to do more work per iteration.
306-
307-0.5.0 (2006-06-12):
308- Highlights:
309- - Fixed several bugs, including several potential data-corruption issues.
310- All users are recommended to upgrade, but back up your data and test your
311- upgrade first!
312- - There is now a 'money' attribute type which uses fixed-precision math in
313- the database specifically designed for dealing with the types of issues
314- associated with database-persistent financial data.
315- - Some simple relational constraints (the equivalent of ON DELETE CASCADE)
316- have been implemented using the 'whenDeleted' keyword argument.
317- - Indexes which are created in your code will now automatically be added to
318- opened databases without requiring an upgrader or a change to your Item's
319- schemaVersion.
320- - You can now use 'declareLegacyItem' to declare legacy schemas to record the
321- schema of older versions of your software -- this enables upgrading of more
322- than one step per release of your application code.
323- - You can now create multi-column indexes using attributes.compoundIndex.
324- ---
325- - Made Item.typeName and Item.schemaVersion optional in most cases.
326- - Added axiom.batch for reliably operating on large groups of items.
327- - Removed all usages of util.wait from tests
328- - added 'queryutil.contains' utility query method, for testing when a value
329- is between two attributes.
330- - Added 'negate' argument to oneOf, allowing for issing SQL 'NOT IN' queries.
331- - Improved reliability of the scheduler. Errors are now logged in a
332- structured manner.
333- - Added helper classes for writing axiomatic plug-in commands; see
334- documentation for axiomatic.scripts.axiomatic.AxiomaticCommand and
335- AxiomaticSubCommand.
336- - AttributeQuery now provides .min() and .max() methods which return the
337- obvious thing.
338- - Transactions are more managed more conservatively; BEGIN IMMEDIATE
339- TRANSACTION is used at the beginning of each transact() call, to guarantee
340- that concurrent access is safe, if sometimes slightly slower.
341- - SQL generation has been deferred to query time, which means that there is a
342- more complete API for manipulating Query objects.
343- - repr() of various objects has been improved for easier debugging.
344- - Axiom now emits various log events which you can observe if you wish to
345- analyze query statistics in real-time. These events don't go to the text log by
346- default: Mantissa, for example, uses them to display a pie chart of the
347- most expensive queries on a running system.
348-
349-0.4.0 (2005-12-20):
350- - Fixed sum() in the case of a table with no rows.
351- - LoginAccount no longer contains authentication information, but may be
352- referred to by one or more LoginMethods, which do.
353- - Added an attribute type for floats: ieee754_double.
354- - Enhanced functionality in axiom.sequence.List.
355- - Added support for SQL DISTINCT queries.
356- - On the command line, axiomatic will attempt to automatically discover
357- the correct database to use, if one is not specified.
358- - PID and logfiles are now kept in a subdirectory of the database
359- directory.
360- - The "start" axiomatic subcommand now works on Windows.
361- - Two new axiomatic subcommands have been added related to running servers
362- from Axiom database: "stop" and "status".
363- - Two new axiomatic subcommands have been added related to user
364- management: "extract-user" and "insert-user" for removing users from and
365- adding users to an existing credentials database, along with all of
366- their data.
367- - Axiom queries can now be sorted by a tuple of columns.
368-
369-0.3.0 (2005-11-02):
370- - Removed Axiom/axiom/examples/axiom.tac
371- - Added 'axiomatic start'
372- - added 'hyper', a 'super' capable of working with Item mixins
373- - added check to make sure Unicode strings won't be misleadingly persisted as
374- bytes(), like so:
375- >>> str(buffer(u'hello'))
376- 'h\x00\x00\x00e\x00\x00\x00l\x00\x00\x00l\x00\x00\x00o\x00\x00\x00'
377- - formalized and improved query result to be an object with its own interface
378- rather than a generator
379- - correctly call activate() on items after they have been upgraded
380-
381-0.2.0 (2005-10-27):
382- - Removed accidental Mantissa dependency
383- - Automatic upgrade service added
384- - Lots of new docstrings
385- - Query utility module added, with a function for finding overlapping
386- ranges
387- - Added formal interface for the `where' argument to Store.query()
388- - Added 'oneOf' attribute
389
390=== removed file 'Axiom/README.txt'
391--- Axiom/README.txt 2006-06-14 11:54:41 +0000
392+++ Axiom/README.txt 1970-01-01 00:00:00 +0000
393@@ -1,23 +0,0 @@
394-
395-Divmod Axiom
396-============
397-
398-Divmod Axiom is an object database, or alternatively, an object-relational
399-mapper, implemented on top of Python.
400-
401- Note: Axiom currently supports only SQLite and does NOT have any features
402- for dealing with concurrency. We do plan to add some later, and perhaps
403- also support other databases in the future.
404-
405-Its primary goal is to provide an object-oriented layer with what we consider
406-to be the key aspects of OO, i.e. polymorphism and message dispatch, without
407-hindering the power of an RDBMS.
408-
409-Axiom is a live database, not only an SQL generation tool: it includes an
410-implementation of a scheduler service, external file references, automatic
411-upgraders, robust failure handling, and Twisted integration.
412-
413-Axiom is tightly integrated with Twisted, and can store, start, and stop
414-Twisted services directly from the database using the included 'axiomatic'
415-command-line tool.
416-
417
418=== removed directory 'Axiom/axiom'
419=== removed file 'Axiom/axiom/__init__.py'
420--- Axiom/axiom/__init__.py 2014-01-15 10:48:55 +0000
421+++ Axiom/axiom/__init__.py 1970-01-01 00:00:00 +0000
422@@ -1,8 +0,0 @@
423-# -*- test-case-name: axiom.test -*-
424-from axiom._version import __version__
425-from twisted.python import versions
426-
427-def asTwistedVersion(packageName, versionString):
428- return versions.Version(packageName, *map(int, versionString.split(".")))
429-
430-version = asTwistedVersion("axiom", __version__)
431
432=== removed file 'Axiom/axiom/_fincache.py'
433--- Axiom/axiom/_fincache.py 2013-08-02 19:10:21 +0000
434+++ Axiom/axiom/_fincache.py 1970-01-01 00:00:00 +0000
435@@ -1,167 +0,0 @@
436-from weakref import ref
437-from traceback import print_exc
438-
439-from twisted.python import log
440-
441-from axiom import iaxiom
442-
443-class CacheFault(KeyError):
444- """
445- An item has fallen out of cache, but the weakref callback has not yet run.
446- """
447-
448-
449-
450-class CacheInconsistency(RuntimeError):
451- """
452- A key being cached is already present in the cache.
453- """
454-
455-
456-
457-def logErrorNoMatterWhat():
458- try:
459- log.msg("Exception in finalizer cannot be propagated")
460- log.err()
461- except:
462- try:
463- emergLog = file("WEAKREF_EMERGENCY_ERROR.log", 'a')
464- print_exc(file=emergLog)
465- emergLog.flush()
466- emergLog.close()
467- except:
468- # Nothing can be done. We can't get an emergency log file to write
469- # to. Don't bother.
470- return
471-
472-
473-
474-def createCacheRemoveCallback(cacheRef, key, finalizer):
475- """
476- Construct a callable to be used as a weakref callback for cache entries.
477-
478- The callable will invoke the provided finalizer, as well as removing the
479- cache entry if the cache still exists and contains an entry for the given
480- key.
481-
482- @type cacheRef: L{weakref.ref} to L{FinalizingCache}
483- @param cacheRef: A weakref to the cache in which the corresponding cache
484- item was stored.
485-
486- @param key: The key for which this value is cached.
487-
488- @type finalizer: callable taking 0 arguments
489- @param finalizer: A user-provided callable that will be called when the
490- weakref callback runs.
491- """
492- def remove(self):
493- # Weakref callbacks cannot raise exceptions or DOOM ensues
494- try:
495- finalizer()
496- except:
497- logErrorNoMatterWhat()
498- try:
499- self = cacheRef()
500- if self is not None:
501- try:
502- del self.data[key]
503- except KeyError:
504- # FinalizingCache.get may have already removed the cache
505- # item from the dictionary; see the comment in that method
506- # for an explanation of why.
507- pass
508- except:
509- logErrorNoMatterWhat()
510- return remove
511-
512-
513-
514-class FinalizingCache:
515- """
516- A cache that stores values by weakref.
517-
518- A finalizer is invoked when the weakref to a cached value is broken.
519-
520- @type data: L{dict}
521- @ivar data: The cached values.
522- """
523- def __init__(self):
524- self.data = {}
525-
526-
527- def cache(self, key, value):
528- """
529- Add an entry to the cache.
530-
531- A weakref to the value is stored, rather than a direct reference. The
532- value must have a C{__finalizer__} method that returns a callable which
533- will be invoked when the weakref is broken.
534-
535- @param key: The key identifying the cache entry.
536-
537- @param value: The value for the cache entry.
538- """
539- fin = value.__finalizer__()
540- try:
541- # It's okay if there's already a cache entry for this key as long
542- # as the weakref has already been broken. See the comment in
543- # get() for an explanation of why this might happen.
544- if self.data[key]() is not None:
545- raise CacheInconsistency(
546- "Duplicate cache key: %r %r %r" % (
547- key, value, self.data[key]))
548- except KeyError:
549- pass
550- self.data[key] = ref(value, createCacheRemoveCallback(
551- ref(self), key, fin))
552- return value
553-
554-
555- def uncache(self, key, value):
556- """
557- Remove a key from the cache.
558-
559- As a sanity check, if the specified key is present in the cache, it
560- must have the given value.
561-
562- @param key: The key to remove.
563-
564- @param value: The expected value for the key.
565- """
566- try:
567- assert self.get(key) is value
568- del self.data[key]
569- except KeyError:
570- # If the entry has already been removed from the cache, this will
571- # result in KeyError which we ignore. If the entry is still in the
572- # cache, but the weakref has been broken, this will result in
573- # CacheFault (a KeyError subclass) which we also ignore. See the
574- # comment in get() for an explanation of why this might happen.
575- pass
576-
577-
578- def get(self, key):
579- """
580- Get an entry from the cache by key.
581-
582- @raise KeyError: if the given key is not present in the cache.
583-
584- @raise CacheFault: (a L{KeyError} subclass) if the given key is present
585- in the cache, but the value it points to is gone.
586- """
587- o = self.data[key]()
588- if o is None:
589- # On CPython, the weakref callback will always(?) run before any
590- # other code has a chance to observe that the weakref is broken;
591- # and since the callback removes the item from the dict, this
592- # branch of code should never run. However, on PyPy (and possibly
593- # other Python implementations), the weakref callback does not run
594- # immediately, thus we may be able to observe this intermediate
595- # state. Should this occur, we remove the dict item ourselves,
596- # and raise CacheFault (which is a KeyError subclass).
597- del self.data[key]
598- raise CacheFault(
599- "FinalizingCache has %r but its value is no more." % (key,))
600- log.msg(interface=iaxiom.IStatEvent, stat_cache_hits=1, key=key)
601- return o
602-
603
604=== removed file 'Axiom/axiom/_pysqlite2.py'
605--- Axiom/axiom/_pysqlite2.py 2010-04-03 12:38:34 +0000
606+++ Axiom/axiom/_pysqlite2.py 1970-01-01 00:00:00 +0000
607@@ -1,162 +0,0 @@
608-# -*- test-case-name: axiom.test.test_pysqlite2 -*-
609-
610-"""
611-PySQLite2 Connection and Cursor wrappers.
612-
613-These provide a uniform interface on top of PySQLite2 for Axiom, particularly
614-including error handling behavior and exception types.
615-"""
616-
617-import time, sys
618-
619-try:
620- # Prefer the third-party module, as it is easier to update, and so may
621- # be newer or otherwise better.
622- from pysqlite2 import dbapi2
623-except ImportError:
624- # But fall back to the stdlib module if we're on Python 2.6 or newer,
625- # because it should work too. Don't do this for Python 2.5 because
626- # there are critical, data-destroying bugs in that version.
627- if sys.version_info >= (2, 6):
628- import sqlite3 as dbapi2
629- else:
630- raise
631-
632-from twisted.python import log
633-
634-from axiom import errors, iaxiom
635-
636-class Connection(object):
637- def __init__(self, connection, timeout=None):
638- self._connection = connection
639- self._timeout = timeout
640-
641-
642- def fromDatabaseName(cls, dbFilename, timeout=None, isolationLevel=None):
643- return cls(dbapi2.connect(dbFilename, timeout=0,
644- isolation_level=isolationLevel))
645- fromDatabaseName = classmethod(fromDatabaseName)
646-
647-
648- def cursor(self):
649- return Cursor(self, self._timeout)
650-
651-
652- def identifySQLError(self, sql, args, e):
653- """
654- Identify an appropriate SQL error object for the given message for the
655- supported versions of sqlite.
656-
657- @return: an SQLError
658- """
659- message = e.args[0]
660- if message.startswith("table") and message.endswith("already exists"):
661- return errors.TableAlreadyExists(sql, args, e)
662- return errors.SQLError(sql, args, e)
663-
664-
665-
666-class Cursor(object):
667- def __init__(self, connection, timeout):
668- self._connection = connection
669- self._cursor = connection._connection.cursor()
670- self.timeout = timeout
671-
672-
673- def __iter__(self):
674- return iter(self._cursor)
675-
676-
677- def time(self):
678- """
679- Return the current wallclock time as a float representing seconds
680- from an fixed but arbitrary point.
681- """
682- return time.time()
683-
684-
685- def sleep(self, seconds):
686- """
687- Block for the given number of seconds.
688-
689- @type seconds: C{float}
690- """
691- time.sleep(seconds)
692-
693-
694- def execute(self, sql, args=()):
695- try:
696- try:
697- blockedTime = 0.0
698- t = self.time()
699- try:
700- # SQLite3 uses something like exponential backoff when
701- # trying to acquire a database lock. This means that even
702- # for very long timeouts, it may only attempt to acquire
703- # the lock a handful of times. Another process which is
704- # executing frequent, short-lived transactions may acquire
705- # and release the lock many times between any two attempts
706- # by this one to acquire it. If this process gets unlucky
707- # just a few times, this execute may fail to acquire the
708- # lock within the specified timeout.
709-
710- # Since attempting to acquire the lock is a fairly cheap
711- # operation, we take another route. SQLite3 is always told
712- # to use a timeout of 0 - ie, acquire it on the first try
713- # or fail instantly. We will keep doing this, ten times a
714- # second, until the actual timeout expires.
715-
716- # What would be really fantastic is a notification
717- # mechanism for information about the state of the lock
718- # changing. Of course this clearly insane, no one has ever
719- # managed to invent a tool for communicating one bit of
720- # information between multiple processes.
721- while 1:
722- try:
723- return self._cursor.execute(sql, args)
724- except dbapi2.OperationalError, e:
725- if e.args[0] == 'database is locked':
726- now = self.time()
727- if self.timeout is not None:
728- if (now - t) > self.timeout:
729- raise errors.TimeoutError(sql, self.timeout, e)
730- self.sleep(0.1)
731- blockedTime = self.time() - t
732- else:
733- raise
734- finally:
735- txntime = self.time() - t
736- if txntime - blockedTime > 2.0:
737- log.msg('Extremely long execute: %s' % (txntime - blockedTime,))
738- log.msg(sql)
739- # import traceback; traceback.print_stack()
740- log.msg(interface=iaxiom.IStatEvent,
741- stat_cursor_execute_time=txntime,
742- stat_cursor_blocked_time=blockedTime)
743- except dbapi2.OperationalError, e:
744- if e.args[0] == 'database schema has changed':
745- return self._cursor.execute(sql, args)
746- raise
747- except (dbapi2.ProgrammingError,
748- dbapi2.InterfaceError,
749- dbapi2.OperationalError), e:
750- raise self._connection.identifySQLError(sql, args, e)
751-
752-
753- def lastRowID(self):
754- return self._cursor.lastrowid
755-
756-
757- def close(self):
758- self._cursor.close()
759-
760-
761-# Export some names from the underlying module.
762-sqlite_version_info = dbapi2.sqlite_version_info
763-OperationalError = dbapi2.OperationalError
764-
765-__all__ = [
766- 'OperationalError',
767- 'Connection',
768- 'sqlite_version_info',
769- ]
770
771=== removed file 'Axiom/axiom/_schema.py'
772--- Axiom/axiom/_schema.py 2006-03-30 01:22:40 +0000
773+++ Axiom/axiom/_schema.py 1970-01-01 00:00:00 +0000
774@@ -1,71 +0,0 @@
775-
776-# DELETE_OBJECT = 'DELETE FROM axiom_objects WHERE oid = ?'
777-CREATE_OBJECT = 'INSERT INTO *DATABASE*.axiom_objects (type_id) VALUES (?)'
778-CREATE_TYPE = 'INSERT INTO *DATABASE*.axiom_types (typename, module, version) VALUES (?, ?, ?)'
779-
780-
781-BASE_SCHEMA = ["""
782-CREATE TABLE *DATABASE*.axiom_objects (
783- type_id INTEGER NOT NULL
784- CONSTRAINT fk_type_id REFERENCES axiom_types(oid)
785-)
786-""",
787-
788-"""
789-CREATE INDEX *DATABASE*.axiom_objects_type_idx
790- ON axiom_objects(type_id);
791-""",
792-
793-"""
794-CREATE TABLE *DATABASE*.axiom_types (
795- typename VARCHAR,
796- module VARCHAR,
797- version INTEGER
798-)
799-""",
800-
801-"""
802-CREATE TABLE *DATABASE*.axiom_attributes (
803- type_id INTEGER,
804- row_offset INTEGER,
805- indexed BOOLEAN,
806- sqltype VARCHAR,
807- allow_none BOOLEAN,
808- pythontype VARCHAR,
809- attribute VARCHAR,
810- docstring TEXT
811-)
812-"""]
813-
814-TYPEOF_QUERY = """
815-SELECT *DATABASE*.axiom_types.typename, *DATABASE*.axiom_types.module, *DATABASE*.axiom_types.version
816- FROM *DATABASE*.axiom_types, *DATABASE*.axiom_objects
817- WHERE *DATABASE*.axiom_objects.oid = ?
818- AND *DATABASE*.axiom_types.oid = *DATABASE*.axiom_objects.type_id
819-"""
820-
821-HAS_SCHEMA_FEATURE = ("SELECT COUNT(oid) FROM *DATABASE*.sqlite_master "
822- "WHERE type = ? AND name = ?")
823-
824-IDENTIFYING_SCHEMA = ('SELECT indexed, sqltype, allow_none, attribute '
825- 'FROM *DATABASE*.axiom_attributes WHERE type_id = ? '
826- 'ORDER BY row_offset')
827-
828-ADD_SCHEMA_ATTRIBUTE = (
829- 'INSERT INTO *DATABASE*.axiom_attributes '
830- '(type_id, row_offset, indexed, sqltype, allow_none, attribute, docstring, pythontype) '
831- 'VALUES (?, ?, ?, ?, ?, ?, ?, ?)')
832-
833-ALL_TYPES = 'SELECT oid, module, typename, version FROM *DATABASE*.axiom_types'
834-
835-GET_GREATER_VERSIONS_OF_TYPE = ('SELECT version FROM *DATABASE*.axiom_types '
836- 'WHERE typename = ? AND version > ?')
837-
838-SCHEMA_FOR_TYPE = ('SELECT indexed, pythontype, attribute, docstring '
839- 'FROM *DATABASE*.axiom_attributes '
840- 'WHERE type_id = ?')
841-
842-CHANGE_TYPE = 'UPDATE *DATABASE*.axiom_objects SET type_id = ? WHERE oid = ?'
843-
844-APP_VACUUM = 'DELETE FROM *DATABASE*.axiom_objects WHERE (type_id == -1) AND (oid != (SELECT MAX(oid) from *DATABASE*.axiom_objects))'
845-
846
847=== removed file 'Axiom/axiom/_version.py'
848--- Axiom/axiom/_version.py 2014-03-22 19:16:37 +0000
849+++ Axiom/axiom/_version.py 1970-01-01 00:00:00 +0000
850@@ -1,1 +0,0 @@
851-__version__ = "0.7.1"
852
853=== removed file 'Axiom/axiom/attributes.py'
854--- Axiom/axiom/attributes.py 2010-07-14 21:44:42 +0000
855+++ Axiom/axiom/attributes.py 1970-01-01 00:00:00 +0000
856@@ -1,1326 +0,0 @@
857-# -*- test-case-name: axiom.test.test_attributes,axiom.test.test_reference -*-
858-
859-import os
860-
861-from decimal import Decimal
862-
863-from epsilon import hotfix
864-hotfix.require('twisted', 'filepath_copyTo')
865-
866-from zope.interface import implements
867-
868-from twisted.python import filepath
869-from twisted.python.components import registerAdapter
870-
871-from epsilon.extime import Time
872-
873-from axiom.slotmachine import Attribute as inmemory
874-
875-from axiom.errors import NoCrossStoreReferences, BrokenReference
876-
877-from axiom.iaxiom import IComparison, IOrdering, IColumn, IQuery
878-
879-_NEEDS_FETCH = object() # token indicating that a value was not found
880-
881-__metaclass__ = type
882-
883-
884-class _ComparisonOperatorMuxer:
885- """
886- Collapse comparison operations into calls to a single method with varying
887- arguments.
888- """
889- def compare(self, other, op):
890- """
891- Override this in a subclass.
892- """
893- raise NotImplementedError()
894-
895-
896- def __eq__(self, other):
897- return self.compare(other, '=')
898-
899-
900- def __ne__(self, other):
901- return self.compare(other, '!=')
902-
903-
904- def __gt__(self, other):
905- return self.compare(other, '>')
906-
907-
908- def __lt__(self, other):
909- return self.compare(other, '<')
910-
911-
912- def __ge__(self, other):
913- return self.compare(other, '>=')
914-
915-
916- def __le__(self, other):
917- return self.compare(other, '<=')
918-
919-
920-def compare(left, right, op):
921- # interim: maybe we want objects later? right now strings should be fine
922- if IColumn.providedBy(right):
923- return TwoAttributeComparison(left, op, right)
924- elif right is None:
925- if op == '=':
926- negate = False
927- elif op == '!=':
928- negate = True
929- else:
930- raise TypeError(
931- "None/NULL does not work with %s comparison" % (op,))
932- return NullComparison(left, negate)
933- else:
934- # convert to constant usable in the database
935- return AttributeValueComparison(left, op, right)
936-
937-
938-
939-class _MatchingOperationMuxer:
940- """
941- Collapse string matching operations into calls to a single method with
942- varying arguments.
943- """
944- def _like(self, negate, firstOther, *others):
945- others = (firstOther,) + others
946- likeParts = []
947-
948- allValues = True
949- for other in others:
950- if IColumn.providedBy(other):
951- likeParts.append(LikeColumn(other))
952- allValues = False
953- elif other is None:
954- # LIKE NULL is a silly condition, but it's allowed.
955- likeParts.append(LikeNull())
956- allValues = False
957- else:
958- likeParts.append(LikeValue(other))
959-
960- if allValues:
961- likeParts = [LikeValue(''.join(others))]
962-
963- return LikeComparison(self, negate, likeParts)
964-
965-
966- def like(self, *others):
967- return self._like(False, *others)
968-
969-
970- def notLike(self, *others):
971- return self._like(True, *others)
972-
973-
974- def startswith(self, other):
975- return self._like(False, other, '%')
976-
977-
978- def endswith(self, other):
979- return self._like(False, '%', other)
980-
981-
982-
983-_ASC = 'ASC'
984-_DESC = 'DESC'
985-
986-class _OrderingMixin:
987- """
988- Provide the C{ascending} and C{descending} attributes to specify sort
989- direction.
990- """
991- def _asc(self):
992- return SimpleOrdering(self, _ASC)
993-
994- def _desc(self):
995- return SimpleOrdering(self, _DESC)
996-
997- desc = descending = property(_desc)
998- asc = ascending = property(_asc)
999-
1000-
1001-
1002-class _ContainableMixin:
1003- def oneOf(self, seq, negate=False):
1004- """
1005- Choose items whose attributes are in a fixed set.
1006-
1007- X.oneOf([1, 2, 3])
1008-
1009- Implemented with the SQL 'in' statement.
1010- """
1011- return SequenceComparison(self, seq, negate)
1012-
1013-
1014- def notOneOf(self, seq):
1015- return self.oneOf(seq, negate=True)
1016-
1017-
1018-
1019-class Comparable(_ContainableMixin, _ComparisonOperatorMuxer,
1020- _MatchingOperationMuxer, _OrderingMixin):
1021- """
1022- Helper for a thing that can be compared like an SQLAttribute (or is in fact
1023- an SQLAttribute). Requires that 'self' have 'type' (Item-subclass) and
1024- 'columnName' (str) attributes, as well as an 'infilter' method in the
1025- spirit of SQLAttribute, documented below.
1026- """
1027-
1028- # XXX TODO: improve error reporting
1029-
1030- def compare(self, other, sqlop):
1031- return compare(self, other, sqlop)
1032-
1033-
1034-
1035-class SimpleOrdering:
1036- """
1037- Currently this class is mostly internal. More documentation will follow as
1038- its interface is finalized.
1039- """
1040- implements(IOrdering)
1041-
1042- # maybe this will be a useful public API, for the query something
1043- # something.
1044-
1045- isDescending = property(lambda self: self.direction == _DESC)
1046- isAscending = property(lambda self: self.direction == _ASC)
1047-
1048- def __init__(self, attribute, direction=''):
1049- self.attribute = attribute
1050- self.direction = direction
1051-
1052-
1053- def orderColumns(self):
1054- return [(self.attribute, self.direction)]
1055-
1056-
1057- def __repr__(self):
1058- return repr(self.attribute) + self.direction
1059-
1060-
1061- def __add__(self, other):
1062- if isinstance(other, SimpleOrdering):
1063- return CompoundOrdering([self, other])
1064- elif isinstance(other, (list, tuple)):
1065- return CompoundOrdering([self] + list(other))
1066- else:
1067- return NotImplemented
1068-
1069-
1070- def __radd__(self, other):
1071- if isinstance(other, SimpleOrdering):
1072- return CompoundOrdering([other, self])
1073- elif isinstance(other, (list, tuple)):
1074- return CompoundOrdering(list(other) + [self])
1075- else:
1076- return NotImplemented
1077-
1078-
1079-class CompoundOrdering:
1080- """
1081- List of SimpleOrdering instances.
1082- """
1083- implements(IOrdering)
1084-
1085- def __init__(self, seq):
1086- self.simpleOrderings = list(seq)
1087-
1088-
1089- def __repr__(self):
1090- return self.__class__.__name__ + '(' + repr(self.simpleOrderings) + ')'
1091-
1092-
1093- def __add__(self, other):
1094- """
1095- Just thinking about what might be useful from the perspective of
1096- introspecting on query objects... don't document this *too* thoroughly
1097- yet.
1098- """
1099- if isinstance(other, CompoundOrdering):
1100- return CompoundOrdering(self.simpleOrderings + other.simpleOrderings)
1101- elif isinstance(other, SimpleOrdering):
1102- return CompoundOrdering(self.simpleOrderings + [other])
1103- elif isinstance(other, (list, tuple)):
1104- return CompoundOrdering(self.simpleOrderings + list(other))
1105- else:
1106- return NotImplemented
1107-
1108-
1109- def __radd__(self, other):
1110- """
1111- Just thinking about what might be useful from the perspective of
1112- introspecting on query objects... don't document this *too* thoroughly
1113- yet.
1114- """
1115- if isinstance(other, CompoundOrdering):
1116- return CompoundOrdering(other.simpleOrderings + self.simpleOrderings)
1117- elif isinstance(other, SimpleOrdering):
1118- return CompoundOrdering([other] + self.simpleOrderings)
1119- elif isinstance(other, (list, tuple)):
1120- return CompoundOrdering(list(other) + self.simpleOrderings)
1121- else:
1122- return NotImplemented
1123-
1124-
1125- def orderColumns(self):
1126- x = []
1127- for o in self.simpleOrderings:
1128- x.extend(o.orderColumns())
1129- return x
1130-
1131-
1132-
1133-class UnspecifiedOrdering:
1134- implements(IOrdering)
1135-
1136- def __init__(self, null):
1137- pass
1138-
1139- def __add__(self, other):
1140- return IOrdering(other, NotImplemented)
1141-
1142- __radd__ = __add__
1143-
1144-
1145- def orderColumns(self):
1146- return []
1147-
1148-
1149-registerAdapter(CompoundOrdering, list, IOrdering)
1150-registerAdapter(CompoundOrdering, tuple, IOrdering)
1151-registerAdapter(UnspecifiedOrdering, type(None), IOrdering)
1152-registerAdapter(SimpleOrdering, Comparable, IOrdering)
1153-
1154-def compoundIndex(*columns):
1155- for column in columns:
1156- column.compoundIndexes.append(columns)
1157-
1158-class SQLAttribute(inmemory, Comparable):
1159- """
1160- Abstract superclass of all attributes.
1161-
1162- _Not_ an attribute itself.
1163-
1164- @ivar indexed: A C{bool} indicating whether this attribute will be indexed
1165- in the database.
1166-
1167- @ivar default: The value used for this attribute, if no value is specified.
1168- """
1169- implements(IColumn)
1170-
1171- sqltype = None
1172-
1173- def __init__(self, doc='', indexed=False, default=None, allowNone=True, defaultFactory=None):
1174- inmemory.__init__(self, doc)
1175- self.indexed = indexed
1176- self.compoundIndexes = []
1177- self.allowNone = allowNone
1178- self.default = default
1179- self.defaultFactory = defaultFactory
1180- if default is not None and defaultFactory is not None:
1181- raise ValueError("You may specify only one of default "
1182- "or defaultFactory, not both")
1183-
1184- def computeDefault(self):
1185- if self.defaultFactory is not None:
1186- return self.defaultFactory()
1187- return self.default
1188-
1189-
1190- def reprFor(self, oself):
1191- return repr(self.__get__(oself))
1192-
1193-
1194- def getShortColumnName(self, store):
1195- return store.getShortColumnName(self)
1196-
1197-
1198- def getColumnName(self, store):
1199- return store.getColumnName(self)
1200-
1201-
1202- def prepareInsert(self, oself, store):
1203- """
1204- Override this method to do something to an item to prepare for its
1205- insertion into a database.
1206- """
1207-
1208- def coercer(self, value):
1209- """
1210- must return a value equivalent to the data being passed in for it to be
1211- considered valid for a value of this attribute. for example, 'int' or
1212- 'str'.
1213- """
1214-
1215- raise NotImplementedError()
1216-
1217-
1218- def infilter(self, pyval, oself, store):
1219- """
1220- used to convert a Python value to something that lives in the database;
1221- so called because it is called when objects go in to the database. It
1222- takes a Python value and returns an SQL value.
1223- """
1224- raise NotImplementedError()
1225-
1226- def outfilter(self, dbval, oself):
1227- """
1228- used to convert an SQL value to something that lives in memory; so
1229- called because it is called when objects come out of the database. It
1230- takes an SQL value and returns a Python value.
1231- """
1232- return dbval
1233-
1234- # requiredSlots must be called before it's run
1235-
1236- prefix = "_axiom_memory_"
1237- dbprefix = "_axiom_store_"
1238-
1239- def requiredSlots(self, modname, classname, attrname):
1240- self.modname = modname
1241- self.classname = classname
1242- self.attrname = attrname
1243- self.underlying = self.prefix + attrname
1244- self.dbunderlying = self.dbprefix + attrname
1245- yield self.underlying
1246- yield self.dbunderlying
1247-
1248-
1249- def fullyQualifiedName(self):
1250- return '.'.join([self.modname,
1251- self.classname,
1252- self.attrname])
1253-
1254- def __repr__(self):
1255- return '<%s %s>' % ( self.__class__.__name__, self.fullyQualifiedName())
1256-
1257- def type():
1258- def get(self):
1259- if self._type is None:
1260- from twisted.python.reflect import namedAny
1261- self._type = namedAny(self.modname+'.'+self.classname)
1262- return self._type
1263- return get,
1264- _type = None
1265- type = property(*type())
1266-
1267- def __get__(self, oself, cls=None):
1268- if cls is not None and oself is None:
1269- if self._type is not None:
1270- assert self._type == cls
1271- else:
1272- self._type = cls
1273- return self
1274-
1275- pyval = getattr(oself, self.underlying, _NEEDS_FETCH)
1276- if pyval is _NEEDS_FETCH:
1277- dbval = getattr(oself, self.dbunderlying, _NEEDS_FETCH)
1278- if dbval is _NEEDS_FETCH:
1279- # here is what *is* happening here:
1280-
1281- # SQL attributes are always loaded when an Item is created by
1282- # loading from the database, either via a query, a getItemByID
1283- # or an attribute access. If an attribute is left un-set, that
1284- # means that the item it is on was just created, and we fill in
1285- # the default value.
1286-
1287- # Here is what *should be*, but *is not* happening here:
1288-
1289- # this condition ought to indicate that a value may exist in
1290- # the database, but it is not currently available in memory.
1291- # It would then query the database immediately, loading all
1292- # SQL-resident attributes related to this item to minimize the
1293- # number of queries run (e.g. rather than one per attribute)
1294-
1295- # this is a more desireable condition because it means that you
1296- # can create items "for free", so doing, for example,
1297- # self.bar.storeID is a much cheaper operation than doing
1298- # self.bar.baz. This particular idiom is frequently used in
1299- # queries and so speeding it up to avoid having to do a
1300- # database hit unless you actually need an item's attributes
1301- # would be worthwhile.
1302-
1303- return self.default
1304- pyval = self.outfilter(dbval, oself)
1305- # An upgrader may have changed the value of this attribute. If so,
1306- # return the new value, not the old one.
1307- if dbval != getattr(oself, self.dbunderlying):
1308- return self.__get__(oself, cls)
1309- # cache python value
1310- setattr(oself, self.underlying, pyval)
1311- return pyval
1312-
1313- def loaded(self, oself, dbval):
1314- """
1315- This method is invoked when the item is loaded from the database, and
1316- when a transaction is reverted which restores this attribute's value.
1317-
1318- @param oself: an instance of an item which has this attribute.
1319-
1320- @param dbval: the underlying database value which was retrieved.
1321- """
1322- setattr(oself, self.dbunderlying, dbval)
1323- delattr(oself, self.underlying) # member_descriptors don't raise
1324- # attribute errors; what gives? good
1325- # for us, I guess.
1326-
1327- def _convertPyval(self, oself, pyval):
1328- """
1329- Convert a Python value to a value suitable for inserting into the
1330- database.
1331-
1332- @param oself: The object on which this descriptor is an attribute.
1333- @param pyval: The value to be converted.
1334- @return: A value legal for this column in the database.
1335- """
1336- # convert to dbval later, I guess?
1337- if pyval is None and not self.allowNone:
1338- raise TypeError("attribute [%s.%s = %s()] must not be None" % (
1339- self.classname, self.attrname, self.__class__.__name__))
1340-
1341- return self.infilter(pyval, oself, oself.store)
1342-
1343- def __set__(self, oself, pyval):
1344- st = oself.store
1345-
1346- dbval = self._convertPyval(oself, pyval)
1347- oself.__dirty__[self.attrname] = self, dbval
1348- oself.touch()
1349- setattr(oself, self.underlying, pyval)
1350- setattr(oself, self.dbunderlying, dbval)
1351- if st is not None and st.autocommit:
1352- st._rejectChanges += 1
1353- try:
1354- oself.checkpoint()
1355- finally:
1356- st._rejectChanges -= 1
1357-
1358-
1359-class TwoAttributeComparison:
1360- implements(IComparison)
1361- def __init__(self, leftAttribute, operationString, rightAttribute):
1362- self.leftAttribute = leftAttribute
1363- self.operationString = operationString
1364- self.rightAttribute = rightAttribute
1365-
1366- def getQuery(self, store):
1367- sql = ('(%s %s %s)' % (self.leftAttribute.getColumnName(store),
1368- self.operationString,
1369- self.rightAttribute.getColumnName(store)) )
1370- return sql
1371-
1372- def getInvolvedTables(self):
1373- tables = [self.leftAttribute.type]
1374- if self.leftAttribute.type is not self.rightAttribute.type:
1375- tables.append(self.rightAttribute.type)
1376- return tables
1377-
1378-
1379- def getArgs(self, store):
1380- return []
1381-
1382-
1383- def __repr__(self):
1384- return ' '.join((self.leftAttribute.fullyQualifiedName(),
1385- self.operationString,
1386- self.rightAttribute.fullyQualifiedName()))
1387-
1388-
1389-class AttributeValueComparison:
1390- implements(IComparison)
1391- def __init__(self, attribute, operationString, value):
1392- self.attribute = attribute
1393- self.operationString = operationString
1394- self.value = value
1395-
1396- def getQuery(self, store):
1397- return ('(%s %s ?)' % (self.attribute.getColumnName(store),
1398- self.operationString))
1399-
1400- def getArgs(self, store):
1401- return [self.attribute.infilter(self.value, None, store)]
1402-
1403- def getInvolvedTables(self):
1404- return [self.attribute.type]
1405-
1406- def __repr__(self):
1407- return ' '.join((self.attribute.fullyQualifiedName(),
1408- self.operationString,
1409- repr(self.value)))
1410-
1411-class NullComparison:
1412- implements(IComparison)
1413- def __init__(self, attribute, negate=False):
1414- self.attribute = attribute
1415- self.negate = negate
1416-
1417- def getQuery(self, store):
1418- if self.negate:
1419- op = 'NOT'
1420- else:
1421- op = 'IS'
1422- return ('(%s %s NULL)' % (self.attribute.getColumnName(store),
1423- op))
1424-
1425- def getArgs(self, store):
1426- return []
1427-
1428- def getInvolvedTables(self):
1429- return [self.attribute.type]
1430-
1431-class LikeFragment:
1432- def getLikeArgs(self):
1433- return []
1434-
1435- def getLikeQuery(self, st):
1436- raise NotImplementedError()
1437-
1438- def getLikeTables(self):
1439- return []
1440-
1441-class LikeNull(LikeFragment):
1442- def getLikeQuery(self, st):
1443- return "NULL"
1444-
1445-class LikeValue(LikeFragment):
1446- def __init__(self, value):
1447- self.value = value
1448-
1449- def getLikeQuery(self, st):
1450- return "?"
1451-
1452- def getLikeArgs(self):
1453- return [self.value]
1454-
1455-class LikeColumn(LikeFragment):
1456- def __init__(self, attribute):
1457- self.attribute = attribute
1458-
1459- def getLikeQuery(self, st):
1460- return self.attribute.getColumnName(st)
1461-
1462- def getLikeTables(self):
1463- return [self.attribute.type]
1464-
1465-
1466-class LikeComparison:
1467- implements(IComparison)
1468- # Not AggregateComparison or AttributeValueComparison because there is a
1469- # different, optimized syntax for 'or'. WTF is wrong with you, SQL??
1470-
1471- def __init__(self, attribute, negate, likeParts):
1472- self.negate = negate
1473- self.attribute = attribute
1474- self.likeParts = likeParts
1475-
1476- def getInvolvedTables(self):
1477- tables = [self.attribute.type]
1478- for lf in self.likeParts:
1479- tables.extend([
1480- t for t in lf.getLikeTables() if t not in tables])
1481- return tables
1482-
1483- def getQuery(self, store):
1484- if self.negate:
1485- op = 'NOT LIKE'
1486- else:
1487- op = 'LIKE'
1488- sqlParts = [lf.getLikeQuery(store) for lf in self.likeParts]
1489- sql = '(%s %s (%s))' % (self.attribute.getColumnName(store),
1490- op, ' || '.join(sqlParts))
1491- return sql
1492-
1493- def getArgs(self, store):
1494- l = []
1495- for lf in self.likeParts:
1496- for pyval in lf.getLikeArgs():
1497- l.append(
1498- self.attribute.infilter(
1499- pyval, None, store))
1500- return l
1501-
1502-
1503-
1504-class AggregateComparison:
1505- """
1506- Abstract base class for compound comparisons that aggregate other
1507- comparisons - currently only used for AND and OR comparisons.
1508- """
1509-
1510- implements(IComparison)
1511- operator = None
1512-
1513- def __init__(self, *conditions):
1514- self.conditions = conditions
1515- if self.operator is None:
1516- raise NotImplementedError, ('%s cannot be used; you want AND or OR.'
1517- % self.__class__.__name__)
1518- if not conditions:
1519- raise ValueError, ('%s condition requires at least one argument'
1520- % self.operator)
1521-
1522- def getQuery(self, store):
1523- oper = ' %s ' % self.operator
1524- return '(%s)' % oper.join(
1525- [condition.getQuery(store) for condition in self.conditions])
1526-
1527- def getArgs(self, store):
1528- args = []
1529- for cond in self.conditions:
1530- args += cond.getArgs(store)
1531- return args
1532-
1533- def getInvolvedTables(self):
1534- tables = []
1535- for cond in self.conditions:
1536- tables.extend([
1537- t for t in cond.getInvolvedTables() if t not in tables])
1538- return tables
1539-
1540- def __repr__(self):
1541- return '%s(%s)' % (self.__class__.__name__,
1542- ', '.join(map(repr, self.conditions)))
1543-
1544-
1545-
1546-class SequenceComparison:
1547- implements(IComparison)
1548-
1549- def __init__(self, attribute, container, negate):
1550- self.attribute = attribute
1551- self.container = container
1552- self.negate = negate
1553-
1554- if IColumn.providedBy(container):
1555- self.containerClause = self._columnContainer
1556- self.getArgs = self._columnArgs
1557- elif IQuery.providedBy(container):
1558- self.containerClause = self._queryContainer
1559- self.getArgs = self._queryArgs
1560- else:
1561- self.containerClause = self._sequenceContainer
1562- self.getArgs = self._sequenceArgs
1563-
1564-
1565- def _columnContainer(self, store):
1566- """
1567- Return the fully qualified name of the column being examined so as
1568- to push all of the containment testing into the database.
1569- """
1570- return self.container.getColumnName(store)
1571-
1572-
1573- def _columnArgs(self, store):
1574- """
1575- The IColumn form of this has no arguments, just a column name
1576- specified in the SQL, specified by _columnContainer.
1577- """
1578- return []
1579-
1580-
1581- _subselectSQL = None
1582- _subselectArgs = None
1583- def _queryContainer(self, store):
1584- """
1585- Generate and cache the subselect SQL and its arguments. Return the
1586- subselect SQL.
1587- """
1588- if self._subselectSQL is None:
1589- sql, args = self.container._sqlAndArgs('SELECT',
1590- self.container._queryTarget)
1591- self._subselectSQL, self._subselectArgs = sql, args
1592- return self._subselectSQL
1593-
1594-
1595- def _queryArgs(self, store):
1596- """
1597- Make sure subselect arguments have been generated and then return
1598- them.
1599- """
1600- self._queryContainer(store)
1601- return self._subselectArgs
1602-
1603-
1604- _sequence = None
1605- def _sequenceContainer(self, store):
1606- """
1607- Smash whatever we got into a list and save the result in case we are
1608- executed multiple times. This keeps us from tripping up over
1609- generators and the like.
1610- """
1611- if self._sequence is None:
1612- self._sequence = list(self.container)
1613- self._clause = ', '.join(['?'] * len(self._sequence))
1614- return self._clause
1615-
1616-
1617- def _sequenceArgs(self, store):
1618- """
1619- Filter each element of the data using the attribute type being
1620- tested for containment and hand back the resulting list.
1621- """
1622- self._sequenceContainer(store) # Force _sequence to be valid
1623- return [self.attribute.infilter(pyval, None, store) for pyval in self._sequence]
1624-
1625-
1626- # IComparison - getArgs is assigned as an instance attribute
1627- def getQuery(self, store):
1628- return '%s %sIN (%s)' % (
1629- self.attribute.getColumnName(store),
1630- self.negate and 'NOT ' or '',
1631- self.containerClause(store))
1632-
1633-
1634- def getInvolvedTables(self):
1635- return [self.attribute.type]
1636-
1637-
1638-
1639-class AND(AggregateComparison):
1640- """
1641- Combine 2 L{IComparison}s such that this is true when both are true.
1642- """
1643- operator = 'AND'
1644-
1645-class OR(AggregateComparison):
1646- """
1647- Combine 2 L{IComparison}s such that this is true when either is true.
1648- """
1649- operator = 'OR'
1650-
1651-
1652-class TableOrderComparisonWrapper(object):
1653- """
1654- Wrap any other L{IComparison} and override its L{getInvolvedTables} method
1655- to specify the same tables but in an explicitly specified order.
1656- """
1657- implements(IComparison)
1658-
1659- tables = None
1660- comparison = None
1661-
1662- def __init__(self, tables, comparison):
1663- assert set(tables) == set(comparison.getInvolvedTables())
1664-
1665- self.tables = tables
1666- self.comparison = comparison
1667-
1668-
1669- def getInvolvedTables(self):
1670- return self.tables
1671-
1672-
1673- def getQuery(self, store):
1674- return self.comparison.getQuery(store)
1675-
1676-
1677- def getArgs(self, store):
1678- return self.comparison.getArgs(store)
1679-
1680-
1681-
1682-class boolean(SQLAttribute):
1683- sqltype = 'BOOLEAN'
1684-
1685- def infilter(self, pyval, oself, store):
1686- if pyval is None:
1687- return None
1688- if pyval is True:
1689- return 1
1690- elif pyval is False:
1691- return 0
1692- else:
1693- raise TypeError("attribute [%s.%s = boolean()] must be True or False; not %r" %
1694- (self.classname, self.attrname, type(pyval).__name__,))
1695-
1696- def outfilter(self, dbval, oself):
1697- if dbval == 1:
1698- return True
1699- elif dbval == 0:
1700- return False
1701- elif self.allowNone and dbval is None:
1702- return None
1703- else:
1704- raise ValueError(
1705- "attribute [%s.%s = boolean()] "
1706- "must have a database value of 1 or 0; not %r" %
1707- (self.classname, self.attrname, dbval))
1708-
1709-
1710-
1711-LARGEST_POSITIVE = (2 ** 63)-1
1712-LARGEST_NEGATIVE = -(2 ** 63)
1713-
1714-class ConstraintError(TypeError):
1715- """A type constraint was violated.
1716- """
1717-
1718- def __init__(self,
1719- attributeObj,
1720- requiredTypes,
1721- providedValue):
1722- self.attributeObj = attributeObj
1723- self.requiredTypes = requiredTypes
1724- self.providedValue = providedValue
1725- TypeError.__init__(self,
1726- "attribute [%s.%s = %s()] must be "
1727- "(%s); not %r" %
1728- (attributeObj.classname,
1729- attributeObj.attrname,
1730- attributeObj.__class__.__name__,
1731- requiredTypes,
1732- type(providedValue).__name__))
1733-
1734-
1735-
1736-def requireType(attributeObj, value, typerepr, *types):
1737- if not isinstance(value, types):
1738- raise ConstraintError(attributeObj,
1739- typerepr,
1740- value)
1741-
1742-
1743-
1744-inttyperepr = "integer between %r and %r" % (LARGEST_NEGATIVE, LARGEST_POSITIVE)
1745-
1746-class integer(SQLAttribute):
1747- sqltype = 'INTEGER'
1748- def infilter(self, pyval, oself, store):
1749- if pyval is None:
1750- return None
1751- requireType(self, pyval, inttyperepr, int, long)
1752- if not LARGEST_NEGATIVE <= pyval <= LARGEST_POSITIVE:
1753- raise ConstraintError(
1754- self, inttyperepr, pyval)
1755- return pyval
1756-
1757-
1758-
1759-class bytes(SQLAttribute):
1760- """
1761- Attribute representing a sequence of bytes; this is represented in memory
1762- as a Python 'str'.
1763- """
1764-
1765- sqltype = 'BLOB'
1766-
1767- def infilter(self, pyval, oself, store):
1768- if pyval is None:
1769- return None
1770- if isinstance(pyval, unicode):
1771- raise ConstraintError(self, "str or other byte buffer", pyval)
1772- return buffer(pyval)
1773-
1774- def outfilter(self, dbval, oself):
1775- if dbval is None:
1776- return None
1777- return str(dbval)
1778-
1779-class InvalidPathError(ValueError):
1780- """
1781- A path that could not be used with the database was attempted to be used
1782- with the database.
1783- """
1784-
1785-class text(SQLAttribute):
1786- """
1787- Attribute representing a sequence of characters; this is represented in
1788- memory as a Python 'unicode'.
1789- """
1790-
1791- def __init__(self, caseSensitive=False, **kw):
1792- SQLAttribute.__init__(self, **kw)
1793- if caseSensitive:
1794- self.sqltype = 'TEXT'
1795- else:
1796- self.sqltype = 'TEXT COLLATE NOCASE'
1797- self.caseSensitive = caseSensitive
1798-
1799- def infilter(self, pyval, oself, store):
1800- if pyval is None:
1801- return None
1802- if not isinstance(pyval, unicode) or u'\0' in pyval:
1803- raise ConstraintError(
1804- self, "unicode string without NULL bytes", pyval)
1805- return pyval
1806-
1807- def outfilter(self, dbval, oself):
1808- return dbval
1809-
1810-
1811-
1812-class textlist(text):
1813- delimiter = u'\u001f'
1814-
1815- # Once upon a time, textlist encoded the list in such a way that caused []
1816- # to be indistinguishable from [u'']. This value is now used as a
1817- # placeholder at the head of the list, to avoid this problem in a way that
1818- # is almost completely backwards-compatible with older databases.
1819- guard = u'\u0002'
1820-
1821- def outfilter(self, dbval, oself):
1822- unicodeString = super(textlist, self).outfilter(dbval, oself)
1823- if unicodeString is None:
1824- return None
1825- val = unicodeString.split(self.delimiter)
1826- if val[:1] == [self.guard]:
1827- del val[:1]
1828- return val
1829-
1830- def infilter(self, pyval, oself, store):
1831- if pyval is None:
1832- return None
1833- for innerVal in pyval:
1834- assert self.delimiter not in innerVal and self.guard not in innerVal
1835- result = self.delimiter.join([self.guard] + list(pyval))
1836- return super(textlist, self).infilter(result, oself, store)
1837-
1838-class path(text):
1839- """
1840- Attribute representing a pathname in the filesystem. If 'relative=True',
1841- the default, the representative pathname object must be somewhere inside
1842- the store, and will migrate with the store.
1843-
1844- I expect L{twisted.python.filepath.FilePath} or compatible objects as my
1845- values.
1846- """
1847-
1848- def __init__(self, relative=True, **kw):
1849- text.__init__(self, **kw)
1850- self.relative = True
1851-
1852- def prepareInsert(self, oself, store):
1853- """
1854- Prepare for insertion into the database by making the dbunderlying
1855- attribute of the item a relative pathname with respect to the store
1856- rather than an absolute pathname.
1857- """
1858- if self.relative:
1859- fspath = self.__get__(oself)
1860- oself.__dirty__[self.attrname] = self, self.infilter(fspath, oself, store)
1861-
1862- def infilter(self, pyval, oself, store):
1863- if pyval is None:
1864- return None
1865- mypath = unicode(pyval.path)
1866- if store is None:
1867- store = oself.store
1868- if store is None:
1869- return None
1870- if self.relative:
1871- # XXX add some more filepath APIs to make this kind of checking easier.
1872- storepath = os.path.normpath(store.filesdir.path)
1873- mysegs = mypath.split(os.sep)
1874- storesegs = storepath.split(os.sep)
1875- if len(mysegs) <= len(storesegs) or mysegs[:len(storesegs)] != storesegs:
1876- raise InvalidPathError('%s not in %s' % (mypath, storepath))
1877- # In the database we use '/' to separate paths for portability.
1878- # This databaes could have relative paths created on Windows, then
1879- # be moved to Linux for deployment, and what *was* the native
1880- # os.sep (backslash) will not be friendly to Linux's filesystem.
1881- # However, this is only for relative paths, since absolute or UNC
1882- # pathnames on a Windows system are inherently unportable and it's
1883- # not reasonable to calculate relative paths outside the store.
1884- p = '/'.join(mysegs[len(storesegs):])
1885- else:
1886- p = mypath # we already know it's absolute, it came from a
1887- # filepath.
1888- return super(path, self).infilter(p, oself, store)
1889-
1890- def outfilter(self, dbval, oself):
1891- if dbval is None:
1892- return None
1893- if self.relative:
1894- fp = oself.store.filesdir
1895- for segment in dbval.split('/'):
1896- fp = fp.child(segment)
1897- else:
1898- fp = filepath.FilePath(dbval)
1899- return fp
1900-
1901-
1902-MICRO = 1000000.
1903-
1904-class timestamp(integer):
1905- """
1906- An in-database representation of date and time.
1907-
1908- To make formatting as easy as possible, this is represented in Python as an
1909- instance of L{epsilon.extime.Time}; see its documentation for more details.
1910- """
1911- def infilter(self, pyval, oself, store):
1912- if pyval is None:
1913- return None
1914- return integer.infilter(self,
1915- int(pyval.asPOSIXTimestamp() * MICRO), oself,
1916- store)
1917-
1918- def outfilter(self, dbval, oself):
1919- if dbval is None:
1920- return None
1921- return Time.fromPOSIXTimestamp(dbval / MICRO)
1922-
1923-_cascadingDeletes = {}
1924-_disallows = {}
1925-
1926-class reference(integer):
1927- NULLIFY = object()
1928- DISALLOW = object()
1929- CASCADE = object()
1930-
1931- def __init__(self, doc='', indexed=True, allowNone=True, reftype=None,
1932- whenDeleted=NULLIFY):
1933- integer.__init__(self, doc, indexed, None, allowNone)
1934- assert whenDeleted in (reference.NULLIFY,
1935- reference.CASCADE,
1936- reference.DISALLOW),(
1937- "whenDeleted must be one of: "
1938- "reference.NULLIFY, reference.CASCADE, reference.DISALLOW")
1939- self.reftype = reftype
1940- self.whenDeleted = whenDeleted
1941- if whenDeleted is reference.CASCADE:
1942- # Note; this list is technically in a slightly inconsistent state
1943- # as things are being built.
1944- _cascadingDeletes.setdefault(reftype, []).append(self)
1945- if whenDeleted is reference.DISALLOW:
1946- _disallows.setdefault(reftype, []).append(self)
1947-
1948- def reprFor(self, oself):
1949- obj = getattr(oself, self.underlying, None)
1950- if obj is not None:
1951- if obj.storeID is not None:
1952- return 'reference(%d)' % (obj.storeID,)
1953- else:
1954- return 'reference(unstored@%d)' % (id(obj),)
1955- sid = getattr(oself, self.dbunderlying, None)
1956- if sid is None:
1957- return 'None'
1958- return 'reference(%d)' % (sid,)
1959-
1960-
1961- def __get__(self, oself, cls=None):
1962- """
1963- Override L{integer.__get__} to verify that the value to be returned is
1964- currently a valid item in the same store, and to make sure that legacy
1965- items are upgraded if they happen to have been cached.
1966- """
1967- rv = super(reference, self).__get__(oself, cls)
1968- if rv is self:
1969- # If it's an attr lookup on the class, just do that.
1970- return self
1971- if rv is None:
1972- return rv
1973- if not rv._currentlyValidAsReferentFor(oself.store):
1974- # Make sure it's currently valid, i.e. it's not going to be deleted
1975- # this transaction or it hasn't been deleted.
1976-
1977- # XXX TODO: drop cached in-memory referent if it's been deleted /
1978- # no longer valid.
1979- assert self.whenDeleted is reference.NULLIFY, (
1980- "not sure what to do if not...")
1981- return None
1982- if rv.__legacy__:
1983- delattr(oself, self.underlying)
1984- return super(reference, self).__get__(oself, cls)
1985- return rv
1986-
1987- def prepareInsert(self, oself, store):
1988- oitem = super(reference, self).__get__(oself) # bypass NULLIFY
1989- if oitem is not None and oitem.store is not store:
1990- raise NoCrossStoreReferences(
1991- "Trying to insert item: %r into store: %r, "
1992- "but it has a reference to other item: .%s=%r "
1993- "in another store: %r" % (
1994- oself, store,
1995- self.attrname, oitem,
1996- oitem.store))
1997-
1998- def infilter(self, pyval, oself, store):
1999- if pyval is None:
2000- return None
2001- if oself is None:
2002- return pyval.storeID
2003- if oself.store is None:
2004- return pyval.storeID
2005- if oself.store != pyval.store:
2006- raise NoCrossStoreReferences(
2007- "You can't establish references to items in other stores.")
2008-
2009- return integer.infilter(self, pyval.storeID, oself, store)
2010-
2011- def outfilter(self, dbval, oself):
2012- if dbval is None:
2013- return None
2014-
2015- referee = oself.store.getItemByID(dbval, default=None, autoUpgrade=not oself.__legacy__)
2016- if referee is None and self.whenDeleted is not reference.NULLIFY:
2017-
2018- # If referee merely changed to another valid referent,
2019- # SQLAttribute.__get__ will notice that what we returned is
2020- # inconsistent and try again. However, it doesn't know about the
2021- # BrokenReference that is raised if the old referee is no longer a
2022- # valid referent. Check to see if the dbunderlying is still the
2023- # same as the dbval passed in. If it's different, we should try to
2024- # load the value again. Only if it is unchanged will we raise the
2025- # BrokenReference. It would be better if all of this
2026- # change-detection logic were in one place, but I can't figure out
2027- # how to do that. -exarkun
2028- if dbval != getattr(oself, self.dbunderlying):
2029- return self.__get__(oself, None)
2030-
2031- raise BrokenReference('Reference to storeID %r is broken' % (dbval,))
2032- return referee
2033-
2034-class ieee754_double(SQLAttribute):
2035- """
2036- From the SQLite documentation::
2037-
2038- Each value stored in an SQLite database (or manipulated by the
2039- database engine) has one of the following storage classes: (...)
2040- REAL. The value is a floating point value, stored as an 8-byte IEEE
2041- floating point number.
2042-
2043- This attribute type implements IEEE754 double-precision binary
2044- floating-point storage. Some people call this 'float', and think it is
2045- somehow related to numbers. This assumption can be misleading when working
2046- with certain types of data.
2047-
2048- This attribute name has an unweildy name on purpose. You should be aware
2049- of the caveats related to binary floating point math before using this
2050- type. It is particularly ill-advised to use it to store values
2051- representing large amounts of currency as rounding errors may be
2052- significant enough to introduce accounting discrepancies.
2053-
2054- Certain edge-cases are not handled properly. For example, INF and NAN are
2055- considered by SQLite to be equal to everything, rather than the Python
2056- interpretation where INF is equal only to itself and greater than
2057- everything, and NAN is equal to nothing, not even itself.
2058- """
2059-
2060- sqltype = 'REAL'
2061-
2062- def infilter(self, pyval, oself, store):
2063- if pyval is None:
2064- return None
2065- requireType(self, pyval, 'float', float)
2066- return pyval
2067-
2068- def outfilter(self, dbval, oself):
2069- return dbval
2070-
2071-
2072-
2073-class AbstractFixedPointDecimal(integer):
2074- """
2075- Attribute representing a number with a specified number of decimal
2076- places.
2077-
2078- This is stored in SQLite as a binary integer multiplied by M{10**N}
2079- where C{N} is the number of decimal places required by Python.
2080- Therefore, in-database multiplication, division, or queries which
2081- compare to integers or fixedpointdecimals with a different number of
2082- decimal places, will not work. Also, you cannot store, or sum to, fixed
2083- point decimals greater than M{(2**63)/(10**N)}.
2084-
2085- While L{ieee754_double} is handy for representing various floating-point
2086- numbers, such as scientific measurements, this class (and the associated
2087- Python decimal class) is more appropriate for arithmetic on sums of money.
2088-
2089- For more information on Python's U{Decimal
2090- class<http://www.python.org/doc/current/lib/module-decimal.html>} and on
2091- general U{computerized Decimal math in
2092- general<http://www2.hursley.ibm.com/decimal/decarith.html>}.
2093-
2094- This is currently a private helper superclass because we cannot store
2095- additional metadata about column types; maybe we should fix that.
2096-
2097- @cvar decimalPlaces: the number of points of decimal precision allowed by
2098- the storage and retrieval of this class. *Points beyond this number
2099- will be silently truncated to values passed into the database*, so be
2100- sure to select a value appropriate to your application!
2101- """
2102-
2103- def __init__(self, **kw):
2104- integer.__init__(self, **kw)
2105-
2106-
2107- def infilter(self, pyval, oself, store):
2108- if pyval is None:
2109- return None
2110- if isinstance(pyval, (int, long)):
2111- pyval = Decimal(pyval)
2112- if isinstance(pyval, Decimal):
2113- # Python < 2.5.2 compatibility:
2114- # Use to_integral instead of to_integral_value.
2115- dbval = int((pyval * 10**self.decimalPlaces).to_integral())
2116- return super(AbstractFixedPointDecimal, self).infilter(
2117- dbval, oself, store)
2118- else:
2119- raise TypeError(
2120- "attribute [%s.%s = AbstractFixedPointDecimal(...)] must be "
2121- "Decimal instance; not %r" % (
2122- self.classname, self.attrname, type(pyval).__name__))
2123-
2124-
2125- def outfilter(self, dbval, oself):
2126- if dbval is None:
2127- return None
2128- return Decimal(dbval) / 10**self.decimalPlaces
2129-
2130-
2131- def compare(self, other, sqlop):
2132- if isinstance(other, Comparable):
2133- if isinstance(other, AbstractFixedPointDecimal):
2134- if other.decimalPlaces == self.decimalPlaces:
2135- # fall through to default behavior at bottom
2136- pass
2137- else:
2138- raise TypeError(
2139- "Can't compare Decimals of varying precisions: "
2140- "(%s.%s %s %s.%s)" % (
2141- self.classname, self.attrname,
2142- sqlop,
2143- other.classname, other.attrname
2144- ))
2145- else:
2146- raise TypeError(
2147- "Can't compare Decimals to other things: "
2148- "(%s.%s %s %s.%s)" % (
2149- self.classname, self.attrname,
2150- sqlop,
2151- other.classname, other.attrname
2152- ))
2153- return super(AbstractFixedPointDecimal, self).compare(other, sqlop)
2154-
2155-class point1decimal(AbstractFixedPointDecimal):
2156- decimalPlaces = 1
2157-class point2decimal(AbstractFixedPointDecimal):
2158- decimalPlaces = 2
2159-class point3decimal(AbstractFixedPointDecimal):
2160- decimalPlaces = 3
2161-class point4decimal(AbstractFixedPointDecimal):
2162- decimalPlaces = 4
2163-class point5decimal(AbstractFixedPointDecimal):
2164- decimalPlaces = 5
2165-class point6decimal(AbstractFixedPointDecimal):
2166- decimalPlaces = 6
2167-class point7decimal(AbstractFixedPointDecimal):
2168- decimalPlaces = 7
2169-class point8decimal(AbstractFixedPointDecimal):
2170- decimalPlaces = 8
2171-class point9decimal(AbstractFixedPointDecimal):
2172- decimalPlaces = 9
2173-class point10decimal(AbstractFixedPointDecimal):
2174- decimalPlaces = 10
2175-
2176-class money(point4decimal):
2177- """
2178- I am a 4-point precision fixed-point decimal number column type; suggested
2179- for representing a quantity of money.
2180-
2181- (This does not, however, include features such as currency.)
2182- """
2183
2184=== removed file 'Axiom/axiom/batch.py'
2185--- Axiom/axiom/batch.py 2012-07-05 13:37:40 +0000
2186+++ Axiom/axiom/batch.py 1970-01-01 00:00:00 +0000
2187@@ -1,1230 +0,0 @@
2188-# -*- test-case-name: axiom.test.test_batch -*-
2189-
2190-"""
2191-Utilities for performing repetitive tasks over potentially large sets
2192-of data over an extended period of time.
2193-"""
2194-
2195-import weakref, datetime, os, sys
2196-
2197-from zope.interface import implements
2198-
2199-from twisted.python import reflect, failure, log, procutils, util, runtime
2200-from twisted.internet import task, defer, reactor, error, protocol
2201-from twisted.application import service
2202-
2203-from epsilon import extime, process, cooperator, modal, juice
2204-
2205-from axiom import iaxiom, errors as eaxiom, item, attributes
2206-from axiom.scheduler import Scheduler, SubScheduler
2207-from axiom.upgrade import registerUpgrader, registerDeletionUpgrader
2208-from axiom.dependency import installOn
2209-
2210-VERBOSE = False
2211-
2212-_processors = weakref.WeakValueDictionary()
2213-
2214-
2215-class _NoWorkUnits(Exception):
2216- """
2217- Raised by a _ReliableListener's step() method to indicate it
2218- didn't do anything.
2219- """
2220-
2221-
2222-
2223-class _ProcessingFailure(Exception):
2224- """
2225- Raised when processItem raises any exception. This is never raised
2226- directly, but instances of the three subclasses are.
2227- """
2228- def __init__(self, reliableListener, workUnit, failure):
2229- Exception.__init__(self)
2230- self.reliableListener = reliableListener
2231- self.workUnit = workUnit
2232- self.failure = failure
2233-
2234- # Get rid of all references this failure is holding so that it doesn't
2235- # cause any crazy object leaks. See also the comment in
2236- # BatchProcessingService.step's except suite.
2237- self.failure.cleanFailure()
2238-
2239-
2240- def mark(self):
2241- """
2242- Mark the unit of work as failed in the database and update the listener
2243- so as to skip it next time.
2244- """
2245- self.reliableListener.lastRun = extime.Time()
2246- BatchProcessingError(
2247- store=self.reliableListener.store,
2248- processor=self.reliableListener.processor,
2249- listener=self.reliableListener.listener,
2250- item=self.workUnit,
2251- error=self.failure.getErrorMessage())
2252-
2253-
2254-
2255-class _ForwardProcessingFailure(_ProcessingFailure):
2256- """
2257- An error occurred in a reliable listener while processing items forward
2258- from the mark.
2259- """
2260-
2261- def mark(self):
2262- _ProcessingFailure.mark(self)
2263- self.reliableListener.forwardMark = self.workUnit.storeID
2264-
2265-
2266-
2267-class _BackwardProcessingFailure(_ProcessingFailure):
2268- """
2269- An error occurred in a reliable listener while processing items backwards
2270- from the mark.
2271- """
2272- def mark(self):
2273- _ProcessingFailure.mark(self)
2274- self.reliableListener.backwardMark = self.workUnit.storeID
2275-
2276-
2277-
2278-class _TrackedProcessingFailure(_ProcessingFailure):
2279- """
2280- An error occurred in a reliable listener while processing items specially
2281- added to the batch run.
2282- """
2283-
2284-
2285-
2286-class BatchProcessingError(item.Item):
2287- processor = attributes.reference(doc="""
2288- The batch processor which owns this failure.
2289- """)
2290-
2291- listener = attributes.reference(doc="""
2292- The listener which caused this error.
2293- """)
2294-
2295- item = attributes.reference(doc="""
2296- The item which actually failed to be processed.
2297- """)
2298-
2299- error = attributes.bytes(doc="""
2300- The error message which was associated with this failure.
2301- """)
2302-
2303-
2304-
2305-class _ReliableTracker(item.Item):
2306- """
2307- A tracking item for an out-of-sequence item which a reliable listener
2308- should be given to process.
2309-
2310- These are created when L{_ReliableListener.addItem} is called and the
2311- specified item is in the range of items which have already been processed.
2312- """
2313-
2314- processor = attributes.reference(doc="""
2315- The batch processor which owns this tracker.
2316- """)
2317-
2318- listener = attributes.reference(doc="""
2319- The listener which is responsible for this tracker's item.
2320- """)
2321-
2322- item = attributes.reference(doc="""
2323- The item which this is tracking.
2324- """)
2325-
2326-
2327-
2328-class _ReliableListener(item.Item):
2329- processor = attributes.reference(doc="""
2330- The batch processor which owns this listener.
2331- """)
2332-
2333- listener = attributes.reference(doc="""
2334- The item which is actually the listener.
2335- """)
2336-
2337- backwardMark = attributes.integer(doc="""
2338- Store ID of the first Item after the next Item to be processed in
2339- the backwards direction. Usually, the Store ID of the Item
2340- previously processed in the backwards direction.
2341- """)
2342-
2343- forwardMark = attributes.integer(doc="""
2344- Store ID of the first Item before the next Item to be processed in
2345- the forwards direction. Usually, the Store ID of the Item
2346- previously processed in the forwards direction.
2347- """)
2348-
2349- lastRun = attributes.timestamp(doc="""
2350- Time indicating the last chance given to this listener to do some
2351- work.
2352- """)
2353-
2354- style = attributes.integer(doc="""
2355- Either L{iaxiom.LOCAL} or L{iaxiom.REMOTE}. Indicates where the
2356- batch processing should occur, in the main process or a
2357- subprocess.
2358- """)
2359-
2360- def __repr__(self):
2361- return '<ReliableListener %s %r #%r>' % ({iaxiom.REMOTE: 'remote',
2362- iaxiom.LOCAL: 'local'}[self.style],
2363- self.listener,
2364- self.storeID)
2365-
2366-
2367- def addItem(self, item):
2368- assert type(item) is self.processor.workUnitType, \
2369- "Adding work unit of type %r to listener for type %r" % (
2370- type(item), self.processor.workUnitType)
2371- if item.storeID >= self.backwardMark and item.storeID <= self.forwardMark:
2372- _ReliableTracker(store=self.store,
2373- listener=self,
2374- item=item)
2375-
2376-
2377- def _forwardWork(self, workUnitType):
2378- if VERBOSE:
2379- log.msg("%r looking forward from %r" % (self, self.forwardMark,))
2380- return self.store.query(
2381- workUnitType,
2382- workUnitType.storeID > self.forwardMark,
2383- sort=workUnitType.storeID.ascending,
2384- limit=2)
2385-
2386-
2387- def _backwardWork(self, workUnitType):
2388- if VERBOSE:
2389- log.msg("%r looking backward from %r" % (self, self.backwardMark,))
2390- if self.backwardMark == 0:
2391- return []
2392- return self.store.query(
2393- workUnitType,
2394- workUnitType.storeID < self.backwardMark,
2395- sort=workUnitType.storeID.descending,
2396- limit=2)
2397-
2398-
2399- def _extraWork(self):
2400- return self.store.query(_ReliableTracker,
2401- _ReliableTracker.listener == self,
2402- limit=2)
2403-
2404-
2405- def _doOneWork(self, workUnit, failureType):
2406- if VERBOSE:
2407- log.msg("Processing a unit of work: %r" % (workUnit,))
2408- try:
2409- self.listener.processItem(workUnit)
2410- except:
2411- f = failure.Failure()
2412- if VERBOSE:
2413- log.msg("Processing failed: %s" % (f.getErrorMessage(),))
2414- log.err(f)
2415- raise failureType(self, workUnit, f)
2416-
2417-
2418- def step(self):
2419- first = True
2420- for workTracker in self._extraWork():
2421- if first:
2422- first = False
2423- else:
2424- return True
2425- item = workTracker.item
2426- workTracker.deleteFromStore()
2427- self._doOneWork(item, _TrackedProcessingFailure)
2428-
2429- for workUnit in self._forwardWork(self.processor.workUnitType):
2430- if first:
2431- first = False
2432- else:
2433- return True
2434- self.forwardMark = workUnit.storeID
2435- self._doOneWork(workUnit, _ForwardProcessingFailure)
2436-
2437- for workUnit in self._backwardWork(self.processor.workUnitType):
2438- if first:
2439- first = False
2440- else:
2441- return True
2442- self.backwardMark = workUnit.storeID
2443- self._doOneWork(workUnit, _BackwardProcessingFailure)
2444-
2445- if first:
2446- raise _NoWorkUnits()
2447- if VERBOSE:
2448- log.msg("%r.step() returning False" % (self,))
2449- return False
2450-
2451-
2452-
2453-class _BatchProcessorMixin:
2454-
2455- def step(self, style=iaxiom.LOCAL, skip=()):
2456- now = extime.Time()
2457- first = True
2458-
2459- for listener in self.store.query(_ReliableListener,
2460- attributes.AND(_ReliableListener.processor == self,
2461- _ReliableListener.style == style,
2462- _ReliableListener.listener.notOneOf(skip)),
2463- sort=_ReliableListener.lastRun.ascending):
2464- if not first:
2465- if VERBOSE:
2466- log.msg("Found more work to do, returning True from %r.step()" % (self,))
2467- return True
2468- listener.lastRun = now
2469- try:
2470- if listener.step():
2471- if VERBOSE:
2472- log.msg("%r.step() reported more work to do, returning True from %r.step()" % (listener, self))
2473- return True
2474- except _NoWorkUnits:
2475- if VERBOSE:
2476- log.msg("%r.step() reported no work units" % (listener,))
2477- else:
2478- first = False
2479- if VERBOSE:
2480- log.msg("No listeners left with work, returning False from %r.step()" % (self,))
2481- return False
2482-
2483-
2484- def run(self):
2485- """
2486- Try to run one unit of work through one listener. If there are more
2487- listeners or more work, reschedule this item to be run again in
2488- C{self.busyInterval} milliseconds, otherwise unschedule it.
2489-
2490- @rtype: L{extime.Time} or C{None}
2491- @return: The next time at which to run this item, used by the scheduler
2492- for automatically rescheduling, or None if there is no more work to do.
2493- """
2494- now = extime.Time()
2495- if self.step():
2496- self.scheduled = now + datetime.timedelta(milliseconds=self.busyInterval)
2497- else:
2498- self.scheduled = None
2499- return self.scheduled
2500-
2501-
2502- def timedEventErrorHandler(self, timedEvent, failureObj):
2503- failureObj.trap(_ProcessingFailure)
2504- log.msg("Batch processing failure")
2505- log.err(failureObj.value.failure)
2506- failureObj.value.mark()
2507- return extime.Time() + datetime.timedelta(milliseconds=self.busyInterval)
2508-
2509-
2510- def addReliableListener(self, listener, style=iaxiom.LOCAL):
2511- """
2512- Add the given Item to the set which will be notified of Items
2513- available for processing.
2514-
2515- Note: Each Item is processed synchronously. Adding too many
2516- listeners to a single batch processor will cause the L{step}
2517- method to block while it sends notification to each listener.
2518-
2519- @param listener: An Item instance which provides a
2520- C{processItem} method.
2521-
2522- @return: An Item representing L{listener}'s persistent tracking state.
2523- """
2524- existing = self.store.findUnique(_ReliableListener,
2525- attributes.AND(_ReliableListener.processor == self,
2526- _ReliableListener.listener == listener),
2527- default=None)
2528- if existing is not None:
2529- return existing
2530-
2531- for work in self.store.query(self.workUnitType,
2532- sort=self.workUnitType.storeID.descending,
2533- limit=1):
2534- forwardMark = work.storeID
2535- backwardMark = work.storeID + 1
2536- break
2537- else:
2538- forwardMark = 0
2539- backwardMark = 0
2540-
2541- if self.scheduled is None:
2542- self.scheduled = extime.Time()
2543- iaxiom.IScheduler(self.store).schedule(self, self.scheduled)
2544-
2545- return _ReliableListener(store=self.store,
2546- processor=self,
2547- listener=listener,
2548- forwardMark=forwardMark,
2549- backwardMark=backwardMark,
2550- style=style)
2551-
2552-
2553- def removeReliableListener(self, listener):
2554- """
2555- Remove a previously added listener.
2556- """
2557- self.store.query(_ReliableListener,
2558- attributes.AND(_ReliableListener.processor == self,
2559- _ReliableListener.listener == listener)).deleteFromStore()
2560- self.store.query(BatchProcessingError,
2561- attributes.AND(BatchProcessingError.processor == self,
2562- BatchProcessingError.listener == listener)).deleteFromStore()
2563-
2564-
2565- def getReliableListeners(self):
2566- """
2567- Return an iterable of the listeners which have been added to
2568- this batch processor.
2569- """
2570- for rellist in self.store.query(_ReliableListener, _ReliableListener.processor == self):
2571- yield rellist.listener
2572-
2573-
2574- def getFailedItems(self):
2575- """
2576- Return an iterable of two-tuples of listeners which raised an
2577- exception from C{processItem} and the item which was passed as
2578- the argument to that method.
2579- """
2580- for failed in self.store.query(BatchProcessingError, BatchProcessingError.processor == self):
2581- yield (failed.listener, failed.item)
2582-
2583-
2584- def itemAdded(self):
2585- """
2586- Called to indicate that a new item of the type monitored by this batch
2587- processor is being added to the database.
2588-
2589- If this processor is not already scheduled to run, this will schedule
2590- it. It will also start the batch process if it is not yet running and
2591- there are any registered remote listeners.
2592- """
2593- localCount = self.store.query(
2594- _ReliableListener,
2595- attributes.AND(_ReliableListener.processor == self,
2596- _ReliableListener.style == iaxiom.LOCAL),
2597- limit=1).count()
2598-
2599- remoteCount = self.store.query(
2600- _ReliableListener,
2601- attributes.AND(_ReliableListener.processor == self,
2602- _ReliableListener.style == iaxiom.REMOTE),
2603- limit=1).count()
2604-
2605- if localCount and self.scheduled is None:
2606- self.scheduled = extime.Time()
2607- iaxiom.IScheduler(self.store).schedule(self, self.scheduled)
2608- if remoteCount:
2609- batchService = iaxiom.IBatchService(self.store, None)
2610- if batchService is not None:
2611- batchService.start()
2612-
2613-
2614-
2615-def upgradeProcessor1to2(oldProcessor):
2616- """
2617- Batch processors stopped polling at version 2, so they no longer needed the
2618- idleInterval attribute. They also gained a scheduled attribute which
2619- tracks their interaction with the scheduler. Since they stopped polling,
2620- we also set them up as a timed event here to make sure that they don't
2621- silently disappear, never to be seen again: running them with the scheduler
2622- gives them a chance to figure out what's up and set up whatever other state
2623- they need to continue to run.
2624-
2625- Since this introduces a new dependency of all batch processors on a powerup
2626- for the IScheduler, install a Scheduler or a SubScheduler if one is not
2627- already present.
2628- """
2629- newProcessor = oldProcessor.upgradeVersion(
2630- oldProcessor.typeName, 1, 2,
2631- busyInterval=oldProcessor.busyInterval)
2632- newProcessor.scheduled = extime.Time()
2633-
2634- s = newProcessor.store
2635- sch = iaxiom.IScheduler(s, None)
2636- if sch is None:
2637- if s.parent is None:
2638- # Only site stores have no parents.
2639- sch = Scheduler(store=s)
2640- else:
2641- # Substores get subschedulers.
2642- sch = SubScheduler(store=s)
2643- installOn(sch, s)
2644-
2645- # And set it up to run.
2646- sch.schedule(newProcessor, newProcessor.scheduled)
2647- return newProcessor
2648-
2649-def processor(forType):
2650- """
2651- Create an Axiom Item type which is suitable to use as a batch processor for
2652- the given Axiom Item type.
2653-
2654- Processors created this way depend on a L{iaxiom.IScheduler} powerup on the
2655- on which store they are installed.
2656-
2657- @type forType: L{item.MetaItem}
2658- @param forType: The Axiom Item type for which to create a batch processor
2659- type.
2660-
2661- @rtype: L{item.MetaItem}
2662-
2663- @return: An Axiom Item type suitable for use as a batch processor. If such
2664- a type previously existed, it will be returned. Otherwise, a new type is
2665- created.
2666- """
2667- MILLI = 1000
2668- if forType not in _processors:
2669- def __init__(self, *a, **kw):
2670- item.Item.__init__(self, *a, **kw)
2671- self.store.powerUp(self, iaxiom.IBatchProcessor)
2672-
2673- attrs = {
2674- '__name__': 'Batch_' + forType.__name__,
2675-
2676- '__module__': forType.__module__,
2677-
2678- '__init__': __init__,
2679-
2680- '__repr__': lambda self: '<Batch of %s #%d>' % (reflect.qual(self.workUnitType), self.storeID),
2681-
2682- 'schemaVersion': 2,
2683-
2684- 'workUnitType': forType,
2685-
2686- 'scheduled': attributes.timestamp(doc="""
2687- The next time at which this processor is scheduled to run.
2688- """, default=None),
2689-
2690- # MAGIC NUMBERS AREN'T THEY WONDERFUL?
2691- 'busyInterval': attributes.integer(doc="", default=MILLI / 10),
2692- }
2693- _processors[forType] = item.MetaItem(
2694- attrs['__name__'],
2695- (item.Item, _BatchProcessorMixin),
2696- attrs)
2697-
2698- registerUpgrader(
2699- upgradeProcessor1to2,
2700- _processors[forType].typeName,
2701- 1, 2)
2702-
2703- return _processors[forType]
2704-
2705-
2706-
2707-class ProcessUnavailable(Exception):
2708- """Indicates the process is not available to perform tasks.
2709-
2710- This is a transient error. Calling code should handle it by
2711- arranging to do the work they planned on doing at a later time.
2712- """
2713-
2714-
2715-
2716-class Shutdown(juice.Command):
2717- """
2718- Abandon, belay, cancel, cease, close, conclude, cut it out, desist,
2719- determine, discontinue, drop it, end, finish, finish up, give over, go
2720- amiss, go astray, go wrong, halt, have done with, hold, knock it off, lay
2721- off, leave off, miscarry, perorate, quit, refrain, relinquish, renounce,
2722- resolve, scrap, scratch, scrub, stay, stop, terminate, wind up.
2723- """
2724- commandName = "Shutdown"
2725- responseType = juice.QuitBox
2726-
2727-
2728-def _childProcTerminated(self, err):
2729- self.mode = 'stopped'
2730- err = ProcessUnavailable(err)
2731- for d in self.waitingForProcess:
2732- d.errback(err)
2733- del self.waitingForProcess
2734-
2735-
2736-class ProcessController(object):
2737- """
2738- Stateful class which tracks a Juice connection to a child process.
2739-
2740- Communication occurs over stdin and stdout of the child process. The
2741- process is launched and restarted as necessary. Failures due to the child
2742- process terminating, either unilaterally of by request, are represented as
2743- a transient exception class,
2744-
2745- Mode is one of::
2746-
2747- - 'stopped' (no process running or starting)
2748- - 'starting' (process begun but not ready for requests)
2749- - 'ready' (process ready for requests)
2750- - 'stopping' (process being torn down)
2751- - 'waiting_ready' (process beginning but will be shut down
2752- as soon as it starts up)
2753-
2754- Transitions are as follows::
2755-
2756- getProcess:
2757- stopped -> starting:
2758- launch process
2759- create/save in waitingForStartup/return Deferred
2760- starting -> starting:
2761- create/save/return Deferred
2762- ready -> ready:
2763- return saved process
2764- stopping:
2765- return failing Deferred indicating transient failure
2766- waiting_ready:
2767- return failing Deferred indicating transient failure
2768-
2769- stopProcess:
2770- stopped -> stopped:
2771- return succeeding Deferred
2772- starting -> waiting_ready:
2773- create Deferred, add transient failure errback handler, return
2774- ready -> stopping:
2775- call shutdown on process
2776- return Deferred which fires when shutdown is done
2777-
2778- childProcessCreated:
2779- starting -> ready:
2780- callback saved Deferreds
2781- clear saved Deferreds
2782- waiting_ready:
2783- errback saved Deferred indicating transient failure
2784- return _shutdownIndexerProcess()
2785-
2786- childProcessTerminated:
2787- starting -> stopped:
2788- errback saved Deferreds indicating transient failure
2789- waiting_ready -> stopped:
2790- errback saved Deferreds indicating transient failure
2791- ready -> stopped:
2792- drop reference to process object
2793- stopping -> stopped:
2794- Callback saved shutdown deferred
2795-
2796- @ivar process: A reference to the process object. Set in every non-stopped
2797- mode.
2798-
2799- @ivar juice: A reference to the juice protocol. Set in all modes.
2800-
2801- @ivar connector: A reference to the process protocol. Set in every
2802- non-stopped mode.
2803-
2804- @ivar onProcessStartup: None or a no-argument callable which will
2805- be invoked whenever the connection is first established to a newly
2806- spawned child process.
2807-
2808- @ivar onProcessTermination: None or a no-argument callable which
2809- will be invoked whenever a Juice connection is lost, except in the
2810- case where process shutdown was explicitly requested via
2811- stopProcess().
2812- """
2813-
2814- __metaclass__ = modal.ModalType
2815-
2816- initialMode = 'stopped'
2817- modeAttribute = 'mode'
2818-
2819- # A reference to the Twisted process object which corresponds to
2820- # the child process we have spawned. Set to a non-None value in
2821- # every state except stopped.
2822- process = None
2823-
2824- # A reference to the process protocol object via which we
2825- # communicate with the process's stdin and stdout. Set to a
2826- # non-None value in every state except stopped.
2827- connector = None
2828-
2829- def __init__(self, name, juice, tacPath,
2830- onProcessStartup=None,
2831- onProcessTermination=None,
2832- logPath=None,
2833- pidPath=None):
2834- self.name = name
2835- self.juice = juice
2836- self.tacPath = tacPath
2837- self.onProcessStartup = onProcessStartup
2838- self.onProcessTermination = onProcessTermination
2839- if logPath is None:
2840- logPath = name + '.log'
2841- if pidPath is None:
2842- pidPath = name + '.pid'
2843- self.logPath = logPath
2844- self.pidPath = pidPath
2845-
2846- def _startProcess(self):
2847- executable = sys.executable
2848- env = os.environ
2849-
2850- twistdBinaries = procutils.which("twistd2.4") + procutils.which("twistd")
2851- if not twistdBinaries:
2852- return defer.fail(RuntimeError("Couldn't find twistd to start subprocess"))
2853- twistd = twistdBinaries[0]
2854-
2855- setsid = procutils.which("setsid")
2856-
2857- self.connector = JuiceConnector(self.juice, self)
2858-
2859- args = [
2860- sys.executable,
2861- twistd,
2862- '--logfile=%s' % (self.logPath,)]
2863-
2864- if not runtime.platform.isWindows():
2865- args.append('--pidfile=%s' % (self.pidPath,))
2866-
2867- args.extend(['-noy',
2868- self.tacPath])
2869-
2870- if setsid:
2871- args = ['setsid'] + args
2872- executable = setsid[0]
2873-
2874- self.process = process.spawnProcess(
2875- self.connector, executable, tuple(args), env=env)
2876-
2877- class stopped(modal.mode):
2878- def getProcess(self):
2879- self.mode = 'starting'
2880- self.waitingForProcess = []
2881-
2882- self._startProcess()
2883-
2884- # Mode has changed, this will call some other
2885- # implementation of getProcess.
2886- return self.getProcess()
2887-
2888- def stopProcess(self):
2889- return defer.succeed(None)
2890-
2891- class starting(modal.mode):
2892- def getProcess(self):
2893- d = defer.Deferred()
2894- self.waitingForProcess.append(d)
2895- return d
2896-
2897- def stopProcess(self):
2898- def eb(err):
2899- err.trap(ProcessUnavailable)
2900-
2901- d = defer.Deferred().addErrback(eb)
2902- self.waitingForProcess.append(d)
2903-
2904- self.mode = 'waiting_ready'
2905- return d
2906-
2907- def childProcessCreated(self):
2908- self.mode = 'ready'
2909-
2910- if self.onProcessStartup is not None:
2911- self.onProcessStartup()
2912-
2913- for d in self.waitingForProcess:
2914- d.callback(self.juice)
2915- del self.waitingForProcess
2916-
2917- def childProcessTerminated(self, reason):
2918- _childProcTerminated(self, reason)
2919- if self.onProcessTermination is not None:
2920- self.onProcessTermination()
2921-
2922-
2923- class ready(modal.mode):
2924- def getProcess(self):
2925- return defer.succeed(self.juice)
2926-
2927- def stopProcess(self):
2928- self.mode = 'stopping'
2929- self.onShutdown = defer.Deferred()
2930- Shutdown().do(self.juice)
2931- return self.onShutdown
2932-
2933- def childProcessTerminated(self, reason):
2934- self.mode = 'stopped'
2935- self.process = self.connector = None
2936- if self.onProcessTermination is not None:
2937- self.onProcessTermination()
2938-
2939-
2940- class stopping(modal.mode):
2941- def getProcess(self):
2942- return defer.fail(ProcessUnavailable("Shutting down"))
2943-
2944- def stopProcess(self):
2945- return self.onShutdown
2946-
2947- def childProcessTerminated(self, reason):
2948- self.mode = 'stopped'
2949- self.process = self.connector = None
2950- self.onShutdown.callback(None)
2951-
2952-
2953- class waiting_ready(modal.mode):
2954- def getProcess(self):
2955- return defer.fail(ProcessUnavailable("Shutting down"))
2956-
2957- def childProcessCreated(self):
2958- # This will put us into the stopped state - no big deal,
2959- # we are going into the ready state as soon as it returns.
2960- _childProcTerminated(self, RuntimeError("Shutting down"))
2961-
2962- # Dip into the ready mode for ever so brief an instant so
2963- # that we can shut ourselves down.
2964- self.mode = 'ready'
2965- return self.stopProcess()
2966-
2967- def childProcessTerminated(self, reason):
2968- _childProcTerminated(self, reason)
2969- if self.onProcessTermination is not None:
2970- self.onProcessTermination()
2971-
2972-
2973-
2974-class JuiceConnector(protocol.ProcessProtocol):
2975-
2976- def __init__(self, proto, controller):
2977- self.juice = proto
2978- self.controller = controller
2979-
2980- def connectionMade(self):
2981- log.msg("Subprocess started.")
2982- self.juice.makeConnection(self)
2983- self.controller.childProcessCreated()
2984-
2985- # Transport
2986- disconnecting = False
2987-
2988- def write(self, data):
2989- self.transport.write(data)
2990-
2991- def writeSequence(self, data):
2992- self.transport.writeSequence(data)
2993-
2994- def loseConnection(self):
2995- self.transport.loseConnection()
2996-
2997- def getPeer(self):
2998- return ('omfg what are you talking about',)
2999-
3000- def getHost(self):
3001- return ('seriously it is a process this makes no sense',)
3002-
3003- def inConnectionLost(self):
3004- log.msg("Standard in closed")
3005- protocol.ProcessProtocol.inConnectionLost(self)
3006-
3007- def outConnectionLost(self):
3008- log.msg("Standard out closed")
3009- protocol.ProcessProtocol.outConnectionLost(self)
3010-
3011- def errConnectionLost(self):
3012- log.msg("Standard err closed")
3013- protocol.ProcessProtocol.errConnectionLost(self)
3014-
3015- def outReceived(self, data):
3016- self.juice.dataReceived(data)
3017-
3018- def errReceived(self, data):
3019- log.msg("Received stderr from subprocess: " + repr(data))
3020-
3021- def processEnded(self, status):
3022- log.msg("Process ended")
3023- self.juice.connectionLost(status)
3024- self.controller.childProcessTerminated(status)
3025-
3026-
3027-
3028-class JuiceChild(juice.Juice):
3029- """
3030- Protocol class which runs in the child process
3031-
3032- This just defines one behavior on top of a regular juice protocol: the
3033- shutdown command, which drops the connection and stops the reactor.
3034- """
3035- shutdown = False
3036-
3037- def connectionLost(self, reason):
3038- juice.Juice.connectionLost(self, reason)
3039- if self.shutdown:
3040- reactor.stop()
3041-
3042- def command_SHUTDOWN(self):
3043- log.msg("Shutdown message received, goodbye.")
3044- self.shutdown = True
3045- return {}
3046- command_SHUTDOWN.command = Shutdown
3047-
3048-
3049-
3050-class SetStore(juice.Command):
3051- """
3052- Specify the location of the site store.
3053- """
3054- commandName = 'Set-Store'
3055- arguments = [('storepath', juice.Path())]
3056-
3057-
3058-class SuspendProcessor(juice.Command):
3059- """
3060- Prevent a particular reliable listener from receiving any notifications
3061- until a L{ResumeProcessor} command is sent or the batch process is
3062- restarted.
3063- """
3064- commandName = 'Suspend-Processor'
3065- arguments = [('storepath', juice.Path()),
3066- ('storeid', juice.Integer())]
3067-
3068-
3069-
3070-class ResumeProcessor(juice.Command):
3071- """
3072- Cause a particular reliable listener to begin receiving notifications
3073- again.
3074- """
3075- commandName = 'Resume-Processor'
3076- arguments = [('storepath', juice.Path()),
3077- ('storeid', juice.Integer())]
3078-
3079-
3080-
3081-class CallItemMethod(juice.Command):
3082- """
3083- Invoke a particular method of a particular item.
3084- """
3085- commandName = 'Call-Item-Method'
3086- arguments = [('storepath', juice.Path()),
3087- ('storeid', juice.Integer()),
3088- ('method', juice.String())]
3089-
3090-
3091-class BatchProcessingControllerService(service.Service):
3092- """
3093- Controls starting, stopping, and passing messages to the system process in
3094- charge of remote batch processing.
3095-
3096- @ivar batchController: A reference to the L{ProcessController} for
3097- interacting with the batch process, if one exists. Otherwise C{None}.
3098- """
3099- implements(iaxiom.IBatchService)
3100-
3101- batchController = None
3102-
3103- def __init__(self, store):
3104- self.store = store
3105- self.setName("Batch Processing Controller")
3106-
3107-
3108- def startService(self):
3109- service.Service.startService(self)
3110- tacPath = util.sibpath(__file__, "batch.tac")
3111- proto = BatchProcessingProtocol()
3112- rundir = self.store.dbdir.child("run")
3113- logdir = rundir.child("logs")
3114- for d in rundir, logdir:
3115- try:
3116- d.createDirectory()
3117- except OSError:
3118- pass
3119- self.batchController = ProcessController(
3120- "batch", proto, tacPath,
3121- self._setStore, self._restartProcess,
3122- logdir.child("batch.log").path,
3123- rundir.child("batch.pid").path)
3124-
3125-
3126- def _setStore(self):
3127- return SetStore(storepath=self.store.dbdir).do(self.batchController.juice)
3128-
3129-
3130- def _restartProcess(self):
3131- reactor.callLater(1.0, self.batchController.getProcess)
3132-
3133-
3134- def stopService(self):
3135- service.Service.stopService(self)
3136- d = self.batchController.stopProcess()
3137- d.addErrback(lambda err: err.trap(error.ProcessDone))
3138- return d
3139-
3140-
3141- def call(self, itemMethod):
3142- """
3143- Invoke the given bound item method in the batch process.
3144-
3145- Return a Deferred which fires when the method has been invoked.
3146- """
3147- item = itemMethod.im_self
3148- method = itemMethod.im_func.func_name
3149- return self.batchController.getProcess().addCallback(
3150- CallItemMethod(storepath=item.store.dbdir,
3151- storeid=item.storeID,
3152- method=method).do)
3153-
3154-
3155- def start(self):
3156- if self.batchController is not None:
3157- self.batchController.getProcess()
3158-
3159-
3160- def suspend(self, storepath, storeID):
3161- return self.batchController.getProcess().addCallback(
3162- SuspendProcessor(storepath=storepath, storeid=storeID).do)
3163-
3164-
3165- def resume(self, storepath, storeID):
3166- return self.batchController.getProcess().addCallback(
3167- ResumeProcessor(storepath=storepath, storeid=storeID).do)
3168-
3169-
3170-
3171-class _SubStoreBatchChannel(object):
3172- """
3173- SubStore adapter for passing messages to the batch processing system
3174- process.
3175-
3176- SubStores are adaptable to L{iaxiom.IBatchService} via this adapter.
3177- """
3178- implements(iaxiom.IBatchService)
3179-
3180- def __init__(self, substore):
3181- self.storepath = substore.dbdir
3182- self.service = iaxiom.IBatchService(substore.parent)
3183-
3184-
3185- def call(self, itemMethod):
3186- return self.service.call(itemMethod)
3187-
3188-
3189- def start(self):
3190- self.service.start()
3191-
3192-
3193- def suspend(self, storeID):
3194- return self.service.suspend(self.storepath, storeID)
3195-
3196-
3197- def resume(self, storeID):
3198- return self.service.resume(self.storepath, storeID)
3199-
3200-
3201-
3202-def storeBatchServiceSpecialCase(st, pups):
3203- """
3204- Adapt a L{Store} to L{IBatchService}.
3205-
3206- If C{st} is a substore, return a simple wrapper that delegates to the site
3207- store's L{IBatchService} powerup. Return C{None} if C{st} has no
3208- L{BatchProcessingControllerService}.
3209- """
3210- if st.parent is not None:
3211- try:
3212- return _SubStoreBatchChannel(st)
3213- except TypeError:
3214- return None
3215- storeService = service.IService(st)
3216- try:
3217- return storeService.getServiceNamed("Batch Processing Controller")
3218- except KeyError:
3219- return None
3220-
3221-
3222-
3223-class BatchProcessingProtocol(JuiceChild):
3224- siteStore = None
3225-
3226- def __init__(self, service=None, issueGreeting=False):
3227- juice.Juice.__init__(self, issueGreeting)
3228- self.storepaths = []
3229- if service is not None:
3230- service.cooperator = cooperator.Cooperator()
3231- self.service = service
3232-
3233-
3234- def connectionLost(self, reason):
3235- # In the child process, we are a server. In the child process, we
3236- # don't want to keep running after we can't talk to the client anymore.
3237- if self.isServer:
3238- reactor.stop()
3239-
3240-
3241- def command_SET_STORE(self, storepath):
3242- from axiom import store
3243-
3244- assert self.siteStore is None
3245-
3246- self.siteStore = store.Store(storepath, debug=False)
3247- self.subStores = {}
3248- self.pollCall = task.LoopingCall(self._pollSubStores)
3249- self.pollCall.start(10.0)
3250-
3251- return {}
3252-
3253- command_SET_STORE.command = SetStore
3254-
3255-
3256- def command_SUSPEND_PROCESSOR(self, storepath, storeid):
3257- return self.subStores[storepath.path].suspend(storeid).addCallback(lambda ign: {})
3258- command_SUSPEND_PROCESSOR.command = SuspendProcessor
3259-
3260-
3261- def command_RESUME_PROCESSOR(self, storepath, storeid):
3262- return self.subStores[storepath.path].resume(storeid).addCallback(lambda ign: {})
3263- command_RESUME_PROCESSOR.command = ResumeProcessor
3264-
3265-
3266- def command_CALL_ITEM_METHOD(self, storepath, storeid, method):
3267- return self.subStores[storepath.path].call(storeid, method).addCallback(lambda ign: {})
3268- command_CALL_ITEM_METHOD.command = CallItemMethod
3269-
3270-
3271- def _pollSubStores(self):
3272- from axiom import store, substore
3273-
3274- # Any service which has encountered an error will have logged it and
3275- # then stopped. Prune those here, so that they are noticed as missing
3276- # below and re-added.
3277- for path, svc in self.subStores.items():
3278- if not svc.running:
3279- del self.subStores[path]
3280-
3281- try:
3282- paths = set([p.path for p in self.siteStore.query(substore.SubStore).getColumn("storepath")])
3283- except eaxiom.SQLError, e:
3284- # Generally, database is locked.
3285- log.msg("SubStore query failed with SQLError: %r" % (e,))
3286- except:
3287- # WTF?
3288- log.msg("SubStore query failed with bad error:")
3289- log.err()
3290- else:
3291- for removed in set(self.subStores) - paths:
3292- self.subStores[removed].disownServiceParent()
3293- del self.subStores[removed]
3294- if VERBOSE:
3295- log.msg("Removed SubStore " + removed)
3296- for added in paths - set(self.subStores):
3297- try:
3298- s = store.Store(added, debug=False)
3299- except eaxiom.SQLError, e:
3300- # Generally, database is locked.
3301- log.msg("Opening sub-Store failed with SQLError: %r" % (e,))
3302- except:
3303- log.msg("Opening sub-Store failed with bad error:")
3304- log.err()
3305- else:
3306- self.subStores[added] = BatchProcessingService(s, style=iaxiom.REMOTE)
3307- self.subStores[added].setServiceParent(self.service)
3308- if VERBOSE:
3309- log.msg("Added SubStore " + added)
3310-
3311-
3312-
3313-class BatchProcessingService(service.Service):
3314- """
3315- Steps over the L{iaxiom.IBatchProcessor} powerups for a single L{axiom.store.Store}.
3316- """
3317- def __init__(self, store, style=iaxiom.LOCAL):
3318- self.store = store
3319- self.style = style
3320- self.suspended = []
3321-
3322-
3323- def suspend(self, storeID):
3324- item = self.store.getItemByID(storeID)
3325- self.suspended.append(item)
3326- return item.suspend()
3327-
3328-
3329- def resume(self, storeID):
3330- item = self.store.getItemByID(storeID)
3331- self.suspended.remove(item)
3332- return item.resume()
3333-
3334-
3335- def call(self, storeID, methodName):
3336- return defer.maybeDeferred(getattr(self.store.getItemByID(storeID), methodName))
3337-
3338-
3339- def items(self):
3340- return self.store.powerupsFor(iaxiom.IBatchProcessor)
3341-
3342-
3343- def processWhileRunning(self):
3344- """
3345- Run tasks until stopService is called.
3346- """
3347- work = self.step()
3348- for result, more in work:
3349- yield result
3350- if not self.running:
3351- break
3352- if more:
3353- delay = 0.1
3354- else:
3355- delay = 10.0
3356- yield task.deferLater(reactor, delay, lambda: None)
3357-
3358-
3359- def step(self):
3360- while True:
3361- items = list(self.items())
3362-
3363- if VERBOSE:
3364- log.msg("Found %d processors for %s" % (len(items), self.store))
3365-
3366- ran = False
3367- more = False
3368- while items:
3369- ran = True
3370- item = items.pop()
3371- if VERBOSE:
3372- log.msg("Stepping processor %r (suspended is %r)" % (item, self.suspended))
3373- try:
3374- itemHasMore = item.store.transact(item.step, style=self.style, skip=self.suspended)
3375- except _ProcessingFailure, e:
3376- log.msg("%r failed while processing %r:" % (e.reliableListener, e.workUnit))
3377- log.err(e.failure)
3378- e.mark()
3379-
3380- # _Fuck_. /Fuck/. If user-code in or below (*fuck*)
3381- # item.step creates a Failure on any future iteration
3382- # (-Fuck-) of this loop, it will get a reference to this
3383- # exception instance, since it's in locals and Failures
3384- # extract and save locals (Aaarrrrggg). Get rid of this so
3385- # that doesn't happen. See also the definition of
3386- # _ProcessingFailure.__init__.
3387- e = None
3388- else:
3389- if itemHasMore:
3390- more = True
3391- yield None, bool(more or items)
3392- if not ran:
3393- yield None, more
3394-
3395-
3396- def startService(self):
3397- service.Service.startService(self)
3398- self.parent.cooperator.coiterate(self.processWhileRunning())
3399-
3400-
3401- def stopService(self):
3402- service.Service.stopService(self)
3403- self.store.close()
3404-
3405-
3406-
3407-class BatchManholePowerup(item.Item):
3408- """
3409- Previously, an L{IConchUser} powerup. This class is only still defined for
3410- schema compatibility. Any instances of it will be deleted by an upgrader.
3411- See #1001.
3412- """
3413- schemaVersion = 2
3414- unused = attributes.integer(
3415- doc="Satisfy Axiom requirement for at least one attribute")
3416-
3417-registerDeletionUpgrader(BatchManholePowerup, 1, 2)
3418
3419=== removed file 'Axiom/axiom/batch.tac'
3420--- Axiom/axiom/batch.tac 2006-04-27 14:22:50 +0000
3421+++ Axiom/axiom/batch.tac 1970-01-01 00:00:00 +0000
3422@@ -1,20 +0,0 @@
3423-# -*- test-case-name: axiom.test.test_batch -*-
3424-
3425-"""
3426-Application configuration for the batch sub-process.
3427-
3428-This process reads commands and sends responses via stdio using the JUICE
3429-protocol. When it's not doing that, it queries various databases for work to
3430-do, and then does it. The databases which it queries can be controlled by
3431-sending it messages.
3432-"""
3433-
3434-from twisted.application import service
3435-from twisted.internet import stdio
3436-
3437-from axiom import batch
3438-
3439-application = service.Application("Batch Processing App")
3440-svc = service.MultiService()
3441-svc.setServiceParent(application)
3442-stdio.StandardIO(batch.BatchProcessingProtocol(svc, True))
3443
3444=== removed directory 'Axiom/axiom/benchmarks'
3445=== removed file 'Axiom/axiom/benchmarks/benchmark_batchitemcreation.py'
3446--- Axiom/axiom/benchmarks/benchmark_batchitemcreation.py 2006-10-11 21:52:50 +0000
3447+++ Axiom/axiom/benchmarks/benchmark_batchitemcreation.py 1970-01-01 00:00:00 +0000
3448@@ -1,25 +0,0 @@
3449-
3450-"""
3451-Benchmark batch creation of a large number of simple Items in a transaction.
3452-"""
3453-
3454-from epsilon.scripts import benchmark
3455-
3456-from axiom.store import Store
3457-from axiom.item import Item
3458-from axiom.attributes import integer, text
3459-
3460-class AB(Item):
3461- a = integer()
3462- b = text()
3463-
3464-def main():
3465- s = Store("TEMPORARY.axiom")
3466- benchmark.start()
3467- rows = [(x, unicode(x)) for x in xrange(10000)]
3468- s.transact(lambda: s.batchInsert(AB, (AB.a, AB.b), rows))
3469- benchmark.stop()
3470-
3471-
3472-if __name__ == '__main__':
3473- main()
3474
3475=== removed file 'Axiom/axiom/benchmarks/benchmark_batchitemdeletion.py'
3476--- Axiom/axiom/benchmarks/benchmark_batchitemdeletion.py 2006-10-11 21:52:50 +0000
3477+++ Axiom/axiom/benchmarks/benchmark_batchitemdeletion.py 1970-01-01 00:00:00 +0000
3478@@ -1,27 +0,0 @@
3479-
3480-"""
3481-Benchmark batch creation of a large number of simple Items in a transaction.
3482-"""
3483-
3484-from epsilon.scripts import benchmark
3485-
3486-from axiom.store import Store
3487-from axiom.item import Item
3488-from axiom.attributes import integer, text
3489-
3490-class AB(Item):
3491- a = integer()
3492- b = text()
3493-
3494-def main():
3495- s = Store("TEMPORARY.axiom")
3496- rows = [(x, unicode(x)) for x in xrange(10000)]
3497- s.transact(lambda: s.batchInsert(AB, (AB.a, AB.b), rows))
3498-
3499- benchmark.start()
3500- s.transact(s.query(AB).deleteFromStore)
3501- benchmark.stop()
3502-
3503-
3504-if __name__ == '__main__':
3505- main()
3506
3507=== removed file 'Axiom/axiom/benchmarks/benchmark_itemcreation.py'
3508--- Axiom/axiom/benchmarks/benchmark_itemcreation.py 2006-06-01 15:53:37 +0000
3509+++ Axiom/axiom/benchmarks/benchmark_itemcreation.py 1970-01-01 00:00:00 +0000
3510@@ -1,28 +0,0 @@
3511-
3512-"""
3513-Benchmark creation of a large number of simple Items in a transaction.
3514-"""
3515-
3516-from epsilon.scripts import benchmark
3517-
3518-from axiom.store import Store
3519-from axiom.item import Item
3520-from axiom.attributes import integer, text
3521-
3522-class AB(Item):
3523- a = integer()
3524- b = text()
3525-
3526-def main():
3527- s = Store("TEMPORARY.axiom")
3528- def txn():
3529- for x in range(10000):
3530- AB(a=x, b=unicode(x), store=s)
3531-
3532- benchmark.start()
3533- s.transact(txn)
3534- benchmark.stop()
3535-
3536-
3537-if __name__ == '__main__':
3538- main()
3539
3540=== removed file 'Axiom/axiom/benchmarks/benchmark_itemdeletion.py'
3541--- Axiom/axiom/benchmarks/benchmark_itemdeletion.py 2006-10-11 21:52:50 +0000
3542+++ Axiom/axiom/benchmarks/benchmark_itemdeletion.py 1970-01-01 00:00:00 +0000
3543@@ -1,29 +0,0 @@
3544-
3545-"""
3546-Benchmark batch creation of a large number of simple Items in a transaction.
3547-"""
3548-
3549-from epsilon.scripts import benchmark
3550-
3551-from axiom.store import Store
3552-from axiom.item import Item
3553-from axiom.attributes import integer, text
3554-
3555-class AB(Item):
3556- a = integer()
3557- b = text()
3558-
3559-def main():
3560- s = Store("TEMPORARY.axiom")
3561- rows = [(x, unicode(x)) for x in xrange(10000)]
3562- s.transact(lambda: s.batchInsert(AB, (AB.a, AB.b), rows))
3563- def deleteStuff():
3564- for it in s.query(AB):
3565- it.deleteFromStore()
3566- benchmark.start()
3567- s.transact(deleteStuff)
3568- benchmark.stop()
3569-
3570-
3571-if __name__ == '__main__':
3572- main()
3573
3574=== removed file 'Axiom/axiom/benchmarks/benchmark_tagnames.py'
3575--- Axiom/axiom/benchmarks/benchmark_tagnames.py 2006-06-01 15:53:37 +0000
3576+++ Axiom/axiom/benchmarks/benchmark_tagnames.py 1970-01-01 00:00:00 +0000
3577@@ -1,43 +0,0 @@
3578-
3579-"""
3580-Benchmark the tagNames method of L{axiom.tags.Catalog}
3581-"""
3582-
3583-import time, sys
3584-
3585-from epsilon.scripts import benchmark
3586-
3587-from axiom import store, item, attributes, tags
3588-
3589-N_TAGS = 20
3590-N_COPIES = 5000
3591-N_LOOPS = 1000
3592-
3593-class TaggedObject(item.Item):
3594- name = attributes.text()
3595-
3596-
3597-
3598-def main():
3599- s = store.Store("tags.axiom")
3600- c = tags.Catalog(store=s)
3601- o = TaggedObject(store=s)
3602-
3603- def tagObjects(tag, copies):
3604- for x in xrange(copies):
3605- c.tag(o, tag)
3606- for i in xrange(N_TAGS):
3607- s.transact(tagObjects, unicode(i), N_COPIES)
3608-
3609- def getTags():
3610- for i in xrange(N_LOOPS):
3611- list(c.tagNames())
3612-
3613- benchmark.start()
3614- s.transact(getTags)
3615- benchmark.stop()
3616-
3617-
3618-
3619-if __name__ == '__main__':
3620- main()
3621
3622=== removed file 'Axiom/axiom/benchmarks/benchmark_tagsof.py'
3623--- Axiom/axiom/benchmarks/benchmark_tagsof.py 2006-06-01 15:53:37 +0000
3624+++ Axiom/axiom/benchmarks/benchmark_tagsof.py 1970-01-01 00:00:00 +0000
3625@@ -1,48 +0,0 @@
3626-
3627-"""
3628-Benchmark the tagsOf method of L{axiom.tags.Catalog}
3629-"""
3630-
3631-import time, sys
3632-
3633-from epsilon.scripts import benchmark
3634-
3635-from axiom import store, item, attributes, tags
3636-
3637-N = 30
3638-
3639-class TaggedObject(item.Item):
3640- name = attributes.text()
3641-
3642-
3643-
3644-def main():
3645- s = store.Store("tags.axiom")
3646- c = tags.Catalog(store=s)
3647-
3648- objects = []
3649- def createObjects():
3650- for x in xrange(N):
3651- objects.append(TaggedObject(store=s))
3652- s.transact(createObjects)
3653-
3654- def tagObjects():
3655- for o in objects:
3656- for x in xrange(N):
3657- c.tag(o, unicode(x))
3658- s.transact(tagObjects)
3659-
3660- def getTags():
3661- for i in xrange(N):
3662- for o in objects:
3663- for t in c.tagsOf(o):
3664- pass
3665-
3666- benchmark.start()
3667- s.transact(getTags)
3668- benchmark.stop()
3669-
3670-
3671-
3672-if __name__ == '__main__':
3673- main()
3674
3675=== removed file 'Axiom/axiom/benchmarks/testbase.py'
3676--- Axiom/axiom/benchmarks/testbase.py 2010-04-03 12:38:34 +0000
3677+++ Axiom/axiom/benchmarks/testbase.py 1970-01-01 00:00:00 +0000
3678@@ -1,5 +0,0 @@
3679-
3680-from axiom._pysqlite2 import Connection
3681-
3682-con = Connection.fromDatabaseName("test.sqlite")
3683-cur = con.cursor()
3684
3685=== removed file 'Axiom/axiom/benchmarks/testindex.py'
3686--- Axiom/axiom/benchmarks/testindex.py 2005-10-28 22:06:23 +0000
3687+++ Axiom/axiom/benchmarks/testindex.py 1970-01-01 00:00:00 +0000
3688@@ -1,5 +0,0 @@
3689-
3690-from testbase import cur
3691-
3692-cur.execute('create index foo_bar_idx on foo(bar)')
3693-cur.commit()
3694
3695=== removed file 'Axiom/axiom/benchmarks/testinit.py'
3696--- Axiom/axiom/benchmarks/testinit.py 2005-07-28 22:09:16 +0000
3697+++ Axiom/axiom/benchmarks/testinit.py 1970-01-01 00:00:00 +0000
3698@@ -1,10 +0,0 @@
3699-
3700-from testbase import con, cur
3701-
3702-cur.execute("create table foo (bar int, baz varchar)")
3703-
3704-for x in range(500):
3705- cur.execute("insert into foo values (?, ?)",
3706- (x, "string-value-of-"+str(x)))
3707-
3708-con.commit()
3709
3710=== removed file 'Axiom/axiom/benchmarks/testreader.py'
3711--- Axiom/axiom/benchmarks/testreader.py 2005-10-28 22:06:23 +0000
3712+++ Axiom/axiom/benchmarks/testreader.py 1970-01-01 00:00:00 +0000
3713@@ -1,10 +0,0 @@
3714-
3715-import itertools
3716-import time
3717-
3718-from testbase import cur
3719-
3720-for num in itertools.count():
3721- cur.execute("select * from foo")
3722- foovals = cur.fetchall()
3723- print num, 'I fetched', len(foovals), 'values.', time.ctime()
3724
3725=== removed file 'Axiom/axiom/benchmarks/testwriter.py'
3726--- Axiom/axiom/benchmarks/testwriter.py 2005-07-28 22:09:16 +0000
3727+++ Axiom/axiom/benchmarks/testwriter.py 1970-01-01 00:00:00 +0000
3728@@ -1,13 +0,0 @@
3729-
3730-import time
3731-import itertools
3732-
3733-from testbase import con, cur
3734-BATCH = 500
3735-for num in itertools.count():
3736- for x in range(BATCH):
3737- n = (num * BATCH) + x
3738- cur.execute("insert into foo values (?, ?)",
3739- (n, "string-value-of-"+str(n)))
3740- con.commit()
3741- print num, 'write pass complete', time.ctime()
3742
3743=== removed file 'Axiom/axiom/dependency.py'
3744--- Axiom/axiom/dependency.py 2008-08-13 02:55:58 +0000
3745+++ Axiom/axiom/dependency.py 1970-01-01 00:00:00 +0000
3746@@ -1,289 +0,0 @@
3747-# Copright 2008 Divmod, Inc. See LICENSE file for details.
3748-# -*- test-case-name: axiom.test.test_dependency -*-
3749-"""
3750-A dependency management system for items.
3751-"""
3752-
3753-import sys, itertools
3754-
3755-from zope.interface.advice import addClassAdvisor
3756-
3757-from epsilon.structlike import record
3758-
3759-from axiom.item import Item
3760-from axiom.attributes import reference, boolean, AND
3761-from axiom.errors import ItemNotFound, DependencyError, UnsatisfiedRequirement
3762-
3763-#There is probably a cleaner way to do this.
3764-_globalDependencyMap = {}
3765-
3766-def dependentsOf(cls):
3767- deps = _globalDependencyMap.get(cls, None)
3768- if deps is None:
3769- return []
3770- else:
3771- return [d[0] for d in deps]
3772-
3773-##Totally ripping off z.i
3774-
3775-def dependsOn(itemType, itemCustomizer=None, doc='',
3776- indexed=True, whenDeleted=reference.NULLIFY):
3777- """
3778- This function behaves like L{axiom.attributes.reference} but with
3779- an extra behaviour: when this item is installed (via
3780- L{axiom.dependency.installOn} on a target item, the
3781- type named here will be instantiated and installed on the target
3782- as well.
3783-
3784- For example::
3785-
3786- class Foo(Item):
3787- counter = integer()
3788- thingIDependOn = dependsOn(Baz, lambda baz: baz.setup())
3789-
3790- @param itemType: The Item class to instantiate and install.
3791- @param itemCustomizer: A callable that accepts the item installed
3792- as a dependency as its first argument. It will be called only if
3793- an item is created to satisfy this dependency.
3794-
3795- @return: An L{axiom.attributes.reference} instance.
3796- """
3797-
3798- frame = sys._getframe(1)
3799- locals = frame.f_locals
3800-
3801- # Try to make sure we were called from a class def.
3802- if (locals is frame.f_globals) or ('__module__' not in locals):
3803- raise TypeError("dependsOn can be used only from a class definition.")
3804- ref = reference(reftype=itemType, doc=doc, indexed=indexed, allowNone=True,
3805- whenDeleted=whenDeleted)
3806- if "__dependsOn_advice_data__" not in locals:
3807- addClassAdvisor(_dependsOn_advice)
3808- locals.setdefault('__dependsOn_advice_data__', []).append(
3809- (itemType, itemCustomizer, ref))
3810- return ref
3811-
3812-def _dependsOn_advice(cls):
3813- if cls in _globalDependencyMap:
3814- print "Double advising of %s. dependency map from first time: %s" % (
3815- cls, _globalDependencyMap[cls])
3816- #bail if we end up here twice, somehow
3817- return cls
3818- for itemType, itemCustomizer, ref in cls.__dict__[
3819- '__dependsOn_advice_data__']:
3820- classDependsOn(cls, itemType, itemCustomizer, ref)
3821- del cls.__dependsOn_advice_data__
3822- return cls
3823-
3824-def classDependsOn(cls, itemType, itemCustomizer, ref):
3825- _globalDependencyMap.setdefault(cls, []).append(
3826- (itemType, itemCustomizer, ref))
3827-
3828-class _DependencyConnector(Item):
3829- """
3830- I am a connector between installed items and their targets.
3831- """
3832- installee = reference(doc="The item installed.")
3833- target = reference(doc="The item installed upon.")
3834- explicitlyInstalled = boolean(doc="Whether this item was installed"
3835- "explicitly (and thus whether or not it"
3836- "should be automatically uninstalled when"
3837- "nothing depends on it)")
3838-
3839-
3840-def installOn(self, target):
3841- """
3842- Install this object on the target along with any powerup
3843- interfaces it declares. Also track that the object now depends on
3844- the target, and the object was explicitly installed (and therefore
3845- should not be uninstalled by subsequent uninstallation operations
3846- unless it is explicitly removed).
3847- """
3848- _installOn(self, target, True)
3849-
3850-
3851-def _installOn(self, target, __explicitlyInstalled=False):
3852- depBlob = _globalDependencyMap.get(self.__class__, [])
3853- dependencies, itemCustomizers, refs = (map(list, zip(*depBlob))
3854- or ([], [], []))
3855- #See if any of our dependencies have been installed already
3856- for dc in self.store.query(_DependencyConnector,
3857- _DependencyConnector.target == target):
3858- if dc.installee.__class__ in dependencies:
3859- i = dependencies.index(dc.installee.__class__)
3860- refs[i].__set__(self, dc.installee)
3861- del dependencies[i], itemCustomizers[i], refs[i]
3862- if (dc.installee.__class__ == self.__class__
3863- and self.__class__ in set(
3864- itertools.chain([blob[0][0] for blob in
3865- _globalDependencyMap.values()]))):
3866- #Somebody got here before we did... let's punt
3867- raise DependencyError("An instance of %r is already "
3868- "installed on %r." % (self.__class__,
3869- target))
3870- #The rest we'll install
3871- for i, cls in enumerate(dependencies):
3872- it = cls(store=self.store)
3873- if itemCustomizers[i] is not None:
3874- itemCustomizers[i](it)
3875- _installOn(it, target, False)
3876- refs[i].__set__(self, it)
3877- #And now the connector for our own dependency.
3878-
3879- dc = self.store.findUnique(
3880- _DependencyConnector,
3881- AND(_DependencyConnector.target==target,
3882- _DependencyConnector.installee==self,
3883- _DependencyConnector.explicitlyInstalled==__explicitlyInstalled),
3884- None)
3885- assert dc is None, "Dependency connector already exists, wtf are you doing?"
3886- _DependencyConnector(store=self.store, target=target,
3887- installee=self,
3888- explicitlyInstalled=__explicitlyInstalled)
3889-
3890- target.powerUp(self)
3891-
3892- callback = getattr(self, "installed", None)
3893- if callback is not None:
3894- callback()
3895-
3896-def uninstallFrom(self, target):
3897- """
3898- Remove this object from the target, as well as any dependencies
3899- that it automatically installed which were not explicitly
3900- "pinned" by calling "install", and raising an exception if
3901- anything still depends on this.
3902- """
3903-
3904- #did this class powerup on any interfaces? powerdown if so.
3905- target.powerDown(self)
3906-
3907-
3908- for dc in self.store.query(_DependencyConnector,
3909- _DependencyConnector.target==target):
3910- if dc.installee is self:
3911- dc.deleteFromStore()
3912-
3913- for item in installedUniqueRequirements(self, target):
3914- uninstallFrom(item, target)
3915-
3916- callback = getattr(self, "uninstalled", None)
3917- if callback is not None:
3918- callback()
3919-
3920-def installedOn(self):
3921- """
3922- If this item is installed on another item, return the install
3923- target. Otherwise return None.
3924- """
3925- try:
3926- return self.store.findUnique(_DependencyConnector,
3927- _DependencyConnector.installee == self
3928- ).target
3929- except ItemNotFound:
3930- return None
3931-
3932-
3933-def installedDependents(self, target):
3934- """
3935- Return an iterable of things installed on the target that
3936- require this item.
3937- """
3938- for dc in self.store.query(_DependencyConnector,
3939- _DependencyConnector.target == target):
3940- depends = dependentsOf(dc.installee.__class__)
3941- if self.__class__ in depends:
3942- yield dc.installee
3943-
3944-def installedUniqueRequirements(self, target):
3945- """
3946- Return an iterable of things installed on the target that this item
3947- requires and are not required by anything else.
3948- """
3949-
3950- myDepends = dependentsOf(self.__class__)
3951- #XXX optimize?
3952- for dc in self.store.query(_DependencyConnector,
3953- _DependencyConnector.target==target):
3954- if dc.installee is self:
3955- #we're checking all the others not ourself
3956- continue
3957- depends = dependentsOf(dc.installee.__class__)
3958- if self.__class__ in depends:
3959- raise DependencyError(
3960- "%r cannot be uninstalled from %r, "
3961- "%r still depends on it" % (self, target, dc.installee))
3962-
3963- for cls in myDepends[:]:
3964- #If one of my dependencies is required by somebody
3965- #else, leave it alone
3966- if cls in depends:
3967- myDepends.remove(cls)
3968-
3969- for dc in self.store.query(_DependencyConnector,
3970- _DependencyConnector.target==target):
3971- if (dc.installee.__class__ in myDepends
3972- and not dc.explicitlyInstalled):
3973- yield dc.installee
3974-
3975-def installedRequirements(self, target):
3976- """
3977- Return an iterable of things installed on the target that this
3978- item requires.
3979- """
3980- myDepends = dependentsOf(self.__class__)
3981- for dc in self.store.query(_DependencyConnector,
3982- _DependencyConnector.target == target):
3983- if dc.installee.__class__ in myDepends:
3984- yield dc.installee
3985-
3986-
3987-
3988-def onlyInstallPowerups(self, target):
3989- """
3990- Deprecated - L{Item.powerUp} now has this functionality.
3991- """
3992- target.powerUp(self)
3993-
3994-
3995-
3996-class requiresFromSite(
3997- record('powerupInterface defaultFactory siteDefaultFactory',
3998- defaultFactory=None,
3999- siteDefaultFactory=None)):
4000- """
4001- A read-only descriptor that will return the site store's powerup for a
4002- given item.
4003-
4004- @ivar powerupInterface: an L{Interface} describing the powerup that the
4005- site store should be adapted to.
4006-
4007- @ivar defaultFactory: a 1-argument callable that takes the site store and
4008- returns a value for this descriptor. This is invoked in cases where the
4009- site store does not provide a default factory of its own, and this
4010- descriptor is retrieved from an item in a store with a parent.
4011-
4012- @ivar siteDefaultFactory: a 1-argument callable that takes the site store
4013- and returns a value for this descriptor. This is invoked in cases where
4014- this descriptor is retrieved from an item in a store without a parent.
4015- """
4016-
4017- def _invokeFactory(self, defaultFactory, siteStore):
4018- if defaultFactory is None:
4019- raise UnsatisfiedRequirement()
4020- return defaultFactory(siteStore)
4021-
4022-
4023- def __get__(self, oself, type=None):
4024- """
4025- Retrieve the value of this dependency from the site store.
4026- """
4027- siteStore = oself.store.parent
4028- if siteStore is not None:
4029- pi = self.powerupInterface(siteStore, None)
4030- if pi is None:
4031- pi = self._invokeFactory(self.defaultFactory, siteStore)
4032- else:
4033- pi = self._invokeFactory(self.siteDefaultFactory, oself.store)
4034- return pi
4035-
4036
4037=== removed file 'Axiom/axiom/errors.py'
4038--- Axiom/axiom/errors.py 2009-01-02 14:21:43 +0000
4039+++ Axiom/axiom/errors.py 1970-01-01 00:00:00 +0000
4040@@ -1,193 +0,0 @@
4041-# -*- test-case-name: axiom.test -*-
4042-
4043-from twisted.cred.error import UnauthorizedLogin
4044-
4045-
4046-class TimeoutError(Exception):
4047- """
4048- A low-level SQL operation timed out.
4049-
4050- @ivar statement: The SQL statement which timed out.
4051- @ivar timeout: The timeout, in seconds, which was exceeded.
4052- @ivar underlying: The backend exception which signaled this, or None.
4053- """
4054- def __init__(self, statement, timeout, underlying):
4055- Exception.__init__(self, statement, timeout, underlying)
4056- self.statement = statement
4057- self.timeout = timeout
4058- self.underlying = underlying
4059-
4060-
4061-
4062-class BadCredentials(UnauthorizedLogin):
4063- pass
4064-
4065-
4066-
4067-class NoSuchUser(UnauthorizedLogin):
4068- pass
4069-
4070-
4071-
4072-class MissingDomainPart(NoSuchUser):
4073- """
4074- Raised when a login is attempted with a username which consists of only
4075- a local part. For example, "testuser" instead of "testuser@example.com".
4076- """
4077-
4078-
4079-class DuplicateUser(Exception):
4080- pass
4081-
4082-
4083-
4084-class CannotOpenStore(RuntimeError):
4085- """
4086- There is a problem such that the store cannot be opened.
4087- """
4088-
4089-
4090-
4091-class NoUpgradePathAvailable(CannotOpenStore):
4092- """
4093- No upgrade path is available, so the store cannot be opened.
4094- """
4095-
4096-
4097-
4098-class NoCrossStoreReferences(AttributeError):
4099- """
4100- References are not allowed between items within different Stores.
4101- """
4102-
4103-
4104-
4105-class SQLError(RuntimeError):
4106- """
4107- Axiom internally generated some bad SQL.
4108- """
4109- def __init__(self, sql, args, underlying):
4110- RuntimeError.__init__(self, sql, args, underlying)
4111- self.sql, self.args, self.underlying = self.args
4112-
4113- def __str__(self):
4114- return "<SQLError: %r(%r) caused %s: %s>" % (
4115- self.sql, self.args,
4116- self.underlying.__class__, self.underlying)
4117-
4118-
4119-class TableAlreadyExists(SQLError):
4120- """
4121- Axiom internally created a table at the same time as another database.
4122- (User code should not need to catch this exception.)
4123- """
4124-
4125-
4126-
4127-class UnknownItemType(Exception):
4128- """
4129- Can't load an item: it's of a type that I don't see anywhere in Python.
4130- """
4131-
4132-
4133-
4134-class SQLWarning(Warning):
4135- """
4136- Axiom internally generated some CREATE TABLE SQL that ... probably wasn't bad
4137- """
4138-
4139-
4140-
4141-class TableCreationConcurrencyError(RuntimeError):
4142- """
4143- Woah, this is really bad. If you can get this please tell us how.
4144- """
4145-
4146-
4147-
4148-class DuplicateUniqueItem(KeyError):
4149- """
4150- Found 2 or more of an item which is supposed to be unique.
4151- """
4152-
4153-
4154-
4155-class ItemNotFound(KeyError):
4156- """
4157- Did not find even 1 of an item which was supposed to exist.
4158- """
4159-
4160-
4161-
4162-class ItemClassesOnly(TypeError):
4163- """
4164- An object was passed to a method that wasn't a subclass of Item.
4165- """
4166-
4167-
4168-class ChangeRejected(Exception):
4169- """
4170- Raised when an attempt is made to change the database at a time when
4171- database changes are disallowed for reasons of consistency.
4172-
4173- This is raised when an application-level callback (for example, committed)
4174- attempts to change database state.
4175- """
4176-
4177-class DependencyError(Exception):
4178- """
4179- Raised when an item can't be installed or uninstalled.
4180- """
4181-
4182-class DeletionDisallowed(ValueError):
4183- """
4184- Raised when an attempt is made to delete an item that is referred to by
4185- reference attributes with whenDeleted == DISALLOW.
4186- """
4187-
4188-class DataIntegrityError(RuntimeError):
4189- """
4190- Data integrity seems to have been lost.
4191- """
4192-
4193-
4194-
4195-class BrokenReference(DataIntegrityError):
4196- """
4197- A reference to a nonexistent item was detected when this should be
4198- impossible.
4199- """
4200-
4201-
4202-
4203-class UpgraderRecursion(RuntimeError):
4204- """
4205- Upgraders are not allowed to recurse.
4206- """
4207-
4208-
4209-
4210-class ItemUpgradeError(RuntimeError):
4211- """
4212- Attempting to upgrade an Item resulted in an error.
4213-
4214- @ivar originalFailure: The failure that caused the item upgrade to fail
4215- @ivar storeID: Store ID of the item that failed to upgrade
4216- @ivar oldType: The type of the item being upgraded
4217- @ivar newType: The type the item should've been upgraded to
4218- """
4219- def __init__(self, originalFailure, storeID, oldType, newType):
4220- RuntimeError.__init__(self, originalFailure, storeID, oldType, newType)
4221- self.originalFailure = originalFailure
4222- self.storeID = storeID
4223- self.oldType = oldType
4224- self.newType = newType
4225-
4226-
4227-
4228-class UnsatisfiedRequirement(AttributeError):
4229- """
4230- A requirement described by a L{axiom.dependency.requiresFromSite} was not
4231- satisfied by the database, and could not be satisfied automatically at
4232- runtime by a default factory.
4233- """
4234
4235=== removed directory 'Axiom/axiom/examples'
4236=== removed file 'Axiom/axiom/examples/bucket.py'
4237--- Axiom/axiom/examples/bucket.py 2005-07-28 22:09:16 +0000
4238+++ Axiom/axiom/examples/bucket.py 1970-01-01 00:00:00 +0000
4239@@ -1,52 +0,0 @@
4240-from axiom import item, attributes
4241-
4242-class Bucket(item.Item):
4243- typeName = 'bucket'
4244- schemaVersion = 1
4245-
4246- name = attributes.text()
4247-
4248- def getstuff(self):
4249- for food in self.store.query(FoodItem,
4250- FoodItem.bucket == self,
4251- sort=FoodItem.deliciousness.descending):
4252- food.extra.what()
4253-
4254-
4255-class FoodItem(item.Item):
4256- typeName = 'food'
4257- schemaVersion = 1
4258-
4259- bucket = attributes.reference()
4260- extra = attributes.reference()
4261- deliciousness = attributes.integer(indexed=True)
4262-
4263-class Chicken(item.Item):
4264- typeName = 'chicken'
4265- schemaVersion = 1
4266-
4267- epistemologicalBasisForCrossingTheRoad = attributes.text()
4268- def what(self):
4269- print 'chicken!'
4270-
4271-class Biscuit(item.Item):
4272- typeName = 'biscuit'
4273- schemaVersion = 1
4274-
4275- fluffiness = attributes.integer()
4276- def what(self):
4277- print 'biscuits!'
4278-
4279-
4280-from axiom.store import Store
4281-
4282-s = Store()
4283-
4284-u = Bucket(name=u'whatever', store=s)
4285-c = Chicken(epistemologicalBasisForCrossingTheRoad=u'extropian', store=s)
4286-b = Biscuit(fluffiness=100, store=s)
4287-
4288-FoodItem(store=s, deliciousness=3, extra=c, bucket=u)
4289-FoodItem(store=s, deliciousness=4, extra=b, bucket=u)
4290-
4291-u.getstuff()
4292
4293=== removed file 'Axiom/axiom/examples/library.py'
4294--- Axiom/axiom/examples/library.py 2005-09-10 21:18:46 +0000
4295+++ Axiom/axiom/examples/library.py 1970-01-01 00:00:00 +0000
4296@@ -1,114 +0,0 @@
4297-
4298-import random
4299-
4300-from axiom.item import Item
4301-from axiom.attributes import text, timestamp, reference, integer, AND, OR
4302-from axiom.store import Store
4303-from epsilon import extime
4304-
4305-_d = extime.Time.fromISO8601TimeAndDate
4306-
4307-_books = [
4308- (u'Heart of Darkness', u'Joseph Conrad', u'0486264645', 80, _d('1990-07-01T00:00:00.000001')),
4309- (u'The Dark Tower, Book 7', u'Stephen King', u'1880418622', 864, _d('2004-11-21T00:00:00.000001')),
4310- (u'Guns, Germs, and Steel: The Fates of Human Societies', u'Jared Diamond', u'0393317552', 480, _d('1999-04-01T00:00:00.000001')),
4311- (u'The Lions of al-Rassan', u'Guy Gavriel Kay', u'0060733497', 528, _d('2005-06-28T00:00:00.000001')),
4312- ]
4313-
4314-_borrowers = [u'Anne', u'Bob', u'Carol', u'Dave']
4315-
4316-
4317-class Borrower(Item):
4318- typeName = 'borrower'
4319- schemaVersion = 1
4320- name = text(indexed=True)
4321-
4322-class Book(Item):
4323- typeName = 'book'
4324- schemaVersion = 1
4325-
4326- title = text()
4327- author = text()
4328- isbn = text()
4329- pages = integer()
4330- datePublished = timestamp()
4331-
4332- lentTo = reference()
4333- library = reference()
4334-
4335-class LendingLibrary(Item):
4336- typeName = 'lending_library'
4337- schemaVersion = 1
4338-
4339- name = text()
4340-
4341- def books(self):
4342- return self.store.query(Book,
4343- Book.library == self)
4344-
4345- def getBorrower(self, name):
4346- for b in self.store.query(Borrower,
4347- Borrower.name == name):
4348- return b
4349- b = Borrower(name=name,
4350- store=self.store)
4351- return b
4352-
4353- def initialize(self):
4354- for title, author, isbn, pages, published in _books:
4355- b = Book(
4356- title=title,
4357- author=author,
4358- isbn=isbn,
4359- pages=pages,
4360- datePublished=published,
4361- library=self,
4362- store=self.store)
4363-
4364-
4365- def displayBooks(self):
4366- for book in self.books():
4367- print book.title,
4368- if book.lentTo is not None:
4369- print 'lent to', '['+book.lentTo.name+']'
4370- else:
4371- print 'in library'
4372-
4373- def shuffleLending(self):
4374- for book in self.books():
4375- if book.lentTo is not None:
4376- print book.lentTo.name, 'returned', book.title
4377- book.lentTo = None
4378- for book in self.books():
4379- if random.choice([True, False]):
4380- borrower = random.choice(_borrowers)
4381- print 'Lending', book.title, 'to', borrower
4382- book.lentTo = self.getBorrower(borrower)
4383-
4384-def main(s):
4385- for ll in s.query(LendingLibrary):
4386- print 'found existing library'
4387- break
4388- else:
4389- print 'creating new library'
4390- ll = LendingLibrary(store=s)
4391- ll.initialize()
4392- ll.displayBooks()
4393- print '***'
4394- ll.shuffleLending()
4395- print '---'
4396- ll.displayBooks()
4397- print '***'
4398- ll.shuffleLending()
4399- print '---'
4400-
4401- print s.count(Book, AND (Book.author == u'Stephen King',
4402- Book.title == u'The Lions of al-Rassan'))
4403- print s.count(Book, OR (Book.author == u'Stephen King',
4404- Book.title == u'The Lions of al-Rassan'))
4405-
4406-
4407-if __name__ == '__main__':
4408- s = Store('testdb')
4409- s.transact(main, s)
4410- s.close()
4411
4412=== removed file 'Axiom/axiom/iaxiom.py'
4413--- Axiom/axiom/iaxiom.py 2010-07-18 17:44:38 +0000
4414+++ Axiom/axiom/iaxiom.py 1970-01-01 00:00:00 +0000
4415@@ -1,363 +0,0 @@
4416-
4417-from zope.interface import Interface, Attribute
4418-
4419-
4420-class IStatEvent(Interface):
4421- """
4422- Marker for a log message that is useful as a statistic.
4423-
4424- Log messages with 'interface' set to this class will be made available to
4425- external observers. This is useful for tracking the rate of events such as
4426- page views.
4427- """
4428-
4429-
4430-class IAtomicFile(Interface):
4431- def __init__(tempname, destdir):
4432- """Create a new atomic file.
4433-
4434- The file will exist temporarily at C{tempname} and be relocated to
4435- C{destdir} when it is closed.
4436- """
4437-
4438- def tell():
4439- """Return the current offset into the file, in bytes.
4440- """
4441-
4442- def write(bytes):
4443- """Write some bytes to this file.
4444- """
4445-
4446- def close(callback):
4447- """Close this file. Move it to its final location.
4448-
4449- @param callback: A no-argument callable which will be invoked
4450- when this file is ready to be moved to its final location. It
4451- must return the segment of the path relative to per-user
4452- storage of the owner of this file. Alternatively, a string
4453- with semantics the same as those previously described for the
4454- return value of the callable.
4455-
4456- @rtype: C{axiom.store.StoreRelativePath}
4457- @return: A Deferred which fires with the full path to the file
4458- when it has been closed, or which fails if there is some error
4459- closing the file.
4460- """
4461-
4462- def abort():
4463- """Give up on this file. Discard its contents.
4464- """
4465-
4466-
4467-class IAxiomaticCommand(Interface):
4468- """
4469- Subcommand for 'axiomatic' and 'tell-axiom' command line programs.
4470-
4471- Should subclass twisted.python.usage.Options and provide a command to run.
4472-
4473- '.parent' attribute will be set to an object with a getStore method.
4474- """
4475-
4476- name = Attribute("""
4477- """)
4478-
4479- description = Attribute("""
4480- """)
4481-
4482-
4483-
4484-class IBeneficiary(Interface):
4485- """
4486- Interface to adapt to when looking for an appropriate application-level
4487- object to install powerups on.
4488- """
4489-
4490- def powerUp(implementor, interface):
4491- """ Install a powerup on this object. There is not necessarily any inverse
4492- powerupsFor on a beneficiary, although there may be; installations may
4493- be forwarded to a different implementation object, or deferred.
4494- """
4495-
4496-class IPowerupIndirector(Interface):
4497- """
4498- Implement this interface if you want to change what is returned from
4499- powerupsFor for a particular interface.
4500- """
4501-
4502- def indirect(interface):
4503- """
4504- When an item which implements IPowerupIndirector is returned from a
4505- powerupsFor query, this method will be called on it to give it the
4506- opportunity to return something other than itself from powerupsFor.
4507-
4508- @param interface: the interface passed to powerupsFor
4509- @type interface: L{zope.interface.Interface}
4510- """
4511-
4512-
4513-
4514-class IScheduler(Interface):
4515- """
4516- An interface for scheduling tasks. Quite often the store will be adaptable
4517- to this; in any Mantissa application, for example; so it is reasonable to
4518- assume that it is if your application needs to schedule timed events or
4519- queue tasks.
4520- """
4521- def schedule(runnable, when):
4522- """
4523- @param runnable: any Item with a 'run' method.
4524-
4525- @param when: a Time instance describing when the runnable's run()
4526- method will be called. See extime.Time's documentation for more
4527- details.
4528- """
4529-
4530-
4531-
4532-class IQuery(Interface):
4533- """
4534- An object that represents a query that can be performed against a database.
4535- """
4536-
4537- limit = Attribute(
4538- """
4539- An integer representing the maximum number of rows to be returned from
4540- this query, or None, if the query is unlimited.
4541- """)
4542-
4543- store = Attribute(
4544- """
4545- The Axiom store that this query will return results from.
4546- """)
4547-
4548- def __iter__():
4549- """
4550- Retrieve an iterator for the results of this query.
4551-
4552- The query is performed whenever this is called.
4553- """
4554-
4555-
4556- def count():
4557- """
4558- Return the number of results in this query.
4559-
4560- NOTE: In most cases, this will have to load all of the rows in this
4561- query. It is therefore very slow and should generally be considered
4562- discouraged. Call with caution!
4563- """
4564-
4565-
4566- def cloneQuery(limit):
4567- """
4568- Create a similar-but-not-identical copy of this query with certain
4569- attributes changed.
4570-
4571- (Currently this only supports the manipulation of the "limit"
4572- parameter, but it is the intent that with a richer query-introspection
4573- interface, this signature could be expanded to support many different
4574- attributes.)
4575-
4576- @param limit: an integer, representing the maximum number of rows that
4577- this query should return.
4578-
4579- @return: an L{IQuery} provider with the new limit.
4580- """
4581-
4582-
4583-
4584-class IColumn(Interface):
4585- """
4586- An object that represents a column in the database.
4587- """
4588- def getShortColumnName(store):
4589- """
4590- @rtype: C{str}
4591- @return: Just the name of this column.
4592- """
4593-
4594-
4595- def getColumnName(store):
4596- """
4597- @rtype: C{str}
4598-
4599- @return: The fully qualified name of this object as a column within the
4600- database, eg, C{"main_database.some_table.[this_column]"}.
4601- """
4602-
4603- def fullyQualifiedName():
4604- """
4605- @rtype: C{str}
4606-
4607- @return: The fully qualfied name of this object as an attribute in
4608- Python code, eg, C{myproject.mymodule.MyClass.myAttribute}. If this
4609- attribute is represented by an actual Python code object, it will be a
4610- dot-separated sequence of Python identifiers; otherwise, it will
4611- contain invalid identifier characters other than '.'.
4612- """
4613-
4614- def __get__(row):
4615- """
4616- @param row: an item that has this column.
4617- @type row: L{axiom.item.Item}
4618-
4619- @return: The value of the column described by this object, for the given
4620- row.
4621-
4622- @rtype: depends on the underlying type of the column.
4623- """
4624-
4625-
4626-
4627-class IOrdering(Interface):
4628- """
4629- An object suitable for passing to the 'sort' argument of a query method.
4630- """
4631- def orderColumns():
4632- """
4633- Return a list of two-tuples of IColumn providers and either C{'ASC'} or
4634- C{'DESC'} defining this ordering.
4635- """
4636-
4637-
4638-
4639-class IComparison(Interface):
4640- """
4641- An object that represents an in-database comparison. A predicate that may
4642- apply to certain items in a store. Passed as an argument to
4643- attributes.AND, .OR, and Store.query(...)
4644- """
4645- def getInvolvedTables():
4646- """
4647- Return a sequence of L{Item} subclasses which are referenced by this
4648- comparison. A class may appear at most once.
4649- """
4650-
4651-
4652- def getQuery(store):
4653- """
4654- Return an SQL string with ?-style bind parameter syntax thingies.
4655- """
4656-
4657-
4658- def getArgs(store):
4659- """
4660- Return a sequence of arguments suitable for use to satisfy the bind
4661- parameters in the result of L{getQuery}.
4662- """
4663-
4664-
4665-
4666-class IReliableListener(Interface):
4667- """
4668- Receives notification of the existence of Items of a particular type.
4669-
4670- {IReliableListener} providers are given to
4671- L{IBatchProcessor.addReliableListener} and will then have L{processItem}
4672- called with items handled by that processor.
4673- """
4674-
4675- def processItem(item):
4676- """
4677- Callback notifying this listener of the existence of the given item.
4678- """
4679-
4680- def suspend():
4681- """
4682- Invoked when notification for this listener is being temporarily
4683- suspended.
4684-
4685- This should clean up any ephemeral resources held by this listener and
4686- generally prepare to not do anything for a while.
4687- """
4688-
4689- def resume():
4690- """
4691- Invoked when notification for this listener is being resumed.
4692-
4693- Any actions taken by L{suspend} may be reversed by this method.
4694- """
4695-
4696-
4697-LOCAL, REMOTE = range(2)
4698-class IBatchProcessor(Interface):
4699- def addReliableListener(listener, style=LOCAL):
4700- """
4701- Add the given Item to the set which will be notified of Items
4702- available for processing.
4703-
4704- Note: Each Item is processed synchronously. Adding too many
4705- listeners to a single batch processor will cause the L{step}
4706- method to block while it sends notification to each listener.
4707-
4708- @type listener: L{IReliableListener}
4709- @param listener: The item to which listened-for items will be passed
4710- for processing.
4711- """
4712-
4713-
4714- def removeReliableListener(listener):
4715- """
4716- Remove a previously added listener.
4717- """
4718-
4719-
4720- def getReliableListeners():
4721- """
4722- Return an iterable of the listeners which have been added to
4723- this batch processor.
4724- """
4725-
4726-
4727-
4728-class IBatchService(Interface):
4729- """
4730- Object which allows minimal communication with L{IReliableListener}
4731- providers which are running remotely (that is, with the L{REMOTE} style).
4732- """
4733- def start():
4734- """
4735- Start the remote batch process if it has not yet been started, otherwise
4736- do nothing.
4737- """
4738-
4739-
4740- def suspend(storeID):
4741- """
4742- @type storeID: C{int}
4743- @param storeID: The storeID of the listener to suspend.
4744-
4745- @rtype: L{twisted.internet.defer.Deferred}
4746- @return: A Deferred which fires when the listener has been suspended.
4747- """
4748-
4749-
4750- def resume(storeID):
4751- """
4752- @type storeID: C{int}
4753- @param storeID: The storeID of the listener to resume.
4754-
4755- @rtype: L{twisted.internet.defer.Deferred}
4756- @return: A Deferred which fires when the listener has been resumed.
4757- """
4758-
4759-
4760-
4761-class IVersion(Interface):
4762- """
4763- Object with version information for a package that creates Axiom
4764- items, most likely a L{twisted.python.versions.Version}. Used to
4765- track which versions of a package have been used to load a store.
4766- """
4767- package = Attribute("""
4768- Name of a Python package.
4769- """)
4770- major = Attribute("""
4771- Major version number.
4772- """)
4773- minor = Attribute("""
4774- Minor version number.
4775- """)
4776- micro = Attribute("""
4777- Micro version number.
4778- """)
4779
4780=== removed file 'Axiom/axiom/item.py'
4781--- Axiom/axiom/item.py 2014-04-08 22:47:38 +0000
4782+++ Axiom/axiom/item.py 1970-01-01 00:00:00 +0000
4783@@ -1,1137 +0,0 @@
4784-# -*- test-case-name: axiom.test -*-
4785-
4786-__metaclass__ = type
4787-
4788-import gc
4789-from zope.interface import implements, Interface
4790-
4791-from inspect import getabsfile
4792-from weakref import WeakValueDictionary
4793-
4794-from twisted.python import log
4795-from twisted.python.reflect import qual, namedAny
4796-from twisted.python.util import mergeFunctionMetadata
4797-from twisted.application.service import IService, IServiceCollection, MultiService
4798-
4799-from axiom import slotmachine, _schema, iaxiom
4800-from axiom.errors import ChangeRejected, DeletionDisallowed
4801-from axiom.iaxiom import IColumn, IPowerupIndirector
4802-
4803-from axiom.attributes import (
4804- SQLAttribute, _ComparisonOperatorMuxer, _MatchingOperationMuxer,
4805- _OrderingMixin, _ContainableMixin, Comparable, compare, inmemory,
4806- reference, text, integer, AND, _cascadingDeletes, _disallows)
4807-
4808-_typeNameToMostRecentClass = WeakValueDictionary()
4809-
4810-def normalize(qualName):
4811- """
4812- Turn a fully-qualified Python name into a string usable as part of a
4813- table name.
4814- """
4815- return qualName.lower().replace('.', '_')
4816-
4817-class NoInheritance(RuntimeError):
4818- """
4819- Inheritance is as-yet unsupported by XAtop.
4820- """
4821-
4822-class NotInStore(RuntimeError):
4823- """
4824- """
4825-
4826-class CantInstantiateItem(RuntimeError):
4827- """You can't instantiate Item directly. Make a subclass.
4828- """
4829-
4830-class MetaItem(slotmachine.SchemaMetaMachine):
4831- """Simple metaclass for Item that adds Item (and its subclasses) to
4832- _typeNameToMostRecentClass mapping.
4833- """
4834-
4835- def __new__(meta, name, bases, dictionary):
4836- T = slotmachine.SchemaMetaMachine.__new__(meta, name, bases, dictionary)
4837- if T.__name__ == 'Item' and T.__module__ == __name__:
4838- return T
4839- T.__already_inherited__ += 1
4840- if T.__already_inherited__ >= 2:
4841- raise NoInheritance("already inherited from item once: "
4842- "in-database inheritance not yet supported")
4843- if T.typeName is None:
4844- T.typeName = normalize(qual(T))
4845- if T.schemaVersion is None:
4846- T.schemaVersion = 1
4847- if not T.__legacy__ and T.typeName in _typeNameToMostRecentClass:
4848- # Let's try not to gc.collect() every time.
4849- gc.collect()
4850- if T.typeName in _typeNameToMostRecentClass:
4851- if T.__legacy__:
4852- return T
4853- otherT = _typeNameToMostRecentClass[T.typeName]
4854-
4855- if (otherT.__name__ == T.__name__
4856- and getabsfile(T) == getabsfile(otherT)
4857- and T.__module__ != otherT.__module__):
4858-
4859- if len(T.__module__) < len(otherT.__module__):
4860- relmod = T.__module__
4861- else:
4862- relmod = otherT.__module__
4863-
4864- raise RuntimeError(
4865- "Use absolute imports; relative import"
4866- " detected for type %r (imported from %r)" % (
4867- T.typeName, relmod))
4868-
4869- raise RuntimeError("2 definitions of axiom typename %r: %r %r" % (
4870- T.typeName, T, _typeNameToMostRecentClass[T.typeName]))
4871- _typeNameToMostRecentClass[T.typeName] = T
4872- return T
4873-
4874-
4875- def __cmp__(self, other):
4876- """
4877- Ensure stable sorting between Item classes. This provides determinism
4878- in SQL generation, which is beneficial for debugging and performance
4879- purposes.
4880- """
4881- if isinstance(other, MetaItem):
4882- return cmp((self.typeName, self.schemaVersion),
4883- (other.typeName, other.schemaVersion))
4884- return NotImplemented
4885-
4886-
4887-def noop():
4888- pass
4889-
4890-class _StoreIDComparer(Comparable):
4891- """
4892- See Comparable's docstring for the explanation of the requirements of my implementation.
4893- """
4894- implements(IColumn)
4895-
4896- def __init__(self, type):
4897- self.type = type
4898-
4899- def __repr__(self):
4900- return '<storeID ' + qual(self.type) + '.storeID>'
4901-
4902- def fullyQualifiedName(self):
4903- # XXX: this is an example of silly redundancy, this really ought to be
4904- # refactored to work like any other attribute (including being
4905- # explicitly covered in the schema, which has other good qualities like
4906- # allowing tables to be VACUUM'd without destroying oid stability and
4907- # every storeID reference ever. --glyph
4908- return qual(self.type)+'.storeID'
4909-
4910- # attributes required by ColumnComparer
4911- def infilter(self, pyval, oself, store):
4912- return pyval
4913-
4914- def outfilter(self, dbval, oself):
4915- return dbval
4916-
4917- def getShortColumnName(self, store):
4918- return store.getShortColumnName(self)
4919-
4920- def getColumnName(self, store):
4921- return store.getColumnName(self)
4922-
4923- def __get__(self, item, type=None):
4924- if item is None:
4925- return self
4926- else:
4927- return getattr(item, 'storeID')
4928-
4929-
4930-class _SpecialStoreIDAttribute(slotmachine.SetOnce):
4931- """
4932- Because storeID is special (it's unique, it determines a row's cache
4933- identity, it's immutable, etc) we don't use a regular SQLAttribute to
4934- represent it - but it still needs to be compared with other SQL attributes,
4935- as it is in fact represented by the 'oid' database column.
4936-
4937- I implement set-once semantics to enforce immutability, but delegate
4938- comparison operations to _StoreIDComparer.
4939- """
4940- def __get__(self, oself, type=None):
4941- if type is not None and oself is None:
4942- if type._storeIDComparer is None:
4943- # Reuse the same instance so that the store can use it
4944- # as a key for various caching, like any other attributes.
4945- type._storeIDComparer = _StoreIDComparer(type)
4946- return type._storeIDComparer
4947- return super(_SpecialStoreIDAttribute, self).__get__(oself, type)
4948-
4949-
4950-def serviceSpecialCase(item, pups):
4951- if item._axiom_service is not None:
4952- return item._axiom_service
4953- svc = MultiService()
4954- for subsvc in pups:
4955- subsvc.setServiceParent(svc)
4956- item._axiom_service = svc
4957- return svc
4958-
4959-
4960-
4961-
4962-class Empowered(object):
4963- """
4964- An object which can have powerups.
4965-
4966- @type store: L{axiom.store.Store}
4967- @ivar store: Persistence object to which powerups can be added for later
4968- retrieval.
4969-
4970- @type aggregateInterfaces: C{dict}
4971- @ivar aggregateInterfaces: Mapping from interface classes to callables
4972- which will be used to produce corresponding powerups. The callables
4973- will be invoked with two arguments, the L{Empowered} for which powerups
4974- are being loaded and with a list of powerups found in C{store}. The
4975- return value is the powerup. These are used only by the callable
4976- interface adaption API, not C{powerupsFor}.
4977- """
4978- aggregateInterfaces = {
4979- IService: serviceSpecialCase,
4980- IServiceCollection: serviceSpecialCase}
4981-
4982- def inMemoryPowerUp(self, powerup, interface):
4983- """
4984- Install an arbitrary object as a powerup on an item or store.
4985-
4986- Powerups installed using this method will only exist as long as this
4987- object remains in memory. They will also take precedence over powerups
4988- installed with L{powerUp}.
4989-
4990- @param interface: a zope interface
4991- """
4992- self._inMemoryPowerups[interface] = powerup
4993-
4994-
4995- def powerUp(self, powerup, interface=None, priority=0):
4996- """
4997- Installs a powerup (e.g. plugin) on an item or store.
4998-
4999- Powerups will be returned in an iterator when queried for using the
5000- 'powerupsFor' method. Normally they will be returned in order of
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches