Merge lp:~nataliabidart/magicicada-server/only-one-store-name into lp:magicicada-server

Proposed by Natalia Bidart
Status: Merged
Approved by: Natalia Bidart
Approved revision: 16
Merged at revision: 15
Proposed branch: lp:~nataliabidart/magicicada-server/only-one-store-name
Merge into: lp:magicicada-server
Diff against target: 1278 lines (+136/-149)
21 files modified
README.txt (+1/-1)
src/backends/db/scripts/schema (+6/-10)
src/backends/db/store.py (+2/-2)
src/backends/db/tests/test_dbtransaction.py (+5/-5)
src/backends/db/tests/test_store.py (+1/-1)
src/backends/filesync/data/__init__.py (+1/-1)
src/backends/filesync/data/adminservices.py (+2/-2)
src/backends/filesync/data/dbmanager.py (+1/-9)
src/backends/filesync/data/gateway.py (+31/-31)
src/backends/filesync/data/testing/ormtestcase.py (+2/-2)
src/backends/filesync/data/testing/testcase.py (+2/-2)
src/backends/filesync/data/tests/test_dao.py (+2/-2)
src/backends/filesync/data/tests/test_gateway.py (+52/-52)
src/backends/testing/resources.py (+4/-6)
src/backends/txlog/model.py (+6/-5)
src/backends/txlog/tests/test_model.py (+4/-4)
src/backends/txlog/tests/test_utils.py (+1/-1)
src/backends/txlog/utils.py (+8/-8)
src/server/tests/test_account.py (+1/-1)
src/server/tests/test_sharing.py (+1/-1)
src/server/tests/test_throttling.py (+3/-3)
To merge this branch: bzr merge lp:~nataliabidart/magicicada-server/only-one-store-name
Reviewer Review Type Date Requested Status
Natalia Bidart Approve
Review via email: mp+270191@code.launchpad.net

Commit message

- Make sure only one Storm store is used in the project. Pure syntactic renames.

To post a comment you must log in.
Revision history for this message
Natalia Bidart (nataliabidart) wrote :

Pure syntactic renames, also testing lander script.

review: Approve
Revision history for this message
Magicicada Bot (magicicada) wrote :

The `tree_dir` option for the target branch is not a lightweight checkout. Please ask a project administrator to resolve the issue, and try again.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'README.txt'
--- README.txt 2015-09-03 17:59:20 +0000
+++ README.txt 2015-09-05 01:43:14 +0000
@@ -63,7 +63,7 @@
63 sudo apt-get install bzr make python-transaction protobuf-compiler \63 sudo apt-get install bzr make python-transaction protobuf-compiler \
64 python-setuptools gcc python-dev python-twisted-web postgresql-9.1 \64 python-setuptools gcc python-dev python-twisted-web postgresql-9.1 \
65 python-yaml python-psycopg2 postgresql-contrib supervisor \65 python-yaml python-psycopg2 postgresql-contrib supervisor \
66 postgresql-plpython-9.1 python-boto squid \66 postgresql-plpython-9.1 python-boto squid python-virtualenv \
67 python-protobuf python-psutil python-testresources \67 python-protobuf python-psutil python-testresources \
68 python-tz python-bson python-iso8601 python-openid python-meliae68 python-tz python-bson python-iso8601 python-openid python-meliae
6969
7070
=== modified file 'src/backends/db/scripts/schema'
--- src/backends/db/scripts/schema 2015-08-17 04:24:23 +0000
+++ src/backends/db/scripts/schema 2015-09-05 01:43:14 +0000
@@ -25,7 +25,7 @@
25import backends.db.schemas.storage as storage_schema25import backends.db.schemas.storage as storage_schema
26import backends.db.schemas.txlog as txlog_schema26import backends.db.schemas.txlog as txlog_schema
2727
28from backends.db.store import get_store28from backends.db.store import get_filesync_store
29from optparse import OptionParser29from optparse import OptionParser
3030
3131
@@ -44,9 +44,6 @@
44 parser.add_option("-a", "--all",44 parser.add_option("-a", "--all",
45 dest="all", default=False, action="store_true",45 dest="all", default=False, action="store_true",
46 help="Create all schemas known")46 help="Create all schemas known")
47 parser.add_option("--store", dest="store_name",
48 help="Used in conjunction with schema to upgrade one "
49 "schema.")
50 parser.add_option("--schema", dest="schema_name",47 parser.add_option("--schema", dest="schema_name",
51 help="Used in conjunction with store to upgrade "48 help="Used in conjunction with store to upgrade "
52 "one schema")49 "one schema")
@@ -56,18 +53,17 @@
5653
57 (options, args) = parser.parse_args()54 (options, args) = parser.parse_args()
5855
59 def create(schema_name, store_name='filesync'):56 def create(schema_name):
60 """Create the schema using the store."""57 """Create the schema using the store."""
61 schema = schemas[schema_name]58 schema = schemas[schema_name]
62 if options.dryrun:59 if options.dryrun:
63 print '*' * 8060 print '*' * 80
64 print 'Dry run of %s schema in %s store' % (61 print 'Dry run of %s schema' % schema_name
65 schema_name, store_name)
66 print '*' * 8062 print '*' * 80
67 ddl = schema.create_schema().preview()63 ddl = schema.create_schema().preview()
68 print ddl64 print ddl
69 else:65 else:
70 target_store = get_store(store_name)66 target_store = get_filesync_store()
71 schema.create_schema().upgrade(target_store)67 schema.create_schema().upgrade(target_store)
7268
73 if options.all:69 if options.all:
@@ -79,8 +75,8 @@
79 create('storage')75 create('storage')
80 create('txlog')76 create('txlog')
8177
82 elif options.store_name and options.schema_name:78 elif options.schema_name:
83 create(options.schema_name, options.store_name)79 create(options.schema_name)
84 else:80 else:
85 print "Must be called with --all or --schema and --store"81 print "Must be called with --all or --schema and --store"
8682
8783
=== modified file 'src/backends/db/store.py'
--- src/backends/db/store.py 2015-08-17 00:09:45 +0000
+++ src/backends/db/store.py 2015-09-05 01:43:14 +0000
@@ -88,9 +88,9 @@
88 return zstorm.get(store_name, default_uri=uri)88 return zstorm.get(store_name, default_uri=uri)
8989
9090
91def get_filesync_store(store_name):91def get_filesync_store():
92 """Get a store using the filesync_tm."""92 """Get a store using the filesync_tm."""
93 return get_store(store_name, zstorm=filesync_zstorm)93 return get_store('filesync', zstorm=filesync_zstorm)
9494
9595
96@contextlib.contextmanager96@contextlib.contextmanager
9797
=== modified file 'src/backends/db/tests/test_dbtransaction.py'
--- src/backends/db/tests/test_dbtransaction.py 2015-08-17 15:05:00 +0000
+++ src/backends/db/tests/test_dbtransaction.py 2015-09-05 01:43:14 +0000
@@ -39,7 +39,7 @@
3939
40from backends.db import errors40from backends.db import errors
4141
42from backends.db.store import get_store42from backends.db.store import get_filesync_store
43from backends.db.dbtransaction import (43from backends.db.dbtransaction import (
44 _check_stores_and_invalidate,44 _check_stores_and_invalidate,
45 retryable_transaction,45 retryable_transaction,
@@ -767,11 +767,11 @@
767 logger.addHandler(h)767 logger.addHandler(h)
768768
769 make_storage_user(1, u'foo', u'foo', 10000)769 make_storage_user(1, u'foo', u'foo', 10000)
770 sto = get_store('filesync', filesync_zstorm)770 store = get_filesync_store()
771 self._sto = sto # for later cleanup771 self._sto = store # for later cleanup
772 obj = StorageObject(1, u'foo', u'File')772 obj = StorageObject(1, u'foo', u'File')
773 sto.add(obj)773 store.add(obj)
774 sto.flush()774 store.flush()
775 self.assertFalse(obj.__storm_object_info__.get("invalidated", False))775 self.assertFalse(obj.__storm_object_info__.get("invalidated", False))
776 _check_stores_and_invalidate(filesync_zstorm)776 _check_stores_and_invalidate(filesync_zstorm)
777 self.assertTrue(obj.__storm_object_info__.get("invalidated", False))777 self.assertTrue(obj.__storm_object_info__.get("invalidated", False))
778778
=== modified file 'src/backends/db/tests/test_store.py'
--- src/backends/db/tests/test_store.py 2015-08-17 00:09:45 +0000
+++ src/backends/db/tests/test_store.py 2015-09-05 01:43:14 +0000
@@ -28,7 +28,7 @@
2828
29 def test_get_filesync_store(self):29 def test_get_filesync_store(self):
30 """Test get_filesync_store returns the expected store."""30 """Test get_filesync_store returns the expected store."""
31 db = store.get_filesync_store('filesync').get_database()31 db = store.get_filesync_store().get_database()
32 self.assertTrue(isinstance(db, store.FilesyncDatabase))32 self.assertTrue(isinstance(db, store.FilesyncDatabase))
33 self.assertEqual('filesync', db.name)33 self.assertEqual('filesync', db.name)
3434
3535
=== modified file 'src/backends/filesync/data/__init__.py'
--- src/backends/filesync/data/__init__.py 2015-08-17 00:09:45 +0000
+++ src/backends/filesync/data/__init__.py 2015-09-05 01:43:14 +0000
@@ -85,7 +85,7 @@
8585
86from ubuntuone.storageprotocol.content_hash import content_hash_factory86from ubuntuone.storageprotocol.content_hash import content_hash_factory
8787
88from backends.filesync.data.dbmanager import get_storage_store # NOQA88from backends.filesync.data.dbmanager import get_filesync_store # NOQA
89from backends.filesync.data.dbmanager import filesync_tm # NOQA89from backends.filesync.data.dbmanager import filesync_tm # NOQA
9090
91EMPTY_CONTENT_HASH = content_hash_factory().content_hash()91EMPTY_CONTENT_HASH = content_hash_factory().content_hash()
9292
=== modified file 'src/backends/filesync/data/adminservices.py'
--- src/backends/filesync/data/adminservices.py 2015-08-16 19:22:32 +0000
+++ src/backends/filesync/data/adminservices.py 2015-09-05 01:43:14 +0000
@@ -17,7 +17,7 @@
1717
18"""Services provided for administrative access to storage data."""18"""Services provided for administrative access to storage data."""
1919
20from backends.filesync.data import get_storage_store, model, dao20from backends.filesync.data import get_filesync_store, model, dao
21from backends.filesync.data.gateway import StorageUserGateway21from backends.filesync.data.gateway import StorageUserGateway
22from backends.filesync.data.dbmanager import fsync_readonly22from backends.filesync.data.dbmanager import fsync_readonly
2323
@@ -34,7 +34,7 @@
3434
35 def _find_users(self):35 def _find_users(self):
36 """Perform storm query based on current filter."""36 """Perform storm query based on current filter."""
37 store = get_storage_store()37 store = get_filesync_store()
38 conditions = []38 conditions = []
39 if self.filter is not None:39 if self.filter is not None:
40 filter = unicode("%" + self.filter + "%")40 filter = unicode("%" + self.filter + "%")
4141
=== modified file 'src/backends/filesync/data/dbmanager.py'
--- src/backends/filesync/data/dbmanager.py 2015-08-17 00:09:45 +0000
+++ src/backends/filesync/data/dbmanager.py 2015-09-05 01:43:14 +0000
@@ -17,7 +17,7 @@
1717
18"""Manage database connections and stores to the storage database."""18"""Manage database connections and stores to the storage database."""
1919
20from backends.db.store import get_filesync_store20from backends.db.store import get_filesync_store # NOQA
21from backends.db.dbtransaction import (21from backends.db.dbtransaction import (
22 get_storm_commit,22 get_storm_commit,
23 get_storm_readonly,23 get_storm_readonly,
@@ -28,11 +28,3 @@
28fsync_commit = get_storm_commit(filesync_tm)28fsync_commit = get_storm_commit(filesync_tm)
29fsync_readonly = get_storm_readonly(filesync_tm)29fsync_readonly = get_storm_readonly(filesync_tm)
30fsync_readonly_slave = get_storm_readonly(filesync_tm, use_ro_store=True)30fsync_readonly_slave = get_storm_readonly(filesync_tm, use_ro_store=True)
31
32
33def get_storage_store():
34 """Return the default storage store.
35
36 This is primarily for legacy tests while transaction handling is migrated
37 """
38 return get_filesync_store('filesync')
3931
=== modified file 'src/backends/filesync/data/gateway.py'
--- src/backends/filesync/data/gateway.py 2015-08-29 00:03:11 +0000
+++ src/backends/filesync/data/gateway.py 2015-09-05 01:43:14 +0000
@@ -38,7 +38,7 @@
38from backends.db.dbtransaction import db_timeout, TRANSACTION_MAX_TIME38from backends.db.dbtransaction import db_timeout, TRANSACTION_MAX_TIME
39from backends.filesync.data import model, errors, dao, utils39from backends.filesync.data import model, errors, dao, utils
40from backends.filesync.notifier.notifier import get_notifier40from backends.filesync.notifier.notifier import get_notifier
41from backends.filesync.data.dbmanager import get_storage_store41from backends.filesync.data.dbmanager import get_filesync_store
42from config import config42from config import config
4343
4444
@@ -162,7 +162,7 @@
162 @property162 @property
163 def store(self):163 def store(self):
164 """The main storage store."""164 """The main storage store."""
165 return get_storage_store()165 return get_filesync_store()
166166
167167
168class SystemGateway(GatewayBase):168class SystemGateway(GatewayBase):
@@ -185,7 +185,7 @@
185 user.status = 'Live'185 user.status = 'Live'
186 user.subscription_status = 'Live'186 user.subscription_status = 'Live'
187 # initialize the user's data187 # initialize the user's data
188 store = get_storage_store()188 store = get_filesync_store()
189 # create or update the user info table189 # create or update the user info table
190 user_info = store.get(model.StorageUserInfo, user_id)190 user_info = store.get(model.StorageUserInfo, user_id)
191 if user_info is None:191 if user_info is None:
@@ -306,7 +306,7 @@
306 download_key=None):306 download_key=None):
307 """Make a new download object."""307 """Make a new download object."""
308 self.get_user(user_id)308 self.get_user(user_id)
309 store = get_storage_store()309 store = get_filesync_store()
310 download = model.Download(310 download = model.Download(
311 user_id, volume_id, file_path, download_url, download_key)311 user_id, volume_id, file_path, download_url, download_key)
312 store.add(download)312 store.add(download)
@@ -315,7 +315,7 @@
315 def _get_download(self, user_id, download_id):315 def _get_download(self, user_id, download_id):
316 """Internal function to get the download and owner."""316 """Internal function to get the download and owner."""
317 user = self.get_user(user_id)317 user = self.get_user(user_id)
318 store = get_storage_store()318 store = get_filesync_store()
319 download = store.get(model.Download, download_id)319 download = store.get(model.Download, download_id)
320 return user, download320 return user, download
321321
@@ -323,7 +323,7 @@
323 download_key=None):323 download_key=None):
324 """Get a download by its UDF, file path and download key."""324 """Get a download by its UDF, file path and download key."""
325 self.get_user(user_id)325 self.get_user(user_id)
326 store = get_storage_store()326 store = get_filesync_store()
327 download = store.find(327 download = store.find(
328 model.Download,328 model.Download,
329 model.Download.owner_id == user_id,329 model.Download.owner_id == user_id,
@@ -382,7 +382,7 @@
382382
383 def get_failed_downloads(self, start_date, end_date):383 def get_failed_downloads(self, start_date, end_date):
384 """Get failed downloads."""384 """Get failed downloads."""
385 store = get_storage_store()385 store = get_filesync_store()
386 result = store.find(386 result = store.find(
387 model.Download,387 model.Download,
388 model.Download._status == model.DOWNLOAD_STATUS_ERROR,388 model.Download._status == model.DOWNLOAD_STATUS_ERROR,
@@ -393,7 +393,7 @@
393393
394 def get_node(self, node_id):394 def get_node(self, node_id):
395 """Get a node for the specified node_id."""395 """Get a node for the specified node_id."""
396 store = get_storage_store()396 store = get_filesync_store()
397 node = store.find(397 node = store.find(
398 model.StorageObject,398 model.StorageObject,
399 model.StorageObject.status == model.STATUS_LIVE,399 model.StorageObject.status == model.STATUS_LIVE,
@@ -404,7 +404,7 @@
404404
405 def get_user_info(self, user_id):405 def get_user_info(self, user_id):
406 """Get the UserInfo DAO for user_id"""406 """Get the UserInfo DAO for user_id"""
407 store = get_storage_store()407 store = get_filesync_store()
408 user_info = store.get(model.StorageUserInfo, user_id)408 user_info = store.get(model.StorageUserInfo, user_id)
409 if user_info is None:409 if user_info is None:
410 raise errors.DoesNotExist(self.user_dne_error)410 raise errors.DoesNotExist(self.user_dne_error)
@@ -413,13 +413,13 @@
413 def cleanup_uploadjobs(self, uploadjobs):413 def cleanup_uploadjobs(self, uploadjobs):
414 """Delete uploadjobs."""414 """Delete uploadjobs."""
415 uploadjob_ids = [job.id for job in uploadjobs]415 uploadjob_ids = [job.id for job in uploadjobs]
416 store = get_storage_store()416 store = get_filesync_store()
417 store.find(model.UploadJob,417 store.find(model.UploadJob,
418 model.UploadJob.uploadjob_id.is_in(uploadjob_ids)).remove()418 model.UploadJob.uploadjob_id.is_in(uploadjob_ids)).remove()
419419
420 def get_abandoned_uploadjobs(self, last_active, limit=1000):420 def get_abandoned_uploadjobs(self, last_active, limit=1000):
421 """Get uploadjobs that are older than last_active."""421 """Get uploadjobs that are older than last_active."""
422 store = get_storage_store()422 store = get_filesync_store()
423 jobs = store.find(423 jobs = store.find(
424 model.UploadJob,424 model.UploadJob,
425 model.UploadJob.when_last_active < last_active)[:limit]425 model.UploadJob.when_last_active < last_active)[:limit]
@@ -431,7 +431,7 @@
431 query = """SELECT id FROM StorageUser431 query = """SELECT id FROM StorageUser
432 ORDER BY RANDOM()432 ORDER BY RANDOM()
433 LIMIT 1"""433 LIMIT 1"""
434 store = get_storage_store()434 store = get_filesync_store()
435 result = store.execute(SQL(query)).get_one()435 result = store.execute(SQL(query)).get_one()
436 return result[0]436 return result[0]
437437
@@ -455,7 +455,7 @@
455 This typically only happens when a user's subscription changes.455 This typically only happens when a user's subscription changes.
456 """456 """
457 user = self.store.get(model.StorageUser, self.user.id)457 user = self.store.get(model.StorageUser, self.user.id)
458 store = get_storage_store()458 store = get_filesync_store()
459459
460 # update the subscription in the user460 # update the subscription in the user
461 if subscription is not None:461 if subscription is not None:
@@ -482,14 +482,14 @@
482 @timing_metric482 @timing_metric
483 def get_quota(self):483 def get_quota(self):
484 """Get the user's quota information."""484 """Get the user's quota information."""
485 store = get_storage_store()485 store = get_filesync_store()
486 info = store.get(model.StorageUserInfo, self.user.id)486 info = store.get(model.StorageUserInfo, self.user.id)
487 return dao.UserInfo(info, gateway=self)487 return dao.UserInfo(info, gateway=self)
488488
489 @timing_metric489 @timing_metric
490 def recalculate_quota(self):490 def recalculate_quota(self):
491 """Recalculate a user's quota."""491 """Recalculate a user's quota."""
492 store = get_storage_store()492 store = get_filesync_store()
493 info = store.get(model.StorageUserInfo, self.user.id)493 info = store.get(model.StorageUserInfo, self.user.id)
494 info.recalculate_used_bytes()494 info.recalculate_used_bytes()
495 return dao.UserInfo(info, gateway=self)495 return dao.UserInfo(info, gateway=self)
@@ -504,7 +504,7 @@
504 if not self.user.is_active:504 if not self.user.is_active:
505 raise errors.NoPermission(self.inactive_user_error)505 raise errors.NoPermission(self.inactive_user_error)
506 # sanity check506 # sanity check
507 store = get_storage_store()507 store = get_filesync_store()
508 udf = store.find(508 udf = store.find(
509 model.UserVolume,509 model.UserVolume,
510 model.UserVolume.owner_id == self.user.id,510 model.UserVolume.owner_id == self.user.id,
@@ -743,7 +743,7 @@
743 """Create a UDF."""743 """Create a UDF."""
744 if not self.user.is_active:744 if not self.user.is_active:
745 raise errors.NoPermission(self.inactive_user_error)745 raise errors.NoPermission(self.inactive_user_error)
746 store = get_storage_store()746 store = get_filesync_store()
747 # need a lock here.747 # need a lock here.
748 info = store.get(model.StorageUserInfo, self.user.id)748 info = store.get(model.StorageUserInfo, self.user.id)
749 info.lock_for_update()749 info.lock_for_update()
@@ -770,7 +770,7 @@
770 """Get a UDF by the path parts."""770 """Get a UDF by the path parts."""
771 if not self.user.is_active:771 if not self.user.is_active:
772 raise errors.NoPermission(self.inactive_user_error)772 raise errors.NoPermission(self.inactive_user_error)
773 store = get_storage_store()773 store = get_filesync_store()
774 path = path.rstrip('/')774 path = path.rstrip('/')
775 if from_full_path:775 if from_full_path:
776 udfs = store.find(776 udfs = store.find(
@@ -796,7 +796,7 @@
796 """Delete a UDF."""796 """Delete a UDF."""
797 if not self.user.is_active:797 if not self.user.is_active:
798 raise errors.NoPermission(self.inactive_user_error)798 raise errors.NoPermission(self.inactive_user_error)
799 store = get_storage_store()799 store = get_filesync_store()
800 udf = store.find(800 udf = store.find(
801 model.UserVolume,801 model.UserVolume,
802 model.UserVolume.id == udf_id,802 model.UserVolume.id == udf_id,
@@ -819,7 +819,7 @@
819 """Get a UDF."""819 """Get a UDF."""
820 if not self.user.is_active:820 if not self.user.is_active:
821 raise errors.NoPermission(self.inactive_user_error)821 raise errors.NoPermission(self.inactive_user_error)
822 store = get_storage_store()822 store = get_filesync_store()
823 udf = store.find(823 udf = store.find(
824 model.UserVolume,824 model.UserVolume,
825 model.UserVolume.id == udf_id,825 model.UserVolume.id == udf_id,
@@ -835,7 +835,7 @@
835 """Return Live UDFs."""835 """Return Live UDFs."""
836 if not self.user.is_active:836 if not self.user.is_active:
837 raise errors.NoPermission(self.inactive_user_error)837 raise errors.NoPermission(self.inactive_user_error)
838 store = get_storage_store()838 store = get_filesync_store()
839 udfs = store.find(839 udfs = store.find(
840 model.UserVolume,840 model.UserVolume,
841 model.UserVolume.owner_id == self.user.id,841 model.UserVolume.owner_id == self.user.id,
@@ -848,7 +848,7 @@
848 @timing_metric848 @timing_metric
849 def get_downloads(self):849 def get_downloads(self):
850 """Get all downloads for a user."""850 """Get all downloads for a user."""
851 store = get_storage_store()851 store = get_filesync_store()
852 return [dao.Download(download)852 return [dao.Download(download)
853 for download in store.find(853 for download in store.find(
854 model.Download,854 model.Download,
@@ -857,7 +857,7 @@
857 @timing_metric857 @timing_metric
858 def get_public_files(self):858 def get_public_files(self):
859 """Get all public files for a user."""859 """Get all public files for a user."""
860 store = get_storage_store()860 store = get_filesync_store()
861 nodes = store.find(861 nodes = store.find(
862 model.StorageObject,862 model.StorageObject,
863 model.StorageObject.status == model.STATUS_LIVE,863 model.StorageObject.status == model.STATUS_LIVE,
@@ -871,7 +871,7 @@
871 @timing_metric871 @timing_metric
872 def get_public_folders(self):872 def get_public_folders(self):
873 """Get all public folders for a user."""873 """Get all public folders for a user."""
874 store = get_storage_store()874 store = get_filesync_store()
875 nodes = store.find(875 nodes = store.find(
876 model.StorageObject,876 model.StorageObject,
877 model.StorageObject.status == model.STATUS_LIVE,877 model.StorageObject.status == model.STATUS_LIVE,
@@ -899,7 +899,7 @@
899 @timing_metric899 @timing_metric
900 def get_share_generation(self, share):900 def get_share_generation(self, share):
901 """Get the generation of the speficied share."""901 """Get the generation of the speficied share."""
902 store = get_storage_store()902 store = get_filesync_store()
903 vol = store.find(903 vol = store.find(
904 model.UserVolume,904 model.UserVolume,
905 model.UserVolume.id == model.StorageObject.volume_id,905 model.UserVolume.id == model.StorageObject.volume_id,
@@ -939,7 +939,7 @@
939 WHERE o.id = t.parent_id::UUID AND939 WHERE o.id = t.parent_id::UUID AND
940 o.volume_id=u.id AND u.status = E'Live' ;940 o.volume_id=u.id AND u.status = E'Live' ;
941 """ % dict(owner_id=self.user.id)941 """ % dict(owner_id=self.user.id)
942 store = get_storage_store()942 store = get_filesync_store()
943 nodes = store.execute(SQL(sql))943 nodes = store.execute(SQL(sql))
944 gws = {}944 gws = {}
945 for n in nodes:945 for n in nodes:
@@ -978,7 +978,7 @@
978978
979 def _get_reusable_content(self, hash_value, magic_hash):979 def _get_reusable_content(self, hash_value, magic_hash):
980 """Get a contentblob for reusable content."""980 """Get a contentblob for reusable content."""
981 store = get_storage_store()981 store = get_filesync_store()
982982
983 # check to see if we have the content blob for that hash983 # check to see if we have the content blob for that hash
984 contentblob = store.find(984 contentblob = store.find(
@@ -1104,7 +1104,7 @@
1104 @property1104 @property
1105 def store(self):1105 def store(self):
1106 """The storm store to use."""1106 """The storm store to use."""
1107 return get_storage_store()1107 return get_filesync_store()
11081108
1109 def _get_root_node(self):1109 def _get_root_node(self):
1110 """Get the root node for this volume."""1110 """Get the root node for this volume."""
@@ -1142,7 +1142,7 @@
1142 """Make sure the share is still good."""1142 """Make sure the share is still good."""
1143 if self.share:1143 if self.share:
1144 # if this is a share, make sure it's still valid1144 # if this is a share, make sure it's still valid
1145 store = get_storage_store()1145 store = get_filesync_store()
1146 share = store.find(1146 share = store.find(
1147 model.Share,1147 model.Share,
1148 model.Share.id == self.share.id,1148 model.Share.id == self.share.id,
@@ -2214,10 +2214,10 @@
22142214
2215def fix_all_udfs_with_generation_out_of_sync(2215def fix_all_udfs_with_generation_out_of_sync(
2216 logger, sleep=0, dry_run=False, batch_size=500):2216 logger, sleep=0, dry_run=False, batch_size=500):
2217 from backends.filesync.data.dbmanager import get_storage_store2217 from backends.filesync.data.dbmanager import get_filesync_store
2218 if dry_run:2218 if dry_run:
2219 logger.info("Dry-run enabled; not committing any changes.")2219 logger.info("Dry-run enabled; not committing any changes.")
2220 store = get_storage_store()2220 store = get_filesync_store()
2221 query = "SELECT id FROM StorageUser"2221 query = "SELECT id FROM StorageUser"
2222 user_ids = [row[0] for row in store.execute(query)]2222 user_ids = [row[0] for row in store.execute(query)]
2223 start = time.time()2223 start = time.time()
22242224
=== modified file 'src/backends/filesync/data/testing/ormtestcase.py'
--- src/backends/filesync/data/testing/ormtestcase.py 2015-08-29 00:03:11 +0000
+++ src/backends/filesync/data/testing/ormtestcase.py 2015-09-05 01:43:14 +0000
@@ -20,7 +20,7 @@
20import uuid20import uuid
2121
22from backends.filesync.data import model22from backends.filesync.data import model
23from backends.filesync.data.dbmanager import get_storage_store, filesync_tm23from backends.filesync.data.dbmanager import get_filesync_store, filesync_tm
24from backends.filesync.data.testing.testcase import DAOObjectFactory24from backends.filesync.data.testing.testcase import DAOObjectFactory
25from backends.filesync.data.testing.testdata import get_fake_hash25from backends.filesync.data.testing.testdata import get_fake_hash
2626
@@ -178,4 +178,4 @@
178 @property178 @property
179 def store(self):179 def store(self):
180 """Get the store, dont cache, threading issues may arise"""180 """Get the store, dont cache, threading issues may arise"""
181 return get_storage_store()181 return get_filesync_store()
182182
=== modified file 'src/backends/filesync/data/testing/testcase.py'
--- src/backends/filesync/data/testing/testcase.py 2015-08-17 00:09:45 +0000
+++ src/backends/filesync/data/testing/testcase.py 2015-09-05 01:43:14 +0000
@@ -23,7 +23,7 @@
2323
24from backends.filesync.data import utils, filesync_tm24from backends.filesync.data import utils, filesync_tm
25from backends.filesync.data.gateway import SystemGateway25from backends.filesync.data.gateway import SystemGateway
26from backends.filesync.data.dbmanager import get_storage_store26from backends.filesync.data.dbmanager import get_filesync_store
27from backends.filesync.data.testing.testdata import get_fake_hash27from backends.filesync.data.testing.testdata import get_fake_hash
28from backends.testing.testcase import DatabaseResourceTestCase28from backends.testing.testcase import DatabaseResourceTestCase
2929
@@ -35,7 +35,7 @@
35 """Set up."""35 """Set up."""
36 super(StorageDALTestCase, self).setUp()36 super(StorageDALTestCase, self).setUp()
37 self.obj_factory = DAOObjectFactory()37 self.obj_factory = DAOObjectFactory()
38 self.store = get_storage_store()38 self.store = get_filesync_store()
39 self.save_utils_set_public_uuid = utils.set_public_uuid39 self.save_utils_set_public_uuid = utils.set_public_uuid
4040
41 def tearDown(self):41 def tearDown(self):
4242
=== modified file 'src/backends/filesync/data/tests/test_dao.py'
--- src/backends/filesync/data/tests/test_dao.py 2015-08-29 00:03:11 +0000
+++ src/backends/filesync/data/tests/test_dao.py 2015-09-05 01:43:14 +0000
@@ -33,7 +33,7 @@
33from backends.filesync.data.testing.testdata import (33from backends.filesync.data.testing.testdata import (
34 get_test_contentblob, get_fake_hash)34 get_test_contentblob, get_fake_hash)
35from backends.filesync.data import model, dao, errors, services, utils35from backends.filesync.data import model, dao, errors, services, utils
36from backends.filesync.data.dbmanager import get_storage_store36from backends.filesync.data.dbmanager import get_filesync_store
3737
3838
39class DAOInitTestCase(TestCase):39class DAOInitTestCase(TestCase):
@@ -1460,7 +1460,7 @@
14601460
1461 def _flush_store(self):1461 def _flush_store(self):
1462 """Flushes the store used in tests."""1462 """Flushes the store used in tests."""
1463 get_storage_store().flush()1463 get_filesync_store().flush()
14641464
1465 def _create_directory_with_five_files(self):1465 def _create_directory_with_five_files(self):
1466 """Creates a DirectoryNode with 5 files inside it."""1466 """Creates a DirectoryNode with 5 files inside it."""
14671467
=== modified file 'src/backends/filesync/data/tests/test_gateway.py'
--- src/backends/filesync/data/tests/test_gateway.py 2015-08-29 00:03:11 +0000
+++ src/backends/filesync/data/tests/test_gateway.py 2015-09-05 01:43:14 +0000
@@ -42,7 +42,7 @@
42 timing_metric,42 timing_metric,
43)43)
44from backends.filesync.data.dbmanager import (44from backends.filesync.data.dbmanager import (
45 get_storage_store, filesync_tm as transaction)45 get_filesync_store, filesync_tm as transaction)
46from backends.filesync.data import dao, errors, model, utils46from backends.filesync.data import dao, errors, model, utils
47from backends.filesync.data.testing.testdata import (47from backends.filesync.data.testing.testdata import (
48 get_fake_hash, get_test_contentblob)48 get_fake_hash, get_test_contentblob)
@@ -263,7 +263,7 @@
263 def test_handle_node_change_with_shares(self):263 def test_handle_node_change_with_shares(self):
264 """Test the handle_node_change."""264 """Test the handle_node_change."""
265 self.setup_shares()265 self.setup_shares()
266 node = get_storage_store().get(model.StorageObject, self.d3.id)266 node = get_filesync_store().get(model.StorageObject, self.d3.id)
267 self.vgw.handle_node_change(node)267 self.vgw.handle_node_change(node)
268 transaction.commit()268 transaction.commit()
269 self.assertIn(VolumeNewGeneration(self.user.id, None, node.269 self.assertIn(VolumeNewGeneration(self.user.id, None, node.
@@ -282,7 +282,7 @@
282 def test_handle_node_change_from_share(self):282 def test_handle_node_change_from_share(self):
283 """Test the handle_node_change."""283 """Test the handle_node_change."""
284 self.setup_shares()284 self.setup_shares()
285 node = get_storage_store().get(model.StorageObject, self.d3.id)285 node = get_filesync_store().get(model.StorageObject, self.d3.id)
286 share = self.user1.get_share(self.share1.id)286 share = self.user1.get_share(self.share1.id)
287 vgw = ReadWriteVolumeGateway(self.user1, share=share)287 vgw = ReadWriteVolumeGateway(self.user1, share=share)
288 vgw.handle_node_change(node)288 vgw.handle_node_change(node)
@@ -315,7 +315,7 @@
315 """Make sure make_file with magic content sends a notification."""315 """Make sure make_file with magic content sends a notification."""
316 cb = get_test_contentblob("FakeContent")316 cb = get_test_contentblob("FakeContent")
317 cb.magic_hash = 'magic'317 cb.magic_hash = 'magic'
318 get_storage_store().add(cb)318 get_filesync_store().add(cb)
319 f = self.vgw.make_file(self.root.id, u"filename", hash=cb.hash,319 f = self.vgw.make_file(self.root.id, u"filename", hash=cb.hash,
320 magic_hash='magic')320 magic_hash='magic')
321 transaction.commit()321 transaction.commit()
@@ -548,7 +548,7 @@
548 self.assertEqual(user.username, u"username")548 self.assertEqual(user.username, u"username")
549 self.assertEqual(user.visible_name, u"Visible Name")549 self.assertEqual(user.visible_name, u"Visible Name")
550 self.assertEqual(user._subscription_status, model.STATUS_LIVE)550 self.assertEqual(user._subscription_status, model.STATUS_LIVE)
551 store = get_storage_store()551 store = get_filesync_store()
552 info = store.get(model.StorageUserInfo, 1)552 info = store.get(model.StorageUserInfo, 1)
553 self.assertEqual(info.max_storage_bytes, 1)553 self.assertEqual(info.max_storage_bytes, 1)
554 root = model.StorageObject.get_root(store, user.id)554 root = model.StorageObject.get_root(store, user.id)
@@ -561,7 +561,7 @@
561 self.gw.create_or_update_user(561 self.gw.create_or_update_user(
562 1, u"username", u"Visible Name", 1)562 1, u"username", u"Visible Name", 1)
563 # update the user info.563 # update the user info.
564 usr = get_storage_store().get(model.StorageUser, 1)564 usr = get_filesync_store().get(model.StorageUser, 1)
565 usr.status = model.STATUS_DEAD565 usr.status = model.STATUS_DEAD
566 usr.subscription_status = model.STATUS_DEAD566 usr.subscription_status = model.STATUS_DEAD
567 transaction.commit()567 transaction.commit()
@@ -579,7 +579,7 @@
579 def test_get_shareoffer(self):579 def test_get_shareoffer(self):
580 """Test get_shareoffer."""580 """Test get_shareoffer."""
581 user1 = self.create_user(id=1, username=u"sharer")581 user1 = self.create_user(id=1, username=u"sharer")
582 store = get_storage_store()582 store = get_filesync_store()
583 root = model.StorageObject.get_root(store, user1.id)583 root = model.StorageObject.get_root(store, user1.id)
584 share = model.Share(user1.id, root.id, None, u"Share", "View",584 share = model.Share(user1.id, root.id, None, u"Share", "View",
585 email="fake@example.com")585 email="fake@example.com")
@@ -614,7 +614,7 @@
614 """614 """
615 # setup the share_offer615 # setup the share_offer
616 user1 = self.create_user(id=1, username=u"sharer")616 user1 = self.create_user(id=1, username=u"sharer")
617 store = get_storage_store()617 store = get_filesync_store()
618 root = model.StorageObject.get_root(store, user1.id)618 root = model.StorageObject.get_root(store, user1.id)
619 share = model.Share(user1.id, root.id, None, u"Share", "View",619 share = model.Share(user1.id, root.id, None, u"Share", "View",
620 email="fake@example.com")620 email="fake@example.com")
@@ -660,7 +660,7 @@
660 """Test that the claim_shareoffer function works properly."""660 """Test that the claim_shareoffer function works properly."""
661 # setup the share_offer661 # setup the share_offer
662 user1 = self.create_user(id=1, username=u"sharer")662 user1 = self.create_user(id=1, username=u"sharer")
663 store = get_storage_store()663 store = get_filesync_store()
664 root = model.StorageObject.get_root(store, user1.id)664 root = model.StorageObject.get_root(store, user1.id)
665 share = model.Share(user1.id, root.id, None, u"Share", "View",665 share = model.Share(user1.id, root.id, None, u"Share", "View",
666 email="fake@example.com")666 email="fake@example.com")
@@ -669,7 +669,7 @@
669 # user 2 does not exist669 # user 2 does not exist
670 self.gw.claim_shareoffer(2, u"sharee", u"Sharee", share.id)670 self.gw.claim_shareoffer(2, u"sharee", u"Sharee", share.id)
671 user2 = self.gw.get_user(2)671 user2 = self.gw.get_user(2)
672 store = get_storage_store()672 store = get_filesync_store()
673 root2 = model.StorageObject.get_root(store, user2.id)673 root2 = model.StorageObject.get_root(store, user2.id)
674 self.assertTrue(root2 is not None)674 self.assertTrue(root2 is not None)
675 self.assertEqual(user2.is_active, False)675 self.assertEqual(user2.is_active, False)
@@ -682,7 +682,7 @@
682 user = self.gw.create_or_update_user(682 user = self.gw.create_or_update_user(
683 1, u"username", u"Visible Name", 1)683 1, u"username", u"Visible Name", 1)
684 udf = model.UserVolume.create(684 udf = model.UserVolume.create(
685 get_storage_store(), user.id, u"~/path/name")685 get_filesync_store(), user.id, u"~/path/name")
686 dl_url = u"http://download/url"686 dl_url = u"http://download/url"
687 download = self.gw.make_download(687 download = self.gw.make_download(
688 user.id, udf.id, u"path", dl_url)688 user.id, udf.id, u"path", dl_url)
@@ -698,7 +698,7 @@
698 user = self.gw.create_or_update_user(698 user = self.gw.create_or_update_user(
699 1, u"username", u"Visible Name", 1)699 1, u"username", u"Visible Name", 1)
700 udf = model.UserVolume.create(700 udf = model.UserVolume.create(
701 get_storage_store(), user.id, u"~/path/name")701 get_filesync_store(), user.id, u"~/path/name")
702 download = self.gw.make_download(702 download = self.gw.make_download(
703 user.id, udf.id, u"path", u"http://download/url", ["key"])703 user.id, udf.id, u"path", u"http://download/url", ["key"])
704 self.assertTrue(isinstance(download, dao.Download))704 self.assertTrue(isinstance(download, dao.Download))
@@ -715,7 +715,7 @@
715 user = self.gw.create_or_update_user(715 user = self.gw.create_or_update_user(
716 1, u"username", u"Visible Name", 1)716 1, u"username", u"Visible Name", 1)
717 udf = model.UserVolume.create(717 udf = model.UserVolume.create(
718 get_storage_store(), user.id, u"~/path/name")718 get_filesync_store(), user.id, u"~/path/name")
719 download = self.gw.make_download(719 download = self.gw.make_download(
720 user.id, udf.id, u"path", u"http://download/url")720 user.id, udf.id, u"path", u"http://download/url")
721721
@@ -728,7 +728,7 @@
728 user = self.gw.create_or_update_user(728 user = self.gw.create_or_update_user(
729 1, u"username", u"Visible Name", 1)729 1, u"username", u"Visible Name", 1)
730 udf = model.UserVolume.create(730 udf = model.UserVolume.create(
731 get_storage_store(), user.id, u"~/path/name")731 get_filesync_store(), user.id, u"~/path/name")
732 download_url = u"http://download/url"732 download_url = u"http://download/url"
733 file_path = u"path"733 file_path = u"path"
734 download_id = uuid.uuid4()734 download_id = uuid.uuid4()
@@ -738,7 +738,7 @@
738 SQL = """INSERT INTO Download (id, owner_id, file_path, download_url,738 SQL = """INSERT INTO Download (id, owner_id, file_path, download_url,
739 volume_id, status, status_change_date)739 volume_id, status, status_change_date)
740 VALUES (?, ?, ?, ?, ?, 'Complete', now())"""740 VALUES (?, ?, ?, ?, ?, 'Complete', now())"""
741 get_storage_store().execute(741 get_filesync_store().execute(
742 SQL, (download_id, user.id, file_path, download_url, udf.id))742 SQL, (download_id, user.id, file_path, download_url, udf.id))
743743
744 download = self.gw.get_download(744 download = self.gw.get_download(
@@ -751,7 +751,7 @@
751 user = self.gw.create_or_update_user(751 user = self.gw.create_or_update_user(
752 1, u"username", u"Visible Name", 1)752 1, u"username", u"Visible Name", 1)
753 udf = model.UserVolume.create(753 udf = model.UserVolume.create(
754 get_storage_store(), user.id, u"~/path/name")754 get_filesync_store(), user.id, u"~/path/name")
755755
756 file_path = u"path"756 file_path = u"path"
757 download_key = u"mydownloadkey"757 download_key = u"mydownloadkey"
@@ -775,7 +775,7 @@
775 user = self.gw.create_or_update_user(775 user = self.gw.create_or_update_user(
776 1, u"username", u"Visible Name", 1)776 1, u"username", u"Visible Name", 1)
777 udf = model.UserVolume.create(777 udf = model.UserVolume.create(
778 get_storage_store(), user.id, u"~/path/name")778 get_filesync_store(), user.id, u"~/path/name")
779 key = ["some", "key"]779 key = ["some", "key"]
780 download = self.gw.make_download(780 download = self.gw.make_download(
781 user.id, udf.id, u"path", u"http://download/url", key)781 user.id, udf.id, u"path", u"http://download/url", key)
@@ -791,7 +791,7 @@
791 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",791 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",
792 1)792 1)
793 udf = model.UserVolume.create(793 udf = model.UserVolume.create(
794 get_storage_store(), user.id, u"~/path/name")794 get_filesync_store(), user.id, u"~/path/name")
795 key = ["some", "key"]795 key = ["some", "key"]
796 download = self.gw.make_download(796 download = self.gw.make_download(
797 user.id, udf.id, u"path", u"http://download/url/1", key)797 user.id, udf.id, u"path", u"http://download/url/1", key)
@@ -808,7 +808,7 @@
808 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",808 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",
809 1)809 1)
810 udf = model.UserVolume.create(810 udf = model.UserVolume.create(
811 get_storage_store(), user.id, u"~/path/name")811 get_filesync_store(), user.id, u"~/path/name")
812 download = self.gw.make_download(812 download = self.gw.make_download(
813 user.id, udf.id, u"path", u"http://download/url")813 user.id, udf.id, u"path", u"http://download/url")
814814
@@ -820,7 +820,7 @@
820 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",820 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",
821 1)821 1)
822 udf = model.UserVolume.create(822 udf = model.UserVolume.create(
823 get_storage_store(), user.id, u"~/spath/name")823 get_filesync_store(), user.id, u"~/spath/name")
824 download = self.gw.make_download(824 download = self.gw.make_download(
825 user.id, udf.id, u"path", u"http://download/url")825 user.id, udf.id, u"path", u"http://download/url")
826 new_download = self.gw.update_download(826 new_download = self.gw.update_download(
@@ -834,7 +834,7 @@
834 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",834 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",
835 1)835 1)
836 udf = model.UserVolume.create(836 udf = model.UserVolume.create(
837 get_storage_store(), user.id, u"~/path/name")837 get_filesync_store(), user.id, u"~/path/name")
838 download = self.gw.make_download(838 download = self.gw.make_download(
839 user.id, udf.id, u"path", u"http://download/url")839 user.id, udf.id, u"path", u"http://download/url")
840 a_file = udf.root_node.make_file(u"TheName")840 a_file = udf.root_node.make_file(u"TheName")
@@ -849,7 +849,7 @@
849 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",849 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",
850 1)850 1)
851 udf = model.UserVolume.create(851 udf = model.UserVolume.create(
852 get_storage_store(), user.id, u"~/path/name")852 get_filesync_store(), user.id, u"~/path/name")
853 download = self.gw.make_download(853 download = self.gw.make_download(
854 user.id, udf.id, u"path", u"http://download/url")854 user.id, udf.id, u"path", u"http://download/url")
855 new_download = self.gw.update_download(855 new_download = self.gw.update_download(
@@ -878,7 +878,7 @@
878 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",878 user = self.gw.create_or_update_user(1, u"username", u"Visible Name",
879 1)879 1)
880 sgw = SystemGateway()880 sgw = SystemGateway()
881 storage_store = get_storage_store()881 storage_store = get_filesync_store()
882 root = model.StorageObject.get_root(storage_store, user.id)882 root = model.StorageObject.get_root(storage_store, user.id)
883 node = root.make_file(u"TheName")883 node = root.make_file(u"TheName")
884 node._content_hash = model.EMPTY_CONTENT_HASH884 node._content_hash = model.EMPTY_CONTENT_HASH
@@ -925,7 +925,7 @@
925 multipart_id=str(uuid.uuid4()),925 multipart_id=str(uuid.uuid4()),
926 multipart_key=uuid.uuid4())926 multipart_key=uuid.uuid4())
927 # change the when_started date for the test.927 # change the when_started date for the test.
928 store = get_storage_store()928 store = get_filesync_store()
929 uploadjob = store.get(model.UploadJob, up1.id)929 uploadjob = store.get(model.UploadJob, up1.id)
930 uploadjob.when_last_active = (930 uploadjob.when_last_active = (
931 datetime.datetime.now() - datetime.timedelta(uid))931 datetime.datetime.now() - datetime.timedelta(uid))
@@ -954,7 +954,7 @@
954 multipart_id=str(uuid.uuid4()),954 multipart_id=str(uuid.uuid4()),
955 multipart_key=uuid.uuid4())955 multipart_key=uuid.uuid4())
956 # change the when_started date for the test.956 # change the when_started date for the test.
957 store = get_storage_store()957 store = get_filesync_store()
958 uploadjob = store.get(model.UploadJob, up1.id)958 uploadjob = store.get(model.UploadJob, up1.id)
959 uploadjob.when_last_active = (959 uploadjob.when_last_active = (
960 datetime.datetime.now() - datetime.timedelta(10))960 datetime.datetime.now() - datetime.timedelta(10))
@@ -1193,7 +1193,7 @@
1193 self.assertEqual(quota.max_storage_bytes, 2)1193 self.assertEqual(quota.max_storage_bytes, 2)
1194 self.assertEqual(user2._subscription_status, model.STATUS_LIVE)1194 self.assertEqual(user2._subscription_status, model.STATUS_LIVE)
1195 # make sure the StorageUserInfo is updated as well1195 # make sure the StorageUserInfo is updated as well
1196 store = get_storage_store()1196 store = get_filesync_store()
1197 info = store.get(model.StorageUserInfo, user2.id)1197 info = store.get(model.StorageUserInfo, user2.id)
1198 self.assertEqual(info.max_storage_bytes, 2)1198 self.assertEqual(info.max_storage_bytes, 2)
11991199
@@ -1218,7 +1218,7 @@
1218 def test_accept_share(self):1218 def test_accept_share(self):
1219 """Test accepting a direct share."""1219 """Test accepting a direct share."""
1220 user1 = self.create_user(id=2, username=u"sharer")1220 user1 = self.create_user(id=2, username=u"sharer")
1221 store = get_storage_store()1221 store = get_filesync_store()
1222 root = model.StorageObject.get_root(store, user1.id)1222 root = model.StorageObject.get_root(store, user1.id)
1223 share = model.Share(user1.id, root.id, self.user.id, u"Share", "View")1223 share = model.Share(user1.id, root.id, self.user.id, u"Share", "View")
1224 self.store.add(share)1224 self.store.add(share)
@@ -1238,7 +1238,7 @@
1238 def test_decline_share(self):1238 def test_decline_share(self):
1239 """Test declinet a direct share."""1239 """Test declinet a direct share."""
1240 user1 = self.create_user(id=2, username=u"sharer")1240 user1 = self.create_user(id=2, username=u"sharer")
1241 store = get_storage_store()1241 store = get_filesync_store()
1242 root = model.StorageObject.get_root(store, user1.id)1242 root = model.StorageObject.get_root(store, user1.id)
1243 share = model.Share(user1.id, root.id, self.user.id, u"Share", "View")1243 share = model.Share(user1.id, root.id, self.user.id, u"Share", "View")
1244 self.store.add(share)1244 self.store.add(share)
@@ -1260,7 +1260,7 @@
1260 def test_delete_share(self):1260 def test_delete_share(self):
1261 """Test delete shares from share-er and share-ee"""1261 """Test delete shares from share-er and share-ee"""
1262 user1 = self.create_user(id=2, username=u"sharer")1262 user1 = self.create_user(id=2, username=u"sharer")
1263 store = get_storage_store()1263 store = get_filesync_store()
1264 root = model.StorageObject.get_root(store, user1.id)1264 root = model.StorageObject.get_root(store, user1.id)
1265 share = model.Share(self.user.id, root.id, user1.id,1265 share = model.Share(self.user.id, root.id, user1.id,
1266 u"Share", "View")1266 u"Share", "View")
@@ -1425,7 +1425,7 @@
1425 usera = self.create_user(id=2, username=u"sharee1")1425 usera = self.create_user(id=2, username=u"sharee1")
1426 userb = self.create_user(id=3, username=u"sharee2")1426 userb = self.create_user(id=3, username=u"sharee2")
1427 userc = self.create_user(id=4, username=u"sharee3")1427 userc = self.create_user(id=4, username=u"sharee3")
1428 store = get_storage_store()1428 store = get_filesync_store()
1429 vgw = self.gw.get_root_gateway()1429 vgw = self.gw.get_root_gateway()
1430 dir1 = vgw.make_subdirectory(vgw.get_root().id, u"shared1")1430 dir1 = vgw.make_subdirectory(vgw.get_root().id, u"shared1")
1431 dir2 = vgw.make_subdirectory(dir1.id, u"shared2")1431 dir2 = vgw.make_subdirectory(dir1.id, u"shared2")
@@ -1460,7 +1460,7 @@
1460 usera = self.create_user(id=2, username=u"sharee1")1460 usera = self.create_user(id=2, username=u"sharee1")
1461 sharea = vgw.make_share(dir1.id, u"sharea", user_id=usera.id)1461 sharea = vgw.make_share(dir1.id, u"sharea", user_id=usera.id)
1462 usera._gateway.accept_share(sharea.id)1462 usera._gateway.accept_share(sharea.id)
1463 store = get_storage_store()1463 store = get_filesync_store()
1464 dir1 = store.get(model.StorageObject, dir1.id)1464 dir1 = store.get(model.StorageObject, dir1.id)
1465 self.user._gateway.delete_related_shares(dir1)1465 self.user._gateway.delete_related_shares(dir1)
1466 self.assertRaises(1466 self.assertRaises(
@@ -1473,7 +1473,7 @@
1473 self.assertEqual(dls, [])1473 self.assertEqual(dls, [])
1474 sysgw = SystemGateway()1474 sysgw = SystemGateway()
1475 udf = model.UserVolume.create(1475 udf = model.UserVolume.create(
1476 get_storage_store(),1476 get_filesync_store(),
1477 self.user.id, u"~/path/name")1477 self.user.id, u"~/path/name")
1478 dl_url = u"http://download/url"1478 dl_url = u"http://download/url"
1479 found_urls = {}1479 found_urls = {}
@@ -1492,7 +1492,7 @@
1492 def test_get_public_files(self):1492 def test_get_public_files(self):
1493 """Test get_public_files method."""1493 """Test get_public_files method."""
1494 vgw = self.gw.get_root_gateway()1494 vgw = self.gw.get_root_gateway()
1495 storage_store = get_storage_store()1495 storage_store = get_filesync_store()
1496 root = model.StorageObject.get_root(storage_store, self.user.id)1496 root = model.StorageObject.get_root(storage_store, self.user.id)
1497 node = root.make_file(u"TheName")1497 node = root.make_file(u"TheName")
1498 node._content_hash = model.EMPTY_CONTENT_HASH1498 node._content_hash = model.EMPTY_CONTENT_HASH
@@ -1533,7 +1533,7 @@
1533 def test_get_public_folders(self):1533 def test_get_public_folders(self):
1534 """Test get_public_folders method."""1534 """Test get_public_folders method."""
1535 vgw = self.gw.get_root_gateway()1535 vgw = self.gw.get_root_gateway()
1536 storage_store = get_storage_store()1536 storage_store = get_filesync_store()
1537 root = model.StorageObject.get_root(storage_store, self.user.id)1537 root = model.StorageObject.get_root(storage_store, self.user.id)
1538 node = root.make_subdirectory(u'test_dir')1538 node = root.make_subdirectory(u'test_dir')
1539 vgw.change_public_access(node.id, True, allow_directory=True)1539 vgw.change_public_access(node.id, True, allow_directory=True)
@@ -1552,7 +1552,7 @@
1552 def test_get_share_generation(self):1552 def test_get_share_generation(self):
1553 """Test the get_share_generation method."""1553 """Test the get_share_generation method."""
1554 user1 = self.create_user(id=2, username=u"sharer")1554 user1 = self.create_user(id=2, username=u"sharer")
1555 store = get_storage_store()1555 store = get_filesync_store()
1556 root = model.StorageObject.get_root(store, user1.id)1556 root = model.StorageObject.get_root(store, user1.id)
1557 share = model.Share(self.user.id, root.id, user1.id,1557 share = model.Share(self.user.id, root.id, user1.id,
1558 u"Share", "View")1558 u"Share", "View")
@@ -1568,7 +1568,7 @@
1568 def test_get_share_generation_None(self):1568 def test_get_share_generation_None(self):
1569 """Test the get_share_generation method."""1569 """Test the get_share_generation method."""
1570 user1 = self.create_user(id=2, username=u"sharer")1570 user1 = self.create_user(id=2, username=u"sharer")
1571 store = get_storage_store()1571 store = get_filesync_store()
1572 root = model.StorageObject.get_root(store, user1.id)1572 root = model.StorageObject.get_root(store, user1.id)
1573 share = model.Share(self.user.id, root.id, user1.id,1573 share = model.Share(self.user.id, root.id, user1.id,
1574 u"Share", "View")1574 u"Share", "View")
@@ -1621,7 +1621,7 @@
1621 """Test update_content will reuse owned content."""1621 """Test update_content will reuse owned content."""
1622 hash_value = get_fake_hash()1622 hash_value = get_fake_hash()
1623 node = self._make_file_with_content(hash_value)1623 node = self._make_file_with_content(hash_value)
1624 get_storage_store().find(1624 get_filesync_store().find(
1625 model.ContentBlob,1625 model.ContentBlob,
1626 model.ContentBlob.hash == node.content_hash1626 model.ContentBlob.hash == node.content_hash
1627 ).set(magic_hash='magic')1627 ).set(magic_hash='magic')
@@ -1650,7 +1650,7 @@
16501650
1651 hash_value = get_fake_hash()1651 hash_value = get_fake_hash()
1652 node = self._make_file_with_content(hash_value, gw=user2._gateway)1652 node = self._make_file_with_content(hash_value, gw=user2._gateway)
1653 get_storage_store().find(1653 get_filesync_store().find(
1654 model.ContentBlob,1654 model.ContentBlob,
1655 model.ContentBlob.hash == node.content_hash1655 model.ContentBlob.hash == node.content_hash
1656 ).set(magic_hash='magic')1656 ).set(magic_hash='magic')
@@ -1678,7 +1678,7 @@
1678 """Test update_content will reuse owned content."""1678 """Test update_content will reuse owned content."""
1679 hash_value = get_fake_hash()1679 hash_value = get_fake_hash()
1680 node = self._make_file_with_content(hash_value)1680 node = self._make_file_with_content(hash_value)
1681 get_storage_store().find(1681 get_filesync_store().find(
1682 model.ContentBlob,1682 model.ContentBlob,
1683 model.ContentBlob.hash == node.content_hash1683 model.ContentBlob.hash == node.content_hash
1684 ).set(magic_hash='magic')1684 ).set(magic_hash='magic')
@@ -1706,7 +1706,7 @@
17061706
1707 hash_value = get_fake_hash()1707 hash_value = get_fake_hash()
1708 node = self._make_file_with_content(hash_value, gw=user2._gateway)1708 node = self._make_file_with_content(hash_value, gw=user2._gateway)
1709 get_storage_store().find(1709 get_filesync_store().find(
1710 model.ContentBlob,1710 model.ContentBlob,
1711 model.ContentBlob.hash == node.content_hash1711 model.ContentBlob.hash == node.content_hash
1712 ).set(magic_hash='magic')1712 ).set(magic_hash='magic')
@@ -1767,7 +1767,7 @@
1767 max_storage_bytes=200)1767 max_storage_bytes=200)
1768 self.user = self.gw.get_user(user.id)1768 self.user = self.gw.get_user(user.id)
1769 self.user_quota = self.user._gateway.get_quota()1769 self.user_quota = self.user._gateway.get_quota()
1770 self.storage_store = get_storage_store()1770 self.storage_store = get_filesync_store()
1771 self.vgw = self.user._gateway.get_root_gateway()1771 self.vgw = self.user._gateway.get_root_gateway()
1772 self.root = self.vgw.get_root()1772 self.root = self.vgw.get_root()
17731773
@@ -2413,7 +2413,7 @@
2413 max_storage_bytes=200)2413 max_storage_bytes=200)
2414 self.user = self.gw.get_user(user.id)2414 self.user = self.gw.get_user(user.id)
2415 self.user_quota = self.user._gateway.get_quota()2415 self.user_quota = self.user._gateway.get_quota()
2416 self.storage_store = get_storage_store()2416 self.storage_store = get_filesync_store()
2417 self.setup_volume()2417 self.setup_volume()
24182418
2419 def setup_volume(self):2419 def setup_volume(self):
@@ -2431,7 +2431,7 @@
2431 def tweak_users_quota(self, user_id, max_bytes, used_bytes=0):2431 def tweak_users_quota(self, user_id, max_bytes, used_bytes=0):
2432 """Utility to toy with the user's quota."""2432 """Utility to toy with the user's quota."""
2433 self.gw.get_user(user_id)2433 self.gw.get_user(user_id)
2434 store = get_storage_store()2434 store = get_filesync_store()
2435 store.find(2435 store.find(
2436 model.StorageUserInfo,2436 model.StorageUserInfo,
2437 model.StorageUserInfo.id == user_id2437 model.StorageUserInfo.id == user_id
@@ -2669,7 +2669,7 @@
2669 """Test make_file method."""2669 """Test make_file method."""
2670 cb = get_test_contentblob("FakeContent")2670 cb = get_test_contentblob("FakeContent")
2671 cb.magic_hash = 'magic'2671 cb.magic_hash = 'magic'
2672 get_storage_store().add(cb)2672 get_filesync_store().add(cb)
2673 # make enough room2673 # make enough room
2674 self.tweak_users_quota(self.owner.id, cb.deflated_size)2674 self.tweak_users_quota(self.owner.id, cb.deflated_size)
2675 node = self.vgw.make_file(self.root.id, u"the file name",2675 node = self.vgw.make_file(self.root.id, u"the file name",
@@ -2681,7 +2681,7 @@
2681 # make a content blob with a magic hash2681 # make a content blob with a magic hash
2682 cb = get_test_contentblob("FakeContent")2682 cb = get_test_contentblob("FakeContent")
2683 cb.magic_hash = 'magic'2683 cb.magic_hash = 'magic'
2684 get_storage_store().add(cb)2684 get_filesync_store().add(cb)
2685 self.assertRaises(errors.HashMismatch,2685 self.assertRaises(errors.HashMismatch,
2686 self.vgw.make_file, self.root.id, u"name.txt",2686 self.vgw.make_file, self.root.id, u"name.txt",
2687 hash="wronghash")2687 hash="wronghash")
@@ -3414,7 +3414,7 @@
3414 self.user_quota = self.user._gateway.get_quota()3414 self.user_quota = self.user._gateway.get_quota()
3415 self.owner = self.user3415 self.owner = self.user
3416 self.owner_quota = self.user_quota3416 self.owner_quota = self.user_quota
3417 self.storage_store = get_storage_store()3417 self.storage_store = get_filesync_store()
3418 # make a test file using storm3418 # make a test file using storm
3419 udf = model.UserVolume.create(3419 udf = model.UserVolume.create(
3420 self.storage_store, self.user.id, u"~/thepath/thename")3420 self.storage_store, self.user.id, u"~/thepath/thename")
@@ -3440,7 +3440,7 @@
3440 id=2, username=u"sharer", max_storage_bytes=200)3440 id=2, username=u"sharer", max_storage_bytes=200)
3441 self.owner = sharer3441 self.owner = sharer
3442 self.owner_quota = sharer._gateway.get_quota()3442 self.owner_quota = sharer._gateway.get_quota()
3443 self.storage_store = get_storage_store()3443 self.storage_store = get_filesync_store()
3444 root = model.StorageObject.get_root(self.storage_store, sharer.id)3444 root = model.StorageObject.get_root(self.storage_store, sharer.id)
3445 rw_node = root.make_subdirectory(u"WriteMe")3445 rw_node = root.make_subdirectory(u"WriteMe")
3446 transaction.commit()3446 transaction.commit()
@@ -3476,7 +3476,7 @@
3476 self.gw = SystemGateway()3476 self.gw = SystemGateway()
3477 user = self.create_user(username=u"testuser")3477 user = self.create_user(username=u"testuser")
3478 self.user = self.gw.get_user(user.id, session_id="QWERTY")3478 self.user = self.gw.get_user(user.id, session_id="QWERTY")
3479 self.storage_store = get_storage_store()3479 self.storage_store = get_filesync_store()
3480 # make a test file3480 # make a test file
3481 vgw = self.user._gateway.get_root_gateway()3481 vgw = self.user._gateway.get_root_gateway()
3482 root = self.storage_store.get(model.StorageObject, vgw.get_root().id)3482 root = self.storage_store.get(model.StorageObject, vgw.get_root().id)
@@ -3568,7 +3568,7 @@
3568 self.gw = SystemGateway()3568 self.gw = SystemGateway()
3569 user = self.create_user(username=u"testuser")3569 user = self.create_user(username=u"testuser")
3570 self.user = self.gw.get_user(user.id, session_id="QWERTY")3570 self.user = self.gw.get_user(user.id, session_id="QWERTY")
3571 self.storage_store = get_storage_store()3571 self.storage_store = get_filesync_store()
3572 # make a test file using storm3572 # make a test file using storm
3573 self.udf = model.UserVolume.create(3573 self.udf = model.UserVolume.create(
3574 self.storage_store, self.user.id, u"~/thepath/thename")3574 self.storage_store, self.user.id, u"~/thepath/thename")
@@ -3692,10 +3692,10 @@
3692 self.gw = SystemGateway()3692 self.gw = SystemGateway()
3693 user = self.create_user(username=u"testuser")3693 user = self.create_user(username=u"testuser")
3694 self.user = self.gw.get_user(user.id, session_id="QWERTY")3694 self.user = self.gw.get_user(user.id, session_id="QWERTY")
3695 self.storage_store = get_storage_store()3695 self.storage_store = get_filesync_store()
3696 self.sharer = self.create_user(id=2, username=u"sharer")3696 self.sharer = self.create_user(id=2, username=u"sharer")
3697 self.othersharee = self.create_user(id=3, username=u"sharee")3697 self.othersharee = self.create_user(id=3, username=u"sharee")
3698 store = get_storage_store()3698 store = get_filesync_store()
3699 root = model.StorageObject.get_root(store, self.sharer.id)3699 root = model.StorageObject.get_root(store, self.sharer.id)
3700 self.r_node = root.make_subdirectory(u"NoWrite")3700 self.r_node = root.make_subdirectory(u"NoWrite")
3701 self.file = self.r_node.make_file(u"A File for uploads")3701 self.file = self.r_node.make_file(u"A File for uploads")
@@ -4190,7 +4190,7 @@
4190 def setUp(self):4190 def setUp(self):
4191 super(GenerationsTestCase, self).setUp()4191 super(GenerationsTestCase, self).setUp()
4192 self.user = self.create_user(username=u"testuser")4192 self.user = self.create_user(username=u"testuser")
4193 self.storage_store = get_storage_store()4193 self.storage_store = get_filesync_store()
4194 # make a test file4194 # make a test file
4195 self.ugw = StorageUserGateway(self.user)4195 self.ugw = StorageUserGateway(self.user)
4196 self.vgw = self.ugw.get_root_gateway()4196 self.vgw = self.ugw.get_root_gateway()
41974197
=== modified file 'src/backends/testing/resources.py'
--- src/backends/testing/resources.py 2015-08-29 00:03:11 +0000
+++ src/backends/testing/resources.py 2015-09-05 01:43:14 +0000
@@ -31,7 +31,7 @@
31from backends.db.schemas import account as account_schema31from backends.db.schemas import account as account_schema
32from backends.db.schemas import storage as storage_schema32from backends.db.schemas import storage as storage_schema
33from backends.db.dbwatcher import DatabaseWatcher33from backends.db.dbwatcher import DatabaseWatcher
34from backends.db.db_admin_store import get_admin_store34from backends.db.store import get_filesync_store
35from backends.filesync.data.dbmanager import filesync_tm35from backends.filesync.data.dbmanager import filesync_tm
3636
37DEBUG_RESOURCES = bool(os.environ.get("DEBUG_RESOURCES"))37DEBUG_RESOURCES = bool(os.environ.get("DEBUG_RESOURCES"))
@@ -41,12 +41,11 @@
41 """A resource that resets a database to a known state for each test."""41 """A resource that resets a database to a known state for each test."""
42 _watcher = None42 _watcher = None
4343
44 def __init__(self, dbname, schema_modules, store_name, autocommit=False,44 def __init__(self, dbname, schema_modules, autocommit=False,
45 tx_manager=transaction):45 tx_manager=transaction):
46 super(DatabaseResource, self).__init__()46 super(DatabaseResource, self).__init__()
47 self.dbname = dbname47 self.dbname = dbname
48 self.schema_modules = schema_modules48 self.schema_modules = schema_modules
49 self.store_name = store_name
50 self.autocommit = autocommit49 self.autocommit = autocommit
51 self.saw_commit = False50 self.saw_commit = False
52 self.schemas = None51 self.schemas = None
@@ -72,7 +71,7 @@
72 watcher.enable(self.dbname)71 watcher.enable(self.dbname)
73 if self.schemas is None:72 if self.schemas is None:
74 self.schemas = [s.create_schema() for s in self.schema_modules]73 self.schemas = [s.create_schema() for s in self.schema_modules]
75 store = get_admin_store(self.store_name)74 store = get_filesync_store()
76 transaction.abort()75 transaction.abort()
77 for s in self.schemas:76 for s in self.schemas:
78 s.upgrade(store)77 s.upgrade(store)
@@ -94,7 +93,7 @@
94 self.tx_manager.abort()93 self.tx_manager.abort()
95 # Someone committed to the database: clean it up.94 # Someone committed to the database: clean it up.
96 if self.saw_commit:95 if self.saw_commit:
97 store = get_admin_store(self.store_name)96 store = get_filesync_store()
98 for s in reversed(self.schemas):97 for s in reversed(self.schemas):
99 s.delete(store)98 s.delete(store)
100 transaction.commit()99 transaction.commit()
@@ -116,5 +115,4 @@
116FilesyncDatabaseResource = DatabaseResource(115FilesyncDatabaseResource = DatabaseResource(
117 dbname='filesync',116 dbname='filesync',
118 schema_modules=[account_schema, storage_schema],117 schema_modules=[account_schema, storage_schema],
119 store_name='filesync',
120 tx_manager=filesync_tm)118 tx_manager=filesync_tm)
121119
=== modified file 'src/backends/txlog/model.py'
--- src/backends/txlog/model.py 2015-09-03 14:23:04 +0000
+++ src/backends/txlog/model.py 2015-09-05 01:43:14 +0000
@@ -25,7 +25,7 @@
25from storm.locals import Int, DateTime, Enum, Store, Unicode25from storm.locals import Int, DateTime, Enum, Store, Unicode
26from storm.store import AutoReload26from storm.store import AutoReload
2727
28from backends.filesync.data.dbmanager import get_storage_store28from backends.filesync.data.dbmanager import get_filesync_store
29from backends.filesync.data.model import (29from backends.filesync.data.model import (
30 STATUS_LIVE,30 STATUS_LIVE,
31 Share,31 Share,
@@ -112,7 +112,7 @@
112112
113 @classmethod113 @classmethod
114 def bootstrap(cls, user):114 def bootstrap(cls, user):
115 store = get_storage_store()115 store = get_filesync_store()
116 cls.record_user_created(user)116 cls.record_user_created(user)
117 # Number of TransactionLog rows we inserted.117 # Number of TransactionLog rows we inserted.
118 rows = 1118 rows = 1
@@ -177,7 +177,8 @@
177 conditions = [Share.shared_by == user.id,177 conditions = [Share.shared_by == user.id,
178 Share.status == STATUS_LIVE,178 Share.status == STATUS_LIVE,
179 Share.accepted == True] # NOQA179 Share.accepted == True] # NOQA
180 shares = get_storage_store().using(share_join).find(Share, *conditions)180 shares = get_filesync_store().using(share_join).find(
181 Share, *conditions)
181 for share in shares:182 for share in shares:
182 cls.record_share_accepted(share)183 cls.record_share_accepted(share)
183 rows += 1184 rows += 1
@@ -233,7 +234,7 @@
233 txlog = cls(234 txlog = cls(
234 None, user.id, None, cls.OP_USER_CREATED, None, None,235 None, user.id, None, cls.OP_USER_CREATED, None, None,
235 extra_data=extra_data.decode('ascii'))236 extra_data=extra_data.decode('ascii'))
236 store = get_storage_store()237 store = get_filesync_store()
237 return store.add(txlog)238 return store.add(txlog)
238239
239 @classmethod240 @classmethod
@@ -329,7 +330,7 @@
329330
330 @classmethod331 @classmethod
331 def _record_share_accepted_or_deleted(cls, share, op_type):332 def _record_share_accepted_or_deleted(cls, share, op_type):
332 store = get_storage_store()333 store = get_filesync_store()
333 node = store.get(StorageObject, share.subtree)334 node = store.get(StorageObject, share.subtree)
334 when_last_changed = share.when_last_changed335 when_last_changed = share.when_last_changed
335 extra_data = dict(336 extra_data = dict(
336337
=== modified file 'src/backends/txlog/tests/test_model.py'
--- src/backends/txlog/tests/test_model.py 2015-09-03 14:23:04 +0000
+++ src/backends/txlog/tests/test_model.py 2015-09-05 01:43:14 +0000
@@ -19,7 +19,7 @@
1919
20from mock import patch20from mock import patch
2121
22from backends.filesync.data.dbmanager import get_storage_store22from backends.filesync.data.dbmanager import get_filesync_store
23from backends.filesync.data.gateway import SystemGateway23from backends.filesync.data.gateway import SystemGateway
24from backends.filesync.data.model import (24from backends.filesync.data.model import (
25 PublicNode, STATUS_DEAD, StorageObject, StorageUser, UserVolume)25 PublicNode, STATUS_DEAD, StorageObject, StorageUser, UserVolume)
@@ -441,7 +441,7 @@
441441
442 user = StorageUser.new(self.store, user_id, name, visible_name)442 user = StorageUser.new(self.store, user_id, name, visible_name)
443443
444 store = get_storage_store()444 store = get_filesync_store()
445 txlog = store.find(TransactionLog, owner_id=user.id).one()445 txlog = store.find(TransactionLog, owner_id=user.id).one()
446 self.assertTxLogDetailsMatchesUserDetails(user, txlog)446 self.assertTxLogDetailsMatchesUserDetails(user, txlog)
447447
@@ -528,7 +528,7 @@
528528
529 TransactionLog.bootstrap(user)529 TransactionLog.bootstrap(user)
530530
531 txlog = get_storage_store().find(531 txlog = get_filesync_store().find(
532 TransactionLog, op_type=TransactionLog.OP_USER_CREATED).one()532 TransactionLog, op_type=TransactionLog.OP_USER_CREATED).one()
533 self.assertTxLogDetailsMatchesUserDetails(user, txlog)533 self.assertTxLogDetailsMatchesUserDetails(user, txlog)
534534
@@ -540,7 +540,7 @@
540540
541 TransactionLog.bootstrap(user)541 TransactionLog.bootstrap(user)
542542
543 txlog = get_storage_store().find(543 txlog = get_filesync_store().find(
544 TransactionLog, op_type=TransactionLog.OP_SHARE_ACCEPTED).one()544 TransactionLog, op_type=TransactionLog.OP_SHARE_ACCEPTED).one()
545 expected_attrs = self._get_dict_with_txlog_attrs_from_share(545 expected_attrs = self._get_dict_with_txlog_attrs_from_share(
546 share, directory, TransactionLog.OP_SHARE_ACCEPTED)546 share, directory, TransactionLog.OP_SHARE_ACCEPTED)
547547
=== modified file 'src/backends/txlog/tests/test_utils.py'
--- src/backends/txlog/tests/test_utils.py 2015-09-03 14:23:04 +0000
+++ src/backends/txlog/tests/test_utils.py 2015-09-05 01:43:14 +0000
@@ -133,7 +133,7 @@
133 """133 """
134 return type('DummyResultSet', (object,), dict(rowcount=0))134 return type('DummyResultSet', (object,), dict(rowcount=0))
135135
136 with patch.object(dbmanager, 'get_storage_store') as mock_get:136 with patch.object(dbmanager, 'get_filesync_store') as mock_get:
137 mock_get.return_value = DummyStore()137 mock_get.return_value = DummyStore()
138138
139 self.assertRaises(139 self.assertRaises(
140140
=== modified file 'src/backends/txlog/utils.py'
--- src/backends/txlog/utils.py 2015-08-16 19:22:32 +0000
+++ src/backends/txlog/utils.py 2015-09-05 01:43:14 +0000
@@ -46,7 +46,7 @@
46 fsync_commit.46 fsync_commit.
47 """47 """
48 worker_name = unicode(worker_name)48 worker_name = unicode(worker_name)
49 store = dbmanager.get_storage_store()49 store = dbmanager.get_filesync_store()
5050
51 last_row = store.execute(u"""SELECT row_id, timestamp51 last_row = store.execute(u"""SELECT row_id, timestamp
52 FROM txlog_db_worker_last_row52 FROM txlog_db_worker_last_row
@@ -73,7 +73,7 @@
73 decorated with fsync_commit.73 decorated with fsync_commit.
74 """74 """
75 worker_name = unicode(worker_name)75 worker_name = unicode(worker_name)
76 store = dbmanager.get_storage_store()76 store = dbmanager.get_filesync_store()
77 result = store.execute(u"""UPDATE txlog_db_worker_last_row77 result = store.execute(u"""UPDATE txlog_db_worker_last_row
78 SET row_id=?, timestamp=?78 SET row_id=?, timestamp=?
79 WHERE worker_id=?""", (row_id, timestamp, worker_name))79 WHERE worker_id=?""", (row_id, timestamp, worker_name))
@@ -111,7 +111,7 @@
111 """111 """
112 if expire_secs is None:112 if expire_secs is None:
113 expire_secs = UNSEEN_EXPIRES113 expire_secs = UNSEEN_EXPIRES
114 store = dbmanager.get_storage_store()114 store = dbmanager.get_filesync_store()
115 parameters = (last_id, )115 parameters = (last_id, )
116 select = u"""116 select = u"""
117 SELECT txlog.id, owner_id, node_id, volume_id, op_type, path,117 SELECT txlog.id, owner_id, node_id, volume_id, op_type, path,
@@ -195,7 +195,7 @@
195 if expire_secs is None:195 if expire_secs is None:
196 expire_secs = UNSEEN_EXPIRES196 expire_secs = UNSEEN_EXPIRES
197 worker_id = unicode(worker_id)197 worker_id = unicode(worker_id)
198 store = dbmanager.get_storage_store()198 store = dbmanager.get_filesync_store()
199 deleted = 0199 deleted = 0
200 condition = (u"created < TIMEZONE('UTC'::text, NOW()) "200 condition = (u"created < TIMEZONE('UTC'::text, NOW()) "
201 " - INTERVAL '{} seconds'".format(expire_secs))201 " - INTERVAL '{} seconds'".format(expire_secs))
@@ -234,7 +234,7 @@
234 be deleted.234 be deleted.
235 """235 """
236236
237 store = dbmanager.get_storage_store()237 store = dbmanager.get_filesync_store()
238 parameters = [timestamp_limit]238 parameters = [timestamp_limit]
239 inner_select = "SELECT id FROM txlog_transaction_log WHERE timestamp <= ?"239 inner_select = "SELECT id FROM txlog_transaction_log WHERE timestamp <= ?"
240240
@@ -257,7 +257,7 @@
257 precisely from the provided date (a datetime.date object). Also, the257 precisely from the provided date (a datetime.date object). Also, the
258 quantity_limit parameter is mandatory."""258 quantity_limit parameter is mandatory."""
259259
260 store = dbmanager.get_storage_store()260 store = dbmanager.get_filesync_store()
261 parameters = [date, quantity_limit]261 parameters = [date, quantity_limit]
262 inner_select = ("SELECT id FROM txlog_transaction_log "262 inner_select = ("SELECT id FROM txlog_transaction_log "
263 "WHERE timestamp::date = ? LIMIT ?")263 "WHERE timestamp::date = ? LIMIT ?")
@@ -271,7 +271,7 @@
271271
272def get_row_by_time(timestamp):272def get_row_by_time(timestamp):
273 """Return the smaller txlog row id in that timestamp (or greater)."""273 """Return the smaller txlog row id in that timestamp (or greater)."""
274 store = dbmanager.get_storage_store()274 store = dbmanager.get_filesync_store()
275 query = """275 query = """
276 SELECT id, timestamp FROM txlog_transaction_log276 SELECT id, timestamp FROM txlog_transaction_log
277 WHERE timestamp >= ? ORDER BY id LIMIT 1;277 WHERE timestamp >= ? ORDER BY id LIMIT 1;
@@ -287,7 +287,7 @@
287def keep_last_rows_for_worker_names(worker_names):287def keep_last_rows_for_worker_names(worker_names):
288 """Clean rows from txlog_db_worker_last_row that don't match the given288 """Clean rows from txlog_db_worker_last_row that don't match the given
289 worker names."""289 worker names."""
290 store = dbmanager.get_storage_store()290 store = dbmanager.get_filesync_store()
291 query = ("DELETE FROM txlog_db_worker_last_row "291 query = ("DELETE FROM txlog_db_worker_last_row "
292 "WHERE worker_id NOT IN ?;")292 "WHERE worker_id NOT IN ?;")
293 store.execute(query, (tuple(worker_names), ))293 store.execute(query, (tuple(worker_names), ))
294294
=== modified file 'src/server/tests/test_account.py'
--- src/server/tests/test_account.py 2015-08-29 00:03:11 +0000
+++ src/server/tests/test_account.py 2015-09-05 01:43:14 +0000
@@ -55,7 +55,7 @@
55 when over quota."""55 when over quota."""
56 self.usr0.update(max_storage_bytes=2 ** 16)56 self.usr0.update(max_storage_bytes=2 ** 16)
57 # need to do something that just can't happen normally57 # need to do something that just can't happen normally
58 store = dbmanager.get_storage_store()58 store = dbmanager.get_filesync_store()
59 info = store.get(model.StorageUserInfo, 0)59 info = store.get(model.StorageUserInfo, 0)
60 info.used_storage_bytes = 2 ** 1760 info.used_storage_bytes = 2 ** 17
61 store.commit()61 store.commit()
6262
=== modified file 'src/server/tests/test_sharing.py'
--- src/server/tests/test_sharing.py 2015-08-29 00:03:11 +0000
+++ src/server/tests/test_sharing.py 2015-09-05 01:43:14 +0000
@@ -778,7 +778,7 @@
778 subfile = subdir.make_file(u"subfile")778 subfile = subdir.make_file(u"subfile")
779 subsubdir = subdir.make_subdirectory(u"subsubdir")779 subsubdir = subdir.make_subdirectory(u"subsubdir")
780 subsubfile = subsubdir.make_file(u"subsubfile")780 subsubfile = subsubdir.make_file(u"subsubfile")
781 store = dbmanager.get_storage_store()781 store = dbmanager.get_filesync_store()
782 # set all files with an empty hash782 # set all files with an empty hash
783 store.find(783 store.find(
784 model.StorageObject, model.StorageObject.kind == 'File').set(784 model.StorageObject, model.StorageObject.kind == 'File').set(
785785
=== modified file 'src/server/tests/test_throttling.py'
--- src/server/tests/test_throttling.py 2015-08-17 00:09:45 +0000
+++ src/server/tests/test_throttling.py 2015-09-05 01:43:14 +0000
@@ -26,7 +26,7 @@
26from twisted.internet.protocol import connectionDone26from twisted.internet.protocol import connectionDone
2727
2828
29from backends.filesync.data import get_storage_store, model, filesync_tm29from backends.filesync.data import get_filesync_store, model, filesync_tm
3030
31from ubuntuone.storageprotocol import request, client31from ubuntuone.storageprotocol import request, client
32from ubuntuone.storageprotocol.content_hash import content_hash_factory, crc3232from ubuntuone.storageprotocol.content_hash import content_hash_factory, crc32
@@ -152,7 +152,7 @@
152 def _check_file():152 def _check_file():
153 filesync_tm.begin()153 filesync_tm.begin()
154 try:154 try:
155 store = get_storage_store()155 store = get_filesync_store()
156 content_blob = store.get(model.ContentBlob, hash_value)156 content_blob = store.get(model.ContentBlob, hash_value)
157 if not content_blob:157 if not content_blob:
158 raise ValueError("content blob is not there")158 raise ValueError("content blob is not there")
@@ -205,7 +205,7 @@
205 def _check_file():205 def _check_file():
206 filesync_tm.begin()206 filesync_tm.begin()
207 try:207 try:
208 store = get_storage_store()208 store = get_filesync_store()
209 content_blob = store.get(model.ContentBlob, hash_value)209 content_blob = store.get(model.ContentBlob, hash_value)
210 if not content_blob:210 if not content_blob:
211 raise ValueError("content blob is not there")211 raise ValueError("content blob is not there")

Subscribers

People subscribed via source and target branches

to all changes: