Merge lp:~nataliabidart/magicicada-server/only-one-store-name into lp:magicicada-server
- only-one-store-name
- Merge into trunk
Proposed by
Natalia Bidart
Status: | Merged |
---|---|
Approved by: | Natalia Bidart |
Approved revision: | 16 |
Merged at revision: | 15 |
Proposed branch: | lp:~nataliabidart/magicicada-server/only-one-store-name |
Merge into: | lp:magicicada-server |
Diff against target: |
1278 lines (+136/-149) 21 files modified
README.txt (+1/-1) src/backends/db/scripts/schema (+6/-10) src/backends/db/store.py (+2/-2) src/backends/db/tests/test_dbtransaction.py (+5/-5) src/backends/db/tests/test_store.py (+1/-1) src/backends/filesync/data/__init__.py (+1/-1) src/backends/filesync/data/adminservices.py (+2/-2) src/backends/filesync/data/dbmanager.py (+1/-9) src/backends/filesync/data/gateway.py (+31/-31) src/backends/filesync/data/testing/ormtestcase.py (+2/-2) src/backends/filesync/data/testing/testcase.py (+2/-2) src/backends/filesync/data/tests/test_dao.py (+2/-2) src/backends/filesync/data/tests/test_gateway.py (+52/-52) src/backends/testing/resources.py (+4/-6) src/backends/txlog/model.py (+6/-5) src/backends/txlog/tests/test_model.py (+4/-4) src/backends/txlog/tests/test_utils.py (+1/-1) src/backends/txlog/utils.py (+8/-8) src/server/tests/test_account.py (+1/-1) src/server/tests/test_sharing.py (+1/-1) src/server/tests/test_throttling.py (+3/-3) |
To merge this branch: | bzr merge lp:~nataliabidart/magicicada-server/only-one-store-name |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Natalia Bidart | Approve | ||
Review via email: mp+270191@code.launchpad.net |
Commit message
- Make sure only one Storm store is used in the project. Pure syntactic renames.
Description of the change
To post a comment you must log in.
Revision history for this message
Magicicada Bot (magicicada) wrote : | # |
The `tree_dir` option for the target branch is not a lightweight checkout. Please ask a project administrator to resolve the issue, and try again.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'README.txt' | |||
2 | --- README.txt 2015-09-03 17:59:20 +0000 | |||
3 | +++ README.txt 2015-09-05 01:43:14 +0000 | |||
4 | @@ -63,7 +63,7 @@ | |||
5 | 63 | sudo apt-get install bzr make python-transaction protobuf-compiler \ | 63 | sudo apt-get install bzr make python-transaction protobuf-compiler \ |
6 | 64 | python-setuptools gcc python-dev python-twisted-web postgresql-9.1 \ | 64 | python-setuptools gcc python-dev python-twisted-web postgresql-9.1 \ |
7 | 65 | python-yaml python-psycopg2 postgresql-contrib supervisor \ | 65 | python-yaml python-psycopg2 postgresql-contrib supervisor \ |
9 | 66 | postgresql-plpython-9.1 python-boto squid \ | 66 | postgresql-plpython-9.1 python-boto squid python-virtualenv \ |
10 | 67 | python-protobuf python-psutil python-testresources \ | 67 | python-protobuf python-psutil python-testresources \ |
11 | 68 | python-tz python-bson python-iso8601 python-openid python-meliae | 68 | python-tz python-bson python-iso8601 python-openid python-meliae |
12 | 69 | 69 | ||
13 | 70 | 70 | ||
14 | === modified file 'src/backends/db/scripts/schema' | |||
15 | --- src/backends/db/scripts/schema 2015-08-17 04:24:23 +0000 | |||
16 | +++ src/backends/db/scripts/schema 2015-09-05 01:43:14 +0000 | |||
17 | @@ -25,7 +25,7 @@ | |||
18 | 25 | import backends.db.schemas.storage as storage_schema | 25 | import backends.db.schemas.storage as storage_schema |
19 | 26 | import backends.db.schemas.txlog as txlog_schema | 26 | import backends.db.schemas.txlog as txlog_schema |
20 | 27 | 27 | ||
22 | 28 | from backends.db.store import get_store | 28 | from backends.db.store import get_filesync_store |
23 | 29 | from optparse import OptionParser | 29 | from optparse import OptionParser |
24 | 30 | 30 | ||
25 | 31 | 31 | ||
26 | @@ -44,9 +44,6 @@ | |||
27 | 44 | parser.add_option("-a", "--all", | 44 | parser.add_option("-a", "--all", |
28 | 45 | dest="all", default=False, action="store_true", | 45 | dest="all", default=False, action="store_true", |
29 | 46 | help="Create all schemas known") | 46 | help="Create all schemas known") |
30 | 47 | parser.add_option("--store", dest="store_name", | ||
31 | 48 | help="Used in conjunction with schema to upgrade one " | ||
32 | 49 | "schema.") | ||
33 | 50 | parser.add_option("--schema", dest="schema_name", | 47 | parser.add_option("--schema", dest="schema_name", |
34 | 51 | help="Used in conjunction with store to upgrade " | 48 | help="Used in conjunction with store to upgrade " |
35 | 52 | "one schema") | 49 | "one schema") |
36 | @@ -56,18 +53,17 @@ | |||
37 | 56 | 53 | ||
38 | 57 | (options, args) = parser.parse_args() | 54 | (options, args) = parser.parse_args() |
39 | 58 | 55 | ||
41 | 59 | def create(schema_name, store_name='filesync'): | 56 | def create(schema_name): |
42 | 60 | """Create the schema using the store.""" | 57 | """Create the schema using the store.""" |
43 | 61 | schema = schemas[schema_name] | 58 | schema = schemas[schema_name] |
44 | 62 | if options.dryrun: | 59 | if options.dryrun: |
45 | 63 | print '*' * 80 | 60 | print '*' * 80 |
48 | 64 | print 'Dry run of %s schema in %s store' % ( | 61 | print 'Dry run of %s schema' % schema_name |
47 | 65 | schema_name, store_name) | ||
49 | 66 | print '*' * 80 | 62 | print '*' * 80 |
50 | 67 | ddl = schema.create_schema().preview() | 63 | ddl = schema.create_schema().preview() |
51 | 68 | print ddl | 64 | print ddl |
52 | 69 | else: | 65 | else: |
54 | 70 | target_store = get_store(store_name) | 66 | target_store = get_filesync_store() |
55 | 71 | schema.create_schema().upgrade(target_store) | 67 | schema.create_schema().upgrade(target_store) |
56 | 72 | 68 | ||
57 | 73 | if options.all: | 69 | if options.all: |
58 | @@ -79,8 +75,8 @@ | |||
59 | 79 | create('storage') | 75 | create('storage') |
60 | 80 | create('txlog') | 76 | create('txlog') |
61 | 81 | 77 | ||
64 | 82 | elif options.store_name and options.schema_name: | 78 | elif options.schema_name: |
65 | 83 | create(options.schema_name, options.store_name) | 79 | create(options.schema_name) |
66 | 84 | else: | 80 | else: |
67 | 85 | print "Must be called with --all or --schema and --store" | 81 | print "Must be called with --all or --schema and --store" |
68 | 86 | 82 | ||
69 | 87 | 83 | ||
70 | === modified file 'src/backends/db/store.py' | |||
71 | --- src/backends/db/store.py 2015-08-17 00:09:45 +0000 | |||
72 | +++ src/backends/db/store.py 2015-09-05 01:43:14 +0000 | |||
73 | @@ -88,9 +88,9 @@ | |||
74 | 88 | return zstorm.get(store_name, default_uri=uri) | 88 | return zstorm.get(store_name, default_uri=uri) |
75 | 89 | 89 | ||
76 | 90 | 90 | ||
78 | 91 | def get_filesync_store(store_name): | 91 | def get_filesync_store(): |
79 | 92 | """Get a store using the filesync_tm.""" | 92 | """Get a store using the filesync_tm.""" |
81 | 93 | return get_store(store_name, zstorm=filesync_zstorm) | 93 | return get_store('filesync', zstorm=filesync_zstorm) |
82 | 94 | 94 | ||
83 | 95 | 95 | ||
84 | 96 | @contextlib.contextmanager | 96 | @contextlib.contextmanager |
85 | 97 | 97 | ||
86 | === modified file 'src/backends/db/tests/test_dbtransaction.py' | |||
87 | --- src/backends/db/tests/test_dbtransaction.py 2015-08-17 15:05:00 +0000 | |||
88 | +++ src/backends/db/tests/test_dbtransaction.py 2015-09-05 01:43:14 +0000 | |||
89 | @@ -39,7 +39,7 @@ | |||
90 | 39 | 39 | ||
91 | 40 | from backends.db import errors | 40 | from backends.db import errors |
92 | 41 | 41 | ||
94 | 42 | from backends.db.store import get_store | 42 | from backends.db.store import get_filesync_store |
95 | 43 | from backends.db.dbtransaction import ( | 43 | from backends.db.dbtransaction import ( |
96 | 44 | _check_stores_and_invalidate, | 44 | _check_stores_and_invalidate, |
97 | 45 | retryable_transaction, | 45 | retryable_transaction, |
98 | @@ -767,11 +767,11 @@ | |||
99 | 767 | logger.addHandler(h) | 767 | logger.addHandler(h) |
100 | 768 | 768 | ||
101 | 769 | make_storage_user(1, u'foo', u'foo', 10000) | 769 | make_storage_user(1, u'foo', u'foo', 10000) |
104 | 770 | sto = get_store('filesync', filesync_zstorm) | 770 | store = get_filesync_store() |
105 | 771 | self._sto = sto # for later cleanup | 771 | self._sto = store # for later cleanup |
106 | 772 | obj = StorageObject(1, u'foo', u'File') | 772 | obj = StorageObject(1, u'foo', u'File') |
109 | 773 | sto.add(obj) | 773 | store.add(obj) |
110 | 774 | sto.flush() | 774 | store.flush() |
111 | 775 | self.assertFalse(obj.__storm_object_info__.get("invalidated", False)) | 775 | self.assertFalse(obj.__storm_object_info__.get("invalidated", False)) |
112 | 776 | _check_stores_and_invalidate(filesync_zstorm) | 776 | _check_stores_and_invalidate(filesync_zstorm) |
113 | 777 | self.assertTrue(obj.__storm_object_info__.get("invalidated", False)) | 777 | self.assertTrue(obj.__storm_object_info__.get("invalidated", False)) |
114 | 778 | 778 | ||
115 | === modified file 'src/backends/db/tests/test_store.py' | |||
116 | --- src/backends/db/tests/test_store.py 2015-08-17 00:09:45 +0000 | |||
117 | +++ src/backends/db/tests/test_store.py 2015-09-05 01:43:14 +0000 | |||
118 | @@ -28,7 +28,7 @@ | |||
119 | 28 | 28 | ||
120 | 29 | def test_get_filesync_store(self): | 29 | def test_get_filesync_store(self): |
121 | 30 | """Test get_filesync_store returns the expected store.""" | 30 | """Test get_filesync_store returns the expected store.""" |
123 | 31 | db = store.get_filesync_store('filesync').get_database() | 31 | db = store.get_filesync_store().get_database() |
124 | 32 | self.assertTrue(isinstance(db, store.FilesyncDatabase)) | 32 | self.assertTrue(isinstance(db, store.FilesyncDatabase)) |
125 | 33 | self.assertEqual('filesync', db.name) | 33 | self.assertEqual('filesync', db.name) |
126 | 34 | 34 | ||
127 | 35 | 35 | ||
128 | === modified file 'src/backends/filesync/data/__init__.py' | |||
129 | --- src/backends/filesync/data/__init__.py 2015-08-17 00:09:45 +0000 | |||
130 | +++ src/backends/filesync/data/__init__.py 2015-09-05 01:43:14 +0000 | |||
131 | @@ -85,7 +85,7 @@ | |||
132 | 85 | 85 | ||
133 | 86 | from ubuntuone.storageprotocol.content_hash import content_hash_factory | 86 | from ubuntuone.storageprotocol.content_hash import content_hash_factory |
134 | 87 | 87 | ||
136 | 88 | from backends.filesync.data.dbmanager import get_storage_store # NOQA | 88 | from backends.filesync.data.dbmanager import get_filesync_store # NOQA |
137 | 89 | from backends.filesync.data.dbmanager import filesync_tm # NOQA | 89 | from backends.filesync.data.dbmanager import filesync_tm # NOQA |
138 | 90 | 90 | ||
139 | 91 | EMPTY_CONTENT_HASH = content_hash_factory().content_hash() | 91 | EMPTY_CONTENT_HASH = content_hash_factory().content_hash() |
140 | 92 | 92 | ||
141 | === modified file 'src/backends/filesync/data/adminservices.py' | |||
142 | --- src/backends/filesync/data/adminservices.py 2015-08-16 19:22:32 +0000 | |||
143 | +++ src/backends/filesync/data/adminservices.py 2015-09-05 01:43:14 +0000 | |||
144 | @@ -17,7 +17,7 @@ | |||
145 | 17 | 17 | ||
146 | 18 | """Services provided for administrative access to storage data.""" | 18 | """Services provided for administrative access to storage data.""" |
147 | 19 | 19 | ||
149 | 20 | from backends.filesync.data import get_storage_store, model, dao | 20 | from backends.filesync.data import get_filesync_store, model, dao |
150 | 21 | from backends.filesync.data.gateway import StorageUserGateway | 21 | from backends.filesync.data.gateway import StorageUserGateway |
151 | 22 | from backends.filesync.data.dbmanager import fsync_readonly | 22 | from backends.filesync.data.dbmanager import fsync_readonly |
152 | 23 | 23 | ||
153 | @@ -34,7 +34,7 @@ | |||
154 | 34 | 34 | ||
155 | 35 | def _find_users(self): | 35 | def _find_users(self): |
156 | 36 | """Perform storm query based on current filter.""" | 36 | """Perform storm query based on current filter.""" |
158 | 37 | store = get_storage_store() | 37 | store = get_filesync_store() |
159 | 38 | conditions = [] | 38 | conditions = [] |
160 | 39 | if self.filter is not None: | 39 | if self.filter is not None: |
161 | 40 | filter = unicode("%" + self.filter + "%") | 40 | filter = unicode("%" + self.filter + "%") |
162 | 41 | 41 | ||
163 | === modified file 'src/backends/filesync/data/dbmanager.py' | |||
164 | --- src/backends/filesync/data/dbmanager.py 2015-08-17 00:09:45 +0000 | |||
165 | +++ src/backends/filesync/data/dbmanager.py 2015-09-05 01:43:14 +0000 | |||
166 | @@ -17,7 +17,7 @@ | |||
167 | 17 | 17 | ||
168 | 18 | """Manage database connections and stores to the storage database.""" | 18 | """Manage database connections and stores to the storage database.""" |
169 | 19 | 19 | ||
171 | 20 | from backends.db.store import get_filesync_store | 20 | from backends.db.store import get_filesync_store # NOQA |
172 | 21 | from backends.db.dbtransaction import ( | 21 | from backends.db.dbtransaction import ( |
173 | 22 | get_storm_commit, | 22 | get_storm_commit, |
174 | 23 | get_storm_readonly, | 23 | get_storm_readonly, |
175 | @@ -28,11 +28,3 @@ | |||
176 | 28 | fsync_commit = get_storm_commit(filesync_tm) | 28 | fsync_commit = get_storm_commit(filesync_tm) |
177 | 29 | fsync_readonly = get_storm_readonly(filesync_tm) | 29 | fsync_readonly = get_storm_readonly(filesync_tm) |
178 | 30 | fsync_readonly_slave = get_storm_readonly(filesync_tm, use_ro_store=True) | 30 | fsync_readonly_slave = get_storm_readonly(filesync_tm, use_ro_store=True) |
179 | 31 | |||
180 | 32 | |||
181 | 33 | def get_storage_store(): | ||
182 | 34 | """Return the default storage store. | ||
183 | 35 | |||
184 | 36 | This is primarily for legacy tests while transaction handling is migrated | ||
185 | 37 | """ | ||
186 | 38 | return get_filesync_store('filesync') | ||
187 | 39 | 31 | ||
188 | === modified file 'src/backends/filesync/data/gateway.py' | |||
189 | --- src/backends/filesync/data/gateway.py 2015-08-29 00:03:11 +0000 | |||
190 | +++ src/backends/filesync/data/gateway.py 2015-09-05 01:43:14 +0000 | |||
191 | @@ -38,7 +38,7 @@ | |||
192 | 38 | from backends.db.dbtransaction import db_timeout, TRANSACTION_MAX_TIME | 38 | from backends.db.dbtransaction import db_timeout, TRANSACTION_MAX_TIME |
193 | 39 | from backends.filesync.data import model, errors, dao, utils | 39 | from backends.filesync.data import model, errors, dao, utils |
194 | 40 | from backends.filesync.notifier.notifier import get_notifier | 40 | from backends.filesync.notifier.notifier import get_notifier |
196 | 41 | from backends.filesync.data.dbmanager import get_storage_store | 41 | from backends.filesync.data.dbmanager import get_filesync_store |
197 | 42 | from config import config | 42 | from config import config |
198 | 43 | 43 | ||
199 | 44 | 44 | ||
200 | @@ -162,7 +162,7 @@ | |||
201 | 162 | @property | 162 | @property |
202 | 163 | def store(self): | 163 | def store(self): |
203 | 164 | """The main storage store.""" | 164 | """The main storage store.""" |
205 | 165 | return get_storage_store() | 165 | return get_filesync_store() |
206 | 166 | 166 | ||
207 | 167 | 167 | ||
208 | 168 | class SystemGateway(GatewayBase): | 168 | class SystemGateway(GatewayBase): |
209 | @@ -185,7 +185,7 @@ | |||
210 | 185 | user.status = 'Live' | 185 | user.status = 'Live' |
211 | 186 | user.subscription_status = 'Live' | 186 | user.subscription_status = 'Live' |
212 | 187 | # initialize the user's data | 187 | # initialize the user's data |
214 | 188 | store = get_storage_store() | 188 | store = get_filesync_store() |
215 | 189 | # create or update the user info table | 189 | # create or update the user info table |
216 | 190 | user_info = store.get(model.StorageUserInfo, user_id) | 190 | user_info = store.get(model.StorageUserInfo, user_id) |
217 | 191 | if user_info is None: | 191 | if user_info is None: |
218 | @@ -306,7 +306,7 @@ | |||
219 | 306 | download_key=None): | 306 | download_key=None): |
220 | 307 | """Make a new download object.""" | 307 | """Make a new download object.""" |
221 | 308 | self.get_user(user_id) | 308 | self.get_user(user_id) |
223 | 309 | store = get_storage_store() | 309 | store = get_filesync_store() |
224 | 310 | download = model.Download( | 310 | download = model.Download( |
225 | 311 | user_id, volume_id, file_path, download_url, download_key) | 311 | user_id, volume_id, file_path, download_url, download_key) |
226 | 312 | store.add(download) | 312 | store.add(download) |
227 | @@ -315,7 +315,7 @@ | |||
228 | 315 | def _get_download(self, user_id, download_id): | 315 | def _get_download(self, user_id, download_id): |
229 | 316 | """Internal function to get the download and owner.""" | 316 | """Internal function to get the download and owner.""" |
230 | 317 | user = self.get_user(user_id) | 317 | user = self.get_user(user_id) |
232 | 318 | store = get_storage_store() | 318 | store = get_filesync_store() |
233 | 319 | download = store.get(model.Download, download_id) | 319 | download = store.get(model.Download, download_id) |
234 | 320 | return user, download | 320 | return user, download |
235 | 321 | 321 | ||
236 | @@ -323,7 +323,7 @@ | |||
237 | 323 | download_key=None): | 323 | download_key=None): |
238 | 324 | """Get a download by its UDF, file path and download key.""" | 324 | """Get a download by its UDF, file path and download key.""" |
239 | 325 | self.get_user(user_id) | 325 | self.get_user(user_id) |
241 | 326 | store = get_storage_store() | 326 | store = get_filesync_store() |
242 | 327 | download = store.find( | 327 | download = store.find( |
243 | 328 | model.Download, | 328 | model.Download, |
244 | 329 | model.Download.owner_id == user_id, | 329 | model.Download.owner_id == user_id, |
245 | @@ -382,7 +382,7 @@ | |||
246 | 382 | 382 | ||
247 | 383 | def get_failed_downloads(self, start_date, end_date): | 383 | def get_failed_downloads(self, start_date, end_date): |
248 | 384 | """Get failed downloads.""" | 384 | """Get failed downloads.""" |
250 | 385 | store = get_storage_store() | 385 | store = get_filesync_store() |
251 | 386 | result = store.find( | 386 | result = store.find( |
252 | 387 | model.Download, | 387 | model.Download, |
253 | 388 | model.Download._status == model.DOWNLOAD_STATUS_ERROR, | 388 | model.Download._status == model.DOWNLOAD_STATUS_ERROR, |
254 | @@ -393,7 +393,7 @@ | |||
255 | 393 | 393 | ||
256 | 394 | def get_node(self, node_id): | 394 | def get_node(self, node_id): |
257 | 395 | """Get a node for the specified node_id.""" | 395 | """Get a node for the specified node_id.""" |
259 | 396 | store = get_storage_store() | 396 | store = get_filesync_store() |
260 | 397 | node = store.find( | 397 | node = store.find( |
261 | 398 | model.StorageObject, | 398 | model.StorageObject, |
262 | 399 | model.StorageObject.status == model.STATUS_LIVE, | 399 | model.StorageObject.status == model.STATUS_LIVE, |
263 | @@ -404,7 +404,7 @@ | |||
264 | 404 | 404 | ||
265 | 405 | def get_user_info(self, user_id): | 405 | def get_user_info(self, user_id): |
266 | 406 | """Get the UserInfo DAO for user_id""" | 406 | """Get the UserInfo DAO for user_id""" |
268 | 407 | store = get_storage_store() | 407 | store = get_filesync_store() |
269 | 408 | user_info = store.get(model.StorageUserInfo, user_id) | 408 | user_info = store.get(model.StorageUserInfo, user_id) |
270 | 409 | if user_info is None: | 409 | if user_info is None: |
271 | 410 | raise errors.DoesNotExist(self.user_dne_error) | 410 | raise errors.DoesNotExist(self.user_dne_error) |
272 | @@ -413,13 +413,13 @@ | |||
273 | 413 | def cleanup_uploadjobs(self, uploadjobs): | 413 | def cleanup_uploadjobs(self, uploadjobs): |
274 | 414 | """Delete uploadjobs.""" | 414 | """Delete uploadjobs.""" |
275 | 415 | uploadjob_ids = [job.id for job in uploadjobs] | 415 | uploadjob_ids = [job.id for job in uploadjobs] |
277 | 416 | store = get_storage_store() | 416 | store = get_filesync_store() |
278 | 417 | store.find(model.UploadJob, | 417 | store.find(model.UploadJob, |
279 | 418 | model.UploadJob.uploadjob_id.is_in(uploadjob_ids)).remove() | 418 | model.UploadJob.uploadjob_id.is_in(uploadjob_ids)).remove() |
280 | 419 | 419 | ||
281 | 420 | def get_abandoned_uploadjobs(self, last_active, limit=1000): | 420 | def get_abandoned_uploadjobs(self, last_active, limit=1000): |
282 | 421 | """Get uploadjobs that are older than last_active.""" | 421 | """Get uploadjobs that are older than last_active.""" |
284 | 422 | store = get_storage_store() | 422 | store = get_filesync_store() |
285 | 423 | jobs = store.find( | 423 | jobs = store.find( |
286 | 424 | model.UploadJob, | 424 | model.UploadJob, |
287 | 425 | model.UploadJob.when_last_active < last_active)[:limit] | 425 | model.UploadJob.when_last_active < last_active)[:limit] |
288 | @@ -431,7 +431,7 @@ | |||
289 | 431 | query = """SELECT id FROM StorageUser | 431 | query = """SELECT id FROM StorageUser |
290 | 432 | ORDER BY RANDOM() | 432 | ORDER BY RANDOM() |
291 | 433 | LIMIT 1""" | 433 | LIMIT 1""" |
293 | 434 | store = get_storage_store() | 434 | store = get_filesync_store() |
294 | 435 | result = store.execute(SQL(query)).get_one() | 435 | result = store.execute(SQL(query)).get_one() |
295 | 436 | return result[0] | 436 | return result[0] |
296 | 437 | 437 | ||
297 | @@ -455,7 +455,7 @@ | |||
298 | 455 | This typically only happens when a user's subscription changes. | 455 | This typically only happens when a user's subscription changes. |
299 | 456 | """ | 456 | """ |
300 | 457 | user = self.store.get(model.StorageUser, self.user.id) | 457 | user = self.store.get(model.StorageUser, self.user.id) |
302 | 458 | store = get_storage_store() | 458 | store = get_filesync_store() |
303 | 459 | 459 | ||
304 | 460 | # update the subscription in the user | 460 | # update the subscription in the user |
305 | 461 | if subscription is not None: | 461 | if subscription is not None: |
306 | @@ -482,14 +482,14 @@ | |||
307 | 482 | @timing_metric | 482 | @timing_metric |
308 | 483 | def get_quota(self): | 483 | def get_quota(self): |
309 | 484 | """Get the user's quota information.""" | 484 | """Get the user's quota information.""" |
311 | 485 | store = get_storage_store() | 485 | store = get_filesync_store() |
312 | 486 | info = store.get(model.StorageUserInfo, self.user.id) | 486 | info = store.get(model.StorageUserInfo, self.user.id) |
313 | 487 | return dao.UserInfo(info, gateway=self) | 487 | return dao.UserInfo(info, gateway=self) |
314 | 488 | 488 | ||
315 | 489 | @timing_metric | 489 | @timing_metric |
316 | 490 | def recalculate_quota(self): | 490 | def recalculate_quota(self): |
317 | 491 | """Recalculate a user's quota.""" | 491 | """Recalculate a user's quota.""" |
319 | 492 | store = get_storage_store() | 492 | store = get_filesync_store() |
320 | 493 | info = store.get(model.StorageUserInfo, self.user.id) | 493 | info = store.get(model.StorageUserInfo, self.user.id) |
321 | 494 | info.recalculate_used_bytes() | 494 | info.recalculate_used_bytes() |
322 | 495 | return dao.UserInfo(info, gateway=self) | 495 | return dao.UserInfo(info, gateway=self) |
323 | @@ -504,7 +504,7 @@ | |||
324 | 504 | if not self.user.is_active: | 504 | if not self.user.is_active: |
325 | 505 | raise errors.NoPermission(self.inactive_user_error) | 505 | raise errors.NoPermission(self.inactive_user_error) |
326 | 506 | # sanity check | 506 | # sanity check |
328 | 507 | store = get_storage_store() | 507 | store = get_filesync_store() |
329 | 508 | udf = store.find( | 508 | udf = store.find( |
330 | 509 | model.UserVolume, | 509 | model.UserVolume, |
331 | 510 | model.UserVolume.owner_id == self.user.id, | 510 | model.UserVolume.owner_id == self.user.id, |
332 | @@ -743,7 +743,7 @@ | |||
333 | 743 | """Create a UDF.""" | 743 | """Create a UDF.""" |
334 | 744 | if not self.user.is_active: | 744 | if not self.user.is_active: |
335 | 745 | raise errors.NoPermission(self.inactive_user_error) | 745 | raise errors.NoPermission(self.inactive_user_error) |
337 | 746 | store = get_storage_store() | 746 | store = get_filesync_store() |
338 | 747 | # need a lock here. | 747 | # need a lock here. |
339 | 748 | info = store.get(model.StorageUserInfo, self.user.id) | 748 | info = store.get(model.StorageUserInfo, self.user.id) |
340 | 749 | info.lock_for_update() | 749 | info.lock_for_update() |
341 | @@ -770,7 +770,7 @@ | |||
342 | 770 | """Get a UDF by the path parts.""" | 770 | """Get a UDF by the path parts.""" |
343 | 771 | if not self.user.is_active: | 771 | if not self.user.is_active: |
344 | 772 | raise errors.NoPermission(self.inactive_user_error) | 772 | raise errors.NoPermission(self.inactive_user_error) |
346 | 773 | store = get_storage_store() | 773 | store = get_filesync_store() |
347 | 774 | path = path.rstrip('/') | 774 | path = path.rstrip('/') |
348 | 775 | if from_full_path: | 775 | if from_full_path: |
349 | 776 | udfs = store.find( | 776 | udfs = store.find( |
350 | @@ -796,7 +796,7 @@ | |||
351 | 796 | """Delete a UDF.""" | 796 | """Delete a UDF.""" |
352 | 797 | if not self.user.is_active: | 797 | if not self.user.is_active: |
353 | 798 | raise errors.NoPermission(self.inactive_user_error) | 798 | raise errors.NoPermission(self.inactive_user_error) |
355 | 799 | store = get_storage_store() | 799 | store = get_filesync_store() |
356 | 800 | udf = store.find( | 800 | udf = store.find( |
357 | 801 | model.UserVolume, | 801 | model.UserVolume, |
358 | 802 | model.UserVolume.id == udf_id, | 802 | model.UserVolume.id == udf_id, |
359 | @@ -819,7 +819,7 @@ | |||
360 | 819 | """Get a UDF.""" | 819 | """Get a UDF.""" |
361 | 820 | if not self.user.is_active: | 820 | if not self.user.is_active: |
362 | 821 | raise errors.NoPermission(self.inactive_user_error) | 821 | raise errors.NoPermission(self.inactive_user_error) |
364 | 822 | store = get_storage_store() | 822 | store = get_filesync_store() |
365 | 823 | udf = store.find( | 823 | udf = store.find( |
366 | 824 | model.UserVolume, | 824 | model.UserVolume, |
367 | 825 | model.UserVolume.id == udf_id, | 825 | model.UserVolume.id == udf_id, |
368 | @@ -835,7 +835,7 @@ | |||
369 | 835 | """Return Live UDFs.""" | 835 | """Return Live UDFs.""" |
370 | 836 | if not self.user.is_active: | 836 | if not self.user.is_active: |
371 | 837 | raise errors.NoPermission(self.inactive_user_error) | 837 | raise errors.NoPermission(self.inactive_user_error) |
373 | 838 | store = get_storage_store() | 838 | store = get_filesync_store() |
374 | 839 | udfs = store.find( | 839 | udfs = store.find( |
375 | 840 | model.UserVolume, | 840 | model.UserVolume, |
376 | 841 | model.UserVolume.owner_id == self.user.id, | 841 | model.UserVolume.owner_id == self.user.id, |
377 | @@ -848,7 +848,7 @@ | |||
378 | 848 | @timing_metric | 848 | @timing_metric |
379 | 849 | def get_downloads(self): | 849 | def get_downloads(self): |
380 | 850 | """Get all downloads for a user.""" | 850 | """Get all downloads for a user.""" |
382 | 851 | store = get_storage_store() | 851 | store = get_filesync_store() |
383 | 852 | return [dao.Download(download) | 852 | return [dao.Download(download) |
384 | 853 | for download in store.find( | 853 | for download in store.find( |
385 | 854 | model.Download, | 854 | model.Download, |
386 | @@ -857,7 +857,7 @@ | |||
387 | 857 | @timing_metric | 857 | @timing_metric |
388 | 858 | def get_public_files(self): | 858 | def get_public_files(self): |
389 | 859 | """Get all public files for a user.""" | 859 | """Get all public files for a user.""" |
391 | 860 | store = get_storage_store() | 860 | store = get_filesync_store() |
392 | 861 | nodes = store.find( | 861 | nodes = store.find( |
393 | 862 | model.StorageObject, | 862 | model.StorageObject, |
394 | 863 | model.StorageObject.status == model.STATUS_LIVE, | 863 | model.StorageObject.status == model.STATUS_LIVE, |
395 | @@ -871,7 +871,7 @@ | |||
396 | 871 | @timing_metric | 871 | @timing_metric |
397 | 872 | def get_public_folders(self): | 872 | def get_public_folders(self): |
398 | 873 | """Get all public folders for a user.""" | 873 | """Get all public folders for a user.""" |
400 | 874 | store = get_storage_store() | 874 | store = get_filesync_store() |
401 | 875 | nodes = store.find( | 875 | nodes = store.find( |
402 | 876 | model.StorageObject, | 876 | model.StorageObject, |
403 | 877 | model.StorageObject.status == model.STATUS_LIVE, | 877 | model.StorageObject.status == model.STATUS_LIVE, |
404 | @@ -899,7 +899,7 @@ | |||
405 | 899 | @timing_metric | 899 | @timing_metric |
406 | 900 | def get_share_generation(self, share): | 900 | def get_share_generation(self, share): |
407 | 901 | """Get the generation of the speficied share.""" | 901 | """Get the generation of the speficied share.""" |
409 | 902 | store = get_storage_store() | 902 | store = get_filesync_store() |
410 | 903 | vol = store.find( | 903 | vol = store.find( |
411 | 904 | model.UserVolume, | 904 | model.UserVolume, |
412 | 905 | model.UserVolume.id == model.StorageObject.volume_id, | 905 | model.UserVolume.id == model.StorageObject.volume_id, |
413 | @@ -939,7 +939,7 @@ | |||
414 | 939 | WHERE o.id = t.parent_id::UUID AND | 939 | WHERE o.id = t.parent_id::UUID AND |
415 | 940 | o.volume_id=u.id AND u.status = E'Live' ; | 940 | o.volume_id=u.id AND u.status = E'Live' ; |
416 | 941 | """ % dict(owner_id=self.user.id) | 941 | """ % dict(owner_id=self.user.id) |
418 | 942 | store = get_storage_store() | 942 | store = get_filesync_store() |
419 | 943 | nodes = store.execute(SQL(sql)) | 943 | nodes = store.execute(SQL(sql)) |
420 | 944 | gws = {} | 944 | gws = {} |
421 | 945 | for n in nodes: | 945 | for n in nodes: |
422 | @@ -978,7 +978,7 @@ | |||
423 | 978 | 978 | ||
424 | 979 | def _get_reusable_content(self, hash_value, magic_hash): | 979 | def _get_reusable_content(self, hash_value, magic_hash): |
425 | 980 | """Get a contentblob for reusable content.""" | 980 | """Get a contentblob for reusable content.""" |
427 | 981 | store = get_storage_store() | 981 | store = get_filesync_store() |
428 | 982 | 982 | ||
429 | 983 | # check to see if we have the content blob for that hash | 983 | # check to see if we have the content blob for that hash |
430 | 984 | contentblob = store.find( | 984 | contentblob = store.find( |
431 | @@ -1104,7 +1104,7 @@ | |||
432 | 1104 | @property | 1104 | @property |
433 | 1105 | def store(self): | 1105 | def store(self): |
434 | 1106 | """The storm store to use.""" | 1106 | """The storm store to use.""" |
436 | 1107 | return get_storage_store() | 1107 | return get_filesync_store() |
437 | 1108 | 1108 | ||
438 | 1109 | def _get_root_node(self): | 1109 | def _get_root_node(self): |
439 | 1110 | """Get the root node for this volume.""" | 1110 | """Get the root node for this volume.""" |
440 | @@ -1142,7 +1142,7 @@ | |||
441 | 1142 | """Make sure the share is still good.""" | 1142 | """Make sure the share is still good.""" |
442 | 1143 | if self.share: | 1143 | if self.share: |
443 | 1144 | # if this is a share, make sure it's still valid | 1144 | # if this is a share, make sure it's still valid |
445 | 1145 | store = get_storage_store() | 1145 | store = get_filesync_store() |
446 | 1146 | share = store.find( | 1146 | share = store.find( |
447 | 1147 | model.Share, | 1147 | model.Share, |
448 | 1148 | model.Share.id == self.share.id, | 1148 | model.Share.id == self.share.id, |
449 | @@ -2214,10 +2214,10 @@ | |||
450 | 2214 | 2214 | ||
451 | 2215 | def fix_all_udfs_with_generation_out_of_sync( | 2215 | def fix_all_udfs_with_generation_out_of_sync( |
452 | 2216 | logger, sleep=0, dry_run=False, batch_size=500): | 2216 | logger, sleep=0, dry_run=False, batch_size=500): |
454 | 2217 | from backends.filesync.data.dbmanager import get_storage_store | 2217 | from backends.filesync.data.dbmanager import get_filesync_store |
455 | 2218 | if dry_run: | 2218 | if dry_run: |
456 | 2219 | logger.info("Dry-run enabled; not committing any changes.") | 2219 | logger.info("Dry-run enabled; not committing any changes.") |
458 | 2220 | store = get_storage_store() | 2220 | store = get_filesync_store() |
459 | 2221 | query = "SELECT id FROM StorageUser" | 2221 | query = "SELECT id FROM StorageUser" |
460 | 2222 | user_ids = [row[0] for row in store.execute(query)] | 2222 | user_ids = [row[0] for row in store.execute(query)] |
461 | 2223 | start = time.time() | 2223 | start = time.time() |
462 | 2224 | 2224 | ||
463 | === modified file 'src/backends/filesync/data/testing/ormtestcase.py' | |||
464 | --- src/backends/filesync/data/testing/ormtestcase.py 2015-08-29 00:03:11 +0000 | |||
465 | +++ src/backends/filesync/data/testing/ormtestcase.py 2015-09-05 01:43:14 +0000 | |||
466 | @@ -20,7 +20,7 @@ | |||
467 | 20 | import uuid | 20 | import uuid |
468 | 21 | 21 | ||
469 | 22 | from backends.filesync.data import model | 22 | from backends.filesync.data import model |
471 | 23 | from backends.filesync.data.dbmanager import get_storage_store, filesync_tm | 23 | from backends.filesync.data.dbmanager import get_filesync_store, filesync_tm |
472 | 24 | from backends.filesync.data.testing.testcase import DAOObjectFactory | 24 | from backends.filesync.data.testing.testcase import DAOObjectFactory |
473 | 25 | from backends.filesync.data.testing.testdata import get_fake_hash | 25 | from backends.filesync.data.testing.testdata import get_fake_hash |
474 | 26 | 26 | ||
475 | @@ -178,4 +178,4 @@ | |||
476 | 178 | @property | 178 | @property |
477 | 179 | def store(self): | 179 | def store(self): |
478 | 180 | """Get the store, dont cache, threading issues may arise""" | 180 | """Get the store, dont cache, threading issues may arise""" |
480 | 181 | return get_storage_store() | 181 | return get_filesync_store() |
481 | 182 | 182 | ||
482 | === modified file 'src/backends/filesync/data/testing/testcase.py' | |||
483 | --- src/backends/filesync/data/testing/testcase.py 2015-08-17 00:09:45 +0000 | |||
484 | +++ src/backends/filesync/data/testing/testcase.py 2015-09-05 01:43:14 +0000 | |||
485 | @@ -23,7 +23,7 @@ | |||
486 | 23 | 23 | ||
487 | 24 | from backends.filesync.data import utils, filesync_tm | 24 | from backends.filesync.data import utils, filesync_tm |
488 | 25 | from backends.filesync.data.gateway import SystemGateway | 25 | from backends.filesync.data.gateway import SystemGateway |
490 | 26 | from backends.filesync.data.dbmanager import get_storage_store | 26 | from backends.filesync.data.dbmanager import get_filesync_store |
491 | 27 | from backends.filesync.data.testing.testdata import get_fake_hash | 27 | from backends.filesync.data.testing.testdata import get_fake_hash |
492 | 28 | from backends.testing.testcase import DatabaseResourceTestCase | 28 | from backends.testing.testcase import DatabaseResourceTestCase |
493 | 29 | 29 | ||
494 | @@ -35,7 +35,7 @@ | |||
495 | 35 | """Set up.""" | 35 | """Set up.""" |
496 | 36 | super(StorageDALTestCase, self).setUp() | 36 | super(StorageDALTestCase, self).setUp() |
497 | 37 | self.obj_factory = DAOObjectFactory() | 37 | self.obj_factory = DAOObjectFactory() |
499 | 38 | self.store = get_storage_store() | 38 | self.store = get_filesync_store() |
500 | 39 | self.save_utils_set_public_uuid = utils.set_public_uuid | 39 | self.save_utils_set_public_uuid = utils.set_public_uuid |
501 | 40 | 40 | ||
502 | 41 | def tearDown(self): | 41 | def tearDown(self): |
503 | 42 | 42 | ||
504 | === modified file 'src/backends/filesync/data/tests/test_dao.py' | |||
505 | --- src/backends/filesync/data/tests/test_dao.py 2015-08-29 00:03:11 +0000 | |||
506 | +++ src/backends/filesync/data/tests/test_dao.py 2015-09-05 01:43:14 +0000 | |||
507 | @@ -33,7 +33,7 @@ | |||
508 | 33 | from backends.filesync.data.testing.testdata import ( | 33 | from backends.filesync.data.testing.testdata import ( |
509 | 34 | get_test_contentblob, get_fake_hash) | 34 | get_test_contentblob, get_fake_hash) |
510 | 35 | from backends.filesync.data import model, dao, errors, services, utils | 35 | from backends.filesync.data import model, dao, errors, services, utils |
512 | 36 | from backends.filesync.data.dbmanager import get_storage_store | 36 | from backends.filesync.data.dbmanager import get_filesync_store |
513 | 37 | 37 | ||
514 | 38 | 38 | ||
515 | 39 | class DAOInitTestCase(TestCase): | 39 | class DAOInitTestCase(TestCase): |
516 | @@ -1460,7 +1460,7 @@ | |||
517 | 1460 | 1460 | ||
518 | 1461 | def _flush_store(self): | 1461 | def _flush_store(self): |
519 | 1462 | """Flushes the store used in tests.""" | 1462 | """Flushes the store used in tests.""" |
521 | 1463 | get_storage_store().flush() | 1463 | get_filesync_store().flush() |
522 | 1464 | 1464 | ||
523 | 1465 | def _create_directory_with_five_files(self): | 1465 | def _create_directory_with_five_files(self): |
524 | 1466 | """Creates a DirectoryNode with 5 files inside it.""" | 1466 | """Creates a DirectoryNode with 5 files inside it.""" |
525 | 1467 | 1467 | ||
526 | === modified file 'src/backends/filesync/data/tests/test_gateway.py' | |||
527 | --- src/backends/filesync/data/tests/test_gateway.py 2015-08-29 00:03:11 +0000 | |||
528 | +++ src/backends/filesync/data/tests/test_gateway.py 2015-09-05 01:43:14 +0000 | |||
529 | @@ -42,7 +42,7 @@ | |||
530 | 42 | timing_metric, | 42 | timing_metric, |
531 | 43 | ) | 43 | ) |
532 | 44 | from backends.filesync.data.dbmanager import ( | 44 | from backends.filesync.data.dbmanager import ( |
534 | 45 | get_storage_store, filesync_tm as transaction) | 45 | get_filesync_store, filesync_tm as transaction) |
535 | 46 | from backends.filesync.data import dao, errors, model, utils | 46 | from backends.filesync.data import dao, errors, model, utils |
536 | 47 | from backends.filesync.data.testing.testdata import ( | 47 | from backends.filesync.data.testing.testdata import ( |
537 | 48 | get_fake_hash, get_test_contentblob) | 48 | get_fake_hash, get_test_contentblob) |
538 | @@ -263,7 +263,7 @@ | |||
539 | 263 | def test_handle_node_change_with_shares(self): | 263 | def test_handle_node_change_with_shares(self): |
540 | 264 | """Test the handle_node_change.""" | 264 | """Test the handle_node_change.""" |
541 | 265 | self.setup_shares() | 265 | self.setup_shares() |
543 | 266 | node = get_storage_store().get(model.StorageObject, self.d3.id) | 266 | node = get_filesync_store().get(model.StorageObject, self.d3.id) |
544 | 267 | self.vgw.handle_node_change(node) | 267 | self.vgw.handle_node_change(node) |
545 | 268 | transaction.commit() | 268 | transaction.commit() |
546 | 269 | self.assertIn(VolumeNewGeneration(self.user.id, None, node. | 269 | self.assertIn(VolumeNewGeneration(self.user.id, None, node. |
547 | @@ -282,7 +282,7 @@ | |||
548 | 282 | def test_handle_node_change_from_share(self): | 282 | def test_handle_node_change_from_share(self): |
549 | 283 | """Test the handle_node_change.""" | 283 | """Test the handle_node_change.""" |
550 | 284 | self.setup_shares() | 284 | self.setup_shares() |
552 | 285 | node = get_storage_store().get(model.StorageObject, self.d3.id) | 285 | node = get_filesync_store().get(model.StorageObject, self.d3.id) |
553 | 286 | share = self.user1.get_share(self.share1.id) | 286 | share = self.user1.get_share(self.share1.id) |
554 | 287 | vgw = ReadWriteVolumeGateway(self.user1, share=share) | 287 | vgw = ReadWriteVolumeGateway(self.user1, share=share) |
555 | 288 | vgw.handle_node_change(node) | 288 | vgw.handle_node_change(node) |
556 | @@ -315,7 +315,7 @@ | |||
557 | 315 | """Make sure make_file with magic content sends a notification.""" | 315 | """Make sure make_file with magic content sends a notification.""" |
558 | 316 | cb = get_test_contentblob("FakeContent") | 316 | cb = get_test_contentblob("FakeContent") |
559 | 317 | cb.magic_hash = 'magic' | 317 | cb.magic_hash = 'magic' |
561 | 318 | get_storage_store().add(cb) | 318 | get_filesync_store().add(cb) |
562 | 319 | f = self.vgw.make_file(self.root.id, u"filename", hash=cb.hash, | 319 | f = self.vgw.make_file(self.root.id, u"filename", hash=cb.hash, |
563 | 320 | magic_hash='magic') | 320 | magic_hash='magic') |
564 | 321 | transaction.commit() | 321 | transaction.commit() |
565 | @@ -548,7 +548,7 @@ | |||
566 | 548 | self.assertEqual(user.username, u"username") | 548 | self.assertEqual(user.username, u"username") |
567 | 549 | self.assertEqual(user.visible_name, u"Visible Name") | 549 | self.assertEqual(user.visible_name, u"Visible Name") |
568 | 550 | self.assertEqual(user._subscription_status, model.STATUS_LIVE) | 550 | self.assertEqual(user._subscription_status, model.STATUS_LIVE) |
570 | 551 | store = get_storage_store() | 551 | store = get_filesync_store() |
571 | 552 | info = store.get(model.StorageUserInfo, 1) | 552 | info = store.get(model.StorageUserInfo, 1) |
572 | 553 | self.assertEqual(info.max_storage_bytes, 1) | 553 | self.assertEqual(info.max_storage_bytes, 1) |
573 | 554 | root = model.StorageObject.get_root(store, user.id) | 554 | root = model.StorageObject.get_root(store, user.id) |
574 | @@ -561,7 +561,7 @@ | |||
575 | 561 | self.gw.create_or_update_user( | 561 | self.gw.create_or_update_user( |
576 | 562 | 1, u"username", u"Visible Name", 1) | 562 | 1, u"username", u"Visible Name", 1) |
577 | 563 | # update the user info. | 563 | # update the user info. |
579 | 564 | usr = get_storage_store().get(model.StorageUser, 1) | 564 | usr = get_filesync_store().get(model.StorageUser, 1) |
580 | 565 | usr.status = model.STATUS_DEAD | 565 | usr.status = model.STATUS_DEAD |
581 | 566 | usr.subscription_status = model.STATUS_DEAD | 566 | usr.subscription_status = model.STATUS_DEAD |
582 | 567 | transaction.commit() | 567 | transaction.commit() |
583 | @@ -579,7 +579,7 @@ | |||
584 | 579 | def test_get_shareoffer(self): | 579 | def test_get_shareoffer(self): |
585 | 580 | """Test get_shareoffer.""" | 580 | """Test get_shareoffer.""" |
586 | 581 | user1 = self.create_user(id=1, username=u"sharer") | 581 | user1 = self.create_user(id=1, username=u"sharer") |
588 | 582 | store = get_storage_store() | 582 | store = get_filesync_store() |
589 | 583 | root = model.StorageObject.get_root(store, user1.id) | 583 | root = model.StorageObject.get_root(store, user1.id) |
590 | 584 | share = model.Share(user1.id, root.id, None, u"Share", "View", | 584 | share = model.Share(user1.id, root.id, None, u"Share", "View", |
591 | 585 | email="fake@example.com") | 585 | email="fake@example.com") |
592 | @@ -614,7 +614,7 @@ | |||
593 | 614 | """ | 614 | """ |
594 | 615 | # setup the share_offer | 615 | # setup the share_offer |
595 | 616 | user1 = self.create_user(id=1, username=u"sharer") | 616 | user1 = self.create_user(id=1, username=u"sharer") |
597 | 617 | store = get_storage_store() | 617 | store = get_filesync_store() |
598 | 618 | root = model.StorageObject.get_root(store, user1.id) | 618 | root = model.StorageObject.get_root(store, user1.id) |
599 | 619 | share = model.Share(user1.id, root.id, None, u"Share", "View", | 619 | share = model.Share(user1.id, root.id, None, u"Share", "View", |
600 | 620 | email="fake@example.com") | 620 | email="fake@example.com") |
601 | @@ -660,7 +660,7 @@ | |||
602 | 660 | """Test that the claim_shareoffer function works properly.""" | 660 | """Test that the claim_shareoffer function works properly.""" |
603 | 661 | # setup the share_offer | 661 | # setup the share_offer |
604 | 662 | user1 = self.create_user(id=1, username=u"sharer") | 662 | user1 = self.create_user(id=1, username=u"sharer") |
606 | 663 | store = get_storage_store() | 663 | store = get_filesync_store() |
607 | 664 | root = model.StorageObject.get_root(store, user1.id) | 664 | root = model.StorageObject.get_root(store, user1.id) |
608 | 665 | share = model.Share(user1.id, root.id, None, u"Share", "View", | 665 | share = model.Share(user1.id, root.id, None, u"Share", "View", |
609 | 666 | email="fake@example.com") | 666 | email="fake@example.com") |
610 | @@ -669,7 +669,7 @@ | |||
611 | 669 | # user 2 does not exist | 669 | # user 2 does not exist |
612 | 670 | self.gw.claim_shareoffer(2, u"sharee", u"Sharee", share.id) | 670 | self.gw.claim_shareoffer(2, u"sharee", u"Sharee", share.id) |
613 | 671 | user2 = self.gw.get_user(2) | 671 | user2 = self.gw.get_user(2) |
615 | 672 | store = get_storage_store() | 672 | store = get_filesync_store() |
616 | 673 | root2 = model.StorageObject.get_root(store, user2.id) | 673 | root2 = model.StorageObject.get_root(store, user2.id) |
617 | 674 | self.assertTrue(root2 is not None) | 674 | self.assertTrue(root2 is not None) |
618 | 675 | self.assertEqual(user2.is_active, False) | 675 | self.assertEqual(user2.is_active, False) |
619 | @@ -682,7 +682,7 @@ | |||
620 | 682 | user = self.gw.create_or_update_user( | 682 | user = self.gw.create_or_update_user( |
621 | 683 | 1, u"username", u"Visible Name", 1) | 683 | 1, u"username", u"Visible Name", 1) |
622 | 684 | udf = model.UserVolume.create( | 684 | udf = model.UserVolume.create( |
624 | 685 | get_storage_store(), user.id, u"~/path/name") | 685 | get_filesync_store(), user.id, u"~/path/name") |
625 | 686 | dl_url = u"http://download/url" | 686 | dl_url = u"http://download/url" |
626 | 687 | download = self.gw.make_download( | 687 | download = self.gw.make_download( |
627 | 688 | user.id, udf.id, u"path", dl_url) | 688 | user.id, udf.id, u"path", dl_url) |
628 | @@ -698,7 +698,7 @@ | |||
629 | 698 | user = self.gw.create_or_update_user( | 698 | user = self.gw.create_or_update_user( |
630 | 699 | 1, u"username", u"Visible Name", 1) | 699 | 1, u"username", u"Visible Name", 1) |
631 | 700 | udf = model.UserVolume.create( | 700 | udf = model.UserVolume.create( |
633 | 701 | get_storage_store(), user.id, u"~/path/name") | 701 | get_filesync_store(), user.id, u"~/path/name") |
634 | 702 | download = self.gw.make_download( | 702 | download = self.gw.make_download( |
635 | 703 | user.id, udf.id, u"path", u"http://download/url", ["key"]) | 703 | user.id, udf.id, u"path", u"http://download/url", ["key"]) |
636 | 704 | self.assertTrue(isinstance(download, dao.Download)) | 704 | self.assertTrue(isinstance(download, dao.Download)) |
637 | @@ -715,7 +715,7 @@ | |||
638 | 715 | user = self.gw.create_or_update_user( | 715 | user = self.gw.create_or_update_user( |
639 | 716 | 1, u"username", u"Visible Name", 1) | 716 | 1, u"username", u"Visible Name", 1) |
640 | 717 | udf = model.UserVolume.create( | 717 | udf = model.UserVolume.create( |
642 | 718 | get_storage_store(), user.id, u"~/path/name") | 718 | get_filesync_store(), user.id, u"~/path/name") |
643 | 719 | download = self.gw.make_download( | 719 | download = self.gw.make_download( |
644 | 720 | user.id, udf.id, u"path", u"http://download/url") | 720 | user.id, udf.id, u"path", u"http://download/url") |
645 | 721 | 721 | ||
646 | @@ -728,7 +728,7 @@ | |||
647 | 728 | user = self.gw.create_or_update_user( | 728 | user = self.gw.create_or_update_user( |
648 | 729 | 1, u"username", u"Visible Name", 1) | 729 | 1, u"username", u"Visible Name", 1) |
649 | 730 | udf = model.UserVolume.create( | 730 | udf = model.UserVolume.create( |
651 | 731 | get_storage_store(), user.id, u"~/path/name") | 731 | get_filesync_store(), user.id, u"~/path/name") |
652 | 732 | download_url = u"http://download/url" | 732 | download_url = u"http://download/url" |
653 | 733 | file_path = u"path" | 733 | file_path = u"path" |
654 | 734 | download_id = uuid.uuid4() | 734 | download_id = uuid.uuid4() |
655 | @@ -738,7 +738,7 @@ | |||
656 | 738 | SQL = """INSERT INTO Download (id, owner_id, file_path, download_url, | 738 | SQL = """INSERT INTO Download (id, owner_id, file_path, download_url, |
657 | 739 | volume_id, status, status_change_date) | 739 | volume_id, status, status_change_date) |
658 | 740 | VALUES (?, ?, ?, ?, ?, 'Complete', now())""" | 740 | VALUES (?, ?, ?, ?, ?, 'Complete', now())""" |
660 | 741 | get_storage_store().execute( | 741 | get_filesync_store().execute( |
661 | 742 | SQL, (download_id, user.id, file_path, download_url, udf.id)) | 742 | SQL, (download_id, user.id, file_path, download_url, udf.id)) |
662 | 743 | 743 | ||
663 | 744 | download = self.gw.get_download( | 744 | download = self.gw.get_download( |
664 | @@ -751,7 +751,7 @@ | |||
665 | 751 | user = self.gw.create_or_update_user( | 751 | user = self.gw.create_or_update_user( |
666 | 752 | 1, u"username", u"Visible Name", 1) | 752 | 1, u"username", u"Visible Name", 1) |
667 | 753 | udf = model.UserVolume.create( | 753 | udf = model.UserVolume.create( |
669 | 754 | get_storage_store(), user.id, u"~/path/name") | 754 | get_filesync_store(), user.id, u"~/path/name") |
670 | 755 | 755 | ||
671 | 756 | file_path = u"path" | 756 | file_path = u"path" |
672 | 757 | download_key = u"mydownloadkey" | 757 | download_key = u"mydownloadkey" |
673 | @@ -775,7 +775,7 @@ | |||
674 | 775 | user = self.gw.create_or_update_user( | 775 | user = self.gw.create_or_update_user( |
675 | 776 | 1, u"username", u"Visible Name", 1) | 776 | 1, u"username", u"Visible Name", 1) |
676 | 777 | udf = model.UserVolume.create( | 777 | udf = model.UserVolume.create( |
678 | 778 | get_storage_store(), user.id, u"~/path/name") | 778 | get_filesync_store(), user.id, u"~/path/name") |
679 | 779 | key = ["some", "key"] | 779 | key = ["some", "key"] |
680 | 780 | download = self.gw.make_download( | 780 | download = self.gw.make_download( |
681 | 781 | user.id, udf.id, u"path", u"http://download/url", key) | 781 | user.id, udf.id, u"path", u"http://download/url", key) |
682 | @@ -791,7 +791,7 @@ | |||
683 | 791 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", | 791 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", |
684 | 792 | 1) | 792 | 1) |
685 | 793 | udf = model.UserVolume.create( | 793 | udf = model.UserVolume.create( |
687 | 794 | get_storage_store(), user.id, u"~/path/name") | 794 | get_filesync_store(), user.id, u"~/path/name") |
688 | 795 | key = ["some", "key"] | 795 | key = ["some", "key"] |
689 | 796 | download = self.gw.make_download( | 796 | download = self.gw.make_download( |
690 | 797 | user.id, udf.id, u"path", u"http://download/url/1", key) | 797 | user.id, udf.id, u"path", u"http://download/url/1", key) |
691 | @@ -808,7 +808,7 @@ | |||
692 | 808 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", | 808 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", |
693 | 809 | 1) | 809 | 1) |
694 | 810 | udf = model.UserVolume.create( | 810 | udf = model.UserVolume.create( |
696 | 811 | get_storage_store(), user.id, u"~/path/name") | 811 | get_filesync_store(), user.id, u"~/path/name") |
697 | 812 | download = self.gw.make_download( | 812 | download = self.gw.make_download( |
698 | 813 | user.id, udf.id, u"path", u"http://download/url") | 813 | user.id, udf.id, u"path", u"http://download/url") |
699 | 814 | 814 | ||
700 | @@ -820,7 +820,7 @@ | |||
701 | 820 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", | 820 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", |
702 | 821 | 1) | 821 | 1) |
703 | 822 | udf = model.UserVolume.create( | 822 | udf = model.UserVolume.create( |
705 | 823 | get_storage_store(), user.id, u"~/spath/name") | 823 | get_filesync_store(), user.id, u"~/spath/name") |
706 | 824 | download = self.gw.make_download( | 824 | download = self.gw.make_download( |
707 | 825 | user.id, udf.id, u"path", u"http://download/url") | 825 | user.id, udf.id, u"path", u"http://download/url") |
708 | 826 | new_download = self.gw.update_download( | 826 | new_download = self.gw.update_download( |
709 | @@ -834,7 +834,7 @@ | |||
710 | 834 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", | 834 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", |
711 | 835 | 1) | 835 | 1) |
712 | 836 | udf = model.UserVolume.create( | 836 | udf = model.UserVolume.create( |
714 | 837 | get_storage_store(), user.id, u"~/path/name") | 837 | get_filesync_store(), user.id, u"~/path/name") |
715 | 838 | download = self.gw.make_download( | 838 | download = self.gw.make_download( |
716 | 839 | user.id, udf.id, u"path", u"http://download/url") | 839 | user.id, udf.id, u"path", u"http://download/url") |
717 | 840 | a_file = udf.root_node.make_file(u"TheName") | 840 | a_file = udf.root_node.make_file(u"TheName") |
718 | @@ -849,7 +849,7 @@ | |||
719 | 849 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", | 849 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", |
720 | 850 | 1) | 850 | 1) |
721 | 851 | udf = model.UserVolume.create( | 851 | udf = model.UserVolume.create( |
723 | 852 | get_storage_store(), user.id, u"~/path/name") | 852 | get_filesync_store(), user.id, u"~/path/name") |
724 | 853 | download = self.gw.make_download( | 853 | download = self.gw.make_download( |
725 | 854 | user.id, udf.id, u"path", u"http://download/url") | 854 | user.id, udf.id, u"path", u"http://download/url") |
726 | 855 | new_download = self.gw.update_download( | 855 | new_download = self.gw.update_download( |
727 | @@ -878,7 +878,7 @@ | |||
728 | 878 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", | 878 | user = self.gw.create_or_update_user(1, u"username", u"Visible Name", |
729 | 879 | 1) | 879 | 1) |
730 | 880 | sgw = SystemGateway() | 880 | sgw = SystemGateway() |
732 | 881 | storage_store = get_storage_store() | 881 | storage_store = get_filesync_store() |
733 | 882 | root = model.StorageObject.get_root(storage_store, user.id) | 882 | root = model.StorageObject.get_root(storage_store, user.id) |
734 | 883 | node = root.make_file(u"TheName") | 883 | node = root.make_file(u"TheName") |
735 | 884 | node._content_hash = model.EMPTY_CONTENT_HASH | 884 | node._content_hash = model.EMPTY_CONTENT_HASH |
736 | @@ -925,7 +925,7 @@ | |||
737 | 925 | multipart_id=str(uuid.uuid4()), | 925 | multipart_id=str(uuid.uuid4()), |
738 | 926 | multipart_key=uuid.uuid4()) | 926 | multipart_key=uuid.uuid4()) |
739 | 927 | # change the when_started date for the test. | 927 | # change the when_started date for the test. |
741 | 928 | store = get_storage_store() | 928 | store = get_filesync_store() |
742 | 929 | uploadjob = store.get(model.UploadJob, up1.id) | 929 | uploadjob = store.get(model.UploadJob, up1.id) |
743 | 930 | uploadjob.when_last_active = ( | 930 | uploadjob.when_last_active = ( |
744 | 931 | datetime.datetime.now() - datetime.timedelta(uid)) | 931 | datetime.datetime.now() - datetime.timedelta(uid)) |
745 | @@ -954,7 +954,7 @@ | |||
746 | 954 | multipart_id=str(uuid.uuid4()), | 954 | multipart_id=str(uuid.uuid4()), |
747 | 955 | multipart_key=uuid.uuid4()) | 955 | multipart_key=uuid.uuid4()) |
748 | 956 | # change the when_started date for the test. | 956 | # change the when_started date for the test. |
750 | 957 | store = get_storage_store() | 957 | store = get_filesync_store() |
751 | 958 | uploadjob = store.get(model.UploadJob, up1.id) | 958 | uploadjob = store.get(model.UploadJob, up1.id) |
752 | 959 | uploadjob.when_last_active = ( | 959 | uploadjob.when_last_active = ( |
753 | 960 | datetime.datetime.now() - datetime.timedelta(10)) | 960 | datetime.datetime.now() - datetime.timedelta(10)) |
754 | @@ -1193,7 +1193,7 @@ | |||
755 | 1193 | self.assertEqual(quota.max_storage_bytes, 2) | 1193 | self.assertEqual(quota.max_storage_bytes, 2) |
756 | 1194 | self.assertEqual(user2._subscription_status, model.STATUS_LIVE) | 1194 | self.assertEqual(user2._subscription_status, model.STATUS_LIVE) |
757 | 1195 | # make sure the StorageUserInfo is updated as well | 1195 | # make sure the StorageUserInfo is updated as well |
759 | 1196 | store = get_storage_store() | 1196 | store = get_filesync_store() |
760 | 1197 | info = store.get(model.StorageUserInfo, user2.id) | 1197 | info = store.get(model.StorageUserInfo, user2.id) |
761 | 1198 | self.assertEqual(info.max_storage_bytes, 2) | 1198 | self.assertEqual(info.max_storage_bytes, 2) |
762 | 1199 | 1199 | ||
763 | @@ -1218,7 +1218,7 @@ | |||
764 | 1218 | def test_accept_share(self): | 1218 | def test_accept_share(self): |
765 | 1219 | """Test accepting a direct share.""" | 1219 | """Test accepting a direct share.""" |
766 | 1220 | user1 = self.create_user(id=2, username=u"sharer") | 1220 | user1 = self.create_user(id=2, username=u"sharer") |
768 | 1221 | store = get_storage_store() | 1221 | store = get_filesync_store() |
769 | 1222 | root = model.StorageObject.get_root(store, user1.id) | 1222 | root = model.StorageObject.get_root(store, user1.id) |
770 | 1223 | share = model.Share(user1.id, root.id, self.user.id, u"Share", "View") | 1223 | share = model.Share(user1.id, root.id, self.user.id, u"Share", "View") |
771 | 1224 | self.store.add(share) | 1224 | self.store.add(share) |
772 | @@ -1238,7 +1238,7 @@ | |||
773 | 1238 | def test_decline_share(self): | 1238 | def test_decline_share(self): |
774 | 1239 | """Test declinet a direct share.""" | 1239 | """Test declinet a direct share.""" |
775 | 1240 | user1 = self.create_user(id=2, username=u"sharer") | 1240 | user1 = self.create_user(id=2, username=u"sharer") |
777 | 1241 | store = get_storage_store() | 1241 | store = get_filesync_store() |
778 | 1242 | root = model.StorageObject.get_root(store, user1.id) | 1242 | root = model.StorageObject.get_root(store, user1.id) |
779 | 1243 | share = model.Share(user1.id, root.id, self.user.id, u"Share", "View") | 1243 | share = model.Share(user1.id, root.id, self.user.id, u"Share", "View") |
780 | 1244 | self.store.add(share) | 1244 | self.store.add(share) |
781 | @@ -1260,7 +1260,7 @@ | |||
782 | 1260 | def test_delete_share(self): | 1260 | def test_delete_share(self): |
783 | 1261 | """Test delete shares from share-er and share-ee""" | 1261 | """Test delete shares from share-er and share-ee""" |
784 | 1262 | user1 = self.create_user(id=2, username=u"sharer") | 1262 | user1 = self.create_user(id=2, username=u"sharer") |
786 | 1263 | store = get_storage_store() | 1263 | store = get_filesync_store() |
787 | 1264 | root = model.StorageObject.get_root(store, user1.id) | 1264 | root = model.StorageObject.get_root(store, user1.id) |
788 | 1265 | share = model.Share(self.user.id, root.id, user1.id, | 1265 | share = model.Share(self.user.id, root.id, user1.id, |
789 | 1266 | u"Share", "View") | 1266 | u"Share", "View") |
790 | @@ -1425,7 +1425,7 @@ | |||
791 | 1425 | usera = self.create_user(id=2, username=u"sharee1") | 1425 | usera = self.create_user(id=2, username=u"sharee1") |
792 | 1426 | userb = self.create_user(id=3, username=u"sharee2") | 1426 | userb = self.create_user(id=3, username=u"sharee2") |
793 | 1427 | userc = self.create_user(id=4, username=u"sharee3") | 1427 | userc = self.create_user(id=4, username=u"sharee3") |
795 | 1428 | store = get_storage_store() | 1428 | store = get_filesync_store() |
796 | 1429 | vgw = self.gw.get_root_gateway() | 1429 | vgw = self.gw.get_root_gateway() |
797 | 1430 | dir1 = vgw.make_subdirectory(vgw.get_root().id, u"shared1") | 1430 | dir1 = vgw.make_subdirectory(vgw.get_root().id, u"shared1") |
798 | 1431 | dir2 = vgw.make_subdirectory(dir1.id, u"shared2") | 1431 | dir2 = vgw.make_subdirectory(dir1.id, u"shared2") |
799 | @@ -1460,7 +1460,7 @@ | |||
800 | 1460 | usera = self.create_user(id=2, username=u"sharee1") | 1460 | usera = self.create_user(id=2, username=u"sharee1") |
801 | 1461 | sharea = vgw.make_share(dir1.id, u"sharea", user_id=usera.id) | 1461 | sharea = vgw.make_share(dir1.id, u"sharea", user_id=usera.id) |
802 | 1462 | usera._gateway.accept_share(sharea.id) | 1462 | usera._gateway.accept_share(sharea.id) |
804 | 1463 | store = get_storage_store() | 1463 | store = get_filesync_store() |
805 | 1464 | dir1 = store.get(model.StorageObject, dir1.id) | 1464 | dir1 = store.get(model.StorageObject, dir1.id) |
806 | 1465 | self.user._gateway.delete_related_shares(dir1) | 1465 | self.user._gateway.delete_related_shares(dir1) |
807 | 1466 | self.assertRaises( | 1466 | self.assertRaises( |
808 | @@ -1473,7 +1473,7 @@ | |||
809 | 1473 | self.assertEqual(dls, []) | 1473 | self.assertEqual(dls, []) |
810 | 1474 | sysgw = SystemGateway() | 1474 | sysgw = SystemGateway() |
811 | 1475 | udf = model.UserVolume.create( | 1475 | udf = model.UserVolume.create( |
813 | 1476 | get_storage_store(), | 1476 | get_filesync_store(), |
814 | 1477 | self.user.id, u"~/path/name") | 1477 | self.user.id, u"~/path/name") |
815 | 1478 | dl_url = u"http://download/url" | 1478 | dl_url = u"http://download/url" |
816 | 1479 | found_urls = {} | 1479 | found_urls = {} |
817 | @@ -1492,7 +1492,7 @@ | |||
818 | 1492 | def test_get_public_files(self): | 1492 | def test_get_public_files(self): |
819 | 1493 | """Test get_public_files method.""" | 1493 | """Test get_public_files method.""" |
820 | 1494 | vgw = self.gw.get_root_gateway() | 1494 | vgw = self.gw.get_root_gateway() |
822 | 1495 | storage_store = get_storage_store() | 1495 | storage_store = get_filesync_store() |
823 | 1496 | root = model.StorageObject.get_root(storage_store, self.user.id) | 1496 | root = model.StorageObject.get_root(storage_store, self.user.id) |
824 | 1497 | node = root.make_file(u"TheName") | 1497 | node = root.make_file(u"TheName") |
825 | 1498 | node._content_hash = model.EMPTY_CONTENT_HASH | 1498 | node._content_hash = model.EMPTY_CONTENT_HASH |
826 | @@ -1533,7 +1533,7 @@ | |||
827 | 1533 | def test_get_public_folders(self): | 1533 | def test_get_public_folders(self): |
828 | 1534 | """Test get_public_folders method.""" | 1534 | """Test get_public_folders method.""" |
829 | 1535 | vgw = self.gw.get_root_gateway() | 1535 | vgw = self.gw.get_root_gateway() |
831 | 1536 | storage_store = get_storage_store() | 1536 | storage_store = get_filesync_store() |
832 | 1537 | root = model.StorageObject.get_root(storage_store, self.user.id) | 1537 | root = model.StorageObject.get_root(storage_store, self.user.id) |
833 | 1538 | node = root.make_subdirectory(u'test_dir') | 1538 | node = root.make_subdirectory(u'test_dir') |
834 | 1539 | vgw.change_public_access(node.id, True, allow_directory=True) | 1539 | vgw.change_public_access(node.id, True, allow_directory=True) |
835 | @@ -1552,7 +1552,7 @@ | |||
836 | 1552 | def test_get_share_generation(self): | 1552 | def test_get_share_generation(self): |
837 | 1553 | """Test the get_share_generation method.""" | 1553 | """Test the get_share_generation method.""" |
838 | 1554 | user1 = self.create_user(id=2, username=u"sharer") | 1554 | user1 = self.create_user(id=2, username=u"sharer") |
840 | 1555 | store = get_storage_store() | 1555 | store = get_filesync_store() |
841 | 1556 | root = model.StorageObject.get_root(store, user1.id) | 1556 | root = model.StorageObject.get_root(store, user1.id) |
842 | 1557 | share = model.Share(self.user.id, root.id, user1.id, | 1557 | share = model.Share(self.user.id, root.id, user1.id, |
843 | 1558 | u"Share", "View") | 1558 | u"Share", "View") |
844 | @@ -1568,7 +1568,7 @@ | |||
845 | 1568 | def test_get_share_generation_None(self): | 1568 | def test_get_share_generation_None(self): |
846 | 1569 | """Test the get_share_generation method.""" | 1569 | """Test the get_share_generation method.""" |
847 | 1570 | user1 = self.create_user(id=2, username=u"sharer") | 1570 | user1 = self.create_user(id=2, username=u"sharer") |
849 | 1571 | store = get_storage_store() | 1571 | store = get_filesync_store() |
850 | 1572 | root = model.StorageObject.get_root(store, user1.id) | 1572 | root = model.StorageObject.get_root(store, user1.id) |
851 | 1573 | share = model.Share(self.user.id, root.id, user1.id, | 1573 | share = model.Share(self.user.id, root.id, user1.id, |
852 | 1574 | u"Share", "View") | 1574 | u"Share", "View") |
853 | @@ -1621,7 +1621,7 @@ | |||
854 | 1621 | """Test update_content will reuse owned content.""" | 1621 | """Test update_content will reuse owned content.""" |
855 | 1622 | hash_value = get_fake_hash() | 1622 | hash_value = get_fake_hash() |
856 | 1623 | node = self._make_file_with_content(hash_value) | 1623 | node = self._make_file_with_content(hash_value) |
858 | 1624 | get_storage_store().find( | 1624 | get_filesync_store().find( |
859 | 1625 | model.ContentBlob, | 1625 | model.ContentBlob, |
860 | 1626 | model.ContentBlob.hash == node.content_hash | 1626 | model.ContentBlob.hash == node.content_hash |
861 | 1627 | ).set(magic_hash='magic') | 1627 | ).set(magic_hash='magic') |
862 | @@ -1650,7 +1650,7 @@ | |||
863 | 1650 | 1650 | ||
864 | 1651 | hash_value = get_fake_hash() | 1651 | hash_value = get_fake_hash() |
865 | 1652 | node = self._make_file_with_content(hash_value, gw=user2._gateway) | 1652 | node = self._make_file_with_content(hash_value, gw=user2._gateway) |
867 | 1653 | get_storage_store().find( | 1653 | get_filesync_store().find( |
868 | 1654 | model.ContentBlob, | 1654 | model.ContentBlob, |
869 | 1655 | model.ContentBlob.hash == node.content_hash | 1655 | model.ContentBlob.hash == node.content_hash |
870 | 1656 | ).set(magic_hash='magic') | 1656 | ).set(magic_hash='magic') |
871 | @@ -1678,7 +1678,7 @@ | |||
872 | 1678 | """Test update_content will reuse owned content.""" | 1678 | """Test update_content will reuse owned content.""" |
873 | 1679 | hash_value = get_fake_hash() | 1679 | hash_value = get_fake_hash() |
874 | 1680 | node = self._make_file_with_content(hash_value) | 1680 | node = self._make_file_with_content(hash_value) |
876 | 1681 | get_storage_store().find( | 1681 | get_filesync_store().find( |
877 | 1682 | model.ContentBlob, | 1682 | model.ContentBlob, |
878 | 1683 | model.ContentBlob.hash == node.content_hash | 1683 | model.ContentBlob.hash == node.content_hash |
879 | 1684 | ).set(magic_hash='magic') | 1684 | ).set(magic_hash='magic') |
880 | @@ -1706,7 +1706,7 @@ | |||
881 | 1706 | 1706 | ||
882 | 1707 | hash_value = get_fake_hash() | 1707 | hash_value = get_fake_hash() |
883 | 1708 | node = self._make_file_with_content(hash_value, gw=user2._gateway) | 1708 | node = self._make_file_with_content(hash_value, gw=user2._gateway) |
885 | 1709 | get_storage_store().find( | 1709 | get_filesync_store().find( |
886 | 1710 | model.ContentBlob, | 1710 | model.ContentBlob, |
887 | 1711 | model.ContentBlob.hash == node.content_hash | 1711 | model.ContentBlob.hash == node.content_hash |
888 | 1712 | ).set(magic_hash='magic') | 1712 | ).set(magic_hash='magic') |
889 | @@ -1767,7 +1767,7 @@ | |||
890 | 1767 | max_storage_bytes=200) | 1767 | max_storage_bytes=200) |
891 | 1768 | self.user = self.gw.get_user(user.id) | 1768 | self.user = self.gw.get_user(user.id) |
892 | 1769 | self.user_quota = self.user._gateway.get_quota() | 1769 | self.user_quota = self.user._gateway.get_quota() |
894 | 1770 | self.storage_store = get_storage_store() | 1770 | self.storage_store = get_filesync_store() |
895 | 1771 | self.vgw = self.user._gateway.get_root_gateway() | 1771 | self.vgw = self.user._gateway.get_root_gateway() |
896 | 1772 | self.root = self.vgw.get_root() | 1772 | self.root = self.vgw.get_root() |
897 | 1773 | 1773 | ||
898 | @@ -2413,7 +2413,7 @@ | |||
899 | 2413 | max_storage_bytes=200) | 2413 | max_storage_bytes=200) |
900 | 2414 | self.user = self.gw.get_user(user.id) | 2414 | self.user = self.gw.get_user(user.id) |
901 | 2415 | self.user_quota = self.user._gateway.get_quota() | 2415 | self.user_quota = self.user._gateway.get_quota() |
903 | 2416 | self.storage_store = get_storage_store() | 2416 | self.storage_store = get_filesync_store() |
904 | 2417 | self.setup_volume() | 2417 | self.setup_volume() |
905 | 2418 | 2418 | ||
906 | 2419 | def setup_volume(self): | 2419 | def setup_volume(self): |
907 | @@ -2431,7 +2431,7 @@ | |||
908 | 2431 | def tweak_users_quota(self, user_id, max_bytes, used_bytes=0): | 2431 | def tweak_users_quota(self, user_id, max_bytes, used_bytes=0): |
909 | 2432 | """Utility to toy with the user's quota.""" | 2432 | """Utility to toy with the user's quota.""" |
910 | 2433 | self.gw.get_user(user_id) | 2433 | self.gw.get_user(user_id) |
912 | 2434 | store = get_storage_store() | 2434 | store = get_filesync_store() |
913 | 2435 | store.find( | 2435 | store.find( |
914 | 2436 | model.StorageUserInfo, | 2436 | model.StorageUserInfo, |
915 | 2437 | model.StorageUserInfo.id == user_id | 2437 | model.StorageUserInfo.id == user_id |
916 | @@ -2669,7 +2669,7 @@ | |||
917 | 2669 | """Test make_file method.""" | 2669 | """Test make_file method.""" |
918 | 2670 | cb = get_test_contentblob("FakeContent") | 2670 | cb = get_test_contentblob("FakeContent") |
919 | 2671 | cb.magic_hash = 'magic' | 2671 | cb.magic_hash = 'magic' |
921 | 2672 | get_storage_store().add(cb) | 2672 | get_filesync_store().add(cb) |
922 | 2673 | # make enough room | 2673 | # make enough room |
923 | 2674 | self.tweak_users_quota(self.owner.id, cb.deflated_size) | 2674 | self.tweak_users_quota(self.owner.id, cb.deflated_size) |
924 | 2675 | node = self.vgw.make_file(self.root.id, u"the file name", | 2675 | node = self.vgw.make_file(self.root.id, u"the file name", |
925 | @@ -2681,7 +2681,7 @@ | |||
926 | 2681 | # make a content blob with a magic hash | 2681 | # make a content blob with a magic hash |
927 | 2682 | cb = get_test_contentblob("FakeContent") | 2682 | cb = get_test_contentblob("FakeContent") |
928 | 2683 | cb.magic_hash = 'magic' | 2683 | cb.magic_hash = 'magic' |
930 | 2684 | get_storage_store().add(cb) | 2684 | get_filesync_store().add(cb) |
931 | 2685 | self.assertRaises(errors.HashMismatch, | 2685 | self.assertRaises(errors.HashMismatch, |
932 | 2686 | self.vgw.make_file, self.root.id, u"name.txt", | 2686 | self.vgw.make_file, self.root.id, u"name.txt", |
933 | 2687 | hash="wronghash") | 2687 | hash="wronghash") |
934 | @@ -3414,7 +3414,7 @@ | |||
935 | 3414 | self.user_quota = self.user._gateway.get_quota() | 3414 | self.user_quota = self.user._gateway.get_quota() |
936 | 3415 | self.owner = self.user | 3415 | self.owner = self.user |
937 | 3416 | self.owner_quota = self.user_quota | 3416 | self.owner_quota = self.user_quota |
939 | 3417 | self.storage_store = get_storage_store() | 3417 | self.storage_store = get_filesync_store() |
940 | 3418 | # make a test file using storm | 3418 | # make a test file using storm |
941 | 3419 | udf = model.UserVolume.create( | 3419 | udf = model.UserVolume.create( |
942 | 3420 | self.storage_store, self.user.id, u"~/thepath/thename") | 3420 | self.storage_store, self.user.id, u"~/thepath/thename") |
943 | @@ -3440,7 +3440,7 @@ | |||
944 | 3440 | id=2, username=u"sharer", max_storage_bytes=200) | 3440 | id=2, username=u"sharer", max_storage_bytes=200) |
945 | 3441 | self.owner = sharer | 3441 | self.owner = sharer |
946 | 3442 | self.owner_quota = sharer._gateway.get_quota() | 3442 | self.owner_quota = sharer._gateway.get_quota() |
948 | 3443 | self.storage_store = get_storage_store() | 3443 | self.storage_store = get_filesync_store() |
949 | 3444 | root = model.StorageObject.get_root(self.storage_store, sharer.id) | 3444 | root = model.StorageObject.get_root(self.storage_store, sharer.id) |
950 | 3445 | rw_node = root.make_subdirectory(u"WriteMe") | 3445 | rw_node = root.make_subdirectory(u"WriteMe") |
951 | 3446 | transaction.commit() | 3446 | transaction.commit() |
952 | @@ -3476,7 +3476,7 @@ | |||
953 | 3476 | self.gw = SystemGateway() | 3476 | self.gw = SystemGateway() |
954 | 3477 | user = self.create_user(username=u"testuser") | 3477 | user = self.create_user(username=u"testuser") |
955 | 3478 | self.user = self.gw.get_user(user.id, session_id="QWERTY") | 3478 | self.user = self.gw.get_user(user.id, session_id="QWERTY") |
957 | 3479 | self.storage_store = get_storage_store() | 3479 | self.storage_store = get_filesync_store() |
958 | 3480 | # make a test file | 3480 | # make a test file |
959 | 3481 | vgw = self.user._gateway.get_root_gateway() | 3481 | vgw = self.user._gateway.get_root_gateway() |
960 | 3482 | root = self.storage_store.get(model.StorageObject, vgw.get_root().id) | 3482 | root = self.storage_store.get(model.StorageObject, vgw.get_root().id) |
961 | @@ -3568,7 +3568,7 @@ | |||
962 | 3568 | self.gw = SystemGateway() | 3568 | self.gw = SystemGateway() |
963 | 3569 | user = self.create_user(username=u"testuser") | 3569 | user = self.create_user(username=u"testuser") |
964 | 3570 | self.user = self.gw.get_user(user.id, session_id="QWERTY") | 3570 | self.user = self.gw.get_user(user.id, session_id="QWERTY") |
966 | 3571 | self.storage_store = get_storage_store() | 3571 | self.storage_store = get_filesync_store() |
967 | 3572 | # make a test file using storm | 3572 | # make a test file using storm |
968 | 3573 | self.udf = model.UserVolume.create( | 3573 | self.udf = model.UserVolume.create( |
969 | 3574 | self.storage_store, self.user.id, u"~/thepath/thename") | 3574 | self.storage_store, self.user.id, u"~/thepath/thename") |
970 | @@ -3692,10 +3692,10 @@ | |||
971 | 3692 | self.gw = SystemGateway() | 3692 | self.gw = SystemGateway() |
972 | 3693 | user = self.create_user(username=u"testuser") | 3693 | user = self.create_user(username=u"testuser") |
973 | 3694 | self.user = self.gw.get_user(user.id, session_id="QWERTY") | 3694 | self.user = self.gw.get_user(user.id, session_id="QWERTY") |
975 | 3695 | self.storage_store = get_storage_store() | 3695 | self.storage_store = get_filesync_store() |
976 | 3696 | self.sharer = self.create_user(id=2, username=u"sharer") | 3696 | self.sharer = self.create_user(id=2, username=u"sharer") |
977 | 3697 | self.othersharee = self.create_user(id=3, username=u"sharee") | 3697 | self.othersharee = self.create_user(id=3, username=u"sharee") |
979 | 3698 | store = get_storage_store() | 3698 | store = get_filesync_store() |
980 | 3699 | root = model.StorageObject.get_root(store, self.sharer.id) | 3699 | root = model.StorageObject.get_root(store, self.sharer.id) |
981 | 3700 | self.r_node = root.make_subdirectory(u"NoWrite") | 3700 | self.r_node = root.make_subdirectory(u"NoWrite") |
982 | 3701 | self.file = self.r_node.make_file(u"A File for uploads") | 3701 | self.file = self.r_node.make_file(u"A File for uploads") |
983 | @@ -4190,7 +4190,7 @@ | |||
984 | 4190 | def setUp(self): | 4190 | def setUp(self): |
985 | 4191 | super(GenerationsTestCase, self).setUp() | 4191 | super(GenerationsTestCase, self).setUp() |
986 | 4192 | self.user = self.create_user(username=u"testuser") | 4192 | self.user = self.create_user(username=u"testuser") |
988 | 4193 | self.storage_store = get_storage_store() | 4193 | self.storage_store = get_filesync_store() |
989 | 4194 | # make a test file | 4194 | # make a test file |
990 | 4195 | self.ugw = StorageUserGateway(self.user) | 4195 | self.ugw = StorageUserGateway(self.user) |
991 | 4196 | self.vgw = self.ugw.get_root_gateway() | 4196 | self.vgw = self.ugw.get_root_gateway() |
992 | 4197 | 4197 | ||
993 | === modified file 'src/backends/testing/resources.py' | |||
994 | --- src/backends/testing/resources.py 2015-08-29 00:03:11 +0000 | |||
995 | +++ src/backends/testing/resources.py 2015-09-05 01:43:14 +0000 | |||
996 | @@ -31,7 +31,7 @@ | |||
997 | 31 | from backends.db.schemas import account as account_schema | 31 | from backends.db.schemas import account as account_schema |
998 | 32 | from backends.db.schemas import storage as storage_schema | 32 | from backends.db.schemas import storage as storage_schema |
999 | 33 | from backends.db.dbwatcher import DatabaseWatcher | 33 | from backends.db.dbwatcher import DatabaseWatcher |
1001 | 34 | from backends.db.db_admin_store import get_admin_store | 34 | from backends.db.store import get_filesync_store |
1002 | 35 | from backends.filesync.data.dbmanager import filesync_tm | 35 | from backends.filesync.data.dbmanager import filesync_tm |
1003 | 36 | 36 | ||
1004 | 37 | DEBUG_RESOURCES = bool(os.environ.get("DEBUG_RESOURCES")) | 37 | DEBUG_RESOURCES = bool(os.environ.get("DEBUG_RESOURCES")) |
1005 | @@ -41,12 +41,11 @@ | |||
1006 | 41 | """A resource that resets a database to a known state for each test.""" | 41 | """A resource that resets a database to a known state for each test.""" |
1007 | 42 | _watcher = None | 42 | _watcher = None |
1008 | 43 | 43 | ||
1010 | 44 | def __init__(self, dbname, schema_modules, store_name, autocommit=False, | 44 | def __init__(self, dbname, schema_modules, autocommit=False, |
1011 | 45 | tx_manager=transaction): | 45 | tx_manager=transaction): |
1012 | 46 | super(DatabaseResource, self).__init__() | 46 | super(DatabaseResource, self).__init__() |
1013 | 47 | self.dbname = dbname | 47 | self.dbname = dbname |
1014 | 48 | self.schema_modules = schema_modules | 48 | self.schema_modules = schema_modules |
1015 | 49 | self.store_name = store_name | ||
1016 | 50 | self.autocommit = autocommit | 49 | self.autocommit = autocommit |
1017 | 51 | self.saw_commit = False | 50 | self.saw_commit = False |
1018 | 52 | self.schemas = None | 51 | self.schemas = None |
1019 | @@ -72,7 +71,7 @@ | |||
1020 | 72 | watcher.enable(self.dbname) | 71 | watcher.enable(self.dbname) |
1021 | 73 | if self.schemas is None: | 72 | if self.schemas is None: |
1022 | 74 | self.schemas = [s.create_schema() for s in self.schema_modules] | 73 | self.schemas = [s.create_schema() for s in self.schema_modules] |
1024 | 75 | store = get_admin_store(self.store_name) | 74 | store = get_filesync_store() |
1025 | 76 | transaction.abort() | 75 | transaction.abort() |
1026 | 77 | for s in self.schemas: | 76 | for s in self.schemas: |
1027 | 78 | s.upgrade(store) | 77 | s.upgrade(store) |
1028 | @@ -94,7 +93,7 @@ | |||
1029 | 94 | self.tx_manager.abort() | 93 | self.tx_manager.abort() |
1030 | 95 | # Someone committed to the database: clean it up. | 94 | # Someone committed to the database: clean it up. |
1031 | 96 | if self.saw_commit: | 95 | if self.saw_commit: |
1033 | 97 | store = get_admin_store(self.store_name) | 96 | store = get_filesync_store() |
1034 | 98 | for s in reversed(self.schemas): | 97 | for s in reversed(self.schemas): |
1035 | 99 | s.delete(store) | 98 | s.delete(store) |
1036 | 100 | transaction.commit() | 99 | transaction.commit() |
1037 | @@ -116,5 +115,4 @@ | |||
1038 | 116 | FilesyncDatabaseResource = DatabaseResource( | 115 | FilesyncDatabaseResource = DatabaseResource( |
1039 | 117 | dbname='filesync', | 116 | dbname='filesync', |
1040 | 118 | schema_modules=[account_schema, storage_schema], | 117 | schema_modules=[account_schema, storage_schema], |
1041 | 119 | store_name='filesync', | ||
1042 | 120 | tx_manager=filesync_tm) | 118 | tx_manager=filesync_tm) |
1043 | 121 | 119 | ||
1044 | === modified file 'src/backends/txlog/model.py' | |||
1045 | --- src/backends/txlog/model.py 2015-09-03 14:23:04 +0000 | |||
1046 | +++ src/backends/txlog/model.py 2015-09-05 01:43:14 +0000 | |||
1047 | @@ -25,7 +25,7 @@ | |||
1048 | 25 | from storm.locals import Int, DateTime, Enum, Store, Unicode | 25 | from storm.locals import Int, DateTime, Enum, Store, Unicode |
1049 | 26 | from storm.store import AutoReload | 26 | from storm.store import AutoReload |
1050 | 27 | 27 | ||
1052 | 28 | from backends.filesync.data.dbmanager import get_storage_store | 28 | from backends.filesync.data.dbmanager import get_filesync_store |
1053 | 29 | from backends.filesync.data.model import ( | 29 | from backends.filesync.data.model import ( |
1054 | 30 | STATUS_LIVE, | 30 | STATUS_LIVE, |
1055 | 31 | Share, | 31 | Share, |
1056 | @@ -112,7 +112,7 @@ | |||
1057 | 112 | 112 | ||
1058 | 113 | @classmethod | 113 | @classmethod |
1059 | 114 | def bootstrap(cls, user): | 114 | def bootstrap(cls, user): |
1061 | 115 | store = get_storage_store() | 115 | store = get_filesync_store() |
1062 | 116 | cls.record_user_created(user) | 116 | cls.record_user_created(user) |
1063 | 117 | # Number of TransactionLog rows we inserted. | 117 | # Number of TransactionLog rows we inserted. |
1064 | 118 | rows = 1 | 118 | rows = 1 |
1065 | @@ -177,7 +177,8 @@ | |||
1066 | 177 | conditions = [Share.shared_by == user.id, | 177 | conditions = [Share.shared_by == user.id, |
1067 | 178 | Share.status == STATUS_LIVE, | 178 | Share.status == STATUS_LIVE, |
1068 | 179 | Share.accepted == True] # NOQA | 179 | Share.accepted == True] # NOQA |
1070 | 180 | shares = get_storage_store().using(share_join).find(Share, *conditions) | 180 | shares = get_filesync_store().using(share_join).find( |
1071 | 181 | Share, *conditions) | ||
1072 | 181 | for share in shares: | 182 | for share in shares: |
1073 | 182 | cls.record_share_accepted(share) | 183 | cls.record_share_accepted(share) |
1074 | 183 | rows += 1 | 184 | rows += 1 |
1075 | @@ -233,7 +234,7 @@ | |||
1076 | 233 | txlog = cls( | 234 | txlog = cls( |
1077 | 234 | None, user.id, None, cls.OP_USER_CREATED, None, None, | 235 | None, user.id, None, cls.OP_USER_CREATED, None, None, |
1078 | 235 | extra_data=extra_data.decode('ascii')) | 236 | extra_data=extra_data.decode('ascii')) |
1080 | 236 | store = get_storage_store() | 237 | store = get_filesync_store() |
1081 | 237 | return store.add(txlog) | 238 | return store.add(txlog) |
1082 | 238 | 239 | ||
1083 | 239 | @classmethod | 240 | @classmethod |
1084 | @@ -329,7 +330,7 @@ | |||
1085 | 329 | 330 | ||
1086 | 330 | @classmethod | 331 | @classmethod |
1087 | 331 | def _record_share_accepted_or_deleted(cls, share, op_type): | 332 | def _record_share_accepted_or_deleted(cls, share, op_type): |
1089 | 332 | store = get_storage_store() | 333 | store = get_filesync_store() |
1090 | 333 | node = store.get(StorageObject, share.subtree) | 334 | node = store.get(StorageObject, share.subtree) |
1091 | 334 | when_last_changed = share.when_last_changed | 335 | when_last_changed = share.when_last_changed |
1092 | 335 | extra_data = dict( | 336 | extra_data = dict( |
1093 | 336 | 337 | ||
1094 | === modified file 'src/backends/txlog/tests/test_model.py' | |||
1095 | --- src/backends/txlog/tests/test_model.py 2015-09-03 14:23:04 +0000 | |||
1096 | +++ src/backends/txlog/tests/test_model.py 2015-09-05 01:43:14 +0000 | |||
1097 | @@ -19,7 +19,7 @@ | |||
1098 | 19 | 19 | ||
1099 | 20 | from mock import patch | 20 | from mock import patch |
1100 | 21 | 21 | ||
1102 | 22 | from backends.filesync.data.dbmanager import get_storage_store | 22 | from backends.filesync.data.dbmanager import get_filesync_store |
1103 | 23 | from backends.filesync.data.gateway import SystemGateway | 23 | from backends.filesync.data.gateway import SystemGateway |
1104 | 24 | from backends.filesync.data.model import ( | 24 | from backends.filesync.data.model import ( |
1105 | 25 | PublicNode, STATUS_DEAD, StorageObject, StorageUser, UserVolume) | 25 | PublicNode, STATUS_DEAD, StorageObject, StorageUser, UserVolume) |
1106 | @@ -441,7 +441,7 @@ | |||
1107 | 441 | 441 | ||
1108 | 442 | user = StorageUser.new(self.store, user_id, name, visible_name) | 442 | user = StorageUser.new(self.store, user_id, name, visible_name) |
1109 | 443 | 443 | ||
1111 | 444 | store = get_storage_store() | 444 | store = get_filesync_store() |
1112 | 445 | txlog = store.find(TransactionLog, owner_id=user.id).one() | 445 | txlog = store.find(TransactionLog, owner_id=user.id).one() |
1113 | 446 | self.assertTxLogDetailsMatchesUserDetails(user, txlog) | 446 | self.assertTxLogDetailsMatchesUserDetails(user, txlog) |
1114 | 447 | 447 | ||
1115 | @@ -528,7 +528,7 @@ | |||
1116 | 528 | 528 | ||
1117 | 529 | TransactionLog.bootstrap(user) | 529 | TransactionLog.bootstrap(user) |
1118 | 530 | 530 | ||
1120 | 531 | txlog = get_storage_store().find( | 531 | txlog = get_filesync_store().find( |
1121 | 532 | TransactionLog, op_type=TransactionLog.OP_USER_CREATED).one() | 532 | TransactionLog, op_type=TransactionLog.OP_USER_CREATED).one() |
1122 | 533 | self.assertTxLogDetailsMatchesUserDetails(user, txlog) | 533 | self.assertTxLogDetailsMatchesUserDetails(user, txlog) |
1123 | 534 | 534 | ||
1124 | @@ -540,7 +540,7 @@ | |||
1125 | 540 | 540 | ||
1126 | 541 | TransactionLog.bootstrap(user) | 541 | TransactionLog.bootstrap(user) |
1127 | 542 | 542 | ||
1129 | 543 | txlog = get_storage_store().find( | 543 | txlog = get_filesync_store().find( |
1130 | 544 | TransactionLog, op_type=TransactionLog.OP_SHARE_ACCEPTED).one() | 544 | TransactionLog, op_type=TransactionLog.OP_SHARE_ACCEPTED).one() |
1131 | 545 | expected_attrs = self._get_dict_with_txlog_attrs_from_share( | 545 | expected_attrs = self._get_dict_with_txlog_attrs_from_share( |
1132 | 546 | share, directory, TransactionLog.OP_SHARE_ACCEPTED) | 546 | share, directory, TransactionLog.OP_SHARE_ACCEPTED) |
1133 | 547 | 547 | ||
1134 | === modified file 'src/backends/txlog/tests/test_utils.py' | |||
1135 | --- src/backends/txlog/tests/test_utils.py 2015-09-03 14:23:04 +0000 | |||
1136 | +++ src/backends/txlog/tests/test_utils.py 2015-09-05 01:43:14 +0000 | |||
1137 | @@ -133,7 +133,7 @@ | |||
1138 | 133 | """ | 133 | """ |
1139 | 134 | return type('DummyResultSet', (object,), dict(rowcount=0)) | 134 | return type('DummyResultSet', (object,), dict(rowcount=0)) |
1140 | 135 | 135 | ||
1142 | 136 | with patch.object(dbmanager, 'get_storage_store') as mock_get: | 136 | with patch.object(dbmanager, 'get_filesync_store') as mock_get: |
1143 | 137 | mock_get.return_value = DummyStore() | 137 | mock_get.return_value = DummyStore() |
1144 | 138 | 138 | ||
1145 | 139 | self.assertRaises( | 139 | self.assertRaises( |
1146 | 140 | 140 | ||
1147 | === modified file 'src/backends/txlog/utils.py' | |||
1148 | --- src/backends/txlog/utils.py 2015-08-16 19:22:32 +0000 | |||
1149 | +++ src/backends/txlog/utils.py 2015-09-05 01:43:14 +0000 | |||
1150 | @@ -46,7 +46,7 @@ | |||
1151 | 46 | fsync_commit. | 46 | fsync_commit. |
1152 | 47 | """ | 47 | """ |
1153 | 48 | worker_name = unicode(worker_name) | 48 | worker_name = unicode(worker_name) |
1155 | 49 | store = dbmanager.get_storage_store() | 49 | store = dbmanager.get_filesync_store() |
1156 | 50 | 50 | ||
1157 | 51 | last_row = store.execute(u"""SELECT row_id, timestamp | 51 | last_row = store.execute(u"""SELECT row_id, timestamp |
1158 | 52 | FROM txlog_db_worker_last_row | 52 | FROM txlog_db_worker_last_row |
1159 | @@ -73,7 +73,7 @@ | |||
1160 | 73 | decorated with fsync_commit. | 73 | decorated with fsync_commit. |
1161 | 74 | """ | 74 | """ |
1162 | 75 | worker_name = unicode(worker_name) | 75 | worker_name = unicode(worker_name) |
1164 | 76 | store = dbmanager.get_storage_store() | 76 | store = dbmanager.get_filesync_store() |
1165 | 77 | result = store.execute(u"""UPDATE txlog_db_worker_last_row | 77 | result = store.execute(u"""UPDATE txlog_db_worker_last_row |
1166 | 78 | SET row_id=?, timestamp=? | 78 | SET row_id=?, timestamp=? |
1167 | 79 | WHERE worker_id=?""", (row_id, timestamp, worker_name)) | 79 | WHERE worker_id=?""", (row_id, timestamp, worker_name)) |
1168 | @@ -111,7 +111,7 @@ | |||
1169 | 111 | """ | 111 | """ |
1170 | 112 | if expire_secs is None: | 112 | if expire_secs is None: |
1171 | 113 | expire_secs = UNSEEN_EXPIRES | 113 | expire_secs = UNSEEN_EXPIRES |
1173 | 114 | store = dbmanager.get_storage_store() | 114 | store = dbmanager.get_filesync_store() |
1174 | 115 | parameters = (last_id, ) | 115 | parameters = (last_id, ) |
1175 | 116 | select = u""" | 116 | select = u""" |
1176 | 117 | SELECT txlog.id, owner_id, node_id, volume_id, op_type, path, | 117 | SELECT txlog.id, owner_id, node_id, volume_id, op_type, path, |
1177 | @@ -195,7 +195,7 @@ | |||
1178 | 195 | if expire_secs is None: | 195 | if expire_secs is None: |
1179 | 196 | expire_secs = UNSEEN_EXPIRES | 196 | expire_secs = UNSEEN_EXPIRES |
1180 | 197 | worker_id = unicode(worker_id) | 197 | worker_id = unicode(worker_id) |
1182 | 198 | store = dbmanager.get_storage_store() | 198 | store = dbmanager.get_filesync_store() |
1183 | 199 | deleted = 0 | 199 | deleted = 0 |
1184 | 200 | condition = (u"created < TIMEZONE('UTC'::text, NOW()) " | 200 | condition = (u"created < TIMEZONE('UTC'::text, NOW()) " |
1185 | 201 | " - INTERVAL '{} seconds'".format(expire_secs)) | 201 | " - INTERVAL '{} seconds'".format(expire_secs)) |
1186 | @@ -234,7 +234,7 @@ | |||
1187 | 234 | be deleted. | 234 | be deleted. |
1188 | 235 | """ | 235 | """ |
1189 | 236 | 236 | ||
1191 | 237 | store = dbmanager.get_storage_store() | 237 | store = dbmanager.get_filesync_store() |
1192 | 238 | parameters = [timestamp_limit] | 238 | parameters = [timestamp_limit] |
1193 | 239 | inner_select = "SELECT id FROM txlog_transaction_log WHERE timestamp <= ?" | 239 | inner_select = "SELECT id FROM txlog_transaction_log WHERE timestamp <= ?" |
1194 | 240 | 240 | ||
1195 | @@ -257,7 +257,7 @@ | |||
1196 | 257 | precisely from the provided date (a datetime.date object). Also, the | 257 | precisely from the provided date (a datetime.date object). Also, the |
1197 | 258 | quantity_limit parameter is mandatory.""" | 258 | quantity_limit parameter is mandatory.""" |
1198 | 259 | 259 | ||
1200 | 260 | store = dbmanager.get_storage_store() | 260 | store = dbmanager.get_filesync_store() |
1201 | 261 | parameters = [date, quantity_limit] | 261 | parameters = [date, quantity_limit] |
1202 | 262 | inner_select = ("SELECT id FROM txlog_transaction_log " | 262 | inner_select = ("SELECT id FROM txlog_transaction_log " |
1203 | 263 | "WHERE timestamp::date = ? LIMIT ?") | 263 | "WHERE timestamp::date = ? LIMIT ?") |
1204 | @@ -271,7 +271,7 @@ | |||
1205 | 271 | 271 | ||
1206 | 272 | def get_row_by_time(timestamp): | 272 | def get_row_by_time(timestamp): |
1207 | 273 | """Return the smaller txlog row id in that timestamp (or greater).""" | 273 | """Return the smaller txlog row id in that timestamp (or greater).""" |
1209 | 274 | store = dbmanager.get_storage_store() | 274 | store = dbmanager.get_filesync_store() |
1210 | 275 | query = """ | 275 | query = """ |
1211 | 276 | SELECT id, timestamp FROM txlog_transaction_log | 276 | SELECT id, timestamp FROM txlog_transaction_log |
1212 | 277 | WHERE timestamp >= ? ORDER BY id LIMIT 1; | 277 | WHERE timestamp >= ? ORDER BY id LIMIT 1; |
1213 | @@ -287,7 +287,7 @@ | |||
1214 | 287 | def keep_last_rows_for_worker_names(worker_names): | 287 | def keep_last_rows_for_worker_names(worker_names): |
1215 | 288 | """Clean rows from txlog_db_worker_last_row that don't match the given | 288 | """Clean rows from txlog_db_worker_last_row that don't match the given |
1216 | 289 | worker names.""" | 289 | worker names.""" |
1218 | 290 | store = dbmanager.get_storage_store() | 290 | store = dbmanager.get_filesync_store() |
1219 | 291 | query = ("DELETE FROM txlog_db_worker_last_row " | 291 | query = ("DELETE FROM txlog_db_worker_last_row " |
1220 | 292 | "WHERE worker_id NOT IN ?;") | 292 | "WHERE worker_id NOT IN ?;") |
1221 | 293 | store.execute(query, (tuple(worker_names), )) | 293 | store.execute(query, (tuple(worker_names), )) |
1222 | 294 | 294 | ||
1223 | === modified file 'src/server/tests/test_account.py' | |||
1224 | --- src/server/tests/test_account.py 2015-08-29 00:03:11 +0000 | |||
1225 | +++ src/server/tests/test_account.py 2015-09-05 01:43:14 +0000 | |||
1226 | @@ -55,7 +55,7 @@ | |||
1227 | 55 | when over quota.""" | 55 | when over quota.""" |
1228 | 56 | self.usr0.update(max_storage_bytes=2 ** 16) | 56 | self.usr0.update(max_storage_bytes=2 ** 16) |
1229 | 57 | # need to do something that just can't happen normally | 57 | # need to do something that just can't happen normally |
1231 | 58 | store = dbmanager.get_storage_store() | 58 | store = dbmanager.get_filesync_store() |
1232 | 59 | info = store.get(model.StorageUserInfo, 0) | 59 | info = store.get(model.StorageUserInfo, 0) |
1233 | 60 | info.used_storage_bytes = 2 ** 17 | 60 | info.used_storage_bytes = 2 ** 17 |
1234 | 61 | store.commit() | 61 | store.commit() |
1235 | 62 | 62 | ||
1236 | === modified file 'src/server/tests/test_sharing.py' | |||
1237 | --- src/server/tests/test_sharing.py 2015-08-29 00:03:11 +0000 | |||
1238 | +++ src/server/tests/test_sharing.py 2015-09-05 01:43:14 +0000 | |||
1239 | @@ -778,7 +778,7 @@ | |||
1240 | 778 | subfile = subdir.make_file(u"subfile") | 778 | subfile = subdir.make_file(u"subfile") |
1241 | 779 | subsubdir = subdir.make_subdirectory(u"subsubdir") | 779 | subsubdir = subdir.make_subdirectory(u"subsubdir") |
1242 | 780 | subsubfile = subsubdir.make_file(u"subsubfile") | 780 | subsubfile = subsubdir.make_file(u"subsubfile") |
1244 | 781 | store = dbmanager.get_storage_store() | 781 | store = dbmanager.get_filesync_store() |
1245 | 782 | # set all files with an empty hash | 782 | # set all files with an empty hash |
1246 | 783 | store.find( | 783 | store.find( |
1247 | 784 | model.StorageObject, model.StorageObject.kind == 'File').set( | 784 | model.StorageObject, model.StorageObject.kind == 'File').set( |
1248 | 785 | 785 | ||
1249 | === modified file 'src/server/tests/test_throttling.py' | |||
1250 | --- src/server/tests/test_throttling.py 2015-08-17 00:09:45 +0000 | |||
1251 | +++ src/server/tests/test_throttling.py 2015-09-05 01:43:14 +0000 | |||
1252 | @@ -26,7 +26,7 @@ | |||
1253 | 26 | from twisted.internet.protocol import connectionDone | 26 | from twisted.internet.protocol import connectionDone |
1254 | 27 | 27 | ||
1255 | 28 | 28 | ||
1257 | 29 | from backends.filesync.data import get_storage_store, model, filesync_tm | 29 | from backends.filesync.data import get_filesync_store, model, filesync_tm |
1258 | 30 | 30 | ||
1259 | 31 | from ubuntuone.storageprotocol import request, client | 31 | from ubuntuone.storageprotocol import request, client |
1260 | 32 | from ubuntuone.storageprotocol.content_hash import content_hash_factory, crc32 | 32 | from ubuntuone.storageprotocol.content_hash import content_hash_factory, crc32 |
1261 | @@ -152,7 +152,7 @@ | |||
1262 | 152 | def _check_file(): | 152 | def _check_file(): |
1263 | 153 | filesync_tm.begin() | 153 | filesync_tm.begin() |
1264 | 154 | try: | 154 | try: |
1266 | 155 | store = get_storage_store() | 155 | store = get_filesync_store() |
1267 | 156 | content_blob = store.get(model.ContentBlob, hash_value) | 156 | content_blob = store.get(model.ContentBlob, hash_value) |
1268 | 157 | if not content_blob: | 157 | if not content_blob: |
1269 | 158 | raise ValueError("content blob is not there") | 158 | raise ValueError("content blob is not there") |
1270 | @@ -205,7 +205,7 @@ | |||
1271 | 205 | def _check_file(): | 205 | def _check_file(): |
1272 | 206 | filesync_tm.begin() | 206 | filesync_tm.begin() |
1273 | 207 | try: | 207 | try: |
1275 | 208 | store = get_storage_store() | 208 | store = get_filesync_store() |
1276 | 209 | content_blob = store.get(model.ContentBlob, hash_value) | 209 | content_blob = store.get(model.ContentBlob, hash_value) |
1277 | 210 | if not content_blob: | 210 | if not content_blob: |
1278 | 211 | raise ValueError("content blob is not there") | 211 | raise ValueError("content blob is not there") |
Pure syntactic renames, also testing lander script.