Merge lp:~jderose/dmedia/icore into lp:dmedia
- icore
- Merge into trunk
Status: | Merged |
---|---|
Approved by: | James Raymond |
Approved revision: | 738 |
Merged at revision: | 722 |
Proposed branch: | lp:~jderose/dmedia/icore |
Merge into: | lp:dmedia |
Diff against target: |
1309 lines (+398/-404) 11 files modified
dmedia-gtk (+1/-1) dmedia-service (+10/-8) dmedia/core.py (+40/-65) dmedia/importer.py (+7/-5) dmedia/local.py (+10/-10) dmedia/metastore.py (+13/-4) dmedia/service/tests/test_avahi.py (+5/-3) dmedia/tests/test_core.py (+171/-234) dmedia/tests/test_importer.py (+66/-61) dmedia/tests/test_local.py (+56/-5) dmedia/tests/test_metastore.py (+19/-8) |
To merge this branch: | bzr merge lp:~jderose/dmedia/icore |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
James Raymond | Approve | ||
Review via email: mp+182010@code.launchpad.net |
Commit message
Description of the change
For background, see: https:/
Changes include:
* Core.__init__() now has two new required arguments, *machine* and *user*, the machine and user docs, thereby making the identity parts of the Core API non-optional
* Removed Core.load_
* Local file-stores and peers are now only saved in the dmedia/machine doc, are no longer saved in the _local/dmedia doc
* LocalStores.
set(
* As many different bits of code relied on the stores being in _local/dmedia, the rest of the change is basically porting to the new API and updating tests
James Raymond (jamesmr) : | # |
Preview Diff
1 | === modified file 'dmedia-gtk' | |||
2 | --- dmedia-gtk 2013-08-16 19:44:40 +0000 | |||
3 | +++ dmedia-gtk 2013-08-25 20:53:26 +0000 | |||
4 | @@ -102,7 +102,7 @@ | |||
5 | 102 | def on_batch_finished(self, hub, batch_id, stats, copies, msg): | 102 | def on_batch_finished(self, hub, batch_id, stats, copies, msg): |
6 | 103 | log.info('batch_finished: %s', batch_id) | 103 | log.info('batch_finished: %s', batch_id) |
7 | 104 | log.info('Calling Dmedia.SnapshotProject(%r)...', self.project_id) | 104 | log.info('Calling Dmedia.SnapshotProject(%r)...', self.project_id) |
9 | 105 | #self.proxy.SnapshotProject(self.project_id) | 105 | self.proxy.SnapshotProject(self.project_id) |
10 | 106 | self.blocking = True | 106 | self.blocking = True |
11 | 107 | self.batch_id = batch_id | 107 | self.batch_id = batch_id |
12 | 108 | self.copies = copies | 108 | self.copies = copies |
13 | 109 | 109 | ||
14 | === modified file 'dmedia-service' | |||
15 | --- dmedia-service 2013-07-29 12:43:06 +0000 | |||
16 | +++ dmedia-service 2013-08-25 20:53:26 +0000 | |||
17 | @@ -172,9 +172,11 @@ | |||
18 | 172 | env = self.couch.auto_bootstrap() | 172 | env = self.couch.auto_bootstrap() |
19 | 173 | log.info('%r', self.couch._welcome) | 173 | log.info('%r', self.couch._welcome) |
20 | 174 | log.info('Starting CouchDB took %.3f', time.monotonic() - start) | 174 | log.info('Starting CouchDB took %.3f', time.monotonic() - start) |
24 | 175 | self.core = Core(env, self.couch.get_ssl_config()) | 175 | self.core = Core(env, |
25 | 176 | self.core.reset_local() | 176 | self.couch.machine, |
26 | 177 | self.core.load_identity(self.couch.machine, self.couch.user) | 177 | self.couch.user, |
27 | 178 | self.couch.get_ssl_config() | ||
28 | 179 | ) | ||
29 | 178 | self.env_s = dumps(self.core.env, pretty=True) | 180 | self.env_s = dumps(self.core.env, pretty=True) |
30 | 179 | self.snapshots = Snapshots( | 181 | self.snapshots = Snapshots( |
31 | 180 | self.core.env, | 182 | self.core.env, |
32 | @@ -233,8 +235,8 @@ | |||
33 | 233 | port = env['port'] | 235 | port = env['port'] |
34 | 234 | self.avahi = Avahi(self.core, port) | 236 | self.avahi = Avahi(self.core, port) |
35 | 235 | self.avahi.run() | 237 | self.avahi.run() |
38 | 236 | GLib.timeout_add(5000, self.on_idle4) | 238 | GLib.timeout_add(9000, self.on_idle4) |
39 | 237 | GLib.timeout_add(90 * 1000, self.core.reclaim_if_possible) | 239 | GLib.timeout_add(45 * 1000, self.core.reclaim_if_possible) |
40 | 238 | 240 | ||
41 | 239 | def on_idle4(self): | 241 | def on_idle4(self): |
42 | 240 | """ | 242 | """ |
43 | @@ -458,14 +460,14 @@ | |||
44 | 458 | """ | 460 | """ |
45 | 459 | Return currently connected filestores | 461 | Return currently connected filestores |
46 | 460 | """ | 462 | """ |
48 | 461 | return dumps(self.core.local['stores'], pretty=True) | 463 | return dumps(self.core.machine['stores'], pretty=True) |
49 | 462 | 464 | ||
50 | 463 | @dbus.service.method(IFACE, in_signature='', out_signature='s') | 465 | @dbus.service.method(IFACE, in_signature='', out_signature='s') |
51 | 464 | def Peers(self): | 466 | def Peers(self): |
52 | 465 | """ | 467 | """ |
53 | 466 | Return peers currently known on local network. | 468 | Return peers currently known on local network. |
54 | 467 | """ | 469 | """ |
56 | 468 | return dumps(self.core.local['peers'], pretty=True) | 470 | return dumps(self.core.machine['peers'], pretty=True) |
57 | 469 | 471 | ||
58 | 470 | @dbus.service.method(IFACE, in_signature='s', out_signature='s') | 472 | @dbus.service.method(IFACE, in_signature='s', out_signature='s') |
59 | 471 | def CreateFileStore(self, parentdir): | 473 | def CreateFileStore(self, parentdir): |
60 | @@ -550,7 +552,7 @@ | |||
61 | 550 | if not self.UpdateProject(project_id): | 552 | if not self.UpdateProject(project_id): |
62 | 551 | self.pending_update = project_id | 553 | self.pending_update = project_id |
63 | 552 | self.Snapshot(schema.project_db_name(project_id)) | 554 | self.Snapshot(schema.project_db_name(project_id)) |
65 | 553 | self.Snapshot(schema.DB_NAME) | 555 | #self.Snapshot(schema.DB_NAME) |
66 | 554 | 556 | ||
67 | 555 | @dbus.service.method(IFACE, in_signature='s', out_signature='s') | 557 | @dbus.service.method(IFACE, in_signature='s', out_signature='s') |
68 | 556 | def AutoFormat(self, value): | 558 | def AutoFormat(self, value): |
69 | 557 | 559 | ||
70 | === modified file 'dmedia/core.py' | |||
71 | --- dmedia/core.py 2013-07-28 20:31:16 +0000 | |||
72 | +++ dmedia/core.py 2013-08-25 20:53:26 +0000 | |||
73 | @@ -467,6 +467,11 @@ | |||
74 | 467 | stores.pop(fs_id, None) | 467 | stores.pop(fs_id, None) |
75 | 468 | 468 | ||
76 | 469 | 469 | ||
77 | 470 | def mark_connected_stores(doc, atime, stores): | ||
78 | 471 | assert isinstance(stores, dict) | ||
79 | 472 | doc['atime'] = atime | ||
80 | 473 | doc['stores'] = stores | ||
81 | 474 | |||
82 | 470 | def mark_add_peer(doc, atime, peer_id, info): | 475 | def mark_add_peer(doc, atime, peer_id, info): |
83 | 471 | assert isinstance(info, dict) | 476 | assert isinstance(info, dict) |
84 | 472 | doc['atime'] = atime | 477 | doc['atime'] = atime |
85 | @@ -481,9 +486,12 @@ | |||
86 | 481 | 486 | ||
87 | 482 | 487 | ||
88 | 483 | class Core: | 488 | class Core: |
90 | 484 | def __init__(self, env, ssl_config=None): | 489 | def __init__(self, env, machine, user, ssl_config=None): |
91 | 490 | env.update({ | ||
92 | 491 | 'machine_id': machine['_id'], | ||
93 | 492 | 'user_id': user['_id'], | ||
94 | 493 | }) | ||
95 | 485 | self.env = env | 494 | self.env = env |
96 | 486 | self.ssl_config = ssl_config | ||
97 | 487 | self.db = util.get_db(env, init=True) | 495 | self.db = util.get_db(env, init=True) |
98 | 488 | self.log_db = self.db.database(schema.LOG_DB_NAME) | 496 | self.log_db = self.db.database(schema.LOG_DB_NAME) |
99 | 489 | self.log_db.ensure() | 497 | self.log_db.ensure() |
100 | @@ -491,26 +499,28 @@ | |||
101 | 491 | self.ms = MetaStore(self.db) | 499 | self.ms = MetaStore(self.db) |
102 | 492 | self.stores = LocalStores() | 500 | self.stores = LocalStores() |
103 | 493 | self.task_manager = TaskManager(env, ssl_config) | 501 | self.task_manager = TaskManager(env, ssl_config) |
104 | 502 | self.ssl_config = ssl_config | ||
105 | 494 | try: | 503 | try: |
106 | 495 | self.local = self.db.get(LOCAL_ID) | 504 | self.local = self.db.get(LOCAL_ID) |
107 | 496 | except NotFound: | 505 | except NotFound: |
116 | 497 | self.local = { | 506 | self.local = {'_id': LOCAL_ID} |
117 | 498 | '_id': LOCAL_ID, | 507 | self.local.update({ |
118 | 499 | 'stores': {}, | 508 | 'machine_id': machine['_id'], |
119 | 500 | 'peers': {}, | 509 | 'user_id': user['_id'], |
120 | 501 | } | 510 | }) |
121 | 502 | self.__local = deepcopy(self.local) | 511 | self.local.pop('stores', None) |
122 | 503 | self.machine = None | 512 | self.local.pop('peers', None) |
123 | 504 | self.user = None | 513 | (self.machine, self.user) = self.db.get_defaults([machine, user]) |
124 | 514 | self.machine.update({ | ||
125 | 515 | 'stores': {}, | ||
126 | 516 | 'peers': {}, | ||
127 | 517 | }) | ||
128 | 518 | self.db.save_many([self.local, self.machine, self.user]) | ||
129 | 519 | log.info('machine_id = %s', machine['_id']) | ||
130 | 520 | log.info('user_id = %s', user['_id']) | ||
131 | 505 | 521 | ||
132 | 506 | def save_local(self): | 522 | def save_local(self): |
140 | 507 | if self.local != self.__local: | 523 | self.db.save(self.local) |
134 | 508 | self.db.save(self.local) | ||
135 | 509 | self.__local = deepcopy(self.local) | ||
136 | 510 | |||
137 | 511 | def reset_local(self): | ||
138 | 512 | self.local['stores'] = {} | ||
139 | 513 | self.local['peers'] = {} | ||
141 | 514 | 524 | ||
142 | 515 | def start_background_tasks(self): | 525 | def start_background_tasks(self): |
143 | 516 | self.task_manager.start_tasks() | 526 | self.task_manager.start_tasks() |
144 | @@ -537,53 +547,29 @@ | |||
145 | 537 | self.local['skip_internal'] = flag | 547 | self.local['skip_internal'] = flag |
146 | 538 | self.save_local() | 548 | self.save_local() |
147 | 539 | 549 | ||
148 | 540 | def load_identity(self, machine, user, timestamp=None): | ||
149 | 541 | if timestamp is None: | ||
150 | 542 | timestamp = int(time.time()) | ||
151 | 543 | assert isinstance(timestamp, int) and timestamp > 0 | ||
152 | 544 | try: | ||
153 | 545 | self.db.save_many([machine, user]) | ||
154 | 546 | except BulkConflict: | ||
155 | 547 | pass | ||
156 | 548 | log.info('machine_id = %s', machine['_id']) | ||
157 | 549 | log.info('user_id = %s', user['_id']) | ||
158 | 550 | self.env['machine_id'] = machine['_id'] | ||
159 | 551 | self.env['user_id'] = user['_id'] | ||
160 | 552 | self.local['machine_id'] = machine['_id'] | ||
161 | 553 | self.local['user_id'] = user['_id'] | ||
162 | 554 | self.save_local() | ||
163 | 555 | self.machine = self.db.update(mark_machine_start, machine, timestamp) | ||
164 | 556 | |||
165 | 557 | def add_peer(self, peer_id, info): | 550 | def add_peer(self, peer_id, info): |
166 | 558 | assert isdb32(peer_id) and len(peer_id) == 48 | 551 | assert isdb32(peer_id) and len(peer_id) == 48 |
167 | 559 | assert isinstance(info, dict) | 552 | assert isinstance(info, dict) |
168 | 560 | assert isinstance(info['url'], str) | 553 | assert isinstance(info['url'], str) |
176 | 561 | self.local['peers'][peer_id] = info | 554 | self.machine = self.db.update( |
177 | 562 | self.save_local() | 555 | mark_add_peer, self.machine, int(time.time()), peer_id, info |
178 | 563 | if self.machine: | 556 | ) |
172 | 564 | atime = int(time.time()) | ||
173 | 565 | self.machine = self.db.update( | ||
174 | 566 | mark_add_peer, self.machine, atime, peer_id, info | ||
175 | 567 | ) | ||
179 | 568 | self.restart_vigilance() | 557 | self.restart_vigilance() |
180 | 569 | 558 | ||
181 | 570 | def remove_peer(self, peer_id): | 559 | def remove_peer(self, peer_id): |
193 | 571 | if self.machine: | 560 | if peer_id not in self.machine['peers']: |
183 | 572 | atime = int(time.time()) | ||
184 | 573 | self.machine = self.db.update( | ||
185 | 574 | mark_remove_peer, self.machine, atime, peer_id | ||
186 | 575 | ) | ||
187 | 576 | try: | ||
188 | 577 | del self.local['peers'][peer_id] | ||
189 | 578 | self.save_local() | ||
190 | 579 | self.restart_vigilance() | ||
191 | 580 | return True | ||
192 | 581 | except KeyError: | ||
194 | 582 | return False | 561 | return False |
195 | 562 | self.machine = self.db.update( | ||
196 | 563 | mark_remove_peer, self.machine, int(time.time()), peer_id | ||
197 | 564 | ) | ||
198 | 565 | self.restart_vigilance() | ||
199 | 566 | return True | ||
200 | 583 | 567 | ||
201 | 584 | def _sync_stores(self): | 568 | def _sync_stores(self): |
204 | 585 | self.local['stores'] = self.stores.local_stores() | 569 | stores = self.stores.local_stores() |
205 | 586 | self.save_local() | 570 | self.machine = self.db.update( |
206 | 571 | mark_connected_stores, self.machine, int(time.time()), stores | ||
207 | 572 | ) | ||
208 | 587 | self.restart_vigilance() | 573 | self.restart_vigilance() |
209 | 588 | 574 | ||
210 | 589 | def _add_filestore(self, fs): | 575 | def _add_filestore(self, fs): |
211 | @@ -601,23 +587,12 @@ | |||
212 | 601 | pass | 587 | pass |
213 | 602 | self.task_manager.queue_filestore_tasks(fs) | 588 | self.task_manager.queue_filestore_tasks(fs) |
214 | 603 | self._sync_stores() | 589 | self._sync_stores() |
215 | 604 | if self.machine: | ||
216 | 605 | atime = int(time.time()) | ||
217 | 606 | info = {'parentdir': fs.parentdir} | ||
218 | 607 | self.machine = self.db.update( | ||
219 | 608 | mark_add_filestore, self.machine, atime, fs.id, info | ||
220 | 609 | ) | ||
221 | 610 | 590 | ||
222 | 611 | def _remove_filestore(self, fs): | 591 | def _remove_filestore(self, fs): |
223 | 612 | log.info('Removing %r', fs) | 592 | log.info('Removing %r', fs) |
224 | 613 | self.stores.remove(fs) | 593 | self.stores.remove(fs) |
225 | 614 | self.task_manager.stop_filestore_tasks(fs) | 594 | self.task_manager.stop_filestore_tasks(fs) |
226 | 615 | self._sync_stores() | 595 | self._sync_stores() |
227 | 616 | if self.machine: | ||
228 | 617 | atime = int(time.time()) | ||
229 | 618 | self.machine = self.db.update( | ||
230 | 619 | mark_remove_filestore, self.machine, atime, fs.id | ||
231 | 620 | ) | ||
232 | 621 | 596 | ||
233 | 622 | def _iter_project_dbs(self): | 597 | def _iter_project_dbs(self): |
234 | 623 | for (name, _id) in projects_iter(self.server): | 598 | for (name, _id) in projects_iter(self.server): |
235 | 624 | 599 | ||
236 | === modified file 'dmedia/importer.py' | |||
237 | --- dmedia/importer.py 2013-08-16 19:44:40 +0000 | |||
238 | +++ dmedia/importer.py 2013-08-25 20:53:26 +0000 | |||
239 | @@ -43,7 +43,7 @@ | |||
240 | 43 | from dmedia.util import get_project_db | 43 | from dmedia.util import get_project_db |
241 | 44 | from dmedia.units import bytes10 | 44 | from dmedia.units import bytes10 |
242 | 45 | from dmedia import workers, schema | 45 | from dmedia import workers, schema |
244 | 46 | from dmedia.metastore import create_stored, merge_stored, TimeDelta | 46 | from dmedia.metastore import MetaStore, create_stored, merge_stored, TimeDelta |
245 | 47 | from dmedia.extractor import extract, merge_thumbnail | 47 | from dmedia.extractor import extract, merge_thumbnail |
246 | 48 | 48 | ||
247 | 49 | 49 | ||
248 | @@ -216,9 +216,9 @@ | |||
249 | 216 | # FIXME: Should pick up to 2 filestores based size of import and | 216 | # FIXME: Should pick up to 2 filestores based size of import and |
250 | 217 | # available space on the filestores. | 217 | # available space on the filestores. |
251 | 218 | stores = [] | 218 | stores = [] |
255 | 219 | for parentdir in sorted(self.env['stores']): | 219 | for _id in sorted(self.env['stores']): |
256 | 220 | info = self.env['stores'][parentdir] | 220 | info = self.env['stores'][_id] |
257 | 221 | fs = FileStore(parentdir, info['id']) | 221 | fs = FileStore(info['parentdir'], _id) |
258 | 222 | stores.append(fs) | 222 | stores.append(fs) |
259 | 223 | return stores | 223 | return stores |
260 | 224 | 224 | ||
261 | @@ -325,6 +325,7 @@ | |||
262 | 325 | self._reset() | 325 | self._reset() |
263 | 326 | if not workers.isregistered(ImportWorker): | 326 | if not workers.isregistered(ImportWorker): |
264 | 327 | workers.register(ImportWorker) | 327 | workers.register(ImportWorker) |
265 | 328 | self.ms = MetaStore(self.db) | ||
266 | 328 | 329 | ||
267 | 329 | def _reset(self): | 330 | def _reset(self): |
268 | 330 | self._error = None | 331 | self._error = None |
269 | @@ -340,7 +341,8 @@ | |||
270 | 340 | assert self.doc is None | 341 | assert self.doc is None |
271 | 341 | assert self._workers == {} | 342 | assert self._workers == {} |
272 | 342 | self._reset() | 343 | self._reset() |
274 | 343 | stores = self.db.get('_local/dmedia')['stores'] | 344 | self.machine = self.ms.get_machine() |
275 | 345 | stores = self.machine['stores'] | ||
276 | 344 | assert isinstance(stores, dict) | 346 | assert isinstance(stores, dict) |
277 | 345 | if not stores: | 347 | if not stores: |
278 | 346 | raise ValueError('No FileStores to import into!') | 348 | raise ValueError('No FileStores to import into!') |
279 | 347 | 349 | ||
280 | === modified file 'dmedia/local.py' | |||
281 | --- dmedia/local.py 2013-05-14 21:28:00 +0000 | |||
282 | +++ dmedia/local.py 2013-08-25 20:53:26 +0000 | |||
283 | @@ -191,25 +191,25 @@ | |||
284 | 191 | return fs | 191 | return fs |
285 | 192 | 192 | ||
286 | 193 | def local_stores(self): | 193 | def local_stores(self): |
292 | 194 | stores = {} | 194 | return dict( |
293 | 195 | for fs in self.ids.values(): | 195 | (fs.id, {'parentdir': fs.parentdir, 'copies': fs.copies}) |
294 | 196 | stores[fs.parentdir] = {'id': fs.id, 'copies': fs.copies} | 196 | for fs in self.ids.values() |
295 | 197 | return stores | 197 | ) |
291 | 198 | |||
296 | 199 | 198 | ||
297 | 200 | 199 | ||
298 | 201 | class LocalSlave: | 200 | class LocalSlave: |
299 | 202 | def __init__(self, env): | 201 | def __init__(self, env): |
300 | 203 | self.db = get_db(env) | 202 | self.db = get_db(env) |
301 | 203 | self.machine_id = env['machine_id'] | ||
302 | 204 | self.last_rev = None | 204 | self.last_rev = None |
303 | 205 | 205 | ||
304 | 206 | def update_stores(self): | 206 | def update_stores(self): |
308 | 207 | local = self.db.get('_local/dmedia') | 207 | machine = self.db.get(self.machine_id) |
309 | 208 | if local['_rev'] != self.last_rev: | 208 | if machine['_rev'] != self.last_rev: |
310 | 209 | self.last_rev = local['_rev'] | 209 | self.last_rev = machine['_rev'] |
311 | 210 | self.stores = LocalStores() | 210 | self.stores = LocalStores() |
314 | 211 | for (parentdir, info) in local['stores'].items(): | 211 | for (_id, info) in machine['stores'].items(): |
315 | 212 | fs = FileStore(parentdir, info['id']) | 212 | fs = FileStore(info['parentdir'], _id) |
316 | 213 | self.stores.add(fs) | 213 | self.stores.add(fs) |
317 | 214 | 214 | ||
318 | 215 | def get_doc(self, _id): | 215 | def get_doc(self, _id): |
319 | 216 | 216 | ||
320 | === modified file 'dmedia/metastore.py' | |||
321 | --- dmedia/metastore.py 2013-07-24 04:56:45 +0000 | |||
322 | +++ dmedia/metastore.py 2013-08-25 20:53:26 +0000 | |||
323 | @@ -354,17 +354,26 @@ | |||
324 | 354 | except NotFound: | 354 | except NotFound: |
325 | 355 | return {} | 355 | return {} |
326 | 356 | 356 | ||
327 | 357 | def get_machine(self): | ||
328 | 358 | machine_id = self.get_local_dmedia().get('machine_id') | ||
329 | 359 | if machine_id is None: | ||
330 | 360 | return {} | ||
331 | 361 | try: | ||
332 | 362 | return self.db.get(machine_id) | ||
333 | 363 | except NotFound: | ||
334 | 364 | return {} | ||
335 | 365 | |||
336 | 357 | def get_local_stores(self): | 366 | def get_local_stores(self): |
338 | 358 | doc = self.get_local_dmedia() | 367 | doc = self.get_machine() |
339 | 359 | stores = get_dict(doc, 'stores') | 368 | stores = get_dict(doc, 'stores') |
340 | 360 | local_stores = LocalStores() | 369 | local_stores = LocalStores() |
343 | 361 | for (parentdir, info) in stores.items(): | 370 | for (_id, info) in stores.items(): |
344 | 362 | fs = FileStore(parentdir, info['id']) | 371 | fs = FileStore(info['parentdir'], _id) |
345 | 363 | local_stores.add(fs) | 372 | local_stores.add(fs) |
346 | 364 | return local_stores | 373 | return local_stores |
347 | 365 | 374 | ||
348 | 366 | def get_local_peers(self): | 375 | def get_local_peers(self): |
350 | 367 | doc = self.get_local_dmedia() | 376 | doc = self.get_machine() |
351 | 368 | self._peers = get_dict(doc, 'peers') | 377 | self._peers = get_dict(doc, 'peers') |
352 | 369 | return self._peers | 378 | return self._peers |
353 | 370 | 379 | ||
354 | 371 | 380 | ||
355 | === modified file 'dmedia/service/tests/test_avahi.py' | |||
356 | --- dmedia/service/tests/test_avahi.py 2013-05-25 00:57:20 +0000 | |||
357 | +++ dmedia/service/tests/test_avahi.py 2013-08-25 20:53:26 +0000 | |||
358 | @@ -47,13 +47,15 @@ | |||
359 | 47 | def test_init(self): | 47 | def test_init(self): |
360 | 48 | pki = TempPKI(client_pki=True) | 48 | pki = TempPKI(client_pki=True) |
361 | 49 | ssl_config = pki.get_client_config() | 49 | ssl_config = pki.get_client_config() |
363 | 50 | core = Core(self.env, ssl_config) | 50 | machine = {'_id': random_id(30)} |
364 | 51 | user = {'_id': random_id(30)} | ||
365 | 52 | core = Core(self.env, machine, user, ssl_config) | ||
366 | 51 | port = random_port() | 53 | port = random_port() |
367 | 52 | inst = avahi.Avahi(core, port) | 54 | inst = avahi.Avahi(core, port) |
368 | 53 | self.assertIs(inst.core, core) | 55 | self.assertIs(inst.core, core) |
369 | 54 | self.assertEqual(inst.port, port) | 56 | self.assertEqual(inst.port, port) |
372 | 55 | self.assertEqual(inst.machine_id, self.machine_id) | 57 | self.assertEqual(inst.machine_id, machine['_id']) |
373 | 56 | self.assertEqual(inst.user_id, self.user_id) | 58 | self.assertEqual(inst.user_id, user['_id']) |
374 | 57 | self.assertIs(inst.server, core.server) | 59 | self.assertIs(inst.server, core.server) |
375 | 58 | self.assertIsInstance(inst.ssl_context, ssl.SSLContext) | 60 | self.assertIsInstance(inst.ssl_context, ssl.SSLContext) |
376 | 59 | self.assertEqual(inst.replications, {}) | 61 | self.assertEqual(inst.replications, {}) |
377 | 60 | 62 | ||
378 | === modified file 'dmedia/tests/test_core.py' | |||
379 | --- dmedia/tests/test_core.py 2013-07-28 01:11:13 +0000 | |||
380 | +++ dmedia/tests/test_core.py 2013-08-25 20:53:26 +0000 | |||
381 | @@ -40,6 +40,7 @@ | |||
382 | 40 | from filestore import FileStore | 40 | from filestore import FileStore |
383 | 41 | from filestore.misc import TempFileStore | 41 | from filestore.misc import TempFileStore |
384 | 42 | from filestore.migration import Migration, b32_to_db32 | 42 | from filestore.migration import Migration, b32_to_db32 |
385 | 43 | from usercouch.misc import CouchTestCase | ||
386 | 43 | 44 | ||
387 | 44 | from dmedia.local import LocalStores | 45 | from dmedia.local import LocalStores |
388 | 45 | from dmedia.metastore import MetaStore, get_mtime | 46 | from dmedia.metastore import MetaStore, get_mtime |
389 | @@ -148,6 +149,39 @@ | |||
390 | 148 | 'stores': {}, | 149 | 'stores': {}, |
391 | 149 | }) | 150 | }) |
392 | 150 | 151 | ||
393 | 152 | def test_mark_connected_stores(self): | ||
394 | 153 | atime = int(time.time()) | ||
395 | 154 | fs1 = TempFileStore() | ||
396 | 155 | fs2 = TempFileStore() | ||
397 | 156 | |||
398 | 157 | doc = {} | ||
399 | 158 | stores = { | ||
400 | 159 | fs1.id: {'parentdir': fs1.parentdir} | ||
401 | 160 | } | ||
402 | 161 | self.assertIsNone(core.mark_connected_stores(doc, atime, stores)) | ||
403 | 162 | self.assertEqual(doc, { | ||
404 | 163 | 'atime': atime, | ||
405 | 164 | 'stores': { | ||
406 | 165 | fs1.id: {'parentdir': fs1.parentdir} | ||
407 | 166 | }, | ||
408 | 167 | }) | ||
409 | 168 | self.assertIs(doc['stores'], stores) | ||
410 | 169 | |||
411 | 170 | doc = { | ||
412 | 171 | 'atime': atime - 123456, | ||
413 | 172 | 'stores': { | ||
414 | 173 | fs1.id: {'parentdir': fs1.parentdir}, | ||
415 | 174 | fs2.id: {'parentdir': fs2.parentdir}, | ||
416 | 175 | }, | ||
417 | 176 | } | ||
418 | 177 | stores = {} | ||
419 | 178 | self.assertIsNone(core.mark_connected_stores(doc, atime, stores)) | ||
420 | 179 | self.assertEqual(doc, { | ||
421 | 180 | 'atime': atime, | ||
422 | 181 | 'stores': {}, | ||
423 | 182 | }) | ||
424 | 183 | self.assertIs(doc['stores'], stores) | ||
425 | 184 | |||
426 | 151 | def test_mark_add_peer(self): | 185 | def test_mark_add_peer(self): |
427 | 152 | doc = {} | 186 | doc = {} |
428 | 153 | atime = int(time.time()) | 187 | atime = int(time.time()) |
429 | @@ -265,100 +299,81 @@ | |||
430 | 265 | self.assertEqual(tq.popitem(), ('a', ('aye', 2))) | 299 | self.assertEqual(tq.popitem(), ('a', ('aye', 2))) |
431 | 266 | 300 | ||
432 | 267 | 301 | ||
434 | 268 | class TestCore(CouchCase): | 302 | class TestCore(CouchTestCase): |
435 | 303 | def create(self): | ||
436 | 304 | self.machine_id = random_id(30) | ||
437 | 305 | self.user_id = random_id(30) | ||
438 | 306 | self.machine = {'_id': self.machine_id} | ||
439 | 307 | self.user = {'_id': self.user_id} | ||
440 | 308 | return core.Core(self.env, self.machine, self.user) | ||
441 | 309 | |||
442 | 269 | def test_init(self): | 310 | def test_init(self): |
443 | 270 | inst = core.Core(self.env) | ||
444 | 271 | self.assertIs(inst.env, self.env) | ||
445 | 272 | self.assertIsInstance(inst.db, microfiber.Database) | ||
446 | 273 | self.assertEqual(inst.db.name, DB_NAME) | ||
447 | 274 | self.assertIsInstance(inst.server, microfiber.Server) | ||
448 | 275 | self.assertIs(inst.db.ctx, inst.server.ctx) | ||
449 | 276 | self.assertIsInstance(inst.stores, LocalStores) | ||
450 | 277 | self.assertEqual(inst.local, | ||
451 | 278 | { | ||
452 | 279 | '_id': '_local/dmedia', | ||
453 | 280 | 'stores': {}, | ||
454 | 281 | 'peers': {}, | ||
455 | 282 | } | ||
456 | 283 | ) | ||
457 | 284 | self.assertIsNone(inst.machine) | ||
458 | 285 | self.assertIsNone(inst.user) | ||
459 | 286 | |||
460 | 287 | def test_load_identity(self): | ||
461 | 288 | timestamp = int(time.time()) | ||
462 | 289 | machine_id = random_id(30) | 311 | machine_id = random_id(30) |
463 | 290 | user_id = random_id(30) | 312 | user_id = random_id(30) |
532 | 291 | inst = core.Core(self.env) | 313 | machine = {'_id': machine_id} |
533 | 292 | self.assertIsNone( | 314 | user = {'_id': user_id} |
534 | 293 | inst.load_identity({'_id': machine_id}, {'_id': user_id}, timestamp) | 315 | |
535 | 294 | ) | 316 | inst = core.Core(self.env, machine, user) |
536 | 295 | 317 | self.assertIs(inst.env, self.env) | |
537 | 296 | machine = inst.db.get(machine_id) | 318 | self.assertEqual(inst.env['machine_id'], machine_id) |
538 | 297 | self.assertTrue(machine['_rev'].startswith('2-')) | 319 | self.assertEqual(inst.env['user_id'], user_id) |
539 | 298 | self.assertEqual(machine, { | 320 | self.assertIsInstance(inst.db, microfiber.Database) |
540 | 299 | '_id': machine_id, | 321 | self.assertEqual(inst.db.name, 'dmedia-1') |
541 | 300 | '_rev': machine['_rev'], | 322 | self.assertIsInstance(inst.log_db, microfiber.Database) |
542 | 301 | 'atime': timestamp, | 323 | self.assertEqual(inst.log_db.name, 'log-1') |
543 | 302 | 'stores': {}, | 324 | self.assertIsInstance(inst.server, microfiber.Server) |
544 | 303 | 'peers': {}, | 325 | self.assertIsInstance(inst.ms, MetaStore) |
545 | 304 | }) | 326 | self.assertIs(inst.ms.db, inst.db) |
546 | 305 | 327 | self.assertIsInstance(inst.stores, LocalStores) | |
547 | 306 | user = inst.db.get(user_id) | 328 | self.assertIsInstance(inst.task_manager, core.TaskManager) |
548 | 307 | self.assertEqual(set(user), set(['_id', '_rev'])) | 329 | self.assertIsNone(inst.ssl_config) |
549 | 308 | self.assertTrue(user['_rev'].startswith('1-')) | 330 | self.assertEqual(inst.db.get('_local/dmedia'), { |
550 | 309 | 331 | '_id': '_local/dmedia', | |
551 | 310 | self.assertEqual( | 332 | '_rev': '0-1', |
552 | 311 | inst.db.get('_local/dmedia'), | 333 | 'machine_id': machine_id, |
553 | 312 | { | 334 | 'user_id': user_id, |
554 | 313 | '_id': '_local/dmedia', | 335 | }) |
555 | 314 | '_rev': '0-1', | 336 | self.assertIs(inst.machine, machine) |
556 | 315 | 'stores': {}, | 337 | self.assertEqual(inst.db.get(machine_id), machine) |
557 | 316 | 'peers': {}, | 338 | self.assertEqual(inst.machine['_rev'][:2], '1-') |
558 | 317 | 'machine_id': machine_id, | 339 | self.assertEqual(inst.machine['stores'], {}) |
559 | 318 | 'user_id': user_id, | 340 | self.assertEqual(inst.machine['peers'], {}) |
560 | 319 | } | 341 | self.assertIs(inst.user, user) |
561 | 320 | ) | 342 | self.assertEqual(inst.db.get(user_id), user) |
562 | 321 | self.assertEqual(inst.local, inst.db.get('_local/dmedia')) | 343 | self.assertEqual(inst.user['_rev'][:2], '1-') |
563 | 322 | self.assertEqual(self.env['machine_id'], machine_id) | 344 | |
564 | 323 | self.assertEqual(self.env['user_id'], user_id) | 345 | ssl_config = random_id() |
565 | 324 | 346 | inst = core.Core(self.env, machine, user, ssl_config) | |
566 | 325 | # Now try when machine and user docs already exist: | 347 | self.assertIs(inst.env, self.env) |
567 | 326 | machine['atime'] = timestamp - 12345 | 348 | self.assertEqual(inst.env['machine_id'], machine_id) |
568 | 327 | machine['stores'] = 'foo' | 349 | self.assertEqual(inst.env['user_id'], user_id) |
569 | 328 | machine['peers'] = 'bar' | 350 | self.assertIsInstance(inst.db, microfiber.Database) |
570 | 329 | inst.db.save(machine) | 351 | self.assertEqual(inst.db.name, 'dmedia-1') |
571 | 330 | inst = core.Core(self.env) | 352 | self.assertIsInstance(inst.log_db, microfiber.Database) |
572 | 331 | self.assertIsNone( | 353 | self.assertEqual(inst.log_db.name, 'log-1') |
573 | 332 | inst.load_identity({'_id': machine_id}, {'_id': user_id}, timestamp) | 354 | self.assertIsInstance(inst.server, microfiber.Server) |
574 | 333 | ) | 355 | self.assertIsInstance(inst.ms, MetaStore) |
575 | 334 | 356 | self.assertIs(inst.ms.db, inst.db) | |
576 | 335 | machine = inst.db.get(machine_id) | 357 | self.assertIsInstance(inst.stores, LocalStores) |
577 | 336 | self.assertTrue(machine['_rev'].startswith('4-')) | 358 | self.assertIsInstance(inst.task_manager, core.TaskManager) |
578 | 337 | self.assertEqual(machine, { | 359 | self.assertIs(inst.ssl_config, ssl_config) |
579 | 338 | '_id': machine_id, | 360 | self.assertEqual(inst.db.get('_local/dmedia'), { |
580 | 339 | '_rev': machine['_rev'], | 361 | '_id': '_local/dmedia', |
581 | 340 | 'atime': timestamp, | 362 | '_rev': '0-2', |
582 | 341 | 'stores': {}, | 363 | 'machine_id': machine_id, |
583 | 342 | 'peers': {}, | 364 | 'user_id': user_id, |
584 | 343 | }) | 365 | }) |
585 | 344 | 366 | self.assertIsNot(inst.machine, machine) | |
586 | 345 | user = inst.db.get(user_id) | 367 | self.assertEqual(inst.db.get(machine_id), inst.machine) |
587 | 346 | self.assertEqual(set(user), set(['_id', '_rev'])) | 368 | self.assertEqual(inst.machine['_rev'][:2], '2-') |
588 | 347 | self.assertTrue(user['_rev'].startswith('1-')) | 369 | self.assertEqual(inst.machine['stores'], {}) |
589 | 348 | 370 | self.assertEqual(inst.machine['peers'], {}) | |
590 | 349 | self.assertEqual(inst.db.get('_local/dmedia'), | 371 | self.assertIsNot(inst.user, user) |
591 | 350 | { | 372 | self.assertEqual(inst.db.get(user_id), inst.user) |
592 | 351 | '_id': '_local/dmedia', | 373 | self.assertEqual(inst.user['_rev'][:2], '2-') |
525 | 352 | '_rev': '0-1', | ||
526 | 353 | 'stores': {}, | ||
527 | 354 | 'peers': {}, | ||
528 | 355 | 'machine_id': machine_id, | ||
529 | 356 | 'user_id': user_id, | ||
530 | 357 | } | ||
531 | 358 | ) | ||
593 | 359 | 374 | ||
594 | 360 | def test_add_peer(self): | 375 | def test_add_peer(self): |
596 | 361 | inst = core.Core(self.env) | 376 | inst = self.create() |
597 | 362 | id1 = random_id(30) | 377 | id1 = random_id(30) |
598 | 363 | info1 = { | 378 | info1 = { |
599 | 364 | 'host': 'jderose-Gazelle-Professional', | 379 | 'host': 'jderose-Gazelle-Professional', |
600 | @@ -371,109 +386,63 @@ | |||
601 | 371 | 386 | ||
602 | 372 | # id1 is not yet a peer: | 387 | # id1 is not yet a peer: |
603 | 373 | self.assertIsNone(inst.add_peer(id1, info1)) | 388 | self.assertIsNone(inst.add_peer(id1, info1)) |
614 | 374 | self.assertEqual(inst.db.get('_local/dmedia'), | 389 | self.assertEqual(inst.machine['peers'], {id1: info1}) |
615 | 375 | { | 390 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
606 | 376 | '_id': '_local/dmedia', | ||
607 | 377 | '_rev': '0-1', | ||
608 | 378 | 'stores': {}, | ||
609 | 379 | 'peers': { | ||
610 | 380 | id1: info1, | ||
611 | 381 | }, | ||
612 | 382 | } | ||
613 | 383 | ) | ||
616 | 384 | 391 | ||
617 | 385 | # id2 is not yet a peer: | 392 | # id2 is not yet a peer: |
618 | 386 | self.assertIsNone(inst.add_peer(id2, info2)) | 393 | self.assertIsNone(inst.add_peer(id2, info2)) |
630 | 387 | self.assertEqual(inst.db.get('_local/dmedia'), | 394 | self.assertEqual(inst.machine['peers'], {id1: info1, id2: info2}) |
631 | 388 | { | 395 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
621 | 389 | '_id': '_local/dmedia', | ||
622 | 390 | '_rev': '0-2', | ||
623 | 391 | 'stores': {}, | ||
624 | 392 | 'peers': { | ||
625 | 393 | id1: info1, | ||
626 | 394 | id2: info2, | ||
627 | 395 | }, | ||
628 | 396 | } | ||
629 | 397 | ) | ||
632 | 398 | 396 | ||
633 | 399 | # id1 is already a peer, make sure info is replaced | 397 | # id1 is already a peer, make sure info is replaced |
634 | 400 | new1 = {'url': random_id()} | 398 | new1 = {'url': random_id()} |
635 | 401 | self.assertIsNone(inst.add_peer(id1, new1)) | 399 | self.assertIsNone(inst.add_peer(id1, new1)) |
647 | 402 | self.assertEqual(inst.db.get('_local/dmedia'), | 400 | self.assertEqual(inst.machine['peers'], {id1: new1, id2: info2}) |
648 | 403 | { | 401 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
638 | 404 | '_id': '_local/dmedia', | ||
639 | 405 | '_rev': '0-3', | ||
640 | 406 | 'stores': {}, | ||
641 | 407 | 'peers': { | ||
642 | 408 | id1: new1, | ||
643 | 409 | id2: info2, | ||
644 | 410 | }, | ||
645 | 411 | } | ||
646 | 412 | ) | ||
649 | 413 | 402 | ||
650 | 414 | def test_remove_peer(self): | 403 | def test_remove_peer(self): |
651 | 404 | inst = self.create() | ||
652 | 415 | id1 = random_id(30) | 405 | id1 = random_id(30) |
653 | 416 | id2 = random_id(30) | 406 | id2 = random_id(30) |
654 | 417 | info1 = {'url': random_id()} | 407 | info1 = {'url': random_id()} |
655 | 418 | info2 = {'url': random_id()} | 408 | info2 = {'url': random_id()} |
669 | 419 | 409 | inst.machine['peers'] = {id1: info1, id2: info2} | |
670 | 420 | db = microfiber.Database('dmedia-1', self.env) | 410 | inst.db.save(inst.machine) |
671 | 421 | db.ensure() | 411 | self.assertEqual(inst.machine['_rev'][:2], '2-') |
659 | 422 | local = { | ||
660 | 423 | '_id': '_local/dmedia', | ||
661 | 424 | 'stores': {}, | ||
662 | 425 | 'peers': { | ||
663 | 426 | id1: info1, | ||
664 | 427 | id2: info2, | ||
665 | 428 | }, | ||
666 | 429 | } | ||
667 | 430 | db.save(local) | ||
668 | 431 | inst = core.Core(self.env) | ||
672 | 432 | 412 | ||
673 | 433 | # Test with a peer_id that doesn't exist: | 413 | # Test with a peer_id that doesn't exist: |
674 | 434 | nope = random_id(30) | 414 | nope = random_id(30) |
675 | 435 | self.assertIs(inst.remove_peer(nope), False) | 415 | self.assertIs(inst.remove_peer(nope), False) |
677 | 436 | self.assertEqual(db.get('_local/dmedia'), local) | 416 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
678 | 417 | self.assertEqual(inst.machine['peers'], {id1: info1, id2: info2}) | ||
679 | 418 | self.assertEqual(inst.machine['_rev'][:2], '2-') | ||
680 | 437 | 419 | ||
681 | 438 | # id1 is present | 420 | # id1 is present |
682 | 439 | self.assertIs(inst.remove_peer(id1), True) | 421 | self.assertIs(inst.remove_peer(id1), True) |
693 | 440 | self.assertEqual(db.get('_local/dmedia'), | 422 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
694 | 441 | { | 423 | self.assertEqual(inst.machine['peers'], {id2: info2}) |
695 | 442 | '_id': '_local/dmedia', | 424 | self.assertEqual(inst.machine['_rev'][:2], '3-') |
686 | 443 | '_rev': '0-2', | ||
687 | 444 | 'stores': {}, | ||
688 | 445 | 'peers': { | ||
689 | 446 | id2: info2, | ||
690 | 447 | }, | ||
691 | 448 | } | ||
692 | 449 | ) | ||
696 | 450 | 425 | ||
697 | 451 | # id1 is missing | 426 | # id1 is missing |
698 | 452 | self.assertIs(inst.remove_peer(id1), False) | 427 | self.assertIs(inst.remove_peer(id1), False) |
709 | 453 | self.assertEqual(db.get('_local/dmedia'), | 428 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
710 | 454 | { | 429 | self.assertEqual(inst.machine['peers'], {id2: info2}) |
711 | 455 | '_id': '_local/dmedia', | 430 | self.assertEqual(inst.machine['_rev'][:2], '3-') |
702 | 456 | '_rev': '0-2', | ||
703 | 457 | 'stores': {}, | ||
704 | 458 | 'peers': { | ||
705 | 459 | id2: info2, | ||
706 | 460 | }, | ||
707 | 461 | } | ||
708 | 462 | ) | ||
712 | 463 | 431 | ||
713 | 464 | # id2 is present | 432 | # id2 is present |
714 | 465 | self.assertIs(inst.remove_peer(id2), True) | 433 | self.assertIs(inst.remove_peer(id2), True) |
723 | 466 | self.assertEqual(db.get('_local/dmedia'), | 434 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
724 | 467 | { | 435 | self.assertEqual(inst.machine['peers'], {}) |
725 | 468 | '_id': '_local/dmedia', | 436 | self.assertEqual(inst.machine['_rev'][:2], '4-') |
726 | 469 | '_rev': '0-3', | 437 | |
727 | 470 | 'stores': {}, | 438 | # id2 is missing |
728 | 471 | 'peers': {}, | 439 | self.assertIs(inst.remove_peer(id2), False) |
729 | 472 | } | 440 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
730 | 473 | ) | 441 | self.assertEqual(inst.machine['peers'], {}) |
731 | 442 | self.assertEqual(inst.machine['_rev'][:2], '4-') | ||
732 | 474 | 443 | ||
733 | 475 | def test_create_filestore(self): | 444 | def test_create_filestore(self): |
735 | 476 | inst = core.Core(self.env) | 445 | inst = self.create() |
736 | 477 | 446 | ||
737 | 478 | # Test when a FileStore already exists | 447 | # Test when a FileStore already exists |
738 | 479 | tmp = TempDir() | 448 | tmp = TempDir() |
739 | @@ -493,34 +462,22 @@ | |||
740 | 493 | self.assertEqual(fs.copies, 1) | 462 | self.assertEqual(fs.copies, 1) |
741 | 494 | self.assertIs(inst.stores.by_id(fs.id), fs) | 463 | self.assertIs(inst.stores.by_id(fs.id), fs) |
742 | 495 | self.assertIs(inst.stores.by_parentdir(fs.parentdir), fs) | 464 | self.assertIs(inst.stores.by_parentdir(fs.parentdir), fs) |
754 | 496 | self.assertEqual( | 465 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
755 | 497 | inst.db.get('_local/dmedia'), | 466 | self.assertEqual(inst.machine['stores'], { |
756 | 498 | { | 467 | fs.id: {'parentdir': fs.parentdir, 'copies': 1}, |
757 | 499 | '_id': '_local/dmedia', | 468 | }) |
758 | 500 | '_rev': '0-1', | 469 | self.assertEqual(inst.machine['_rev'][:2], '2-') |
748 | 501 | 'stores': { | ||
749 | 502 | fs.parentdir: {'id': fs.id, 'copies': fs.copies}, | ||
750 | 503 | }, | ||
751 | 504 | 'peers': {}, | ||
752 | 505 | } | ||
753 | 506 | ) | ||
759 | 507 | 470 | ||
760 | 508 | # Make sure we can disconnect a store that was just created | 471 | # Make sure we can disconnect a store that was just created |
761 | 509 | inst.disconnect_filestore(fs.parentdir) | 472 | inst.disconnect_filestore(fs.parentdir) |
771 | 510 | self.assertEqual( | 473 | self.assertEqual(inst.db.get(self.machine_id), inst.machine) |
772 | 511 | inst.db.get('_local/dmedia'), | 474 | self.assertEqual(inst.machine['stores'], {}) |
773 | 512 | { | 475 | self.assertEqual(inst.machine['_rev'][:2], '3-') |
765 | 513 | '_id': '_local/dmedia', | ||
766 | 514 | '_rev': '0-2', | ||
767 | 515 | 'stores': {}, | ||
768 | 516 | 'peers': {}, | ||
769 | 517 | } | ||
770 | 518 | ) | ||
774 | 519 | 476 | ||
775 | 520 | def test_connect_filestore(self): | 477 | def test_connect_filestore(self): |
776 | 521 | tmp = TempDir() | 478 | tmp = TempDir() |
777 | 522 | basedir = tmp.join(filestore.DOTNAME) | 479 | basedir = tmp.join(filestore.DOTNAME) |
779 | 523 | inst = core.Core(self.env) | 480 | inst = self.create() |
780 | 524 | 481 | ||
781 | 525 | # Test when .dmedia/ doesn't exist | 482 | # Test when .dmedia/ doesn't exist |
782 | 526 | with self.assertRaises(FileNotFoundError) as cm: | 483 | with self.assertRaises(FileNotFoundError) as cm: |
783 | @@ -554,7 +511,7 @@ | |||
784 | 554 | ) | 511 | ) |
785 | 555 | 512 | ||
786 | 556 | # Test when expected_id is provided and matches: | 513 | # Test when expected_id is provided and matches: |
788 | 557 | inst = core.Core(self.env) | 514 | inst = self.create() |
789 | 558 | fs_b = inst.connect_filestore(tmp.dir, expected_id=fs.id) | 515 | fs_b = inst.connect_filestore(tmp.dir, expected_id=fs.id) |
790 | 559 | self.assertIsInstance(fs_b, FileStore) | 516 | self.assertIsInstance(fs_b, FileStore) |
791 | 560 | self.assertEqual(fs_b.parentdir, tmp.dir) | 517 | self.assertEqual(fs_b.parentdir, tmp.dir) |
792 | @@ -571,18 +528,15 @@ | |||
793 | 571 | self.assertEqual(fs2_a.copies, 1) | 528 | self.assertEqual(fs2_a.copies, 1) |
794 | 572 | self.assertIs(inst.stores.by_id(fs2.id), fs2_a) | 529 | self.assertIs(inst.stores.by_id(fs2.id), fs2_a) |
795 | 573 | self.assertIs(inst.stores.by_parentdir(fs2.parentdir), fs2_a) | 530 | self.assertIs(inst.stores.by_parentdir(fs2.parentdir), fs2_a) |
798 | 574 | self.assertEqual( | 531 | |
799 | 575 | inst.db.get('_local/dmedia'), | 532 | self.assertEqual(inst.machine, inst.db.get(self.machine_id)) |
800 | 533 | self.assertEqual(inst.machine['stores'], | ||
801 | 576 | { | 534 | { |
810 | 577 | '_id': '_local/dmedia', | 535 | fs.id: {'parentdir': fs.parentdir, 'copies': 1}, |
811 | 578 | '_rev': '0-2', | 536 | fs2.id: {'parentdir': fs2.parentdir, 'copies': 1}, |
812 | 579 | 'stores': { | 537 | }, |
805 | 580 | fs.parentdir: {'id': fs.id, 'copies': 1}, | ||
806 | 581 | fs2.parentdir: {'id': fs2.id, 'copies': 1}, | ||
807 | 582 | }, | ||
808 | 583 | 'peers': {}, | ||
809 | 584 | } | ||
813 | 585 | ) | 538 | ) |
814 | 539 | self.assertEqual(inst.machine['_rev'][:2], '3-') | ||
815 | 586 | 540 | ||
816 | 587 | # Test when migration is needed | 541 | # Test when migration is needed |
817 | 588 | tmp = TempDir() | 542 | tmp = TempDir() |
818 | @@ -592,12 +546,9 @@ | |||
819 | 592 | self.assertEqual(b32_to_db32(old['_id']), fs.id) | 546 | self.assertEqual(b32_to_db32(old['_id']), fs.id) |
820 | 593 | 547 | ||
821 | 594 | def test_disconnect_filestore(self): | 548 | def test_disconnect_filestore(self): |
828 | 595 | inst = core.Core(self.env) | 549 | inst = self.create() |
829 | 596 | 550 | fs1 = TempFileStore() | |
830 | 597 | tmp1 = TempDir() | 551 | fs2 = TempFileStore() |
825 | 598 | fs1 = FileStore.create(tmp1.dir) | ||
826 | 599 | tmp2 = TempDir() | ||
827 | 600 | fs2 = FileStore.create(tmp2.dir) | ||
831 | 601 | 552 | ||
832 | 602 | # Test when not connected: | 553 | # Test when not connected: |
833 | 603 | with self.assertRaises(KeyError) as cm: | 554 | with self.assertRaises(KeyError) as cm: |
834 | @@ -607,44 +558,30 @@ | |||
835 | 607 | # Connect both, then disconnect one by one | 558 | # Connect both, then disconnect one by one |
836 | 608 | inst.connect_filestore(fs1.parentdir, fs1.id) | 559 | inst.connect_filestore(fs1.parentdir, fs1.id) |
837 | 609 | inst.connect_filestore(fs2.parentdir, fs2.id) | 560 | inst.connect_filestore(fs2.parentdir, fs2.id) |
840 | 610 | self.assertEqual( | 561 | self.assertEqual(inst.machine, inst.db.get(self.machine_id)) |
841 | 611 | inst.db.get('_local/dmedia'), | 562 | self.assertEqual(inst.machine['stores'], |
842 | 612 | { | 563 | { |
851 | 613 | '_id': '_local/dmedia', | 564 | fs1.id: {'parentdir': fs1.parentdir, 'copies': 1}, |
852 | 614 | '_rev': '0-2', | 565 | fs2.id: {'parentdir': fs2.parentdir, 'copies': 1}, |
853 | 615 | 'stores': { | 566 | }, |
846 | 616 | fs1.parentdir: {'id': fs1.id, 'copies': 1}, | ||
847 | 617 | fs2.parentdir: {'id': fs2.id, 'copies': 1}, | ||
848 | 618 | }, | ||
849 | 619 | 'peers': {}, | ||
850 | 620 | } | ||
854 | 621 | ) | 567 | ) |
855 | 568 | self.assertEqual(inst.machine['_rev'][:2], '3-') | ||
856 | 622 | 569 | ||
857 | 623 | # Disconnect fs1 | 570 | # Disconnect fs1 |
858 | 624 | inst.disconnect_filestore(fs1.parentdir) | 571 | inst.disconnect_filestore(fs1.parentdir) |
861 | 625 | self.assertEqual( | 572 | self.assertEqual(inst.machine, inst.db.get(self.machine_id)) |
862 | 626 | inst.db.get('_local/dmedia'), | 573 | self.assertEqual(inst.machine['stores'], |
863 | 627 | { | 574 | { |
871 | 628 | '_id': '_local/dmedia', | 575 | fs2.id: {'parentdir': fs2.parentdir, 'copies': 1}, |
872 | 629 | '_rev': '0-3', | 576 | }, |
866 | 630 | 'stores': { | ||
867 | 631 | fs2.parentdir: {'id': fs2.id, 'copies': 1}, | ||
868 | 632 | }, | ||
869 | 633 | 'peers': {}, | ||
870 | 634 | } | ||
873 | 635 | ) | 577 | ) |
874 | 578 | self.assertEqual(inst.machine['_rev'][:2], '4-') | ||
875 | 636 | 579 | ||
876 | 637 | # Disconnect fs2 | 580 | # Disconnect fs2 |
877 | 638 | inst.disconnect_filestore(fs2.parentdir) | 581 | inst.disconnect_filestore(fs2.parentdir) |
887 | 639 | self.assertEqual( | 582 | self.assertEqual(inst.machine, inst.db.get(self.machine_id)) |
888 | 640 | inst.db.get('_local/dmedia'), | 583 | self.assertEqual(inst.machine['stores'], {}) |
889 | 641 | { | 584 | self.assertEqual(inst.machine['_rev'][:2], '5-') |
881 | 642 | '_id': '_local/dmedia', | ||
882 | 643 | '_rev': '0-4', | ||
883 | 644 | 'stores': {}, | ||
884 | 645 | 'peers': {}, | ||
885 | 646 | } | ||
886 | 647 | ) | ||
890 | 648 | 585 | ||
891 | 649 | # Again test when not connected: | 586 | # Again test when not connected: |
892 | 650 | with self.assertRaises(KeyError) as cm: | 587 | with self.assertRaises(KeyError) as cm: |
893 | @@ -655,7 +592,7 @@ | |||
894 | 655 | self.assertEqual(str(cm.exception), repr(fs1.parentdir)) | 592 | self.assertEqual(str(cm.exception), repr(fs1.parentdir)) |
895 | 656 | 593 | ||
896 | 657 | def test_resolve(self): | 594 | def test_resolve(self): |
898 | 658 | inst = core.Core(self.env) | 595 | inst = self.create() |
899 | 659 | 596 | ||
900 | 660 | bad_id1 = random_id(25) # Wrong length | 597 | bad_id1 = random_id(25) # Wrong length |
901 | 661 | self.assertEqual(inst.resolve(bad_id1), | 598 | self.assertEqual(inst.resolve(bad_id1), |
902 | @@ -705,7 +642,7 @@ | |||
903 | 705 | ) | 642 | ) |
904 | 706 | 643 | ||
905 | 707 | def test_resolve_many(self): | 644 | def test_resolve_many(self): |
907 | 708 | inst = core.Core(self.env) | 645 | inst = self.create() |
908 | 709 | tmp = TempDir() | 646 | tmp = TempDir() |
909 | 710 | fs = inst.create_filestore(tmp.dir) | 647 | fs = inst.create_filestore(tmp.dir) |
910 | 711 | 648 | ||
911 | @@ -765,7 +702,7 @@ | |||
912 | 765 | ) | 702 | ) |
913 | 766 | 703 | ||
914 | 767 | def test_allocate_tmp(self): | 704 | def test_allocate_tmp(self): |
916 | 768 | inst = core.Core(self.env) | 705 | inst = self.create() |
917 | 769 | 706 | ||
918 | 770 | with self.assertRaises(Exception) as cm: | 707 | with self.assertRaises(Exception) as cm: |
919 | 771 | inst.allocate_tmp() | 708 | inst.allocate_tmp() |
920 | @@ -778,7 +715,7 @@ | |||
921 | 778 | self.assertEqual(path.getsize(name), 0) | 715 | self.assertEqual(path.getsize(name), 0) |
922 | 779 | 716 | ||
923 | 780 | def test_hash_and_move(self): | 717 | def test_hash_and_move(self): |
925 | 781 | inst = core.Core(self.env) | 718 | inst = self.create() |
926 | 782 | tmp = TempDir() | 719 | tmp = TempDir() |
927 | 783 | fs = inst.create_filestore(tmp.dir) | 720 | fs = inst.create_filestore(tmp.dir) |
928 | 784 | tmp_fp = fs.allocate_tmp() | 721 | tmp_fp = fs.allocate_tmp() |
929 | 785 | 722 | ||
930 | === modified file 'dmedia/tests/test_importer.py' | |||
931 | --- dmedia/tests/test_importer.py 2013-05-15 19:43:08 +0000 | |||
932 | +++ dmedia/tests/test_importer.py 2013-08-25 20:53:26 +0000 | |||
933 | @@ -32,13 +32,14 @@ | |||
934 | 32 | from os import path | 32 | from os import path |
935 | 33 | 33 | ||
936 | 34 | import filestore | 34 | import filestore |
937 | 35 | from filestore.misc import TempFileStore | ||
938 | 36 | from usercouch.misc import CouchTestCase | ||
939 | 35 | from microfiber import random_id, Database | 37 | from microfiber import random_id, Database |
940 | 36 | 38 | ||
941 | 37 | from .couch import CouchCase | ||
942 | 38 | from .base import TempDir, DummyQueue, MagicLanternTestCase2 | 39 | from .base import TempDir, DummyQueue, MagicLanternTestCase2 |
943 | 39 | 40 | ||
944 | 40 | from dmedia.util import get_db | 41 | from dmedia.util import get_db |
946 | 41 | from dmedia.metastore import get_mtime | 42 | from dmedia.metastore import MetaStore, get_mtime |
947 | 42 | from dmedia import importer, schema | 43 | from dmedia import importer, schema |
948 | 43 | 44 | ||
949 | 44 | 45 | ||
950 | @@ -202,42 +203,40 @@ | |||
951 | 202 | ) | 203 | ) |
952 | 203 | 204 | ||
953 | 204 | 205 | ||
956 | 205 | class ImportCase(CouchCase): | 206 | class ImportCase(CouchTestCase): |
955 | 206 | |||
957 | 207 | def setUp(self): | 207 | def setUp(self): |
958 | 208 | super().setUp() | 208 | super().setUp() |
959 | 209 | self.q = DummyQueue() | 209 | self.q = DummyQueue() |
960 | 210 | |||
961 | 211 | self.src = TempDir() | 210 | self.src = TempDir() |
962 | 212 | 211 | ||
971 | 213 | temps = [TempDir() for i in range(2)] | 212 | filestores = [TempFileStore(copies=1), TempFileStore(copies=2)] |
972 | 214 | (self.dst1, self.dst2) = sorted(temps, key=lambda t: t.dir) | 213 | (self.fs1, self.fs2) = sorted(filestores, key=lambda fs: fs.id) |
965 | 215 | |||
966 | 216 | fs1 = filestore.FileStore.create(self.dst1.dir, copies=1) | ||
967 | 217 | fs2 = filestore.FileStore.create(self.dst2.dir, copies=2) | ||
968 | 218 | |||
969 | 219 | self.store1_id = fs1.id | ||
970 | 220 | self.store2_id = fs2.id | ||
973 | 221 | self.stores = { | 214 | self.stores = { |
979 | 222 | self.dst1.dir: {'id': self.store1_id, 'copies': 1}, | 215 | self.fs1.id: {'parentdir': self.fs1.parentdir, 'copies': 1}, |
980 | 223 | self.dst2.dir: {'id': self.store2_id, 'copies': 2}, | 216 | self.fs2.id: {'parentdir': self.fs2.parentdir, 'copies': 2}, |
981 | 224 | } | 217 | } |
982 | 225 | self.db = get_db(self.env) | 218 | |
983 | 226 | self.db.ensure() | 219 | self.machine_id = random_id(30) |
984 | 220 | self.env['machine_id'] = self.machine_id | ||
985 | 221 | machine = { | ||
986 | 222 | '_id': self.machine_id, | ||
987 | 223 | 'stores': self.stores, | ||
988 | 224 | } | ||
989 | 225 | self.db = get_db(self.env, True) | ||
990 | 226 | self.db.save(machine) | ||
991 | 227 | |||
992 | 227 | self.project_id = random_id() | 228 | self.project_id = random_id() |
993 | 228 | self.env['extract'] = False | ||
994 | 229 | self.env['project_id'] = self.project_id | 229 | self.env['project_id'] = self.project_id |
995 | 230 | 230 | ||
996 | 231 | def tearDown(self): | 231 | def tearDown(self): |
997 | 232 | super().tearDown() | 232 | super().tearDown() |
1002 | 233 | self.q = None | 233 | del self.q |
1003 | 234 | self.src = None | 234 | del self.src |
1004 | 235 | self.dst1 = None | 235 | del self.fs1 |
1005 | 236 | self.dst2 = None | 236 | del self.fs2 |
1006 | 237 | 237 | ||
1007 | 238 | 238 | ||
1008 | 239 | class TestImportWorker(ImportCase): | 239 | class TestImportWorker(ImportCase): |
1009 | 240 | |||
1010 | 241 | def setUp(self): | 240 | def setUp(self): |
1011 | 242 | super().setUp() | 241 | super().setUp() |
1012 | 243 | self.batch_id = random_id() | 242 | self.batch_id = random_id() |
1013 | @@ -301,15 +300,15 @@ | |||
1014 | 301 | self.assertEqual(len(stores), 2) | 300 | self.assertEqual(len(stores), 2) |
1015 | 302 | fs1 = stores[0] | 301 | fs1 = stores[0] |
1016 | 303 | self.assertIsInstance(fs1, filestore.FileStore) | 302 | self.assertIsInstance(fs1, filestore.FileStore) |
1020 | 304 | self.assertEquals(fs1.parentdir, self.dst1.dir) | 303 | self.assertEquals(fs1.parentdir, self.fs1.parentdir) |
1021 | 305 | self.assertEquals(fs1.id, self.store1_id) | 304 | self.assertEquals(fs1.id, self.fs1.id) |
1022 | 306 | self.assertEquals(fs1.copies, 1) | 305 | self.assertEquals(fs1.copies, self.fs1.copies) |
1023 | 307 | 306 | ||
1024 | 308 | fs2 = stores[1] | 307 | fs2 = stores[1] |
1025 | 309 | self.assertIsInstance(fs2, filestore.FileStore) | 308 | self.assertIsInstance(fs2, filestore.FileStore) |
1029 | 310 | self.assertEquals(fs2.parentdir, self.dst2.dir) | 309 | self.assertEquals(fs2.parentdir, self.fs2.parentdir) |
1030 | 311 | self.assertEquals(fs2.id, self.store2_id) | 310 | self.assertEquals(fs2.id, self.fs2.id) |
1031 | 312 | self.assertEquals(fs2.copies, 2) | 311 | self.assertEquals(fs2.copies, self.fs2.copies) |
1032 | 313 | 312 | ||
1033 | 314 | # import_all() | 313 | # import_all() |
1034 | 315 | for (file, ch) in result: | 314 | for (file, ch) in result: |
1035 | @@ -355,13 +354,13 @@ | |||
1036 | 355 | self.assertEqual(leaf_hashes, ch.leaf_hashes) | 354 | self.assertEqual(leaf_hashes, ch.leaf_hashes) |
1037 | 356 | self.assertEqual(doc['stored'], | 355 | self.assertEqual(doc['stored'], |
1038 | 357 | { | 356 | { |
1042 | 358 | self.store1_id: { | 357 | self.fs1.id: { |
1043 | 359 | 'copies': 1, | 358 | 'copies': self.fs1.copies, |
1044 | 360 | 'mtime': get_mtime(fs1, ch.id), | 359 | 'mtime': get_mtime(self.fs1, ch.id), |
1045 | 361 | }, | 360 | }, |
1049 | 362 | self.store2_id: { | 361 | self.fs2.id: { |
1050 | 363 | 'copies': 2, | 362 | 'copies': self.fs2.copies, |
1051 | 364 | 'mtime': get_mtime(fs2, ch.id), | 363 | 'mtime': get_mtime(self.fs2, ch.id), |
1052 | 365 | } | 364 | } |
1053 | 366 | 365 | ||
1054 | 367 | } | 366 | } |
1055 | @@ -395,13 +394,23 @@ | |||
1056 | 395 | super().setUp() | 394 | super().setUp() |
1057 | 396 | local = { | 395 | local = { |
1058 | 397 | '_id': '_local/dmedia', | 396 | '_id': '_local/dmedia', |
1060 | 398 | 'stores': self.stores, | 397 | 'machine_id': self.machine_id, |
1061 | 399 | } | 398 | } |
1062 | 400 | self.db.save(local) | 399 | self.db.save(local) |
1063 | 401 | 400 | ||
1064 | 402 | def new(self, callback=None): | 401 | def new(self, callback=None): |
1065 | 403 | return self.klass(self.env, callback) | 402 | return self.klass(self.env, callback) |
1066 | 404 | 403 | ||
1067 | 404 | def test_init(self): | ||
1068 | 405 | callback = DummyCallback() | ||
1069 | 406 | inst = importer.ImportManager(self.env, callback) | ||
1070 | 407 | self.assertIsNone(inst.doc) | ||
1071 | 408 | self.assertIsNone(inst._error) | ||
1072 | 409 | self.assertEqual(inst._progress, {}) | ||
1073 | 410 | self.assertIsInstance(inst.ms, MetaStore) | ||
1074 | 411 | self.assertIs(inst.ms.db, inst.db) | ||
1075 | 412 | self.assertEqual(inst.db.name, 'dmedia-1') | ||
1076 | 413 | |||
1077 | 405 | def test_first_worker_starting(self): | 414 | def test_first_worker_starting(self): |
1078 | 406 | callback = DummyCallback() | 415 | callback = DummyCallback() |
1079 | 407 | inst = self.new(callback) | 416 | inst = self.new(callback) |
1080 | @@ -821,10 +830,8 @@ | |||
1081 | 821 | ('batch_finished', (batch_id, stats, 3, importer.notify_stats2(stats))) | 830 | ('batch_finished', (batch_id, stats, 3, importer.notify_stats2(stats))) |
1082 | 822 | ) | 831 | ) |
1083 | 823 | 832 | ||
1088 | 824 | fs1 = filestore.FileStore(self.dst1.dir, self.store1_id) | 833 | self.assertEqual(set(st.id for st in self.fs1), ids) |
1089 | 825 | fs2 = filestore.FileStore(self.dst2.dir, self.store2_id) | 834 | self.assertEqual(set(st.id for st in self.fs2), ids) |
1086 | 826 | self.assertEqual(set(st.id for st in fs1), ids) | ||
1087 | 827 | self.assertEqual(set(st.id for st in fs2), ids) | ||
1090 | 828 | 835 | ||
1091 | 829 | # Check all the dmedia/file docs: | 836 | # Check all the dmedia/file docs: |
1092 | 830 | for (file, ch) in result: | 837 | for (file, ch) in result: |
1093 | @@ -842,13 +849,13 @@ | |||
1094 | 842 | self.assertEqual(leaf_hashes, ch.leaf_hashes) | 849 | self.assertEqual(leaf_hashes, ch.leaf_hashes) |
1095 | 843 | self.assertEqual(doc['stored'], | 850 | self.assertEqual(doc['stored'], |
1096 | 844 | { | 851 | { |
1100 | 845 | self.store1_id: { | 852 | self.fs1.id: { |
1101 | 846 | 'copies': 1, | 853 | 'copies': self.fs1.copies, |
1102 | 847 | 'mtime': get_mtime(fs1, ch.id), | 854 | 'mtime': get_mtime(self.fs1, ch.id), |
1103 | 848 | }, | 855 | }, |
1107 | 849 | self.store2_id: { | 856 | self.fs2.id: { |
1108 | 850 | 'copies': 2, | 857 | 'copies': self.fs2.copies, |
1109 | 851 | 'mtime': get_mtime(fs2, ch.id), | 858 | 'mtime': get_mtime(self.fs2, ch.id), |
1110 | 852 | } | 859 | } |
1111 | 853 | 860 | ||
1112 | 854 | } | 861 | } |
1113 | @@ -858,8 +865,8 @@ | |||
1114 | 858 | for (file, ch) in result: | 865 | for (file, ch) in result: |
1115 | 859 | if ch is None: | 866 | if ch is None: |
1116 | 860 | continue | 867 | continue |
1119 | 861 | self.assertEqual(fs1.verify(ch.id), ch) | 868 | self.assertEqual(self.fs1.verify(ch.id), ch) |
1120 | 862 | self.assertEqual(fs2.verify(ch.id), ch) | 869 | self.assertEqual(self.fs2.verify(ch.id), ch) |
1121 | 863 | 870 | ||
1122 | 864 | ################################################################## | 871 | ################################################################## |
1123 | 865 | # Okay, now run the whole thing again when they're all duplicates: | 872 | # Okay, now run the whole thing again when they're all duplicates: |
1124 | @@ -910,10 +917,8 @@ | |||
1125 | 910 | ('batch_finished', (batch_id, stats, 3, importer.notify_stats2(stats))) | 917 | ('batch_finished', (batch_id, stats, 3, importer.notify_stats2(stats))) |
1126 | 911 | ) | 918 | ) |
1127 | 912 | 919 | ||
1132 | 913 | fs1 = filestore.FileStore(self.dst1.dir) | 920 | self.assertEqual(set(st.id for st in self.fs1), ids) |
1133 | 914 | fs2 = filestore.FileStore(self.dst2.dir) | 921 | self.assertEqual(set(st.id for st in self.fs2), ids) |
1130 | 915 | self.assertEqual(set(st.id for st in fs1), ids) | ||
1131 | 916 | self.assertEqual(set(st.id for st in fs2), ids) | ||
1134 | 917 | 922 | ||
1135 | 918 | # Check all the dmedia/file docs: | 923 | # Check all the dmedia/file docs: |
1136 | 919 | for (file, ch) in result: | 924 | for (file, ch) in result: |
1137 | @@ -931,13 +936,13 @@ | |||
1138 | 931 | self.assertEqual(leaf_hashes, ch.leaf_hashes) | 936 | self.assertEqual(leaf_hashes, ch.leaf_hashes) |
1139 | 932 | self.assertEqual(doc['stored'], | 937 | self.assertEqual(doc['stored'], |
1140 | 933 | { | 938 | { |
1144 | 934 | self.store1_id: { | 939 | self.fs1.id: { |
1145 | 935 | 'copies': 1, | 940 | 'copies': self.fs1.copies, |
1146 | 936 | 'mtime': get_mtime(fs1, ch.id), | 941 | 'mtime': get_mtime(self.fs1, ch.id), |
1147 | 937 | }, | 942 | }, |
1151 | 938 | self.store2_id: { | 943 | self.fs2.id: { |
1152 | 939 | 'copies': 2, | 944 | 'copies': self.fs2.copies, |
1153 | 940 | 'mtime': get_mtime(fs2, ch.id), | 945 | 'mtime': get_mtime(self.fs2, ch.id), |
1154 | 941 | } | 946 | } |
1155 | 942 | 947 | ||
1156 | 943 | } | 948 | } |
1157 | @@ -947,8 +952,8 @@ | |||
1158 | 947 | for (file, ch) in result: | 952 | for (file, ch) in result: |
1159 | 948 | if ch is None: | 953 | if ch is None: |
1160 | 949 | continue | 954 | continue |
1163 | 950 | self.assertEqual(fs1.verify(ch.id), ch) | 955 | self.assertEqual(self.fs1.verify(ch.id), ch) |
1164 | 951 | self.assertEqual(fs2.verify(ch.id), ch) | 956 | self.assertEqual(self.fs2.verify(ch.id), ch) |
1165 | 952 | 957 | ||
1166 | 953 | 958 | ||
1167 | 954 | MAGIC_LANTERN = ( | 959 | MAGIC_LANTERN = ( |
1168 | 955 | 960 | ||
1169 | === modified file 'dmedia/tests/test_local.py' | |||
1170 | --- dmedia/tests/test_local.py 2013-05-14 21:28:00 +0000 | |||
1171 | +++ dmedia/tests/test_local.py 2013-08-25 20:53:26 +0000 | |||
1172 | @@ -27,6 +27,7 @@ | |||
1173 | 27 | from random import Random | 27 | from random import Random |
1174 | 28 | import time | 28 | import time |
1175 | 29 | 29 | ||
1176 | 30 | import microfiber | ||
1177 | 30 | import filestore | 31 | import filestore |
1178 | 31 | from filestore import DIGEST_BYTES | 32 | from filestore import DIGEST_BYTES |
1179 | 32 | from filestore.misc import TempFileStore | 33 | from filestore.misc import TempFileStore |
1180 | @@ -190,18 +191,17 @@ | |||
1181 | 190 | inst.add(fs1) | 191 | inst.add(fs1) |
1182 | 191 | self.assertEqual(inst.local_stores(), | 192 | self.assertEqual(inst.local_stores(), |
1183 | 192 | { | 193 | { |
1185 | 193 | fs1.parentdir: {'id': fs1.id, 'copies': 1}, | 194 | fs1.id: {'parentdir': fs1.parentdir, 'copies': 1}, |
1186 | 194 | } | 195 | } |
1187 | 195 | ) | 196 | ) |
1189 | 196 | 197 | ||
1190 | 197 | inst.add(fs2) | 198 | inst.add(fs2) |
1191 | 198 | self.assertEqual(inst.local_stores(), | 199 | self.assertEqual(inst.local_stores(), |
1192 | 199 | { | 200 | { |
1195 | 200 | fs1.parentdir: {'id': fs1.id, 'copies': 1}, | 201 | fs1.id: {'parentdir': fs1.parentdir, 'copies': 1}, |
1196 | 201 | fs2.parentdir: {'id': fs2.id, 'copies': 0}, | 202 | fs2.id: {'parentdir': fs2.parentdir, 'copies': 0}, |
1197 | 202 | } | 203 | } |
1198 | 203 | ) | 204 | ) |
1199 | 204 | |||
1200 | 205 | 205 | ||
1201 | 206 | 206 | ||
1202 | 207 | class TestLocalSlave(CouchCase): | 207 | class TestLocalSlave(CouchCase): |
1203 | @@ -209,6 +209,57 @@ | |||
1204 | 209 | super().setUp() | 209 | super().setUp() |
1205 | 210 | util.get_db(self.env, True) | 210 | util.get_db(self.env, True) |
1206 | 211 | 211 | ||
1207 | 212 | def test_init(self): | ||
1208 | 213 | inst = local.LocalSlave(self.env) | ||
1209 | 214 | self.assertIsInstance(inst.db, microfiber.Database) | ||
1210 | 215 | self.assertEqual(inst.machine_id, self.machine_id) | ||
1211 | 216 | self.assertIsNone(inst.last_rev) | ||
1212 | 217 | |||
1213 | 218 | def test_update_stores(self): | ||
1214 | 219 | inst = local.LocalSlave(self.env) | ||
1215 | 220 | machine = { | ||
1216 | 221 | '_id': self.machine_id, | ||
1217 | 222 | 'stores': {}, | ||
1218 | 223 | } | ||
1219 | 224 | inst.db.save(machine) | ||
1220 | 225 | |||
1221 | 226 | # No stores | ||
1222 | 227 | self.assertIsNone(inst.update_stores()) | ||
1223 | 228 | self.assertEqual(inst.last_rev, machine['_rev']) | ||
1224 | 229 | self.assertIsInstance(inst.stores, local.LocalStores) | ||
1225 | 230 | self.assertEqual(inst.stores.local_stores(), {}) | ||
1226 | 231 | |||
1227 | 232 | # One store | ||
1228 | 233 | fs1 = TempFileStore() | ||
1229 | 234 | machine['stores'] = { | ||
1230 | 235 | fs1.id: {'parentdir': fs1.parentdir, 'copies': fs1.copies}, | ||
1231 | 236 | } | ||
1232 | 237 | inst.db.save(machine) | ||
1233 | 238 | self.assertIsNone(inst.update_stores()) | ||
1234 | 239 | self.assertEqual(inst.last_rev, machine['_rev']) | ||
1235 | 240 | self.assertIsInstance(inst.stores, local.LocalStores) | ||
1236 | 241 | self.assertEqual(inst.stores.local_stores(), machine['stores']) | ||
1237 | 242 | |||
1238 | 243 | # Two stores | ||
1239 | 244 | fs2 = TempFileStore() | ||
1240 | 245 | machine['stores'] = { | ||
1241 | 246 | fs1.id: {'parentdir': fs1.parentdir, 'copies': fs1.copies}, | ||
1242 | 247 | fs2.id: {'parentdir': fs2.parentdir, 'copies': fs2.copies}, | ||
1243 | 248 | } | ||
1244 | 249 | inst.db.save(machine) | ||
1245 | 250 | self.assertIsNone(inst.update_stores()) | ||
1246 | 251 | self.assertEqual(inst.last_rev, machine['_rev']) | ||
1247 | 252 | self.assertIsInstance(inst.stores, local.LocalStores) | ||
1248 | 253 | self.assertEqual(inst.stores.local_stores(), machine['stores']) | ||
1249 | 254 | |||
1250 | 255 | # Make sure LocalStores doesn't needlessly get rebuilt | ||
1251 | 256 | old = inst.stores | ||
1252 | 257 | rev = inst.last_rev | ||
1253 | 258 | self.assertIsNone(inst.update_stores()) | ||
1254 | 259 | self.assertIs(inst.stores, old) | ||
1255 | 260 | self.assertIs(inst.last_rev, rev) | ||
1256 | 261 | self.assertEqual(inst.stores.local_stores(), machine['stores']) | ||
1257 | 262 | |||
1258 | 212 | def test_get_doc(self): | 263 | def test_get_doc(self): |
1259 | 213 | inst = local.LocalSlave(self.env) | 264 | inst = local.LocalSlave(self.env) |
1260 | 214 | 265 | ||
1261 | 215 | 266 | ||
1262 | === modified file 'dmedia/tests/test_metastore.py' | |||
1263 | --- dmedia/tests/test_metastore.py 2013-07-02 08:24:57 +0000 | |||
1264 | +++ dmedia/tests/test_metastore.py 2013-08-25 20:53:26 +0000 | |||
1265 | @@ -1432,6 +1432,7 @@ | |||
1266 | 1432 | db = util.get_db(self.env, True) | 1432 | db = util.get_db(self.env, True) |
1267 | 1433 | ms = metastore.MetaStore(db) | 1433 | ms = metastore.MetaStore(db) |
1268 | 1434 | local_id = '_local/dmedia' | 1434 | local_id = '_local/dmedia' |
1269 | 1435 | machine_id = random_id() | ||
1270 | 1435 | 1436 | ||
1271 | 1436 | # _local/dmedia NotFound: | 1437 | # _local/dmedia NotFound: |
1272 | 1437 | self.assertEqual(ms.get_local_peers(), {}) | 1438 | self.assertEqual(ms.get_local_peers(), {}) |
1273 | @@ -1439,18 +1440,28 @@ | |||
1274 | 1439 | with self.assertRaises(microfiber.NotFound) as cm: | 1440 | with self.assertRaises(microfiber.NotFound) as cm: |
1275 | 1440 | db.get(local_id) | 1441 | db.get(local_id) |
1276 | 1441 | 1442 | ||
1283 | 1442 | # _local/dmedia exists, but is missing doc['peers']: | 1443 | # _local/dmedia exists, but is missing 'machine_id': |
1284 | 1443 | doc = {'_id': local_id} | 1444 | local = {'_id': local_id} |
1285 | 1444 | db.save(doc) | 1445 | db.save(local) |
1286 | 1445 | self.assertEqual(ms.get_local_peers(), {}) | 1446 | self.assertEqual(ms.get_local_peers(), {}) |
1287 | 1446 | 1447 | ||
1288 | 1447 | # has doc['peers']: | 1448 | # _local/dmedia has 'machine_id', but machine doc is missing: |
1289 | 1449 | local['machine_id'] = machine_id | ||
1290 | 1450 | db.save(local) | ||
1291 | 1451 | self.assertEqual(ms.get_local_peers(), {}) | ||
1292 | 1452 | |||
1293 | 1453 | # machine exists, but is missing 'peers': | ||
1294 | 1454 | machine = {'_id': machine_id} | ||
1295 | 1455 | db.save(machine) | ||
1296 | 1456 | self.assertEqual(ms.get_local_peers(), {}) | ||
1297 | 1457 | |||
1298 | 1458 | # machine has 'peers': | ||
1299 | 1448 | peers = { | 1459 | peers = { |
1300 | 1449 | random_id(30): {'url': random_id()}, | 1460 | random_id(30): {'url': random_id()}, |
1301 | 1450 | random_id(30): {'url': random_id()}, | 1461 | random_id(30): {'url': random_id()}, |
1302 | 1451 | } | 1462 | } |
1305 | 1452 | doc['peers'] = peers | 1463 | machine['peers'] = peers |
1306 | 1453 | db.save(doc) | 1464 | db.save(machine) |
1307 | 1454 | self.assertEqual(ms.get_local_peers(), peers) | 1465 | self.assertEqual(ms.get_local_peers(), peers) |
1308 | 1455 | 1466 | ||
1309 | 1456 | def test_schema_check(self): | 1467 | def test_schema_check(self): |