Merge lp:~jderose/dmedia/icore into lp:dmedia

Proposed by Jason Gerard DeRose
Status: Merged
Approved by: James Raymond
Approved revision: 738
Merged at revision: 722
Proposed branch: lp:~jderose/dmedia/icore
Merge into: lp:dmedia
Diff against target: 1309 lines (+398/-404)
11 files modified
dmedia-gtk (+1/-1)
dmedia-service (+10/-8)
dmedia/core.py (+40/-65)
dmedia/importer.py (+7/-5)
dmedia/local.py (+10/-10)
dmedia/metastore.py (+13/-4)
dmedia/service/tests/test_avahi.py (+5/-3)
dmedia/tests/test_core.py (+171/-234)
dmedia/tests/test_importer.py (+66/-61)
dmedia/tests/test_local.py (+56/-5)
dmedia/tests/test_metastore.py (+19/-8)
To merge this branch: bzr merge lp:~jderose/dmedia/icore
Reviewer Review Type Date Requested Status
James Raymond Approve
Review via email: mp+182010@code.launchpad.net

Description of the change

For background, see: https://bugs.launchpad.net/dmedia/+bug/1216662

Changes include:

* Core.__init__() now has two new required arguments, *machine* and *user*, the machine and user docs, thereby making the identity parts of the Core API non-optional

* Removed Core.load_identity() method as Core.__init__() now does the same

* Local file-stores and peers are now only saved in the dmedia/machine doc, are no longer saved in the _local/dmedia doc

* LocalStores.local_stores() now returns a dict whose keys are the FileStore.id rather than the FileStore.parentdir, which makes it easy to use set intersection to determine whether a given peer has a specific file, like this:

    set(peer['stores']).intersection(file['stored'])

* As many different bits of code relied on the stores being in _local/dmedia, the rest of the change is basically porting to the new API and updating tests

To post a comment you must log in.
Revision history for this message
James Raymond (jamesmr) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'dmedia-gtk'
--- dmedia-gtk 2013-08-16 19:44:40 +0000
+++ dmedia-gtk 2013-08-25 20:53:26 +0000
@@ -102,7 +102,7 @@
102 def on_batch_finished(self, hub, batch_id, stats, copies, msg):102 def on_batch_finished(self, hub, batch_id, stats, copies, msg):
103 log.info('batch_finished: %s', batch_id)103 log.info('batch_finished: %s', batch_id)
104 log.info('Calling Dmedia.SnapshotProject(%r)...', self.project_id)104 log.info('Calling Dmedia.SnapshotProject(%r)...', self.project_id)
105 #self.proxy.SnapshotProject(self.project_id)105 self.proxy.SnapshotProject(self.project_id)
106 self.blocking = True106 self.blocking = True
107 self.batch_id = batch_id107 self.batch_id = batch_id
108 self.copies = copies108 self.copies = copies
109109
=== modified file 'dmedia-service'
--- dmedia-service 2013-07-29 12:43:06 +0000
+++ dmedia-service 2013-08-25 20:53:26 +0000
@@ -172,9 +172,11 @@
172 env = self.couch.auto_bootstrap()172 env = self.couch.auto_bootstrap()
173 log.info('%r', self.couch._welcome)173 log.info('%r', self.couch._welcome)
174 log.info('Starting CouchDB took %.3f', time.monotonic() - start)174 log.info('Starting CouchDB took %.3f', time.monotonic() - start)
175 self.core = Core(env, self.couch.get_ssl_config())175 self.core = Core(env,
176 self.core.reset_local()176 self.couch.machine,
177 self.core.load_identity(self.couch.machine, self.couch.user)177 self.couch.user,
178 self.couch.get_ssl_config()
179 )
178 self.env_s = dumps(self.core.env, pretty=True)180 self.env_s = dumps(self.core.env, pretty=True)
179 self.snapshots = Snapshots(181 self.snapshots = Snapshots(
180 self.core.env,182 self.core.env,
@@ -233,8 +235,8 @@
233 port = env['port']235 port = env['port']
234 self.avahi = Avahi(self.core, port)236 self.avahi = Avahi(self.core, port)
235 self.avahi.run()237 self.avahi.run()
236 GLib.timeout_add(5000, self.on_idle4)238 GLib.timeout_add(9000, self.on_idle4)
237 GLib.timeout_add(90 * 1000, self.core.reclaim_if_possible)239 GLib.timeout_add(45 * 1000, self.core.reclaim_if_possible)
238240
239 def on_idle4(self):241 def on_idle4(self):
240 """242 """
@@ -458,14 +460,14 @@
458 """460 """
459 Return currently connected filestores461 Return currently connected filestores
460 """462 """
461 return dumps(self.core.local['stores'], pretty=True)463 return dumps(self.core.machine['stores'], pretty=True)
462464
463 @dbus.service.method(IFACE, in_signature='', out_signature='s')465 @dbus.service.method(IFACE, in_signature='', out_signature='s')
464 def Peers(self):466 def Peers(self):
465 """467 """
466 Return peers currently known on local network.468 Return peers currently known on local network.
467 """469 """
468 return dumps(self.core.local['peers'], pretty=True)470 return dumps(self.core.machine['peers'], pretty=True)
469471
470 @dbus.service.method(IFACE, in_signature='s', out_signature='s')472 @dbus.service.method(IFACE, in_signature='s', out_signature='s')
471 def CreateFileStore(self, parentdir):473 def CreateFileStore(self, parentdir):
@@ -550,7 +552,7 @@
550 if not self.UpdateProject(project_id):552 if not self.UpdateProject(project_id):
551 self.pending_update = project_id553 self.pending_update = project_id
552 self.Snapshot(schema.project_db_name(project_id))554 self.Snapshot(schema.project_db_name(project_id))
553 self.Snapshot(schema.DB_NAME)555 #self.Snapshot(schema.DB_NAME)
554556
555 @dbus.service.method(IFACE, in_signature='s', out_signature='s')557 @dbus.service.method(IFACE, in_signature='s', out_signature='s')
556 def AutoFormat(self, value):558 def AutoFormat(self, value):
557559
=== modified file 'dmedia/core.py'
--- dmedia/core.py 2013-07-28 20:31:16 +0000
+++ dmedia/core.py 2013-08-25 20:53:26 +0000
@@ -467,6 +467,11 @@
467 stores.pop(fs_id, None)467 stores.pop(fs_id, None)
468468
469469
470def mark_connected_stores(doc, atime, stores):
471 assert isinstance(stores, dict)
472 doc['atime'] = atime
473 doc['stores'] = stores
474
470def mark_add_peer(doc, atime, peer_id, info):475def mark_add_peer(doc, atime, peer_id, info):
471 assert isinstance(info, dict)476 assert isinstance(info, dict)
472 doc['atime'] = atime477 doc['atime'] = atime
@@ -481,9 +486,12 @@
481486
482487
483class Core:488class Core:
484 def __init__(self, env, ssl_config=None):489 def __init__(self, env, machine, user, ssl_config=None):
490 env.update({
491 'machine_id': machine['_id'],
492 'user_id': user['_id'],
493 })
485 self.env = env494 self.env = env
486 self.ssl_config = ssl_config
487 self.db = util.get_db(env, init=True)495 self.db = util.get_db(env, init=True)
488 self.log_db = self.db.database(schema.LOG_DB_NAME)496 self.log_db = self.db.database(schema.LOG_DB_NAME)
489 self.log_db.ensure()497 self.log_db.ensure()
@@ -491,26 +499,28 @@
491 self.ms = MetaStore(self.db)499 self.ms = MetaStore(self.db)
492 self.stores = LocalStores()500 self.stores = LocalStores()
493 self.task_manager = TaskManager(env, ssl_config)501 self.task_manager = TaskManager(env, ssl_config)
502 self.ssl_config = ssl_config
494 try:503 try:
495 self.local = self.db.get(LOCAL_ID)504 self.local = self.db.get(LOCAL_ID)
496 except NotFound:505 except NotFound:
497 self.local = {506 self.local = {'_id': LOCAL_ID}
498 '_id': LOCAL_ID,507 self.local.update({
499 'stores': {},508 'machine_id': machine['_id'],
500 'peers': {},509 'user_id': user['_id'],
501 }510 })
502 self.__local = deepcopy(self.local)511 self.local.pop('stores', None)
503 self.machine = None512 self.local.pop('peers', None)
504 self.user = None513 (self.machine, self.user) = self.db.get_defaults([machine, user])
514 self.machine.update({
515 'stores': {},
516 'peers': {},
517 })
518 self.db.save_many([self.local, self.machine, self.user])
519 log.info('machine_id = %s', machine['_id'])
520 log.info('user_id = %s', user['_id'])
505521
506 def save_local(self):522 def save_local(self):
507 if self.local != self.__local:523 self.db.save(self.local)
508 self.db.save(self.local)
509 self.__local = deepcopy(self.local)
510
511 def reset_local(self):
512 self.local['stores'] = {}
513 self.local['peers'] = {}
514524
515 def start_background_tasks(self):525 def start_background_tasks(self):
516 self.task_manager.start_tasks()526 self.task_manager.start_tasks()
@@ -537,53 +547,29 @@
537 self.local['skip_internal'] = flag547 self.local['skip_internal'] = flag
538 self.save_local()548 self.save_local()
539549
540 def load_identity(self, machine, user, timestamp=None):
541 if timestamp is None:
542 timestamp = int(time.time())
543 assert isinstance(timestamp, int) and timestamp > 0
544 try:
545 self.db.save_many([machine, user])
546 except BulkConflict:
547 pass
548 log.info('machine_id = %s', machine['_id'])
549 log.info('user_id = %s', user['_id'])
550 self.env['machine_id'] = machine['_id']
551 self.env['user_id'] = user['_id']
552 self.local['machine_id'] = machine['_id']
553 self.local['user_id'] = user['_id']
554 self.save_local()
555 self.machine = self.db.update(mark_machine_start, machine, timestamp)
556
557 def add_peer(self, peer_id, info):550 def add_peer(self, peer_id, info):
558 assert isdb32(peer_id) and len(peer_id) == 48551 assert isdb32(peer_id) and len(peer_id) == 48
559 assert isinstance(info, dict)552 assert isinstance(info, dict)
560 assert isinstance(info['url'], str)553 assert isinstance(info['url'], str)
561 self.local['peers'][peer_id] = info554 self.machine = self.db.update(
562 self.save_local()555 mark_add_peer, self.machine, int(time.time()), peer_id, info
563 if self.machine:556 )
564 atime = int(time.time())
565 self.machine = self.db.update(
566 mark_add_peer, self.machine, atime, peer_id, info
567 )
568 self.restart_vigilance()557 self.restart_vigilance()
569558
570 def remove_peer(self, peer_id):559 def remove_peer(self, peer_id):
571 if self.machine:560 if peer_id not in self.machine['peers']:
572 atime = int(time.time())
573 self.machine = self.db.update(
574 mark_remove_peer, self.machine, atime, peer_id
575 )
576 try:
577 del self.local['peers'][peer_id]
578 self.save_local()
579 self.restart_vigilance()
580 return True
581 except KeyError:
582 return False561 return False
562 self.machine = self.db.update(
563 mark_remove_peer, self.machine, int(time.time()), peer_id
564 )
565 self.restart_vigilance()
566 return True
583567
584 def _sync_stores(self):568 def _sync_stores(self):
585 self.local['stores'] = self.stores.local_stores()569 stores = self.stores.local_stores()
586 self.save_local()570 self.machine = self.db.update(
571 mark_connected_stores, self.machine, int(time.time()), stores
572 )
587 self.restart_vigilance()573 self.restart_vigilance()
588574
589 def _add_filestore(self, fs):575 def _add_filestore(self, fs):
@@ -601,23 +587,12 @@
601 pass587 pass
602 self.task_manager.queue_filestore_tasks(fs)588 self.task_manager.queue_filestore_tasks(fs)
603 self._sync_stores()589 self._sync_stores()
604 if self.machine:
605 atime = int(time.time())
606 info = {'parentdir': fs.parentdir}
607 self.machine = self.db.update(
608 mark_add_filestore, self.machine, atime, fs.id, info
609 )
610590
611 def _remove_filestore(self, fs):591 def _remove_filestore(self, fs):
612 log.info('Removing %r', fs)592 log.info('Removing %r', fs)
613 self.stores.remove(fs)593 self.stores.remove(fs)
614 self.task_manager.stop_filestore_tasks(fs)594 self.task_manager.stop_filestore_tasks(fs)
615 self._sync_stores()595 self._sync_stores()
616 if self.machine:
617 atime = int(time.time())
618 self.machine = self.db.update(
619 mark_remove_filestore, self.machine, atime, fs.id
620 )
621596
622 def _iter_project_dbs(self):597 def _iter_project_dbs(self):
623 for (name, _id) in projects_iter(self.server):598 for (name, _id) in projects_iter(self.server):
624599
=== modified file 'dmedia/importer.py'
--- dmedia/importer.py 2013-08-16 19:44:40 +0000
+++ dmedia/importer.py 2013-08-25 20:53:26 +0000
@@ -43,7 +43,7 @@
43from dmedia.util import get_project_db43from dmedia.util import get_project_db
44from dmedia.units import bytes1044from dmedia.units import bytes10
45from dmedia import workers, schema45from dmedia import workers, schema
46from dmedia.metastore import create_stored, merge_stored, TimeDelta46from dmedia.metastore import MetaStore, create_stored, merge_stored, TimeDelta
47from dmedia.extractor import extract, merge_thumbnail47from dmedia.extractor import extract, merge_thumbnail
4848
4949
@@ -216,9 +216,9 @@
216 # FIXME: Should pick up to 2 filestores based size of import and216 # FIXME: Should pick up to 2 filestores based size of import and
217 # available space on the filestores.217 # available space on the filestores.
218 stores = []218 stores = []
219 for parentdir in sorted(self.env['stores']):219 for _id in sorted(self.env['stores']):
220 info = self.env['stores'][parentdir]220 info = self.env['stores'][_id]
221 fs = FileStore(parentdir, info['id'])221 fs = FileStore(info['parentdir'], _id)
222 stores.append(fs)222 stores.append(fs)
223 return stores223 return stores
224224
@@ -325,6 +325,7 @@
325 self._reset()325 self._reset()
326 if not workers.isregistered(ImportWorker):326 if not workers.isregistered(ImportWorker):
327 workers.register(ImportWorker)327 workers.register(ImportWorker)
328 self.ms = MetaStore(self.db)
328329
329 def _reset(self):330 def _reset(self):
330 self._error = None331 self._error = None
@@ -340,7 +341,8 @@
340 assert self.doc is None341 assert self.doc is None
341 assert self._workers == {}342 assert self._workers == {}
342 self._reset()343 self._reset()
343 stores = self.db.get('_local/dmedia')['stores']344 self.machine = self.ms.get_machine()
345 stores = self.machine['stores']
344 assert isinstance(stores, dict)346 assert isinstance(stores, dict)
345 if not stores:347 if not stores:
346 raise ValueError('No FileStores to import into!')348 raise ValueError('No FileStores to import into!')
347349
=== modified file 'dmedia/local.py'
--- dmedia/local.py 2013-05-14 21:28:00 +0000
+++ dmedia/local.py 2013-08-25 20:53:26 +0000
@@ -191,25 +191,25 @@
191 return fs191 return fs
192192
193 def local_stores(self):193 def local_stores(self):
194 stores = {}194 return dict(
195 for fs in self.ids.values():195 (fs.id, {'parentdir': fs.parentdir, 'copies': fs.copies})
196 stores[fs.parentdir] = {'id': fs.id, 'copies': fs.copies}196 for fs in self.ids.values()
197 return stores197 )
198
199198
200199
201class LocalSlave:200class LocalSlave:
202 def __init__(self, env):201 def __init__(self, env):
203 self.db = get_db(env)202 self.db = get_db(env)
203 self.machine_id = env['machine_id']
204 self.last_rev = None204 self.last_rev = None
205205
206 def update_stores(self):206 def update_stores(self):
207 local = self.db.get('_local/dmedia')207 machine = self.db.get(self.machine_id)
208 if local['_rev'] != self.last_rev:208 if machine['_rev'] != self.last_rev:
209 self.last_rev = local['_rev']209 self.last_rev = machine['_rev']
210 self.stores = LocalStores()210 self.stores = LocalStores()
211 for (parentdir, info) in local['stores'].items():211 for (_id, info) in machine['stores'].items():
212 fs = FileStore(parentdir, info['id'])212 fs = FileStore(info['parentdir'], _id)
213 self.stores.add(fs)213 self.stores.add(fs)
214214
215 def get_doc(self, _id):215 def get_doc(self, _id):
216216
=== modified file 'dmedia/metastore.py'
--- dmedia/metastore.py 2013-07-24 04:56:45 +0000
+++ dmedia/metastore.py 2013-08-25 20:53:26 +0000
@@ -354,17 +354,26 @@
354 except NotFound:354 except NotFound:
355 return {}355 return {}
356356
357 def get_machine(self):
358 machine_id = self.get_local_dmedia().get('machine_id')
359 if machine_id is None:
360 return {}
361 try:
362 return self.db.get(machine_id)
363 except NotFound:
364 return {}
365
357 def get_local_stores(self):366 def get_local_stores(self):
358 doc = self.get_local_dmedia()367 doc = self.get_machine()
359 stores = get_dict(doc, 'stores')368 stores = get_dict(doc, 'stores')
360 local_stores = LocalStores()369 local_stores = LocalStores()
361 for (parentdir, info) in stores.items():370 for (_id, info) in stores.items():
362 fs = FileStore(parentdir, info['id'])371 fs = FileStore(info['parentdir'], _id)
363 local_stores.add(fs)372 local_stores.add(fs)
364 return local_stores373 return local_stores
365374
366 def get_local_peers(self):375 def get_local_peers(self):
367 doc = self.get_local_dmedia()376 doc = self.get_machine()
368 self._peers = get_dict(doc, 'peers')377 self._peers = get_dict(doc, 'peers')
369 return self._peers378 return self._peers
370379
371380
=== modified file 'dmedia/service/tests/test_avahi.py'
--- dmedia/service/tests/test_avahi.py 2013-05-25 00:57:20 +0000
+++ dmedia/service/tests/test_avahi.py 2013-08-25 20:53:26 +0000
@@ -47,13 +47,15 @@
47 def test_init(self):47 def test_init(self):
48 pki = TempPKI(client_pki=True)48 pki = TempPKI(client_pki=True)
49 ssl_config = pki.get_client_config()49 ssl_config = pki.get_client_config()
50 core = Core(self.env, ssl_config)50 machine = {'_id': random_id(30)}
51 user = {'_id': random_id(30)}
52 core = Core(self.env, machine, user, ssl_config)
51 port = random_port()53 port = random_port()
52 inst = avahi.Avahi(core, port)54 inst = avahi.Avahi(core, port)
53 self.assertIs(inst.core, core)55 self.assertIs(inst.core, core)
54 self.assertEqual(inst.port, port)56 self.assertEqual(inst.port, port)
55 self.assertEqual(inst.machine_id, self.machine_id)57 self.assertEqual(inst.machine_id, machine['_id'])
56 self.assertEqual(inst.user_id, self.user_id)58 self.assertEqual(inst.user_id, user['_id'])
57 self.assertIs(inst.server, core.server)59 self.assertIs(inst.server, core.server)
58 self.assertIsInstance(inst.ssl_context, ssl.SSLContext)60 self.assertIsInstance(inst.ssl_context, ssl.SSLContext)
59 self.assertEqual(inst.replications, {})61 self.assertEqual(inst.replications, {})
6062
=== modified file 'dmedia/tests/test_core.py'
--- dmedia/tests/test_core.py 2013-07-28 01:11:13 +0000
+++ dmedia/tests/test_core.py 2013-08-25 20:53:26 +0000
@@ -40,6 +40,7 @@
40from filestore import FileStore40from filestore import FileStore
41from filestore.misc import TempFileStore41from filestore.misc import TempFileStore
42from filestore.migration import Migration, b32_to_db3242from filestore.migration import Migration, b32_to_db32
43from usercouch.misc import CouchTestCase
4344
44from dmedia.local import LocalStores45from dmedia.local import LocalStores
45from dmedia.metastore import MetaStore, get_mtime46from dmedia.metastore import MetaStore, get_mtime
@@ -148,6 +149,39 @@
148 'stores': {},149 'stores': {},
149 })150 })
150151
152 def test_mark_connected_stores(self):
153 atime = int(time.time())
154 fs1 = TempFileStore()
155 fs2 = TempFileStore()
156
157 doc = {}
158 stores = {
159 fs1.id: {'parentdir': fs1.parentdir}
160 }
161 self.assertIsNone(core.mark_connected_stores(doc, atime, stores))
162 self.assertEqual(doc, {
163 'atime': atime,
164 'stores': {
165 fs1.id: {'parentdir': fs1.parentdir}
166 },
167 })
168 self.assertIs(doc['stores'], stores)
169
170 doc = {
171 'atime': atime - 123456,
172 'stores': {
173 fs1.id: {'parentdir': fs1.parentdir},
174 fs2.id: {'parentdir': fs2.parentdir},
175 },
176 }
177 stores = {}
178 self.assertIsNone(core.mark_connected_stores(doc, atime, stores))
179 self.assertEqual(doc, {
180 'atime': atime,
181 'stores': {},
182 })
183 self.assertIs(doc['stores'], stores)
184
151 def test_mark_add_peer(self):185 def test_mark_add_peer(self):
152 doc = {}186 doc = {}
153 atime = int(time.time())187 atime = int(time.time())
@@ -265,100 +299,81 @@
265 self.assertEqual(tq.popitem(), ('a', ('aye', 2)))299 self.assertEqual(tq.popitem(), ('a', ('aye', 2)))
266300
267301
268class TestCore(CouchCase):302class TestCore(CouchTestCase):
303 def create(self):
304 self.machine_id = random_id(30)
305 self.user_id = random_id(30)
306 self.machine = {'_id': self.machine_id}
307 self.user = {'_id': self.user_id}
308 return core.Core(self.env, self.machine, self.user)
309
269 def test_init(self):310 def test_init(self):
270 inst = core.Core(self.env)
271 self.assertIs(inst.env, self.env)
272 self.assertIsInstance(inst.db, microfiber.Database)
273 self.assertEqual(inst.db.name, DB_NAME)
274 self.assertIsInstance(inst.server, microfiber.Server)
275 self.assertIs(inst.db.ctx, inst.server.ctx)
276 self.assertIsInstance(inst.stores, LocalStores)
277 self.assertEqual(inst.local,
278 {
279 '_id': '_local/dmedia',
280 'stores': {},
281 'peers': {},
282 }
283 )
284 self.assertIsNone(inst.machine)
285 self.assertIsNone(inst.user)
286
287 def test_load_identity(self):
288 timestamp = int(time.time())
289 machine_id = random_id(30)311 machine_id = random_id(30)
290 user_id = random_id(30)312 user_id = random_id(30)
291 inst = core.Core(self.env)313 machine = {'_id': machine_id}
292 self.assertIsNone(314 user = {'_id': user_id}
293 inst.load_identity({'_id': machine_id}, {'_id': user_id}, timestamp)315
294 )316 inst = core.Core(self.env, machine, user)
295317 self.assertIs(inst.env, self.env)
296 machine = inst.db.get(machine_id)318 self.assertEqual(inst.env['machine_id'], machine_id)
297 self.assertTrue(machine['_rev'].startswith('2-'))319 self.assertEqual(inst.env['user_id'], user_id)
298 self.assertEqual(machine, {320 self.assertIsInstance(inst.db, microfiber.Database)
299 '_id': machine_id,321 self.assertEqual(inst.db.name, 'dmedia-1')
300 '_rev': machine['_rev'],322 self.assertIsInstance(inst.log_db, microfiber.Database)
301 'atime': timestamp,323 self.assertEqual(inst.log_db.name, 'log-1')
302 'stores': {},324 self.assertIsInstance(inst.server, microfiber.Server)
303 'peers': {},325 self.assertIsInstance(inst.ms, MetaStore)
304 })326 self.assertIs(inst.ms.db, inst.db)
305327 self.assertIsInstance(inst.stores, LocalStores)
306 user = inst.db.get(user_id)328 self.assertIsInstance(inst.task_manager, core.TaskManager)
307 self.assertEqual(set(user), set(['_id', '_rev']))329 self.assertIsNone(inst.ssl_config)
308 self.assertTrue(user['_rev'].startswith('1-'))330 self.assertEqual(inst.db.get('_local/dmedia'), {
309331 '_id': '_local/dmedia',
310 self.assertEqual(332 '_rev': '0-1',
311 inst.db.get('_local/dmedia'),333 'machine_id': machine_id,
312 {334 'user_id': user_id,
313 '_id': '_local/dmedia',335 })
314 '_rev': '0-1',336 self.assertIs(inst.machine, machine)
315 'stores': {},337 self.assertEqual(inst.db.get(machine_id), machine)
316 'peers': {},338 self.assertEqual(inst.machine['_rev'][:2], '1-')
317 'machine_id': machine_id,339 self.assertEqual(inst.machine['stores'], {})
318 'user_id': user_id,340 self.assertEqual(inst.machine['peers'], {})
319 }341 self.assertIs(inst.user, user)
320 )342 self.assertEqual(inst.db.get(user_id), user)
321 self.assertEqual(inst.local, inst.db.get('_local/dmedia'))343 self.assertEqual(inst.user['_rev'][:2], '1-')
322 self.assertEqual(self.env['machine_id'], machine_id)344
323 self.assertEqual(self.env['user_id'], user_id)345 ssl_config = random_id()
324346 inst = core.Core(self.env, machine, user, ssl_config)
325 # Now try when machine and user docs already exist:347 self.assertIs(inst.env, self.env)
326 machine['atime'] = timestamp - 12345348 self.assertEqual(inst.env['machine_id'], machine_id)
327 machine['stores'] = 'foo'349 self.assertEqual(inst.env['user_id'], user_id)
328 machine['peers'] = 'bar'350 self.assertIsInstance(inst.db, microfiber.Database)
329 inst.db.save(machine)351 self.assertEqual(inst.db.name, 'dmedia-1')
330 inst = core.Core(self.env)352 self.assertIsInstance(inst.log_db, microfiber.Database)
331 self.assertIsNone(353 self.assertEqual(inst.log_db.name, 'log-1')
332 inst.load_identity({'_id': machine_id}, {'_id': user_id}, timestamp)354 self.assertIsInstance(inst.server, microfiber.Server)
333 )355 self.assertIsInstance(inst.ms, MetaStore)
334356 self.assertIs(inst.ms.db, inst.db)
335 machine = inst.db.get(machine_id)357 self.assertIsInstance(inst.stores, LocalStores)
336 self.assertTrue(machine['_rev'].startswith('4-'))358 self.assertIsInstance(inst.task_manager, core.TaskManager)
337 self.assertEqual(machine, {359 self.assertIs(inst.ssl_config, ssl_config)
338 '_id': machine_id,360 self.assertEqual(inst.db.get('_local/dmedia'), {
339 '_rev': machine['_rev'],361 '_id': '_local/dmedia',
340 'atime': timestamp,362 '_rev': '0-2',
341 'stores': {},363 'machine_id': machine_id,
342 'peers': {},364 'user_id': user_id,
343 })365 })
344366 self.assertIsNot(inst.machine, machine)
345 user = inst.db.get(user_id)367 self.assertEqual(inst.db.get(machine_id), inst.machine)
346 self.assertEqual(set(user), set(['_id', '_rev']))368 self.assertEqual(inst.machine['_rev'][:2], '2-')
347 self.assertTrue(user['_rev'].startswith('1-'))369 self.assertEqual(inst.machine['stores'], {})
348370 self.assertEqual(inst.machine['peers'], {})
349 self.assertEqual(inst.db.get('_local/dmedia'),371 self.assertIsNot(inst.user, user)
350 {372 self.assertEqual(inst.db.get(user_id), inst.user)
351 '_id': '_local/dmedia',373 self.assertEqual(inst.user['_rev'][:2], '2-')
352 '_rev': '0-1',
353 'stores': {},
354 'peers': {},
355 'machine_id': machine_id,
356 'user_id': user_id,
357 }
358 )
359374
360 def test_add_peer(self):375 def test_add_peer(self):
361 inst = core.Core(self.env)376 inst = self.create()
362 id1 = random_id(30)377 id1 = random_id(30)
363 info1 = {378 info1 = {
364 'host': 'jderose-Gazelle-Professional',379 'host': 'jderose-Gazelle-Professional',
@@ -371,109 +386,63 @@
371386
372 # id1 is not yet a peer:387 # id1 is not yet a peer:
373 self.assertIsNone(inst.add_peer(id1, info1))388 self.assertIsNone(inst.add_peer(id1, info1))
374 self.assertEqual(inst.db.get('_local/dmedia'),389 self.assertEqual(inst.machine['peers'], {id1: info1})
375 {390 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
376 '_id': '_local/dmedia',
377 '_rev': '0-1',
378 'stores': {},
379 'peers': {
380 id1: info1,
381 },
382 }
383 )
384391
385 # id2 is not yet a peer:392 # id2 is not yet a peer:
386 self.assertIsNone(inst.add_peer(id2, info2))393 self.assertIsNone(inst.add_peer(id2, info2))
387 self.assertEqual(inst.db.get('_local/dmedia'),394 self.assertEqual(inst.machine['peers'], {id1: info1, id2: info2})
388 {395 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
389 '_id': '_local/dmedia',
390 '_rev': '0-2',
391 'stores': {},
392 'peers': {
393 id1: info1,
394 id2: info2,
395 },
396 }
397 )
398396
399 # id1 is already a peer, make sure info is replaced397 # id1 is already a peer, make sure info is replaced
400 new1 = {'url': random_id()}398 new1 = {'url': random_id()}
401 self.assertIsNone(inst.add_peer(id1, new1))399 self.assertIsNone(inst.add_peer(id1, new1))
402 self.assertEqual(inst.db.get('_local/dmedia'),400 self.assertEqual(inst.machine['peers'], {id1: new1, id2: info2})
403 {401 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
404 '_id': '_local/dmedia',
405 '_rev': '0-3',
406 'stores': {},
407 'peers': {
408 id1: new1,
409 id2: info2,
410 },
411 }
412 )
413402
414 def test_remove_peer(self):403 def test_remove_peer(self):
404 inst = self.create()
415 id1 = random_id(30)405 id1 = random_id(30)
416 id2 = random_id(30)406 id2 = random_id(30)
417 info1 = {'url': random_id()}407 info1 = {'url': random_id()}
418 info2 = {'url': random_id()}408 info2 = {'url': random_id()}
419409 inst.machine['peers'] = {id1: info1, id2: info2}
420 db = microfiber.Database('dmedia-1', self.env)410 inst.db.save(inst.machine)
421 db.ensure()411 self.assertEqual(inst.machine['_rev'][:2], '2-')
422 local = {
423 '_id': '_local/dmedia',
424 'stores': {},
425 'peers': {
426 id1: info1,
427 id2: info2,
428 },
429 }
430 db.save(local)
431 inst = core.Core(self.env)
432412
433 # Test with a peer_id that doesn't exist:413 # Test with a peer_id that doesn't exist:
434 nope = random_id(30)414 nope = random_id(30)
435 self.assertIs(inst.remove_peer(nope), False)415 self.assertIs(inst.remove_peer(nope), False)
436 self.assertEqual(db.get('_local/dmedia'), local)416 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
417 self.assertEqual(inst.machine['peers'], {id1: info1, id2: info2})
418 self.assertEqual(inst.machine['_rev'][:2], '2-')
437419
438 # id1 is present420 # id1 is present
439 self.assertIs(inst.remove_peer(id1), True)421 self.assertIs(inst.remove_peer(id1), True)
440 self.assertEqual(db.get('_local/dmedia'),422 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
441 {423 self.assertEqual(inst.machine['peers'], {id2: info2})
442 '_id': '_local/dmedia',424 self.assertEqual(inst.machine['_rev'][:2], '3-')
443 '_rev': '0-2',
444 'stores': {},
445 'peers': {
446 id2: info2,
447 },
448 }
449 )
450425
451 # id1 is missing426 # id1 is missing
452 self.assertIs(inst.remove_peer(id1), False)427 self.assertIs(inst.remove_peer(id1), False)
453 self.assertEqual(db.get('_local/dmedia'),428 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
454 {429 self.assertEqual(inst.machine['peers'], {id2: info2})
455 '_id': '_local/dmedia',430 self.assertEqual(inst.machine['_rev'][:2], '3-')
456 '_rev': '0-2',
457 'stores': {},
458 'peers': {
459 id2: info2,
460 },
461 }
462 )
463431
464 # id2 is present432 # id2 is present
465 self.assertIs(inst.remove_peer(id2), True)433 self.assertIs(inst.remove_peer(id2), True)
466 self.assertEqual(db.get('_local/dmedia'),434 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
467 {435 self.assertEqual(inst.machine['peers'], {})
468 '_id': '_local/dmedia',436 self.assertEqual(inst.machine['_rev'][:2], '4-')
469 '_rev': '0-3',437
470 'stores': {},438 # id2 is missing
471 'peers': {},439 self.assertIs(inst.remove_peer(id2), False)
472 }440 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
473 )441 self.assertEqual(inst.machine['peers'], {})
442 self.assertEqual(inst.machine['_rev'][:2], '4-')
474443
475 def test_create_filestore(self):444 def test_create_filestore(self):
476 inst = core.Core(self.env)445 inst = self.create()
477446
478 # Test when a FileStore already exists447 # Test when a FileStore already exists
479 tmp = TempDir()448 tmp = TempDir()
@@ -493,34 +462,22 @@
493 self.assertEqual(fs.copies, 1)462 self.assertEqual(fs.copies, 1)
494 self.assertIs(inst.stores.by_id(fs.id), fs)463 self.assertIs(inst.stores.by_id(fs.id), fs)
495 self.assertIs(inst.stores.by_parentdir(fs.parentdir), fs)464 self.assertIs(inst.stores.by_parentdir(fs.parentdir), fs)
496 self.assertEqual(465 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
497 inst.db.get('_local/dmedia'),466 self.assertEqual(inst.machine['stores'], {
498 {467 fs.id: {'parentdir': fs.parentdir, 'copies': 1},
499 '_id': '_local/dmedia',468 })
500 '_rev': '0-1',469 self.assertEqual(inst.machine['_rev'][:2], '2-')
501 'stores': {
502 fs.parentdir: {'id': fs.id, 'copies': fs.copies},
503 },
504 'peers': {},
505 }
506 )
507470
508 # Make sure we can disconnect a store that was just created471 # Make sure we can disconnect a store that was just created
509 inst.disconnect_filestore(fs.parentdir)472 inst.disconnect_filestore(fs.parentdir)
510 self.assertEqual(473 self.assertEqual(inst.db.get(self.machine_id), inst.machine)
511 inst.db.get('_local/dmedia'),474 self.assertEqual(inst.machine['stores'], {})
512 {475 self.assertEqual(inst.machine['_rev'][:2], '3-')
513 '_id': '_local/dmedia',
514 '_rev': '0-2',
515 'stores': {},
516 'peers': {},
517 }
518 )
519476
520 def test_connect_filestore(self):477 def test_connect_filestore(self):
521 tmp = TempDir()478 tmp = TempDir()
522 basedir = tmp.join(filestore.DOTNAME)479 basedir = tmp.join(filestore.DOTNAME)
523 inst = core.Core(self.env)480 inst = self.create()
524481
525 # Test when .dmedia/ doesn't exist482 # Test when .dmedia/ doesn't exist
526 with self.assertRaises(FileNotFoundError) as cm:483 with self.assertRaises(FileNotFoundError) as cm:
@@ -554,7 +511,7 @@
554 )511 )
555512
556 # Test when expected_id is provided and matches:513 # Test when expected_id is provided and matches:
557 inst = core.Core(self.env)514 inst = self.create()
558 fs_b = inst.connect_filestore(tmp.dir, expected_id=fs.id)515 fs_b = inst.connect_filestore(tmp.dir, expected_id=fs.id)
559 self.assertIsInstance(fs_b, FileStore)516 self.assertIsInstance(fs_b, FileStore)
560 self.assertEqual(fs_b.parentdir, tmp.dir)517 self.assertEqual(fs_b.parentdir, tmp.dir)
@@ -571,18 +528,15 @@
571 self.assertEqual(fs2_a.copies, 1)528 self.assertEqual(fs2_a.copies, 1)
572 self.assertIs(inst.stores.by_id(fs2.id), fs2_a)529 self.assertIs(inst.stores.by_id(fs2.id), fs2_a)
573 self.assertIs(inst.stores.by_parentdir(fs2.parentdir), fs2_a)530 self.assertIs(inst.stores.by_parentdir(fs2.parentdir), fs2_a)
574 self.assertEqual(531
575 inst.db.get('_local/dmedia'),532 self.assertEqual(inst.machine, inst.db.get(self.machine_id))
533 self.assertEqual(inst.machine['stores'],
576 {534 {
577 '_id': '_local/dmedia',535 fs.id: {'parentdir': fs.parentdir, 'copies': 1},
578 '_rev': '0-2',536 fs2.id: {'parentdir': fs2.parentdir, 'copies': 1},
579 'stores': {537 },
580 fs.parentdir: {'id': fs.id, 'copies': 1},
581 fs2.parentdir: {'id': fs2.id, 'copies': 1},
582 },
583 'peers': {},
584 }
585 )538 )
539 self.assertEqual(inst.machine['_rev'][:2], '3-')
586540
587 # Test when migration is needed541 # Test when migration is needed
588 tmp = TempDir()542 tmp = TempDir()
@@ -592,12 +546,9 @@
592 self.assertEqual(b32_to_db32(old['_id']), fs.id)546 self.assertEqual(b32_to_db32(old['_id']), fs.id)
593547
594 def test_disconnect_filestore(self):548 def test_disconnect_filestore(self):
595 inst = core.Core(self.env)549 inst = self.create()
596550 fs1 = TempFileStore()
597 tmp1 = TempDir()551 fs2 = TempFileStore()
598 fs1 = FileStore.create(tmp1.dir)
599 tmp2 = TempDir()
600 fs2 = FileStore.create(tmp2.dir)
601552
602 # Test when not connected:553 # Test when not connected:
603 with self.assertRaises(KeyError) as cm:554 with self.assertRaises(KeyError) as cm:
@@ -607,44 +558,30 @@
607 # Connect both, then disconnect one by one558 # Connect both, then disconnect one by one
608 inst.connect_filestore(fs1.parentdir, fs1.id)559 inst.connect_filestore(fs1.parentdir, fs1.id)
609 inst.connect_filestore(fs2.parentdir, fs2.id)560 inst.connect_filestore(fs2.parentdir, fs2.id)
610 self.assertEqual(561 self.assertEqual(inst.machine, inst.db.get(self.machine_id))
611 inst.db.get('_local/dmedia'),562 self.assertEqual(inst.machine['stores'],
612 {563 {
613 '_id': '_local/dmedia',564 fs1.id: {'parentdir': fs1.parentdir, 'copies': 1},
614 '_rev': '0-2',565 fs2.id: {'parentdir': fs2.parentdir, 'copies': 1},
615 'stores': {566 },
616 fs1.parentdir: {'id': fs1.id, 'copies': 1},
617 fs2.parentdir: {'id': fs2.id, 'copies': 1},
618 },
619 'peers': {},
620 }
621 )567 )
568 self.assertEqual(inst.machine['_rev'][:2], '3-')
622569
623 # Disconnect fs1570 # Disconnect fs1
624 inst.disconnect_filestore(fs1.parentdir)571 inst.disconnect_filestore(fs1.parentdir)
625 self.assertEqual(572 self.assertEqual(inst.machine, inst.db.get(self.machine_id))
626 inst.db.get('_local/dmedia'),573 self.assertEqual(inst.machine['stores'],
627 {574 {
628 '_id': '_local/dmedia',575 fs2.id: {'parentdir': fs2.parentdir, 'copies': 1},
629 '_rev': '0-3',576 },
630 'stores': {
631 fs2.parentdir: {'id': fs2.id, 'copies': 1},
632 },
633 'peers': {},
634 }
635 )577 )
578 self.assertEqual(inst.machine['_rev'][:2], '4-')
636579
637 # Disconnect fs2580 # Disconnect fs2
638 inst.disconnect_filestore(fs2.parentdir)581 inst.disconnect_filestore(fs2.parentdir)
639 self.assertEqual(582 self.assertEqual(inst.machine, inst.db.get(self.machine_id))
640 inst.db.get('_local/dmedia'),583 self.assertEqual(inst.machine['stores'], {})
641 {584 self.assertEqual(inst.machine['_rev'][:2], '5-')
642 '_id': '_local/dmedia',
643 '_rev': '0-4',
644 'stores': {},
645 'peers': {},
646 }
647 )
648585
649 # Again test when not connected:586 # Again test when not connected:
650 with self.assertRaises(KeyError) as cm:587 with self.assertRaises(KeyError) as cm:
@@ -655,7 +592,7 @@
655 self.assertEqual(str(cm.exception), repr(fs1.parentdir))592 self.assertEqual(str(cm.exception), repr(fs1.parentdir))
656593
657 def test_resolve(self):594 def test_resolve(self):
658 inst = core.Core(self.env)595 inst = self.create()
659596
660 bad_id1 = random_id(25) # Wrong length597 bad_id1 = random_id(25) # Wrong length
661 self.assertEqual(inst.resolve(bad_id1),598 self.assertEqual(inst.resolve(bad_id1),
@@ -705,7 +642,7 @@
705 )642 )
706643
707 def test_resolve_many(self):644 def test_resolve_many(self):
708 inst = core.Core(self.env)645 inst = self.create()
709 tmp = TempDir()646 tmp = TempDir()
710 fs = inst.create_filestore(tmp.dir)647 fs = inst.create_filestore(tmp.dir)
711648
@@ -765,7 +702,7 @@
765 )702 )
766703
767 def test_allocate_tmp(self):704 def test_allocate_tmp(self):
768 inst = core.Core(self.env)705 inst = self.create()
769706
770 with self.assertRaises(Exception) as cm: 707 with self.assertRaises(Exception) as cm:
771 inst.allocate_tmp()708 inst.allocate_tmp()
@@ -778,7 +715,7 @@
778 self.assertEqual(path.getsize(name), 0)715 self.assertEqual(path.getsize(name), 0)
779716
780 def test_hash_and_move(self):717 def test_hash_and_move(self):
781 inst = core.Core(self.env)718 inst = self.create()
782 tmp = TempDir()719 tmp = TempDir()
783 fs = inst.create_filestore(tmp.dir)720 fs = inst.create_filestore(tmp.dir)
784 tmp_fp = fs.allocate_tmp()721 tmp_fp = fs.allocate_tmp()
785722
=== modified file 'dmedia/tests/test_importer.py'
--- dmedia/tests/test_importer.py 2013-05-15 19:43:08 +0000
+++ dmedia/tests/test_importer.py 2013-08-25 20:53:26 +0000
@@ -32,13 +32,14 @@
32from os import path32from os import path
3333
34import filestore34import filestore
35from filestore.misc import TempFileStore
36from usercouch.misc import CouchTestCase
35from microfiber import random_id, Database37from microfiber import random_id, Database
3638
37from .couch import CouchCase
38from .base import TempDir, DummyQueue, MagicLanternTestCase239from .base import TempDir, DummyQueue, MagicLanternTestCase2
3940
40from dmedia.util import get_db41from dmedia.util import get_db
41from dmedia.metastore import get_mtime42from dmedia.metastore import MetaStore, get_mtime
42from dmedia import importer, schema43from dmedia import importer, schema
4344
4445
@@ -202,42 +203,40 @@
202 )203 )
203204
204205
205class ImportCase(CouchCase):206class ImportCase(CouchTestCase):
206
207 def setUp(self):207 def setUp(self):
208 super().setUp()208 super().setUp()
209 self.q = DummyQueue()209 self.q = DummyQueue()
210
211 self.src = TempDir()210 self.src = TempDir()
212211
213 temps = [TempDir() for i in range(2)]212 filestores = [TempFileStore(copies=1), TempFileStore(copies=2)]
214 (self.dst1, self.dst2) = sorted(temps, key=lambda t: t.dir)213 (self.fs1, self.fs2) = sorted(filestores, key=lambda fs: fs.id)
215
216 fs1 = filestore.FileStore.create(self.dst1.dir, copies=1)
217 fs2 = filestore.FileStore.create(self.dst2.dir, copies=2)
218
219 self.store1_id = fs1.id
220 self.store2_id = fs2.id
221 self.stores = {214 self.stores = {
222 self.dst1.dir: {'id': self.store1_id, 'copies': 1},215 self.fs1.id: {'parentdir': self.fs1.parentdir, 'copies': 1},
223 self.dst2.dir: {'id': self.store2_id, 'copies': 2},216 self.fs2.id: {'parentdir': self.fs2.parentdir, 'copies': 2},
224 }217 }
225 self.db = get_db(self.env)218
226 self.db.ensure()219 self.machine_id = random_id(30)
220 self.env['machine_id'] = self.machine_id
221 machine = {
222 '_id': self.machine_id,
223 'stores': self.stores,
224 }
225 self.db = get_db(self.env, True)
226 self.db.save(machine)
227
227 self.project_id = random_id()228 self.project_id = random_id()
228 self.env['extract'] = False
229 self.env['project_id'] = self.project_id229 self.env['project_id'] = self.project_id
230230
231 def tearDown(self):231 def tearDown(self):
232 super().tearDown()232 super().tearDown()
233 self.q = None233 del self.q
234 self.src = None234 del self.src
235 self.dst1 = None235 del self.fs1
236 self.dst2 = None236 del self.fs2
237237
238238
239class TestImportWorker(ImportCase):239class TestImportWorker(ImportCase):
240
241 def setUp(self):240 def setUp(self):
242 super().setUp()241 super().setUp()
243 self.batch_id = random_id()242 self.batch_id = random_id()
@@ -301,15 +300,15 @@
301 self.assertEqual(len(stores), 2)300 self.assertEqual(len(stores), 2)
302 fs1 = stores[0]301 fs1 = stores[0]
303 self.assertIsInstance(fs1, filestore.FileStore)302 self.assertIsInstance(fs1, filestore.FileStore)
304 self.assertEquals(fs1.parentdir, self.dst1.dir)303 self.assertEquals(fs1.parentdir, self.fs1.parentdir)
305 self.assertEquals(fs1.id, self.store1_id)304 self.assertEquals(fs1.id, self.fs1.id)
306 self.assertEquals(fs1.copies, 1)305 self.assertEquals(fs1.copies, self.fs1.copies)
307306
308 fs2 = stores[1]307 fs2 = stores[1]
309 self.assertIsInstance(fs2, filestore.FileStore)308 self.assertIsInstance(fs2, filestore.FileStore)
310 self.assertEquals(fs2.parentdir, self.dst2.dir)309 self.assertEquals(fs2.parentdir, self.fs2.parentdir)
311 self.assertEquals(fs2.id, self.store2_id)310 self.assertEquals(fs2.id, self.fs2.id)
312 self.assertEquals(fs2.copies, 2)311 self.assertEquals(fs2.copies, self.fs2.copies)
313312
314 # import_all()313 # import_all()
315 for (file, ch) in result:314 for (file, ch) in result:
@@ -355,13 +354,13 @@
355 self.assertEqual(leaf_hashes, ch.leaf_hashes)354 self.assertEqual(leaf_hashes, ch.leaf_hashes)
356 self.assertEqual(doc['stored'],355 self.assertEqual(doc['stored'],
357 {356 {
358 self.store1_id: {357 self.fs1.id: {
359 'copies': 1,358 'copies': self.fs1.copies,
360 'mtime': get_mtime(fs1, ch.id),359 'mtime': get_mtime(self.fs1, ch.id),
361 },360 },
362 self.store2_id: {361 self.fs2.id: {
363 'copies': 2,362 'copies': self.fs2.copies,
364 'mtime': get_mtime(fs2, ch.id),363 'mtime': get_mtime(self.fs2, ch.id),
365 }364 }
366 365
367 }366 }
@@ -395,13 +394,23 @@
395 super().setUp()394 super().setUp()
396 local = {395 local = {
397 '_id': '_local/dmedia',396 '_id': '_local/dmedia',
398 'stores': self.stores,397 'machine_id': self.machine_id,
399 }398 }
400 self.db.save(local)399 self.db.save(local)
401400
402 def new(self, callback=None):401 def new(self, callback=None):
403 return self.klass(self.env, callback)402 return self.klass(self.env, callback)
404403
404 def test_init(self):
405 callback = DummyCallback()
406 inst = importer.ImportManager(self.env, callback)
407 self.assertIsNone(inst.doc)
408 self.assertIsNone(inst._error)
409 self.assertEqual(inst._progress, {})
410 self.assertIsInstance(inst.ms, MetaStore)
411 self.assertIs(inst.ms.db, inst.db)
412 self.assertEqual(inst.db.name, 'dmedia-1')
413
405 def test_first_worker_starting(self):414 def test_first_worker_starting(self):
406 callback = DummyCallback()415 callback = DummyCallback()
407 inst = self.new(callback)416 inst = self.new(callback)
@@ -821,10 +830,8 @@
821 ('batch_finished', (batch_id, stats, 3, importer.notify_stats2(stats)))830 ('batch_finished', (batch_id, stats, 3, importer.notify_stats2(stats)))
822 )831 )
823832
824 fs1 = filestore.FileStore(self.dst1.dir, self.store1_id)833 self.assertEqual(set(st.id for st in self.fs1), ids)
825 fs2 = filestore.FileStore(self.dst2.dir, self.store2_id)834 self.assertEqual(set(st.id for st in self.fs2), ids)
826 self.assertEqual(set(st.id for st in fs1), ids)
827 self.assertEqual(set(st.id for st in fs2), ids)
828835
829 # Check all the dmedia/file docs:836 # Check all the dmedia/file docs:
830 for (file, ch) in result:837 for (file, ch) in result:
@@ -842,13 +849,13 @@
842 self.assertEqual(leaf_hashes, ch.leaf_hashes)849 self.assertEqual(leaf_hashes, ch.leaf_hashes)
843 self.assertEqual(doc['stored'],850 self.assertEqual(doc['stored'],
844 {851 {
845 self.store1_id: {852 self.fs1.id: {
846 'copies': 1,853 'copies': self.fs1.copies,
847 'mtime': get_mtime(fs1, ch.id),854 'mtime': get_mtime(self.fs1, ch.id),
848 },855 },
849 self.store2_id: {856 self.fs2.id: {
850 'copies': 2,857 'copies': self.fs2.copies,
851 'mtime': get_mtime(fs2, ch.id),858 'mtime': get_mtime(self.fs2, ch.id),
852 }859 }
853 860
854 }861 }
@@ -858,8 +865,8 @@
858 for (file, ch) in result:865 for (file, ch) in result:
859 if ch is None:866 if ch is None:
860 continue867 continue
861 self.assertEqual(fs1.verify(ch.id), ch)868 self.assertEqual(self.fs1.verify(ch.id), ch)
862 self.assertEqual(fs2.verify(ch.id), ch)869 self.assertEqual(self.fs2.verify(ch.id), ch)
863870
864 ##################################################################871 ##################################################################
865 # Okay, now run the whole thing again when they're all duplicates:872 # Okay, now run the whole thing again when they're all duplicates:
@@ -910,10 +917,8 @@
910 ('batch_finished', (batch_id, stats, 3, importer.notify_stats2(stats)))917 ('batch_finished', (batch_id, stats, 3, importer.notify_stats2(stats)))
911 )918 )
912919
913 fs1 = filestore.FileStore(self.dst1.dir)920 self.assertEqual(set(st.id for st in self.fs1), ids)
914 fs2 = filestore.FileStore(self.dst2.dir)921 self.assertEqual(set(st.id for st in self.fs2), ids)
915 self.assertEqual(set(st.id for st in fs1), ids)
916 self.assertEqual(set(st.id for st in fs2), ids)
917922
918 # Check all the dmedia/file docs:923 # Check all the dmedia/file docs:
919 for (file, ch) in result:924 for (file, ch) in result:
@@ -931,13 +936,13 @@
931 self.assertEqual(leaf_hashes, ch.leaf_hashes)936 self.assertEqual(leaf_hashes, ch.leaf_hashes)
932 self.assertEqual(doc['stored'],937 self.assertEqual(doc['stored'],
933 {938 {
934 self.store1_id: {939 self.fs1.id: {
935 'copies': 1,940 'copies': self.fs1.copies,
936 'mtime': get_mtime(fs1, ch.id),941 'mtime': get_mtime(self.fs1, ch.id),
937 },942 },
938 self.store2_id: {943 self.fs2.id: {
939 'copies': 2,944 'copies': self.fs2.copies,
940 'mtime': get_mtime(fs2, ch.id),945 'mtime': get_mtime(self.fs2, ch.id),
941 }946 }
942 947
943 }948 }
@@ -947,8 +952,8 @@
947 for (file, ch) in result:952 for (file, ch) in result:
948 if ch is None:953 if ch is None:
949 continue954 continue
950 self.assertEqual(fs1.verify(ch.id), ch)955 self.assertEqual(self.fs1.verify(ch.id), ch)
951 self.assertEqual(fs2.verify(ch.id), ch)956 self.assertEqual(self.fs2.verify(ch.id), ch)
952 957
953 958
954MAGIC_LANTERN = (959MAGIC_LANTERN = (
955960
=== modified file 'dmedia/tests/test_local.py'
--- dmedia/tests/test_local.py 2013-05-14 21:28:00 +0000
+++ dmedia/tests/test_local.py 2013-08-25 20:53:26 +0000
@@ -27,6 +27,7 @@
27from random import Random27from random import Random
28import time28import time
2929
30import microfiber
30import filestore31import filestore
31from filestore import DIGEST_BYTES32from filestore import DIGEST_BYTES
32from filestore.misc import TempFileStore33from filestore.misc import TempFileStore
@@ -190,18 +191,17 @@
190 inst.add(fs1)191 inst.add(fs1)
191 self.assertEqual(inst.local_stores(),192 self.assertEqual(inst.local_stores(),
192 {193 {
193 fs1.parentdir: {'id': fs1.id, 'copies': 1},194 fs1.id: {'parentdir': fs1.parentdir, 'copies': 1},
194 }195 }
195 )196 )
196 197
197 inst.add(fs2)198 inst.add(fs2)
198 self.assertEqual(inst.local_stores(),199 self.assertEqual(inst.local_stores(),
199 {200 {
200 fs1.parentdir: {'id': fs1.id, 'copies': 1},201 fs1.id: {'parentdir': fs1.parentdir, 'copies': 1},
201 fs2.parentdir: {'id': fs2.id, 'copies': 0}, 202 fs2.id: {'parentdir': fs2.parentdir, 'copies': 0},
202 }203 }
203 )204 )
204
205205
206206
207class TestLocalSlave(CouchCase):207class TestLocalSlave(CouchCase):
@@ -209,6 +209,57 @@
209 super().setUp()209 super().setUp()
210 util.get_db(self.env, True)210 util.get_db(self.env, True)
211211
212 def test_init(self):
213 inst = local.LocalSlave(self.env)
214 self.assertIsInstance(inst.db, microfiber.Database)
215 self.assertEqual(inst.machine_id, self.machine_id)
216 self.assertIsNone(inst.last_rev)
217
218 def test_update_stores(self):
219 inst = local.LocalSlave(self.env)
220 machine = {
221 '_id': self.machine_id,
222 'stores': {},
223 }
224 inst.db.save(machine)
225
226 # No stores
227 self.assertIsNone(inst.update_stores())
228 self.assertEqual(inst.last_rev, machine['_rev'])
229 self.assertIsInstance(inst.stores, local.LocalStores)
230 self.assertEqual(inst.stores.local_stores(), {})
231
232 # One store
233 fs1 = TempFileStore()
234 machine['stores'] = {
235 fs1.id: {'parentdir': fs1.parentdir, 'copies': fs1.copies},
236 }
237 inst.db.save(machine)
238 self.assertIsNone(inst.update_stores())
239 self.assertEqual(inst.last_rev, machine['_rev'])
240 self.assertIsInstance(inst.stores, local.LocalStores)
241 self.assertEqual(inst.stores.local_stores(), machine['stores'])
242
243 # Two stores
244 fs2 = TempFileStore()
245 machine['stores'] = {
246 fs1.id: {'parentdir': fs1.parentdir, 'copies': fs1.copies},
247 fs2.id: {'parentdir': fs2.parentdir, 'copies': fs2.copies},
248 }
249 inst.db.save(machine)
250 self.assertIsNone(inst.update_stores())
251 self.assertEqual(inst.last_rev, machine['_rev'])
252 self.assertIsInstance(inst.stores, local.LocalStores)
253 self.assertEqual(inst.stores.local_stores(), machine['stores'])
254
255 # Make sure LocalStores doesn't needlessly get rebuilt
256 old = inst.stores
257 rev = inst.last_rev
258 self.assertIsNone(inst.update_stores())
259 self.assertIs(inst.stores, old)
260 self.assertIs(inst.last_rev, rev)
261 self.assertEqual(inst.stores.local_stores(), machine['stores'])
262
212 def test_get_doc(self):263 def test_get_doc(self):
213 inst = local.LocalSlave(self.env)264 inst = local.LocalSlave(self.env)
214265
215266
=== modified file 'dmedia/tests/test_metastore.py'
--- dmedia/tests/test_metastore.py 2013-07-02 08:24:57 +0000
+++ dmedia/tests/test_metastore.py 2013-08-25 20:53:26 +0000
@@ -1432,6 +1432,7 @@
1432 db = util.get_db(self.env, True)1432 db = util.get_db(self.env, True)
1433 ms = metastore.MetaStore(db)1433 ms = metastore.MetaStore(db)
1434 local_id = '_local/dmedia'1434 local_id = '_local/dmedia'
1435 machine_id = random_id()
14351436
1436 # _local/dmedia NotFound:1437 # _local/dmedia NotFound:
1437 self.assertEqual(ms.get_local_peers(), {})1438 self.assertEqual(ms.get_local_peers(), {})
@@ -1439,18 +1440,28 @@
1439 with self.assertRaises(microfiber.NotFound) as cm:1440 with self.assertRaises(microfiber.NotFound) as cm:
1440 db.get(local_id)1441 db.get(local_id)
14411442
1442 # _local/dmedia exists, but is missing doc['peers']:1443 # _local/dmedia exists, but is missing 'machine_id':
1443 doc = {'_id': local_id}1444 local = {'_id': local_id}
1444 db.save(doc)1445 db.save(local)
1445 self.assertEqual(ms.get_local_peers(), {})1446 self.assertEqual(ms.get_local_peers(), {})
14461447
1447 # has doc['peers']:1448 # _local/dmedia has 'machine_id', but machine doc is missing:
1449 local['machine_id'] = machine_id
1450 db.save(local)
1451 self.assertEqual(ms.get_local_peers(), {})
1452
1453 # machine exists, but is missing 'peers':
1454 machine = {'_id': machine_id}
1455 db.save(machine)
1456 self.assertEqual(ms.get_local_peers(), {})
1457
1458 # machine has 'peers':
1448 peers = {1459 peers = {
1449 random_id(30): {'url': random_id()},1460 random_id(30): {'url': random_id()},
1450 random_id(30): {'url': random_id()},1461 random_id(30): {'url': random_id()},
1451 }1462 }
1452 doc['peers'] = peers1463 machine['peers'] = peers
1453 db.save(doc)1464 db.save(machine)
1454 self.assertEqual(ms.get_local_peers(), peers)1465 self.assertEqual(ms.get_local_peers(), peers)
14551466
1456 def test_schema_check(self):1467 def test_schema_check(self):

Subscribers

People subscribed via source and target branches