Merge lp:~jderose/dmedia/schema-ver0 into lp:dmedia

Proposed by Jason Gerard DeRose
Status: Merged
Merged at revision: 181
Proposed branch: lp:~jderose/dmedia/schema-ver0
Merge into: lp:dmedia
Diff against target: 1570 lines (+859/-232)
11 files modified
dmedia/extractor.py (+6/-1)
dmedia/filestore.py (+3/-0)
dmedia/gtkui/__init__.py (+3/-1)
dmedia/importer.py (+31/-75)
dmedia/metastore.py (+103/-16)
dmedia/schema.py (+409/-14)
dmedia/tests/helpers.py (+1/-0)
dmedia/tests/test_extractor.py (+16/-14)
dmedia/tests/test_importer.py (+8/-100)
dmedia/tests/test_schema.py (+275/-7)
dmedia/webui/data/browser.js (+4/-4)
To merge this branch: bzr merge lp:~jderose/dmedia/schema-ver0
Reviewer Review Type Date Requested Status
Jason Gerard DeRose Approve
Review via email: mp+56680@code.launchpad.net

Description of the change

There is still a bit of work to do finalizing how tags work, but the important part is already enforced in the schema: 'tags' is a dictionary. I'll open another bug for finishing the tags schema, but this is already a fairly large change, so I'm proposing this for merge before it gets out of hand.

Oh, and this includes an important unrelated bugfix: I goofed in the 0.5 transition from PyGTK to PyGI and am no longer calling GObject.threads_init() when ./dmedia-service runs... oops. This was causing a lot of weird problems with GUI hangs and the logging order (in conjunction with multiprocessing) getting wacky. Anyway, I sliped that little one-line fix in here too (in dmedia/gtkui/__init__.py).

To post a comment you must log in.
Revision history for this message
Jason Gerard DeRose (jderose) wrote :

Okay, I'm gonna self-approve as I want to get the important PyGI fix into trunk. This month needs to be an especially high-velocity month for dmedia, so I might break the rules often. To make up for it, I will write an unusually high number of unit tests, even by my standards.

"""
Jason, looks great! Amazingly there isn't even a single typo, I'm sure!

--Jason
"""

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'dmedia/extractor.py'
2--- dmedia/extractor.py 2011-01-28 14:07:33 +0000
3+++ dmedia/extractor.py 2011-04-07 03:13:28 +0000
4@@ -223,13 +223,18 @@
5 def merge_metadata(src, doc):
6 ext = doc['ext']
7 attachments = doc.get('_attachments', {})
8+ meta = doc.get('meta', {})
9 if ext in _extractors:
10 callback = _extractors[ext]
11 for (key, value) in callback(src, attachments):
12- if key not in doc or key == 'mtime':
13+ if key == 'mtime':
14 doc[key] = value
15+ elif key not in meta:
16+ meta[key] = value
17 if attachments and '_attachments' not in doc:
18 doc['_attachments'] = attachments
19+ if meta and 'meta' not in doc:
20+ doc['meta'] = meta
21
22
23 def merge_exif(src, attachments):
24
25=== modified file 'dmedia/filestore.py'
26--- dmedia/filestore.py 2011-02-27 03:47:55 +0000
27+++ dmedia/filestore.py 2011-04-07 03:13:28 +0000
28@@ -613,6 +613,9 @@
29 raise ValueError('%s.base not a directory: %r' %
30 (self.__class__.__name__, self.base)
31 )
32+
33+ # FIXME: This is too high-level for FileStore, should instead be deault
34+ # with by the core API entry point as FileStore are first initialized
35 self.record = path.join(self.base, 'store.json')
36 try:
37 fp = open(self.record, 'rb')
38
39=== modified file 'dmedia/gtkui/__init__.py'
40--- dmedia/gtkui/__init__.py 2011-03-27 08:01:30 +0000
41+++ dmedia/gtkui/__init__.py 2011-04-07 03:13:28 +0000
42@@ -26,4 +26,6 @@
43 import gi
44 gi.require_version('Gtk', '2.0')
45 gi.require_version('WebKit', '1.0')
46-from gi.repository import Gtk, WebKit
47+from gi.repository import GObject, Gtk, WebKit
48+
49+GObject.threads_init()
50
51=== modified file 'dmedia/importer.py'
52--- dmedia/importer.py 2011-03-28 12:38:29 +0000
53+++ dmedia/importer.py 2011-04-07 03:13:28 +0000
54@@ -33,7 +33,7 @@
55
56 import couchdb
57
58-from .schema import random_id
59+from .schema import random_id, create_file, create_batch, create_import
60 from .errors import DuplicateFile
61 from .workers import (
62 CouchWorker, CouchManager, register, isregistered, exception_name
63@@ -48,6 +48,24 @@
64 log = logging.getLogger()
65
66
67+# FIXME: This needs to be done with some real inspection of the file contents,
68+# but this is just a stopgap for the sake of getting the schema stable:
69+MEDIA_MAP = {
70+ 'ogv': 'video',
71+ 'mov': 'video',
72+ 'avi': 'video',
73+
74+ 'oga': 'audio',
75+ 'flac': 'audio',
76+ 'wav': 'audio',
77+ 'mp3': 'audio',
78+
79+ 'jpg': 'image',
80+ 'cr2': 'image',
81+ 'png': 'image',
82+}
83+
84+
85 def normalize_ext(name):
86 """
87 Return (root, ext) from *name* where extension is normalized to lower-case.
88@@ -143,53 +161,6 @@
89 yield tup
90
91
92-def create_batch(machine_id=None):
93- """
94- Create initial 'dmedia/batch' accounting document.
95- """
96- return {
97- '_id': random_id(),
98- 'type': 'dmedia/batch',
99- 'time': time.time(),
100- 'machine_id': machine_id,
101- 'imports': [],
102- 'errors': [],
103- 'stats': {
104- 'considered': {'count': 0, 'bytes': 0},
105- 'imported': {'count': 0, 'bytes': 0},
106- 'skipped': {'count': 0, 'bytes': 0},
107- 'empty': {'count': 0, 'bytes': 0},
108- 'error': {'count': 0, 'bytes': 0},
109- }
110- }
111-
112-
113-def create_import(base, batch_id=None, machine_id=None):
114- """
115- Create initial 'dmedia/import' accounting document.
116- """
117- return {
118- '_id': random_id(),
119- 'type': 'dmedia/import',
120- 'time': time.time(),
121- 'batch_id': batch_id,
122- 'machine_id': machine_id,
123- 'base': base,
124- 'log': {
125- 'imported': [],
126- 'skipped': [],
127- 'empty': [],
128- 'error': [],
129- },
130- 'stats': {
131- 'imported': {'count': 0, 'bytes': 0},
132- 'skipped': {'count': 0, 'bytes': 0},
133- 'empty': {'count': 0, 'bytes': 0},
134- 'error': {'count': 0, 'bytes': 0},
135- }
136- }
137-
138-
139 class ImportWorker(CouchWorker):
140 def __init__(self, env, q, key, args):
141 super(ImportWorker, self).__init__(env, q, key, args)
142@@ -308,34 +279,19 @@
143 except couchdb.ResourceNotFound as e:
144 pass
145
146- ts = time.time()
147- doc = {
148- '_id': chash,
149- '_attachments': {
150- 'leaves': {
151- 'data': b64encode(pack_leaves(leaves)),
152- 'content_type': 'application/octet-stream',
153- }
154- },
155- 'type': 'dmedia/file',
156- 'time': ts,
157- 'bytes': stat.st_size,
158- 'ext': ext,
159- 'origin': 'user',
160- 'stored': {
161- self.filestore._id: {
162- 'copies': 1,
163- 'time': ts,
164- },
165- },
166-
167- 'import_id': self._id,
168- 'mtime': stat.st_mtime,
169- 'name': name,
170- 'dir': path.relpath(path.dirname(src), self.base),
171- }
172+ doc = create_file(stat.st_size, leaves, self.filestore._id,
173+ copies=1, ext=ext
174+ )
175+ assert doc['_id'] == chash
176+ doc.update(
177+ import_id=self._id,
178+ mtime=stat.st_mtime,
179+ name=name,
180+ dir=path.relpath(path.dirname(src), self.base),
181+ )
182 if ext:
183- doc['content_type'] = mimetypes.types_map.get('.' + ext)
184+ doc['mime'] = mimetypes.types_map.get('.' + ext)
185+ doc['media'] = MEDIA_MAP.get(ext)
186 if self.extract:
187 merge_metadata(src, doc)
188 (_id, _rev) = self.db.save(doc)
189
190=== modified file 'dmedia/metastore.py'
191--- dmedia/metastore.py 2011-03-27 09:05:32 +0000
192+++ dmedia/metastore.py 2011-04-07 03:13:28 +0000
193@@ -62,6 +62,19 @@
194 }
195 """
196
197+# views in the 'file' design only index docs for which doc.type == 'dmedia/file'
198+file_stored = """
199+// Get list of all files on a given store, total bytes on that store
200+function(doc) {
201+ if (doc.type == 'dmedia/file') {
202+ var key;
203+ for (key in doc.stored) {
204+ emit(key, doc.bytes);
205+ }
206+ }
207+}
208+"""
209+
210 file_bytes = """
211 function(doc) {
212 if (doc.type == 'dmedia/file' && typeof(doc.bytes) == 'number') {
213@@ -78,10 +91,10 @@
214 }
215 """
216
217-file_content_type = """
218+file_mime = """
219 function(doc) {
220 if (doc.type == 'dmedia/file') {
221- emit(doc.content_type, null);
222+ emit(doc.mime, null);
223 }
224 }
225 """
226@@ -94,16 +107,6 @@
227 }
228 """
229
230-file_tags = """
231-function(doc) {
232- if (doc.type == 'dmedia/file' && doc.tags) {
233- doc.tags.forEach(function(tag) {
234- emit(tag, null);
235- });
236- }
237-}
238-"""
239-
240 file_import_id = """
241 function(doc) {
242 if (doc.type == 'dmedia/file' && doc.import_id) {
243@@ -112,13 +115,87 @@
244 }
245 """
246
247+# views in the 'user' design only index docs for which doc.type == 'dmedia/file'
248+# and doc.origin == 'user'
249+user_copies = """
250+// Durability of user's personal files
251+function(doc) {
252+ if (doc.type == 'dmedia/file' && doc.origin == 'user') {
253+ var copies = 0;
254+ var key;
255+ for (key in doc.stored) {
256+ copies += doc.stored[key].copies;
257+ }
258+ emit(copies, null);
259+ }
260+}
261+"""
262+
263+user_media = """
264+function(doc) {
265+ if (doc.type == 'dmedia/file' && doc.origin == 'user') {
266+ emit(doc.media, null);
267+ }
268+}
269+"""
270+
271+user_tags = """
272+function(doc) {
273+ if (doc.type == 'dmedia/file' && doc.origin == 'user' && doc.tags) {
274+ var key;
275+ for (key in doc.tags) {
276+ emit(key, doc.tags[key]);
277+ }
278+ }
279+}
280+"""
281+
282+user_all = """
283+function(doc) {
284+ if (doc.type == 'dmedia/file' && doc.origin == 'user') {
285+ emit(doc.mtime, null);
286+ }
287+}
288+"""
289+
290+user_video = """
291+function(doc) {
292+ if (doc.type == 'dmedia/file' && doc.origin == 'user') {
293+ if (doc.media == 'video') {
294+ emit(doc.mtime, null);
295+ }
296+ }
297+}
298+"""
299+
300+user_image = """
301+function(doc) {
302+ if (doc.type == 'dmedia/file' && doc.origin == 'user') {
303+ if (doc.media == 'image') {
304+ emit(doc.mtime, null);
305+ }
306+ }
307+}
308+"""
309+
310+user_audio = """
311+function(doc) {
312+ if (doc.type == 'dmedia/file' && doc.origin == 'user') {
313+ if (doc.media == 'audio') {
314+ emit(doc.mtime, null);
315+ }
316+ }
317+}
318+"""
319+
320+
321 def build_design_doc(design, views):
322 _id = '_design/' + design
323 d = {}
324 for (view, map_, reduce_) in views:
325- d[view] = {'map': map_}
326+ d[view] = {'map': map_.strip()}
327 if reduce_ is not None:
328- d[view]['reduce'] = reduce_
329+ d[view]['reduce'] = reduce_.strip()
330 doc = {
331 '_id': _id,
332 'language': 'javascript',
333@@ -168,12 +245,22 @@
334 )),
335
336 ('file', (
337+ ('stored', file_stored, _sum),
338 ('import_id', file_import_id, None),
339 ('bytes', file_bytes, _sum),
340 ('ext', file_ext, _count),
341- ('content_type', file_content_type, _count),
342+ ('mime', file_mime, _count),
343 ('mtime', file_mtime, None),
344- ('tags', file_tags, _count),
345+ )),
346+
347+ ('user', (
348+ ('copies', user_copies, None),
349+ ('media', user_media, _count),
350+ ('tags', user_tags, _count),
351+ ('all', user_all, None),
352+ ('video', user_video, None),
353+ ('image', user_image, None),
354+ ('audio', user_audio, None),
355 )),
356 )
357
358
359=== modified file 'dmedia/schema.py'
360--- dmedia/schema.py 2011-03-27 09:05:32 +0000
361+++ dmedia/schema.py 2011-04-07 03:13:28 +0000
362@@ -33,6 +33,7 @@
363
364 >>> good = {
365 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
366+... 'ver': 0,
367 ... 'type': 'dmedia/foo',
368 ... 'time': 1234567890,
369 ... }
370@@ -40,6 +41,7 @@
371 >>> check_dmedia(good) # Returns None
372 >>> bad = {
373 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
374+... 'ver': 0,
375 ... 'kind': 'dmedia/foo',
376 ... 'timestamp': 1234567890,
377 ... }
378@@ -222,6 +224,7 @@
379
380 >>> doc = {
381 ... '_id': 'MZZG2ZDSOQVSW2TEMVZG643F',
382+... 'ver': 0,
383 ... 'type': 'dmedia/batch',
384 ... 'time': 1234567890,
385 ... }
386@@ -308,9 +311,11 @@
387 from __future__ import print_function
388
389 import os
390-from base64 import b32encode, b32decode
391+from hashlib import sha1
392+from base64 import b32encode, b32decode, b64encode
393 import re
394 import time
395+
396 from .constants import TYPE_ERROR, EXT_PAT
397
398 # Some private helper functions that don't directly define any schema.
399@@ -325,6 +330,201 @@
400 #
401 # That is all.
402
403+
404+# FIXME: These functions are a step toward making the checks more concise and
405+# the error messages consistent and even more helpful. However, these functions
406+# aren't used much yet... but all the schema checks should be ported to these
407+# functions eventually.
408+def _label(path):
409+ """
410+ Create a helpful debugging label to indicate the attribute in question.
411+
412+ For example:
413+
414+ >>> _label([])
415+ 'doc'
416+ >>> _label(['log'])
417+ "doc['log']"
418+ >>> _label(['log', 'considered', 2, 'src'])
419+ "doc['log']['considered'][2]['src']"
420+
421+
422+ See also `_value()`.
423+ """
424+ return 'doc' + ''.join('[{!r}]'.format(key) for key in path)
425+
426+
427+def _value(doc, path):
428+ """
429+ Retrieve value from *doc* by traversing *path*.
430+
431+ For example:
432+
433+ >>> doc = {'log': {'considered': [None, None, {'src': 'hello'}, None]}}
434+ >>> _value(doc, [])
435+ {'log': {'considered': [None, None, {'src': 'hello'}, None]}}
436+ >>> _value(doc, ['log'])
437+ {'considered': [None, None, {'src': 'hello'}, None]}
438+ >>> _value(doc, ['log', 'considered', 2, 'src'])
439+ 'hello'
440+
441+
442+ Or if you try to retrieve something that doesn't exist:
443+
444+ >>> _value(doc, ['log', 'considered', 7])
445+ Traceback (most recent call last):
446+ ...
447+ ValueError: doc['log']['considered'][7] does not exists
448+
449+
450+ Or if a key/index is missing higher up in the path:
451+
452+ >>> _value(doc, ['dog', 'considered', 7])
453+ Traceback (most recent call last):
454+ ...
455+ ValueError: doc['dog'] does not exists
456+
457+
458+ See also `_label()`.
459+ """
460+ value = doc
461+ p = []
462+ for key in path:
463+ p.append(key)
464+ try:
465+ value = value[key]
466+ except (KeyError, IndexError):
467+ raise ValueError(
468+ '{} does not exists'.format(_label(p))
469+ )
470+ return value
471+
472+
473+def _exists(doc, path):
474+ """
475+ Return ``True`` if the end of *path* exists.
476+
477+ For example:
478+
479+ >>> doc = {'foo': {'hello': 'world'}, 'bar': ['hello', 'naughty', 'nurse']}
480+ >>> _exists(doc, ['foo', 'hello'])
481+ True
482+ >>> _exists(doc, ['foo', 'sup'])
483+ False
484+ >>> _exists(doc, ['bar', 2])
485+ True
486+ >>> _exists(doc, ['bar', 3])
487+ False
488+
489+
490+ Or if a key/index is missing higher up the path:
491+
492+ >>> _exists(doc, ['stuff', 'junk'])
493+ Traceback (most recent call last):
494+ ...
495+ ValueError: doc['stuff'] does not exists
496+
497+
498+ See also `_check_if_exists()`.
499+ """
500+ if len(path) == 0:
501+ return True
502+ base = _value(doc, path[:-1])
503+ key = path[-1]
504+ try:
505+ value = base[key]
506+ return True
507+ except (KeyError, IndexError):
508+ return False
509+
510+
511+def _check(doc, path, *checks):
512+ """
513+ Run a series of *checks* on the value in *doc* addressed by *path*.
514+
515+ For example:
516+
517+ >>> doc = {'foo': [None, {'bar': 'aye'}, None]}
518+ >>> _check(doc, ['foo', 1, 'bar'],
519+ ... _check_str,
520+ ... (_check_in, 'bee', 'sea'),
521+ ... )
522+ ...
523+ Traceback (most recent call last):
524+ ...
525+ ValueError: doc['foo'][1]['bar'] value 'aye' not in ('bee', 'sea')
526+
527+
528+ Or if a value is missing:
529+
530+ >>> _check(doc, ['foo', 3],
531+ ... _can_be_none,
532+ ... )
533+ ...
534+ Traceback (most recent call last):
535+ ...
536+ ValueError: doc['foo'][3] does not exists
537+
538+
539+ See also `_check_if_exists()`.
540+ """
541+ value = _value(doc, path)
542+ label = _label(path)
543+ for c in checks:
544+ if isinstance(c, tuple):
545+ (c, args) = (c[0], c[1:])
546+ else:
547+ args = tuple()
548+ if c(value, label, *args) is True:
549+ break
550+
551+
552+def _check_if_exists(doc, path, *checks):
553+ """
554+ Run *checks* only if value at *path* exists.
555+
556+ For example:
557+
558+ >>> doc = {'name': 17}
559+ >>> _check_if_exists(doc, ['dir'], _check_str)
560+ >>> _check_if_exists(doc, ['name'], _check_str)
561+ Traceback (most recent call last):
562+ ...
563+ TypeError: doc['name']: need a <type 'basestring'>; got a <type 'int'>: 17
564+
565+
566+ See also `_check()` and `_exists()`.
567+ """
568+ if _exists(doc, path):
569+ _check(doc, path, *checks)
570+
571+
572+def _can_be_none(value, label):
573+ """
574+ Stop execution of check if *value* is ``None``.
575+
576+ `_check()` will abort upon a check function returning ``True``.
577+
578+ For example, here a ``TypeError`` is raised:
579+
580+ >>> doc = {'ext': None}
581+ >>> _check(doc, ['ext'], _check_str)
582+ Traceback (most recent call last):
583+ ...
584+ TypeError: doc['ext']: need a <type 'basestring'>; got a <type 'NoneType'>: None
585+
586+
587+ But here it is not:
588+
589+ >>> _check(doc, ['ext'], _can_be_none, _check_str)
590+
591+ """
592+ if value is None:
593+ return True
594+
595+# /FIXME new helper functions
596+
597+
598 def _check_dict(value, label):
599 """
600 Verify that *value* is a ``dict`` instance.
601@@ -387,13 +587,13 @@
602 if not isinstance(value, (int, float)):
603 raise TypeError(TYPE_ERROR % (label, (int, float), type(value), value))
604
605-def _check_at_least(value, minvalue, label):
606+def _check_at_least(value, label, minvalue=0):
607 """
608 Verify that *value* is greater than or equal to *minvalue*.
609
610 For example:
611
612- >>> _check_at_least(0, 1, 'bytes')
613+ >>> _check_at_least(0, 'bytes', 1)
614 Traceback (most recent call last):
615 ...
616 ValueError: bytes must be >= 1; got 0
617@@ -477,6 +677,25 @@
618 )
619
620
621+def _check_in(value, label, *possible):
622+ """
623+ Check that *value* is one of *possible*.
624+
625+ For example:
626+
627+ >>> _check_in('foo', "doc['media']", 'video', 'audio', 'image')
628+ Traceback (most recent call last):
629+ ...
630+ ValueError: doc['media'] value 'foo' not in ('video', 'audio', 'image')
631+
632+ """
633+ if value not in possible:
634+ raise ValueError(
635+ '{} value {!r} not in {!r}'.format(label, value, possible)
636+ )
637+
638+
639+
640 # The schema defining functions:
641
642 def check_base32(value, label='_id'):
643@@ -579,7 +798,7 @@
644
645 """
646 _check_int_float(value, label)
647- _check_at_least(value, 0, label)
648+ _check_at_least(value, label, 0)
649
650
651 def check_dmedia(doc):
652@@ -591,14 +810,17 @@
653
654 1. have '_id' that passes `check_base32()`
655
656- 2. have 'type' that passes `check_type()`
657-
658- 3. have 'time' that passes `check_time()`
659+ 2. have a 'ver' equal to ``0``
660+
661+ 3. have 'type' that passes `check_type()`
662+
663+ 4. have 'time' that passes `check_time()`
664
665 For example, a conforming value:
666
667 >>> doc = {
668 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
669+ ... 'ver': 0,
670 ... 'type': 'dmedia/file',
671 ... 'time': 1234567890,
672 ... }
673@@ -610,6 +832,7 @@
674
675 >>> doc = {
676 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
677+ ... 'ver': 0,
678 ... 'kind': 'dmedia/file',
679 ... 'timestamp': 1234567890,
680 ... }
681@@ -620,8 +843,13 @@
682 ValueError: doc missing keys: ['time', 'type']
683
684 """
685- _check_required(doc, ['_id', 'type', 'time'])
686+ _check_required(doc, ['_id', 'ver', 'type', 'time'])
687 check_base32(doc['_id'])
688+ _check_int(doc['ver'], 'ver')
689+ if doc['ver'] != 0:
690+ raise ValueError(
691+ "doc['ver'] must be 0; got {!r}".format(doc['ver'])
692+ )
693 check_type(doc['type'])
694 check_time(doc['time'])
695
696@@ -638,7 +866,7 @@
697
698 3. have values that are themselves ``dict`` instances
699
700- 4. values must have 'copies' that is an ``int`` >= 1
701+ 4. values must have 'copies' that is an ``int`` >= 0
702
703 5. values must have 'time' that conforms with `check_time()`
704
705@@ -686,7 +914,7 @@
706 copies = value['copies']
707 l3 = l2 + "['copies']"
708 _check_int(copies, l3)
709- _check_at_least(copies, 1, l3)
710+ _check_at_least(copies, l3, 0)
711
712 # Check 'time':
713 check_time(value['time'], l2 + "['time']")
714@@ -780,7 +1008,6 @@
715 raise ValueError('%s: %r not in %r' % (label, value, allowed))
716
717
718-
719 def check_dmedia_file(doc):
720 """
721 Verify that *doc* is a valid 'dmedia/file' record type.
722@@ -803,6 +1030,7 @@
723
724 >>> doc = {
725 ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',
726+ ... 'ver': 0,
727 ... 'type': 'dmedia/file',
728 ... 'time': 1234567890,
729 ... 'bytes': 20202333,
730@@ -823,6 +1051,7 @@
731
732 >>> doc = {
733 ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',
734+ ... 'ver': 0,
735 ... 'type': 'dmedia/file',
736 ... 'time': 1234567890,
737 ... 'bytes': 20202333,
738@@ -854,7 +1083,7 @@
739 # Check 'bytes':
740 b = doc['bytes']
741 _check_int(b, 'bytes')
742- _check_at_least(b, 1, 'bytes')
743+ _check_at_least(b, 'bytes', 1)
744
745 # Check 'ext':
746 check_ext(doc['ext'])
747@@ -865,6 +1094,63 @@
748 # Check 'stored'
749 check_stored(doc['stored'])
750
751+ check_dmedia_file_optional(doc)
752+
753+
754+def check_dmedia_file_optional(doc):
755+ """
756+ Check the optional attributes in a 'dmedia/file' document.
757+ """
758+ _check_dict(doc, 'doc')
759+
760+ # 'mime' like 'video/quicktime'
761+ _check_if_exists(doc, ['mime'],
762+ _can_be_none,
763+ _check_str,
764+ )
765+
766+ # 'media' like 'video'
767+ _check_if_exists(doc, ['media'],
768+ _can_be_none,
769+ _check_str,
770+ (_check_in, 'video', 'audio', 'image'),
771+ )
772+
773+ # 'mtime' like 1234567890
774+ _check_if_exists(doc, ['mtime'],
775+ check_time
776+ )
777+
778+ # 'atime' like 1234567890
779+ _check_if_exists(doc, ['atime'],
780+ check_time
781+ )
782+
783+ # name like 'MVI_5899.MOV'
784+ _check_if_exists(doc, ['name'],
785+ _check_str,
786+ )
787+
788+ # dir like 'DCIM/100EOS5D2'
789+ _check_if_exists(doc, ['dir'],
790+ _check_str,
791+ )
792+
793+ # 'meta' like {'iso': 800}
794+ _check_if_exists(doc, ['meta'],
795+ _check_dict
796+ )
797+
798+ # 'user' like {'title': 'cool sunset'}
799+ _check_if_exists(doc, ['user'],
800+ _check_dict
801+ )
802+
803+ # 'tags' like {'burp': {'start': 6, 'end': 73}}
804+ _check_if_exists(doc, ['tags'],
805+ _check_dict
806+ )
807+
808
809 def check_dmedia_store(doc):
810 """
811@@ -883,6 +1169,7 @@
812
813 >>> doc = {
814 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
815+ ... 'ver': 0,
816 ... 'type': 'dmedia/file',
817 ... 'time': 1234567890,
818 ... 'plugin': 'filestore',
819@@ -896,6 +1183,7 @@
820
821 >>> doc = {
822 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
823+ ... 'ver': 0,
824 ... 'type': 'dmedia/file',
825 ... 'time': 1234567890,
826 ... 'dispatch': 'filestore',
827@@ -925,7 +1213,7 @@
828 key = 'copies'
829 dc = doc[key]
830 _check_int(dc, key)
831- _check_at_least(dc, 1, key)
832+ _check_at_least(dc, key, 1)
833
834
835 def random_id(random=None):
836@@ -950,10 +1238,68 @@
837 return b32encode(random)
838
839
840-# This should probably be moved
841+# FIXME: There is current a recursize import issue with filestore, but FileStore
842+# shouldn't deal with the store.json file anyway, should not import
843+# `schema.create_store()`
844+def tophash_personalization(file_size):
845+ return ' '.join(['dmedia/tophash', str(file_size)]).encode('utf-8')
846+
847+
848+def tophash(file_size, leaves):
849+ """
850+ Initialize hash for a file that is *file_size* bytes.
851+ """
852+ h = sha1(tophash_personalization(file_size))
853+ h.update(leaves)
854+ return b32encode(h.digest())
855+
856+
857+def create_file(file_size, leaves, store, copies=0, ext=None, origin='user'):
858+ """
859+ Create a minimal 'dmedia/file' document.
860+
861+ :param file_size: an ``int``, the file size in bytes, eg ``20202333``
862+ :param leaves: a ``list`` containing the content hash of each leaf
863+ :param store: the ID of the store where this file is initially stored, eg
864+ ``'Y4J3WQCMKV5GHATOCZZBHF4Y'``
865+ :param copies: an ``int`` to represent the durability of the file on this
866+ store; default is ``0``
867+ :param ext: the file extension, eg ``'mov'``; default is ``None``
868+ :param origin: the file's origin (for durability/reclamation purposes);
869+ default is ``'user'``
870+ """
871+ ts = time.time()
872+ packed = b''.join(leaves)
873+ return {
874+ '_id': tophash(file_size, packed),
875+ '_attachments': {
876+ 'leaves': {
877+ 'data': b64encode(packed),
878+ 'content_type': 'application/octet-stream',
879+ }
880+ },
881+ 'ver': 0,
882+ 'type': 'dmedia/file',
883+ 'time': ts,
884+ 'bytes': file_size,
885+ 'ext': ext,
886+ 'origin': origin,
887+ 'stored': {
888+ store: {
889+ 'copies': copies,
890+ 'time': ts,
891+ }
892+ }
893+ }
894+
895+
896 def create_store(base, machine_id, copies=1):
897+ """
898+ Create a 'dmedia/store' document.
899+ """
900 return {
901 '_id': random_id(),
902+ 'ver': 0,
903 'type': 'dmedia/store',
904 'time': time.time(),
905 'plugin': 'filestore',
906@@ -961,3 +1307,52 @@
907 'path': base,
908 'machine_id': machine_id,
909 }
910+
911+
912+def create_batch(machine_id=None):
913+ """
914+ Create initial 'dmedia/batch' accounting document.
915+ """
916+ return {
917+ '_id': random_id(),
918+ 'ver': 0,
919+ 'type': 'dmedia/batch',
920+ 'time': time.time(),
921+ 'machine_id': machine_id,
922+ 'imports': [],
923+ 'errors': [],
924+ 'stats': {
925+ 'considered': {'count': 0, 'bytes': 0},
926+ 'imported': {'count': 0, 'bytes': 0},
927+ 'skipped': {'count': 0, 'bytes': 0},
928+ 'empty': {'count': 0, 'bytes': 0},
929+ 'error': {'count': 0, 'bytes': 0},
930+ }
931+ }
932+
933+
934+def create_import(base, batch_id=None, machine_id=None):
935+ """
936+ Create initial 'dmedia/import' accounting document.
937+ """
938+ return {
939+ '_id': random_id(),
940+ 'ver': 0,
941+ 'type': 'dmedia/import',
942+ 'time': time.time(),
943+ 'batch_id': batch_id,
944+ 'machine_id': machine_id,
945+ 'base': base,
946+ 'log': {
947+ 'imported': [],
948+ 'skipped': [],
949+ 'empty': [],
950+ 'error': [],
951+ },
952+ 'stats': {
953+ 'imported': {'count': 0, 'bytes': 0},
954+ 'skipped': {'count': 0, 'bytes': 0},
955+ 'empty': {'count': 0, 'bytes': 0},
956+ 'error': {'count': 0, 'bytes': 0},
957+ }
958+ }
959
960=== modified file 'dmedia/tests/helpers.py'
961--- dmedia/tests/helpers.py 2011-02-22 14:07:47 +0000
962+++ dmedia/tests/helpers.py 2011-04-07 03:13:28 +0000
963@@ -34,6 +34,7 @@
964 from . import sample_mov, sample_thm
965
966 mov_hash = 'TGX33XXWU3EVHEEY5J7NBOJGKBFXLEBK'
967+mov_size = 20202333
968 mov_leaves = [
969 b32decode('IXJTSUCYYFECGSG6JIB2R77CAJVJK4W3'),
970 b32decode('MA3IAHUOKXR4TRG7CWAPOO7U4WCV5WJ4'),
971
972=== modified file 'dmedia/tests/test_extractor.py'
973--- dmedia/tests/test_extractor.py 2011-01-26 05:14:20 +0000
974+++ dmedia/tests/test_extractor.py 2011-04-07 03:13:28 +0000
975@@ -411,21 +411,23 @@
976 doc,
977 dict(
978 ext='mov',
979- width=1920,
980- height=1080,
981- duration=3,
982- codec_video='H.264 / AVC',
983- codec_audio='Raw 16-bit PCM audio',
984- sample_rate=48000,
985- fps=30,
986- channels='Stereo',
987- iso=100,
988- shutter=u'1/100',
989- aperture=11.0,
990- lens=u'Canon EF 70-200mm f/4L IS',
991- camera=u'Canon EOS 5D Mark II',
992- focal_length=u'138.0 mm',
993 mtime=1287520994 + 68 / 100.0,
994+ meta=dict(
995+ width=1920,
996+ height=1080,
997+ duration=3,
998+ codec_video='H.264 / AVC',
999+ codec_audio='Raw 16-bit PCM audio',
1000+ sample_rate=48000,
1001+ fps=30,
1002+ channels='Stereo',
1003+ iso=100,
1004+ shutter=u'1/100',
1005+ aperture=11.0,
1006+ lens=u'Canon EF 70-200mm f/4L IS',
1007+ camera=u'Canon EOS 5D Mark II',
1008+ focal_length=u'138.0 mm',
1009+ ),
1010 )
1011 )
1012
1013
1014=== modified file 'dmedia/tests/test_importer.py'
1015--- dmedia/tests/test_importer.py 2011-03-28 12:38:29 +0000
1016+++ dmedia/tests/test_importer.py 2011-04-07 03:13:28 +0000
1017@@ -132,103 +132,6 @@
1018 )
1019 os.chmod(subdir, 0o700)
1020
1021- def test_create_batch(self):
1022- f = importer.create_batch
1023- machine_id = random_id()
1024- doc = f(machine_id)
1025-
1026- self.assertEqual(schema.check_dmedia(doc), None)
1027- self.assertTrue(isinstance(doc, dict))
1028- self.assertEqual(
1029- set(doc),
1030- set([
1031- '_id',
1032- 'type',
1033- 'time',
1034- 'imports',
1035- 'errors',
1036- 'machine_id',
1037- 'stats',
1038- ])
1039- )
1040- _id = doc['_id']
1041- self.assertEqual(b32encode(b32decode(_id)), _id)
1042- self.assertEqual(len(_id), 24)
1043- self.assertEqual(doc['type'], 'dmedia/batch')
1044- self.assertTrue(isinstance(doc['time'], (int, float)))
1045- self.assertTrue(doc['time'] <= time.time())
1046- self.assertEqual(doc['imports'], [])
1047- self.assertEqual(doc['errors'], [])
1048- self.assertEqual(doc['machine_id'], machine_id)
1049- self.assertEqual(
1050- doc['stats'],
1051- {
1052- 'considered': {'count': 0, 'bytes': 0},
1053- 'imported': {'count': 0, 'bytes': 0},
1054- 'skipped': {'count': 0, 'bytes': 0},
1055- 'empty': {'count': 0, 'bytes': 0},
1056- 'error': {'count': 0, 'bytes': 0},
1057- }
1058- )
1059-
1060- def test_create_import(self):
1061- f = importer.create_import
1062-
1063- base = '/media/EOS_DIGITAL'
1064- batch_id = random_id()
1065- machine_id = random_id()
1066-
1067- keys = set([
1068- '_id',
1069- 'type',
1070- 'time',
1071- 'base',
1072- 'batch_id',
1073- 'machine_id',
1074- 'log',
1075- 'stats',
1076- ])
1077-
1078- doc = f(base, batch_id=batch_id, machine_id=machine_id)
1079- self.assertEqual(schema.check_dmedia(doc), None)
1080- self.assertTrue(isinstance(doc, dict))
1081- self.assertEqual(set(doc), keys)
1082-
1083- _id = doc['_id']
1084- self.assertEqual(b32encode(b32decode(_id)), _id)
1085- self.assertEqual(len(_id), 24)
1086-
1087- self.assertEqual(doc['type'], 'dmedia/import')
1088- self.assertTrue(isinstance(doc['time'], (int, float)))
1089- self.assertTrue(doc['time'] <= time.time())
1090- self.assertEqual(doc['base'], base)
1091- self.assertEqual(doc['batch_id'], batch_id)
1092- self.assertEqual(doc['machine_id'], machine_id)
1093-
1094- doc = f(base)
1095- self.assertEqual(schema.check_dmedia(doc), None)
1096- self.assertEqual(set(doc), keys)
1097- self.assertEqual(doc['batch_id'], None)
1098- self.assertEqual(doc['machine_id'], None)
1099- self.assertEqual(
1100- doc['log'],
1101- {
1102- 'imported': [],
1103- 'skipped': [],
1104- 'empty': [],
1105- 'error': [],
1106- }
1107- )
1108- self.assertEqual(
1109- doc['stats'],
1110- {
1111- 'imported': {'count': 0, 'bytes': 0},
1112- 'skipped': {'count': 0, 'bytes': 0},
1113- 'empty': {'count': 0, 'bytes': 0},
1114- 'error': {'count': 0, 'bytes': 0},
1115- }
1116- )
1117-
1118 def test_to_dbus_stats(self):
1119 f = importer.to_dbus_stats
1120 stats = dict(
1121@@ -406,6 +309,7 @@
1122 set([
1123 '_id',
1124 '_rev',
1125+ 'ver',
1126 'type',
1127 'time',
1128 'base',
1129@@ -507,6 +411,7 @@
1130 '_id',
1131 '_rev',
1132 '_attachments',
1133+ 'ver',
1134 'type',
1135 'time',
1136 'bytes',
1137@@ -518,7 +423,8 @@
1138 'mtime',
1139 'name',
1140 'dir',
1141- 'content_type',
1142+ 'mime',
1143+ 'media',
1144 ])
1145 )
1146 self.assertEqual(schema.check_dmedia_file(doc), None)
1147@@ -534,7 +440,7 @@
1148 self.assertEqual(doc['mtime'], path.getmtime(src1))
1149 self.assertEqual(doc['name'], 'MVI_5751.MOV')
1150 self.assertEqual(doc['dir'], 'DCIM/100EOS5D2')
1151- self.assertEqual(doc['content_type'], 'video/quicktime')
1152+ self.assertEqual(doc['mime'], 'video/quicktime')
1153
1154 # Test with duplicate
1155 (action, doc) = inst._import_file(src2)
1156@@ -834,7 +740,9 @@
1157 self.assertEqual(
1158 set(batch),
1159 set([
1160- '_id', '_rev',
1161+ '_id',
1162+ '_rev',
1163+ 'ver',
1164 'type',
1165 'time',
1166 'imports',
1167
1168=== modified file 'dmedia/tests/test_schema.py'
1169--- dmedia/tests/test_schema.py 2011-03-27 09:05:32 +0000
1170+++ dmedia/tests/test_schema.py 2011-04-07 03:13:28 +0000
1171@@ -24,10 +24,10 @@
1172 """
1173
1174 from unittest import TestCase
1175-from base64 import b32encode, b32decode
1176+from base64 import b32encode, b32decode, b64encode
1177 from copy import deepcopy
1178 import time
1179-from .helpers import raises, TempDir
1180+from .helpers import raises, TempDir, mov_hash, mov_leaves, mov_size
1181 from dmedia.constants import TYPE_ERROR
1182 from dmedia.schema import random_id
1183 from dmedia import schema
1184@@ -163,13 +163,24 @@
1185
1186 good = {
1187 '_id': 'MZZG2ZDSOQVSW2TEMVZG643F',
1188+ 'ver': 0,
1189 'type': 'dmedia/foo',
1190 'time': 1234567890,
1191 'foo': 'bar',
1192 }
1193 g = deepcopy(good)
1194 self.assertEqual(f(g), None)
1195- for key in ['_id', 'type', 'time']:
1196+
1197+ # check with bad ver:
1198+ bad = deepcopy(good)
1199+ bad['ver'] = 0.0
1200+ e = raises(TypeError, f, bad)
1201+ self.assertEqual(str(e), TYPE_ERROR % ('ver', int, float, 0.0))
1202+ bad['ver'] = 1
1203+ e = raises(ValueError, f, bad)
1204+ self.assertEqual(str(e), "doc['ver'] must be 0; got 1")
1205+
1206+ for key in ['_id', 'ver', 'type', 'time']:
1207 bad = deepcopy(good)
1208 del bad[key]
1209 e = raises(ValueError, f, bad)
1210@@ -190,7 +201,7 @@
1211 e = raises(ValueError, f, bad)
1212 self.assertEqual(
1213 str(e),
1214- 'doc missing keys: %r' % ['_id', 'time', 'type']
1215+ 'doc missing keys: %r' % ['_id', 'time', 'type', 'ver']
1216 )
1217
1218 def test_check_stored(self):
1219@@ -275,11 +286,11 @@
1220 TYPE_ERROR % (label, int, float, 2.0)
1221 )
1222 bad = deepcopy(good)
1223- bad['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] = 0
1224+ bad['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] = -2
1225 e = raises(ValueError, f, bad)
1226 self.assertEqual(
1227 str(e),
1228- '%s must be >= 1; got 0' % label
1229+ '%s must be >= 0; got -2' % label
1230 )
1231
1232 # Test with bad 'time' type/value:
1233@@ -420,6 +431,7 @@
1234 # Test with good doc:
1235 good = {
1236 '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',
1237+ 'ver': 0,
1238 'type': 'dmedia/file',
1239 'time': 1234567890,
1240 'bytes': 20202333,
1241@@ -517,7 +529,103 @@
1242 e = raises(ValueError, f, bad)
1243 self.assertEqual(
1244 str(e),
1245- "stored['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] must be >= 1; got -1"
1246+ "stored['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] must be >= 0; got -1"
1247+ )
1248+
1249+ def test_check_dmedia_file_optional(self):
1250+ f = schema.check_dmedia_file_optional
1251+ f({})
1252+
1253+ # mime
1254+ self.assertIsNone(f({'mime': 'video/quicktime'}))
1255+ e = raises(TypeError, f, {'mime': 42})
1256+ self.assertEqual(
1257+ str(e),
1258+ TYPE_ERROR % ("doc['mime']", basestring, int, 42)
1259+ )
1260+
1261+ # media
1262+ self.assertIsNone(f({'media': None}))
1263+ self.assertIsNone(f({'media': 'video'}))
1264+ self.assertIsNone(f({'media': 'audio'}))
1265+ self.assertIsNone(f({'media': 'image'}))
1266+ e = raises(TypeError, f, {'media': 42})
1267+ self.assertEqual(
1268+ str(e),
1269+ TYPE_ERROR % ("doc['media']", basestring, int, 42)
1270+ )
1271+ e = raises(ValueError, f, {'media': 'stuff'})
1272+ self.assertEqual(
1273+ str(e),
1274+ "doc['media'] value 'stuff' not in ('video', 'audio', 'image')"
1275+ )
1276+
1277+ # mtime
1278+ self.assertIsNone(f({'mtime': 1302125982.946627}))
1279+ self.assertIsNone(f({'mtime': 1234567890}))
1280+ e = raises(TypeError, f, {'mtime': '1234567890'})
1281+ self.assertEqual(
1282+ str(e),
1283+ TYPE_ERROR % ("doc['mtime']", (int, float), str, '1234567890')
1284+ )
1285+ e = raises(ValueError, f, {'mtime': -1})
1286+ self.assertEqual(
1287+ str(e),
1288+ "doc['mtime'] must be >= 0; got -1"
1289+ )
1290+
1291+ # atime
1292+ self.assertIsNone(f({'atime': 1302125982.946627}))
1293+ self.assertIsNone(f({'atime': 1234567890}))
1294+ e = raises(TypeError, f, {'atime': '1234567890'})
1295+ self.assertEqual(
1296+ str(e),
1297+ TYPE_ERROR % ("doc['atime']", (int, float), str, '1234567890')
1298+ )
1299+ e = raises(ValueError, f, {'atime': -0.3})
1300+ self.assertEqual(
1301+ str(e),
1302+ "doc['atime'] must be >= 0; got -0.3"
1303+ )
1304+
1305+ # name
1306+ self.assertIsNone(f({'name': 'MVI_5899.MOV'}))
1307+ e = raises(TypeError, f, {'name': 42})
1308+ self.assertEqual(
1309+ str(e),
1310+ TYPE_ERROR % ("doc['name']", basestring, int, 42)
1311+ )
1312+
1313+ # dir
1314+ self.assertIsNone(f({'dir': 'DCIM/100EOS5D2'}))
1315+ e = raises(TypeError, f, {'dir': 42})
1316+ self.assertEqual(
1317+ str(e),
1318+ TYPE_ERROR % ("doc['dir']", basestring, int, 42)
1319+ )
1320+
1321+ # meta
1322+ self.assertIsNone(f({'meta': {'iso': 800}}))
1323+ e = raises(TypeError, f, {'meta': 42})
1324+ self.assertEqual(
1325+ str(e),
1326+ TYPE_ERROR % ("doc['meta']", dict, int, 42)
1327+ )
1328+
1329+ # user
1330+ self.assertIsNone(f({'user': {'title': 'cool sunset'}}))
1331+ e = raises(TypeError, f, {'user': 42})
1332+ self.assertEqual(
1333+ str(e),
1334+ TYPE_ERROR % ("doc['user']", dict, int, 42)
1335+ )
1336+
1337+ # tags
1338+ self.assertIsNone(f({'tags': {'burp': {'start': 6, 'end': 73}}}))
1339+ e = raises(TypeError, f, {'tags': 42})
1340+ self.assertEqual(
1341+ str(e),
1342+ TYPE_ERROR % ("doc['tags']", dict, int, 42)
1343 )
1344
1345
1346@@ -527,6 +635,7 @@
1347 # Test with good doc:
1348 good = {
1349 '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',
1350+ 'ver': 0,
1351 'type': 'dmedia/file',
1352 'time': 1234567890,
1353 'plugin': 'filestore',
1354@@ -593,6 +702,64 @@
1355 self.assertEqual(len(binary), 15)
1356 self.assertEqual(b32encode(binary), _id)
1357
1358+ def test_create_file(self):
1359+ f = schema.create_file
1360+ store = schema.random_id()
1361+
1362+ d = f(mov_size, mov_leaves, store)
1363+ schema.check_dmedia_file(d)
1364+ self.assertEqual(
1365+ set(d),
1366+ set([
1367+ '_id',
1368+ '_attachments',
1369+ 'ver',
1370+ 'type',
1371+ 'time',
1372+ 'bytes',
1373+ 'ext',
1374+ 'origin',
1375+ 'stored',
1376+ ])
1377+ )
1378+ self.assertEqual(d['_id'], mov_hash)
1379+ self.assertEqual(
1380+ d['_attachments'],
1381+ {
1382+ 'leaves': {
1383+ 'data': b64encode(b''.join(mov_leaves)),
1384+ 'content_type': 'application/octet-stream',
1385+ }
1386+ }
1387+ )
1388+ self.assertEqual(d['ver'], 0)
1389+ self.assertEqual(d['type'], 'dmedia/file')
1390+ self.assertLessEqual(d['time'], time.time())
1391+ self.assertEqual(d['bytes'], mov_size)
1392+ self.assertIsNone(d['ext'], None)
1393+ self.assertEqual(d['origin'], 'user')
1394+
1395+ s = d['stored']
1396+ self.assertIsInstance(s, dict)
1397+ self.assertEqual(list(s), [store])
1398+ self.assertEqual(set(s[store]), set(['copies', 'time']))
1399+ self.assertEqual(s[store]['copies'], 0)
1400+ self.assertEqual(s[store]['time'], d['time'])
1401+
1402+ # Test overriding default kwarg values:
1403+ d = f(mov_size, mov_leaves, store, copies=2)
1404+ schema.check_dmedia_file(d)
1405+ self.assertEqual(d['stored'][store]['copies'], 2)
1406+
1407+ d = f(mov_size, mov_leaves, store, ext='mov')
1408+ schema.check_dmedia_file(d)
1409+ self.assertEqual(d['ext'], 'mov')
1410+
1411+ d = f(mov_size, mov_leaves, store, origin='proxy')
1412+ schema.check_dmedia_file(d)
1413+ self.assertEqual(d['origin'], 'proxy')
1414+
1415+
1416 def test_create_store(self):
1417 f = schema.create_store
1418 tmp = TempDir()
1419@@ -605,6 +772,7 @@
1420 set(doc),
1421 set([
1422 '_id',
1423+ 'ver',
1424 'type',
1425 'time',
1426 'plugin',
1427@@ -625,6 +793,7 @@
1428 set(doc),
1429 set([
1430 '_id',
1431+ 'ver',
1432 'type',
1433 'time',
1434 'plugin',
1435@@ -639,3 +808,102 @@
1436 self.assertEqual(doc['copies'], 3)
1437 self.assertEqual(doc['path'], base)
1438 self.assertEqual(doc['machine_id'], machine_id)
1439+
1440+ def test_create_batch(self):
1441+ f = schema.create_batch
1442+ machine_id = random_id()
1443+ doc = f(machine_id)
1444+
1445+ self.assertEqual(schema.check_dmedia(doc), None)
1446+ self.assertTrue(isinstance(doc, dict))
1447+ self.assertEqual(
1448+ set(doc),
1449+ set([
1450+ '_id',
1451+ 'ver',
1452+ 'type',
1453+ 'time',
1454+ 'imports',
1455+ 'errors',
1456+ 'machine_id',
1457+ 'stats',
1458+ ])
1459+ )
1460+ _id = doc['_id']
1461+ self.assertEqual(b32encode(b32decode(_id)), _id)
1462+ self.assertEqual(len(_id), 24)
1463+ self.assertEqual(doc['type'], 'dmedia/batch')
1464+ self.assertTrue(isinstance(doc['time'], (int, float)))
1465+ self.assertTrue(doc['time'] <= time.time())
1466+ self.assertEqual(doc['imports'], [])
1467+ self.assertEqual(doc['errors'], [])
1468+ self.assertEqual(doc['machine_id'], machine_id)
1469+ self.assertEqual(
1470+ doc['stats'],
1471+ {
1472+ 'considered': {'count': 0, 'bytes': 0},
1473+ 'imported': {'count': 0, 'bytes': 0},
1474+ 'skipped': {'count': 0, 'bytes': 0},
1475+ 'empty': {'count': 0, 'bytes': 0},
1476+ 'error': {'count': 0, 'bytes': 0},
1477+ }
1478+ )
1479+
1480+ def test_create_import(self):
1481+ f = schema.create_import
1482+
1483+ base = '/media/EOS_DIGITAL'
1484+ batch_id = random_id()
1485+ machine_id = random_id()
1486+
1487+ keys = set([
1488+ '_id',
1489+ 'ver',
1490+ 'type',
1491+ 'time',
1492+ 'base',
1493+ 'batch_id',
1494+ 'machine_id',
1495+ 'log',
1496+ 'stats',
1497+ ])
1498+
1499+ doc = f(base, batch_id=batch_id, machine_id=machine_id)
1500+ self.assertEqual(schema.check_dmedia(doc), None)
1501+ self.assertTrue(isinstance(doc, dict))
1502+ self.assertEqual(set(doc), keys)
1503+
1504+ _id = doc['_id']
1505+ self.assertEqual(b32encode(b32decode(_id)), _id)
1506+ self.assertEqual(len(_id), 24)
1507+
1508+ self.assertEqual(doc['type'], 'dmedia/import')
1509+ self.assertTrue(isinstance(doc['time'], (int, float)))
1510+ self.assertTrue(doc['time'] <= time.time())
1511+ self.assertEqual(doc['base'], base)
1512+ self.assertEqual(doc['batch_id'], batch_id)
1513+ self.assertEqual(doc['machine_id'], machine_id)
1514+
1515+ doc = f(base)
1516+ self.assertEqual(schema.check_dmedia(doc), None)
1517+ self.assertEqual(set(doc), keys)
1518+ self.assertEqual(doc['batch_id'], None)
1519+ self.assertEqual(doc['machine_id'], None)
1520+ self.assertEqual(
1521+ doc['log'],
1522+ {
1523+ 'imported': [],
1524+ 'skipped': [],
1525+ 'empty': [],
1526+ 'error': [],
1527+ }
1528+ )
1529+ self.assertEqual(
1530+ doc['stats'],
1531+ {
1532+ 'imported': {'count': 0, 'bytes': 0},
1533+ 'skipped': {'count': 0, 'bytes': 0},
1534+ 'empty': {'count': 0, 'bytes': 0},
1535+ 'error': {'count': 0, 'bytes': 0},
1536+ }
1537+ )
1538
1539=== modified file 'dmedia/webui/data/browser.js'
1540--- dmedia/webui/data/browser.js 2011-03-31 09:28:58 +0000
1541+++ dmedia/webui/data/browser.js 2011-04-07 03:13:28 +0000
1542@@ -53,8 +53,8 @@
1543 }
1544 Browser.prototype = {
1545 run: function() {
1546- var r = this.db.view('file', 'ext',
1547- {key: 'mov', reduce: false, include_docs: true}
1548+ var r = this.db.view('user', 'video',
1549+ {include_docs: true, descending: true}
1550 );
1551 this.load(r.rows);
1552 },
1553@@ -74,7 +74,7 @@
1554 };
1555
1556 var time = $el('div', {'class': 'time'});
1557- time.textContent = doc.duration + 's';
1558+ time.textContent = doc.meta.duration + 's';
1559
1560 div.appendChild(img);
1561 div.appendChild(time);
1562@@ -90,7 +90,7 @@
1563 names.forEach(function(n) {
1564 var el = $('meta.' + n);
1565 if (el) {
1566- el.textContent = doc[n];
1567+ el.textContent = doc.meta[n];
1568 }
1569 });
1570 },

Subscribers

People subscribed via source and target branches