Merge lp:~jderose/dmedia/schema-ver0 into lp:dmedia

Proposed by Jason Gerard DeRose
Status: Merged
Merged at revision: 181
Proposed branch: lp:~jderose/dmedia/schema-ver0
Merge into: lp:dmedia
Diff against target: 1570 lines (+859/-232)
11 files modified
dmedia/extractor.py (+6/-1)
dmedia/filestore.py (+3/-0)
dmedia/gtkui/__init__.py (+3/-1)
dmedia/importer.py (+31/-75)
dmedia/metastore.py (+103/-16)
dmedia/schema.py (+409/-14)
dmedia/tests/helpers.py (+1/-0)
dmedia/tests/test_extractor.py (+16/-14)
dmedia/tests/test_importer.py (+8/-100)
dmedia/tests/test_schema.py (+275/-7)
dmedia/webui/data/browser.js (+4/-4)
To merge this branch: bzr merge lp:~jderose/dmedia/schema-ver0
Reviewer Review Type Date Requested Status
Jason Gerard DeRose Approve
Review via email: mp+56680@code.launchpad.net

Description of the change

There is still a bit of work to do finalizing how tags work, but the important part is already enforced in the schema: 'tags' is a dictionary. I'll open another bug for finishing the tags schema, but this is already a fairly large change, so I'm proposing this for merge before it gets out of hand.

Oh, and this includes an important unrelated bugfix: I goofed in the 0.5 transition from PyGTK to PyGI and am no longer calling GObject.threads_init() when ./dmedia-service runs... oops. This was causing a lot of weird problems with GUI hangs and the logging order (in conjunction with multiprocessing) getting wacky. Anyway, I sliped that little one-line fix in here too (in dmedia/gtkui/__init__.py).

To post a comment you must log in.
Revision history for this message
Jason Gerard DeRose (jderose) wrote :

Okay, I'm gonna self-approve as I want to get the important PyGI fix into trunk. This month needs to be an especially high-velocity month for dmedia, so I might break the rules often. To make up for it, I will write an unusually high number of unit tests, even by my standards.

"""
Jason, looks great! Amazingly there isn't even a single typo, I'm sure!

--Jason
"""

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'dmedia/extractor.py'
--- dmedia/extractor.py 2011-01-28 14:07:33 +0000
+++ dmedia/extractor.py 2011-04-07 03:13:28 +0000
@@ -223,13 +223,18 @@
223def merge_metadata(src, doc):223def merge_metadata(src, doc):
224 ext = doc['ext']224 ext = doc['ext']
225 attachments = doc.get('_attachments', {})225 attachments = doc.get('_attachments', {})
226 meta = doc.get('meta', {})
226 if ext in _extractors:227 if ext in _extractors:
227 callback = _extractors[ext]228 callback = _extractors[ext]
228 for (key, value) in callback(src, attachments):229 for (key, value) in callback(src, attachments):
229 if key not in doc or key == 'mtime':230 if key == 'mtime':
230 doc[key] = value231 doc[key] = value
232 elif key not in meta:
233 meta[key] = value
231 if attachments and '_attachments' not in doc:234 if attachments and '_attachments' not in doc:
232 doc['_attachments'] = attachments235 doc['_attachments'] = attachments
236 if meta and 'meta' not in doc:
237 doc['meta'] = meta
233238
234239
235def merge_exif(src, attachments):240def merge_exif(src, attachments):
236241
=== modified file 'dmedia/filestore.py'
--- dmedia/filestore.py 2011-02-27 03:47:55 +0000
+++ dmedia/filestore.py 2011-04-07 03:13:28 +0000
@@ -613,6 +613,9 @@
613 raise ValueError('%s.base not a directory: %r' %613 raise ValueError('%s.base not a directory: %r' %
614 (self.__class__.__name__, self.base)614 (self.__class__.__name__, self.base)
615 )615 )
616
617 # FIXME: This is too high-level for FileStore, should instead be deault
618 # with by the core API entry point as FileStore are first initialized
616 self.record = path.join(self.base, 'store.json')619 self.record = path.join(self.base, 'store.json')
617 try:620 try:
618 fp = open(self.record, 'rb')621 fp = open(self.record, 'rb')
619622
=== modified file 'dmedia/gtkui/__init__.py'
--- dmedia/gtkui/__init__.py 2011-03-27 08:01:30 +0000
+++ dmedia/gtkui/__init__.py 2011-04-07 03:13:28 +0000
@@ -26,4 +26,6 @@
26import gi26import gi
27gi.require_version('Gtk', '2.0')27gi.require_version('Gtk', '2.0')
28gi.require_version('WebKit', '1.0')28gi.require_version('WebKit', '1.0')
29from gi.repository import Gtk, WebKit29from gi.repository import GObject, Gtk, WebKit
30
31GObject.threads_init()
3032
=== modified file 'dmedia/importer.py'
--- dmedia/importer.py 2011-03-28 12:38:29 +0000
+++ dmedia/importer.py 2011-04-07 03:13:28 +0000
@@ -33,7 +33,7 @@
3333
34import couchdb34import couchdb
3535
36from .schema import random_id36from .schema import random_id, create_file, create_batch, create_import
37from .errors import DuplicateFile37from .errors import DuplicateFile
38from .workers import (38from .workers import (
39 CouchWorker, CouchManager, register, isregistered, exception_name39 CouchWorker, CouchManager, register, isregistered, exception_name
@@ -48,6 +48,24 @@
48log = logging.getLogger()48log = logging.getLogger()
4949
5050
51# FIXME: This needs to be done with some real inspection of the file contents,
52# but this is just a stopgap for the sake of getting the schema stable:
53MEDIA_MAP = {
54 'ogv': 'video',
55 'mov': 'video',
56 'avi': 'video',
57
58 'oga': 'audio',
59 'flac': 'audio',
60 'wav': 'audio',
61 'mp3': 'audio',
62
63 'jpg': 'image',
64 'cr2': 'image',
65 'png': 'image',
66}
67
68
51def normalize_ext(name):69def normalize_ext(name):
52 """70 """
53 Return (root, ext) from *name* where extension is normalized to lower-case.71 Return (root, ext) from *name* where extension is normalized to lower-case.
@@ -143,53 +161,6 @@
143 yield tup161 yield tup
144162
145163
146def create_batch(machine_id=None):
147 """
148 Create initial 'dmedia/batch' accounting document.
149 """
150 return {
151 '_id': random_id(),
152 'type': 'dmedia/batch',
153 'time': time.time(),
154 'machine_id': machine_id,
155 'imports': [],
156 'errors': [],
157 'stats': {
158 'considered': {'count': 0, 'bytes': 0},
159 'imported': {'count': 0, 'bytes': 0},
160 'skipped': {'count': 0, 'bytes': 0},
161 'empty': {'count': 0, 'bytes': 0},
162 'error': {'count': 0, 'bytes': 0},
163 }
164 }
165
166
167def create_import(base, batch_id=None, machine_id=None):
168 """
169 Create initial 'dmedia/import' accounting document.
170 """
171 return {
172 '_id': random_id(),
173 'type': 'dmedia/import',
174 'time': time.time(),
175 'batch_id': batch_id,
176 'machine_id': machine_id,
177 'base': base,
178 'log': {
179 'imported': [],
180 'skipped': [],
181 'empty': [],
182 'error': [],
183 },
184 'stats': {
185 'imported': {'count': 0, 'bytes': 0},
186 'skipped': {'count': 0, 'bytes': 0},
187 'empty': {'count': 0, 'bytes': 0},
188 'error': {'count': 0, 'bytes': 0},
189 }
190 }
191
192
193class ImportWorker(CouchWorker):164class ImportWorker(CouchWorker):
194 def __init__(self, env, q, key, args):165 def __init__(self, env, q, key, args):
195 super(ImportWorker, self).__init__(env, q, key, args)166 super(ImportWorker, self).__init__(env, q, key, args)
@@ -308,34 +279,19 @@
308 except couchdb.ResourceNotFound as e:279 except couchdb.ResourceNotFound as e:
309 pass280 pass
310281
311 ts = time.time()282 doc = create_file(stat.st_size, leaves, self.filestore._id,
312 doc = {283 copies=1, ext=ext
313 '_id': chash,284 )
314 '_attachments': {285 assert doc['_id'] == chash
315 'leaves': {286 doc.update(
316 'data': b64encode(pack_leaves(leaves)),287 import_id=self._id,
317 'content_type': 'application/octet-stream',288 mtime=stat.st_mtime,
318 }289 name=name,
319 },290 dir=path.relpath(path.dirname(src), self.base),
320 'type': 'dmedia/file',291 )
321 'time': ts,
322 'bytes': stat.st_size,
323 'ext': ext,
324 'origin': 'user',
325 'stored': {
326 self.filestore._id: {
327 'copies': 1,
328 'time': ts,
329 },
330 },
331
332 'import_id': self._id,
333 'mtime': stat.st_mtime,
334 'name': name,
335 'dir': path.relpath(path.dirname(src), self.base),
336 }
337 if ext:292 if ext:
338 doc['content_type'] = mimetypes.types_map.get('.' + ext)293 doc['mime'] = mimetypes.types_map.get('.' + ext)
294 doc['media'] = MEDIA_MAP.get(ext)
339 if self.extract:295 if self.extract:
340 merge_metadata(src, doc)296 merge_metadata(src, doc)
341 (_id, _rev) = self.db.save(doc)297 (_id, _rev) = self.db.save(doc)
342298
=== modified file 'dmedia/metastore.py'
--- dmedia/metastore.py 2011-03-27 09:05:32 +0000
+++ dmedia/metastore.py 2011-04-07 03:13:28 +0000
@@ -62,6 +62,19 @@
62}62}
63"""63"""
6464
65# views in the 'file' design only index docs for which doc.type == 'dmedia/file'
66file_stored = """
67// Get list of all files on a given store, total bytes on that store
68function(doc) {
69 if (doc.type == 'dmedia/file') {
70 var key;
71 for (key in doc.stored) {
72 emit(key, doc.bytes);
73 }
74 }
75}
76"""
77
65file_bytes = """78file_bytes = """
66function(doc) {79function(doc) {
67 if (doc.type == 'dmedia/file' && typeof(doc.bytes) == 'number') {80 if (doc.type == 'dmedia/file' && typeof(doc.bytes) == 'number') {
@@ -78,10 +91,10 @@
78}91}
79"""92"""
8093
81file_content_type = """94file_mime = """
82function(doc) {95function(doc) {
83 if (doc.type == 'dmedia/file') {96 if (doc.type == 'dmedia/file') {
84 emit(doc.content_type, null);97 emit(doc.mime, null);
85 }98 }
86}99}
87"""100"""
@@ -94,16 +107,6 @@
94}107}
95"""108"""
96109
97file_tags = """
98function(doc) {
99 if (doc.type == 'dmedia/file' && doc.tags) {
100 doc.tags.forEach(function(tag) {
101 emit(tag, null);
102 });
103 }
104}
105"""
106
107file_import_id = """110file_import_id = """
108function(doc) {111function(doc) {
109 if (doc.type == 'dmedia/file' && doc.import_id) {112 if (doc.type == 'dmedia/file' && doc.import_id) {
@@ -112,13 +115,87 @@
112}115}
113"""116"""
114117
118# views in the 'user' design only index docs for which doc.type == 'dmedia/file'
119# and doc.origin == 'user'
120user_copies = """
121// Durability of user's personal files
122function(doc) {
123 if (doc.type == 'dmedia/file' && doc.origin == 'user') {
124 var copies = 0;
125 var key;
126 for (key in doc.stored) {
127 copies += doc.stored[key].copies;
128 }
129 emit(copies, null);
130 }
131}
132"""
133
134user_media = """
135function(doc) {
136 if (doc.type == 'dmedia/file' && doc.origin == 'user') {
137 emit(doc.media, null);
138 }
139}
140"""
141
142user_tags = """
143function(doc) {
144 if (doc.type == 'dmedia/file' && doc.origin == 'user' && doc.tags) {
145 var key;
146 for (key in doc.tags) {
147 emit(key, doc.tags[key]);
148 }
149 }
150}
151"""
152
153user_all = """
154function(doc) {
155 if (doc.type == 'dmedia/file' && doc.origin == 'user') {
156 emit(doc.mtime, null);
157 }
158}
159"""
160
161user_video = """
162function(doc) {
163 if (doc.type == 'dmedia/file' && doc.origin == 'user') {
164 if (doc.media == 'video') {
165 emit(doc.mtime, null);
166 }
167 }
168}
169"""
170
171user_image = """
172function(doc) {
173 if (doc.type == 'dmedia/file' && doc.origin == 'user') {
174 if (doc.media == 'image') {
175 emit(doc.mtime, null);
176 }
177 }
178}
179"""
180
181user_audio = """
182function(doc) {
183 if (doc.type == 'dmedia/file' && doc.origin == 'user') {
184 if (doc.media == 'audio') {
185 emit(doc.mtime, null);
186 }
187 }
188}
189"""
190
191
115def build_design_doc(design, views):192def build_design_doc(design, views):
116 _id = '_design/' + design193 _id = '_design/' + design
117 d = {}194 d = {}
118 for (view, map_, reduce_) in views:195 for (view, map_, reduce_) in views:
119 d[view] = {'map': map_}196 d[view] = {'map': map_.strip()}
120 if reduce_ is not None:197 if reduce_ is not None:
121 d[view]['reduce'] = reduce_198 d[view]['reduce'] = reduce_.strip()
122 doc = {199 doc = {
123 '_id': _id,200 '_id': _id,
124 'language': 'javascript',201 'language': 'javascript',
@@ -168,12 +245,22 @@
168 )),245 )),
169246
170 ('file', (247 ('file', (
248 ('stored', file_stored, _sum),
171 ('import_id', file_import_id, None),249 ('import_id', file_import_id, None),
172 ('bytes', file_bytes, _sum),250 ('bytes', file_bytes, _sum),
173 ('ext', file_ext, _count),251 ('ext', file_ext, _count),
174 ('content_type', file_content_type, _count),252 ('mime', file_mime, _count),
175 ('mtime', file_mtime, None),253 ('mtime', file_mtime, None),
176 ('tags', file_tags, _count),254 )),
255
256 ('user', (
257 ('copies', user_copies, None),
258 ('media', user_media, _count),
259 ('tags', user_tags, _count),
260 ('all', user_all, None),
261 ('video', user_video, None),
262 ('image', user_image, None),
263 ('audio', user_audio, None),
177 )),264 )),
178 )265 )
179266
180267
=== modified file 'dmedia/schema.py'
--- dmedia/schema.py 2011-03-27 09:05:32 +0000
+++ dmedia/schema.py 2011-04-07 03:13:28 +0000
@@ -33,6 +33,7 @@
3333
34>>> good = {34>>> good = {
35... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',35... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
36... 'ver': 0,
36... 'type': 'dmedia/foo',37... 'type': 'dmedia/foo',
37... 'time': 1234567890,38... 'time': 1234567890,
38... }39... }
@@ -40,6 +41,7 @@
40>>> check_dmedia(good) # Returns None41>>> check_dmedia(good) # Returns None
41>>> bad = {42>>> bad = {
42... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',43... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
44... 'ver': 0,
43... 'kind': 'dmedia/foo',45... 'kind': 'dmedia/foo',
44... 'timestamp': 1234567890,46... 'timestamp': 1234567890,
45... }47... }
@@ -222,6 +224,7 @@
222224
223>>> doc = {225>>> doc = {
224... '_id': 'MZZG2ZDSOQVSW2TEMVZG643F',226... '_id': 'MZZG2ZDSOQVSW2TEMVZG643F',
227... 'ver': 0,
225... 'type': 'dmedia/batch',228... 'type': 'dmedia/batch',
226... 'time': 1234567890,229... 'time': 1234567890,
227... }230... }
@@ -308,9 +311,11 @@
308from __future__ import print_function311from __future__ import print_function
309312
310import os313import os
311from base64 import b32encode, b32decode314from hashlib import sha1
315from base64 import b32encode, b32decode, b64encode
312import re316import re
313import time317import time
318
314from .constants import TYPE_ERROR, EXT_PAT319from .constants import TYPE_ERROR, EXT_PAT
315320
316# Some private helper functions that don't directly define any schema.321# Some private helper functions that don't directly define any schema.
@@ -325,6 +330,201 @@
325#330#
326# That is all.331# That is all.
327332
333
334# FIXME: These functions are a step toward making the checks more concise and
335# the error messages consistent and even more helpful. However, these functions
336# aren't used much yet... but all the schema checks should be ported to these
337# functions eventually.
338def _label(path):
339 """
340 Create a helpful debugging label to indicate the attribute in question.
341
342 For example:
343
344 >>> _label([])
345 'doc'
346 >>> _label(['log'])
347 "doc['log']"
348 >>> _label(['log', 'considered', 2, 'src'])
349 "doc['log']['considered'][2]['src']"
350
351
352 See also `_value()`.
353 """
354 return 'doc' + ''.join('[{!r}]'.format(key) for key in path)
355
356
357def _value(doc, path):
358 """
359 Retrieve value from *doc* by traversing *path*.
360
361 For example:
362
363 >>> doc = {'log': {'considered': [None, None, {'src': 'hello'}, None]}}
364 >>> _value(doc, [])
365 {'log': {'considered': [None, None, {'src': 'hello'}, None]}}
366 >>> _value(doc, ['log'])
367 {'considered': [None, None, {'src': 'hello'}, None]}
368 >>> _value(doc, ['log', 'considered', 2, 'src'])
369 'hello'
370
371
372 Or if you try to retrieve something that doesn't exist:
373
374 >>> _value(doc, ['log', 'considered', 7])
375 Traceback (most recent call last):
376 ...
377 ValueError: doc['log']['considered'][7] does not exists
378
379
380 Or if a key/index is missing higher up in the path:
381
382 >>> _value(doc, ['dog', 'considered', 7])
383 Traceback (most recent call last):
384 ...
385 ValueError: doc['dog'] does not exists
386
387
388 See also `_label()`.
389 """
390 value = doc
391 p = []
392 for key in path:
393 p.append(key)
394 try:
395 value = value[key]
396 except (KeyError, IndexError):
397 raise ValueError(
398 '{} does not exists'.format(_label(p))
399 )
400 return value
401
402
403def _exists(doc, path):
404 """
405 Return ``True`` if the end of *path* exists.
406
407 For example:
408
409 >>> doc = {'foo': {'hello': 'world'}, 'bar': ['hello', 'naughty', 'nurse']}
410 >>> _exists(doc, ['foo', 'hello'])
411 True
412 >>> _exists(doc, ['foo', 'sup'])
413 False
414 >>> _exists(doc, ['bar', 2])
415 True
416 >>> _exists(doc, ['bar', 3])
417 False
418
419
420 Or if a key/index is missing higher up the path:
421
422 >>> _exists(doc, ['stuff', 'junk'])
423 Traceback (most recent call last):
424 ...
425 ValueError: doc['stuff'] does not exists
426
427
428 See also `_check_if_exists()`.
429 """
430 if len(path) == 0:
431 return True
432 base = _value(doc, path[:-1])
433 key = path[-1]
434 try:
435 value = base[key]
436 return True
437 except (KeyError, IndexError):
438 return False
439
440
441def _check(doc, path, *checks):
442 """
443 Run a series of *checks* on the value in *doc* addressed by *path*.
444
445 For example:
446
447 >>> doc = {'foo': [None, {'bar': 'aye'}, None]}
448 >>> _check(doc, ['foo', 1, 'bar'],
449 ... _check_str,
450 ... (_check_in, 'bee', 'sea'),
451 ... )
452 ...
453 Traceback (most recent call last):
454 ...
455 ValueError: doc['foo'][1]['bar'] value 'aye' not in ('bee', 'sea')
456
457
458 Or if a value is missing:
459
460 >>> _check(doc, ['foo', 3],
461 ... _can_be_none,
462 ... )
463 ...
464 Traceback (most recent call last):
465 ...
466 ValueError: doc['foo'][3] does not exists
467
468
469 See also `_check_if_exists()`.
470 """
471 value = _value(doc, path)
472 label = _label(path)
473 for c in checks:
474 if isinstance(c, tuple):
475 (c, args) = (c[0], c[1:])
476 else:
477 args = tuple()
478 if c(value, label, *args) is True:
479 break
480
481
482def _check_if_exists(doc, path, *checks):
483 """
484 Run *checks* only if value at *path* exists.
485
486 For example:
487
488 >>> doc = {'name': 17}
489 >>> _check_if_exists(doc, ['dir'], _check_str)
490 >>> _check_if_exists(doc, ['name'], _check_str)
491 Traceback (most recent call last):
492 ...
493 TypeError: doc['name']: need a <type 'basestring'>; got a <type 'int'>: 17
494
495
496 See also `_check()` and `_exists()`.
497 """
498 if _exists(doc, path):
499 _check(doc, path, *checks)
500
501
502def _can_be_none(value, label):
503 """
504 Stop execution of check if *value* is ``None``.
505
506 `_check()` will abort upon a check function returning ``True``.
507
508 For example, here a ``TypeError`` is raised:
509
510 >>> doc = {'ext': None}
511 >>> _check(doc, ['ext'], _check_str)
512 Traceback (most recent call last):
513 ...
514 TypeError: doc['ext']: need a <type 'basestring'>; got a <type 'NoneType'>: None
515
516
517 But here it is not:
518
519 >>> _check(doc, ['ext'], _can_be_none, _check_str)
520
521 """
522 if value is None:
523 return True
524
525# /FIXME new helper functions
526
527
328def _check_dict(value, label):528def _check_dict(value, label):
329 """529 """
330 Verify that *value* is a ``dict`` instance.530 Verify that *value* is a ``dict`` instance.
@@ -387,13 +587,13 @@
387 if not isinstance(value, (int, float)):587 if not isinstance(value, (int, float)):
388 raise TypeError(TYPE_ERROR % (label, (int, float), type(value), value))588 raise TypeError(TYPE_ERROR % (label, (int, float), type(value), value))
389589
390def _check_at_least(value, minvalue, label):590def _check_at_least(value, label, minvalue=0):
391 """591 """
392 Verify that *value* is greater than or equal to *minvalue*.592 Verify that *value* is greater than or equal to *minvalue*.
393593
394 For example:594 For example:
395595
396 >>> _check_at_least(0, 1, 'bytes')596 >>> _check_at_least(0, 'bytes', 1)
397 Traceback (most recent call last):597 Traceback (most recent call last):
398 ...598 ...
399 ValueError: bytes must be >= 1; got 0599 ValueError: bytes must be >= 1; got 0
@@ -477,6 +677,25 @@
477 )677 )
478678
479679
680def _check_in(value, label, *possible):
681 """
682 Check that *value* is one of *possible*.
683
684 For example:
685
686 >>> _check_in('foo', "doc['media']", 'video', 'audio', 'image')
687 Traceback (most recent call last):
688 ...
689 ValueError: doc['media'] value 'foo' not in ('video', 'audio', 'image')
690
691 """
692 if value not in possible:
693 raise ValueError(
694 '{} value {!r} not in {!r}'.format(label, value, possible)
695 )
696
697
698
480# The schema defining functions:699# The schema defining functions:
481700
482def check_base32(value, label='_id'):701def check_base32(value, label='_id'):
@@ -579,7 +798,7 @@
579798
580 """799 """
581 _check_int_float(value, label)800 _check_int_float(value, label)
582 _check_at_least(value, 0, label)801 _check_at_least(value, label, 0)
583802
584803
585def check_dmedia(doc):804def check_dmedia(doc):
@@ -591,14 +810,17 @@
591810
592 1. have '_id' that passes `check_base32()`811 1. have '_id' that passes `check_base32()`
593812
594 2. have 'type' that passes `check_type()`813 2. have a 'ver' equal to ``0``
595814
596 3. have 'time' that passes `check_time()`815 3. have 'type' that passes `check_type()`
816
817 4. have 'time' that passes `check_time()`
597818
598 For example, a conforming value:819 For example, a conforming value:
599820
600 >>> doc = {821 >>> doc = {
601 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',822 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
823 ... 'ver': 0,
602 ... 'type': 'dmedia/file',824 ... 'type': 'dmedia/file',
603 ... 'time': 1234567890,825 ... 'time': 1234567890,
604 ... }826 ... }
@@ -610,6 +832,7 @@
610832
611 >>> doc = {833 >>> doc = {
612 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',834 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
835 ... 'ver': 0,
613 ... 'kind': 'dmedia/file',836 ... 'kind': 'dmedia/file',
614 ... 'timestamp': 1234567890,837 ... 'timestamp': 1234567890,
615 ... }838 ... }
@@ -620,8 +843,13 @@
620 ValueError: doc missing keys: ['time', 'type']843 ValueError: doc missing keys: ['time', 'type']
621844
622 """845 """
623 _check_required(doc, ['_id', 'type', 'time'])846 _check_required(doc, ['_id', 'ver', 'type', 'time'])
624 check_base32(doc['_id'])847 check_base32(doc['_id'])
848 _check_int(doc['ver'], 'ver')
849 if doc['ver'] != 0:
850 raise ValueError(
851 "doc['ver'] must be 0; got {!r}".format(doc['ver'])
852 )
625 check_type(doc['type'])853 check_type(doc['type'])
626 check_time(doc['time'])854 check_time(doc['time'])
627855
@@ -638,7 +866,7 @@
638866
639 3. have values that are themselves ``dict`` instances867 3. have values that are themselves ``dict`` instances
640868
641 4. values must have 'copies' that is an ``int`` >= 1869 4. values must have 'copies' that is an ``int`` >= 0
642870
643 5. values must have 'time' that conforms with `check_time()`871 5. values must have 'time' that conforms with `check_time()`
644872
@@ -686,7 +914,7 @@
686 copies = value['copies']914 copies = value['copies']
687 l3 = l2 + "['copies']"915 l3 = l2 + "['copies']"
688 _check_int(copies, l3)916 _check_int(copies, l3)
689 _check_at_least(copies, 1, l3)917 _check_at_least(copies, l3, 0)
690918
691 # Check 'time':919 # Check 'time':
692 check_time(value['time'], l2 + "['time']")920 check_time(value['time'], l2 + "['time']")
@@ -780,7 +1008,6 @@
780 raise ValueError('%s: %r not in %r' % (label, value, allowed))1008 raise ValueError('%s: %r not in %r' % (label, value, allowed))
7811009
7821010
783
784def check_dmedia_file(doc):1011def check_dmedia_file(doc):
785 """1012 """
786 Verify that *doc* is a valid 'dmedia/file' record type.1013 Verify that *doc* is a valid 'dmedia/file' record type.
@@ -803,6 +1030,7 @@
8031030
804 >>> doc = {1031 >>> doc = {
805 ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',1032 ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',
1033 ... 'ver': 0,
806 ... 'type': 'dmedia/file',1034 ... 'type': 'dmedia/file',
807 ... 'time': 1234567890,1035 ... 'time': 1234567890,
808 ... 'bytes': 20202333,1036 ... 'bytes': 20202333,
@@ -823,6 +1051,7 @@
8231051
824 >>> doc = {1052 >>> doc = {
825 ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',1053 ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',
1054 ... 'ver': 0,
826 ... 'type': 'dmedia/file',1055 ... 'type': 'dmedia/file',
827 ... 'time': 1234567890,1056 ... 'time': 1234567890,
828 ... 'bytes': 20202333,1057 ... 'bytes': 20202333,
@@ -854,7 +1083,7 @@
854 # Check 'bytes':1083 # Check 'bytes':
855 b = doc['bytes']1084 b = doc['bytes']
856 _check_int(b, 'bytes')1085 _check_int(b, 'bytes')
857 _check_at_least(b, 1, 'bytes')1086 _check_at_least(b, 'bytes', 1)
8581087
859 # Check 'ext':1088 # Check 'ext':
860 check_ext(doc['ext'])1089 check_ext(doc['ext'])
@@ -865,6 +1094,63 @@
865 # Check 'stored'1094 # Check 'stored'
866 check_stored(doc['stored'])1095 check_stored(doc['stored'])
8671096
1097 check_dmedia_file_optional(doc)
1098
1099
1100def check_dmedia_file_optional(doc):
1101 """
1102 Check the optional attributes in a 'dmedia/file' document.
1103 """
1104 _check_dict(doc, 'doc')
1105
1106 # 'mime' like 'video/quicktime'
1107 _check_if_exists(doc, ['mime'],
1108 _can_be_none,
1109 _check_str,
1110 )
1111
1112 # 'media' like 'video'
1113 _check_if_exists(doc, ['media'],
1114 _can_be_none,
1115 _check_str,
1116 (_check_in, 'video', 'audio', 'image'),
1117 )
1118
1119 # 'mtime' like 1234567890
1120 _check_if_exists(doc, ['mtime'],
1121 check_time
1122 )
1123
1124 # 'atime' like 1234567890
1125 _check_if_exists(doc, ['atime'],
1126 check_time
1127 )
1128
1129 # name like 'MVI_5899.MOV'
1130 _check_if_exists(doc, ['name'],
1131 _check_str,
1132 )
1133
1134 # dir like 'DCIM/100EOS5D2'
1135 _check_if_exists(doc, ['dir'],
1136 _check_str,
1137 )
1138
1139 # 'meta' like {'iso': 800}
1140 _check_if_exists(doc, ['meta'],
1141 _check_dict
1142 )
1143
1144 # 'user' like {'title': 'cool sunset'}
1145 _check_if_exists(doc, ['user'],
1146 _check_dict
1147 )
1148
1149 # 'tags' like {'burp': {'start': 6, 'end': 73}}
1150 _check_if_exists(doc, ['tags'],
1151 _check_dict
1152 )
1153
8681154
869def check_dmedia_store(doc):1155def check_dmedia_store(doc):
870 """1156 """
@@ -883,6 +1169,7 @@
8831169
884 >>> doc = {1170 >>> doc = {
885 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',1171 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
1172 ... 'ver': 0,
886 ... 'type': 'dmedia/file',1173 ... 'type': 'dmedia/file',
887 ... 'time': 1234567890,1174 ... 'time': 1234567890,
888 ... 'plugin': 'filestore',1175 ... 'plugin': 'filestore',
@@ -896,6 +1183,7 @@
8961183
897 >>> doc = {1184 >>> doc = {
898 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',1185 ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX',
1186 ... 'ver': 0,
899 ... 'type': 'dmedia/file',1187 ... 'type': 'dmedia/file',
900 ... 'time': 1234567890,1188 ... 'time': 1234567890,
901 ... 'dispatch': 'filestore',1189 ... 'dispatch': 'filestore',
@@ -925,7 +1213,7 @@
925 key = 'copies'1213 key = 'copies'
926 dc = doc[key]1214 dc = doc[key]
927 _check_int(dc, key)1215 _check_int(dc, key)
928 _check_at_least(dc, 1, key)1216 _check_at_least(dc, key, 1)
9291217
9301218
931def random_id(random=None):1219def random_id(random=None):
@@ -950,10 +1238,68 @@
950 return b32encode(random)1238 return b32encode(random)
9511239
9521240
953# This should probably be moved1241# FIXME: There is current a recursize import issue with filestore, but FileStore
1242# shouldn't deal with the store.json file anyway, should not import
1243# `schema.create_store()`
1244def tophash_personalization(file_size):
1245 return ' '.join(['dmedia/tophash', str(file_size)]).encode('utf-8')
1246
1247
1248def tophash(file_size, leaves):
1249 """
1250 Initialize hash for a file that is *file_size* bytes.
1251 """
1252 h = sha1(tophash_personalization(file_size))
1253 h.update(leaves)
1254 return b32encode(h.digest())
1255
1256
1257def create_file(file_size, leaves, store, copies=0, ext=None, origin='user'):
1258 """
1259 Create a minimal 'dmedia/file' document.
1260
1261 :param file_size: an ``int``, the file size in bytes, eg ``20202333``
1262 :param leaves: a ``list`` containing the content hash of each leaf
1263 :param store: the ID of the store where this file is initially stored, eg
1264 ``'Y4J3WQCMKV5GHATOCZZBHF4Y'``
1265 :param copies: an ``int`` to represent the durability of the file on this
1266 store; default is ``0``
1267 :param ext: the file extension, eg ``'mov'``; default is ``None``
1268 :param origin: the file's origin (for durability/reclamation purposes);
1269 default is ``'user'``
1270 """
1271 ts = time.time()
1272 packed = b''.join(leaves)
1273 return {
1274 '_id': tophash(file_size, packed),
1275 '_attachments': {
1276 'leaves': {
1277 'data': b64encode(packed),
1278 'content_type': 'application/octet-stream',
1279 }
1280 },
1281 'ver': 0,
1282 'type': 'dmedia/file',
1283 'time': ts,
1284 'bytes': file_size,
1285 'ext': ext,
1286 'origin': origin,
1287 'stored': {
1288 store: {
1289 'copies': copies,
1290 'time': ts,
1291 }
1292 }
1293 }
1294
1295
954def create_store(base, machine_id, copies=1):1296def create_store(base, machine_id, copies=1):
1297 """
1298 Create a 'dmedia/store' document.
1299 """
955 return {1300 return {
956 '_id': random_id(),1301 '_id': random_id(),
1302 'ver': 0,
957 'type': 'dmedia/store',1303 'type': 'dmedia/store',
958 'time': time.time(),1304 'time': time.time(),
959 'plugin': 'filestore',1305 'plugin': 'filestore',
@@ -961,3 +1307,52 @@
961 'path': base,1307 'path': base,
962 'machine_id': machine_id,1308 'machine_id': machine_id,
963 }1309 }
1310
1311
1312def create_batch(machine_id=None):
1313 """
1314 Create initial 'dmedia/batch' accounting document.
1315 """
1316 return {
1317 '_id': random_id(),
1318 'ver': 0,
1319 'type': 'dmedia/batch',
1320 'time': time.time(),
1321 'machine_id': machine_id,
1322 'imports': [],
1323 'errors': [],
1324 'stats': {
1325 'considered': {'count': 0, 'bytes': 0},
1326 'imported': {'count': 0, 'bytes': 0},
1327 'skipped': {'count': 0, 'bytes': 0},
1328 'empty': {'count': 0, 'bytes': 0},
1329 'error': {'count': 0, 'bytes': 0},
1330 }
1331 }
1332
1333
1334def create_import(base, batch_id=None, machine_id=None):
1335 """
1336 Create initial 'dmedia/import' accounting document.
1337 """
1338 return {
1339 '_id': random_id(),
1340 'ver': 0,
1341 'type': 'dmedia/import',
1342 'time': time.time(),
1343 'batch_id': batch_id,
1344 'machine_id': machine_id,
1345 'base': base,
1346 'log': {
1347 'imported': [],
1348 'skipped': [],
1349 'empty': [],
1350 'error': [],
1351 },
1352 'stats': {
1353 'imported': {'count': 0, 'bytes': 0},
1354 'skipped': {'count': 0, 'bytes': 0},
1355 'empty': {'count': 0, 'bytes': 0},
1356 'error': {'count': 0, 'bytes': 0},
1357 }
1358 }
9641359
=== modified file 'dmedia/tests/helpers.py'
--- dmedia/tests/helpers.py 2011-02-22 14:07:47 +0000
+++ dmedia/tests/helpers.py 2011-04-07 03:13:28 +0000
@@ -34,6 +34,7 @@
34from . import sample_mov, sample_thm34from . import sample_mov, sample_thm
3535
36mov_hash = 'TGX33XXWU3EVHEEY5J7NBOJGKBFXLEBK'36mov_hash = 'TGX33XXWU3EVHEEY5J7NBOJGKBFXLEBK'
37mov_size = 20202333
37mov_leaves = [38mov_leaves = [
38 b32decode('IXJTSUCYYFECGSG6JIB2R77CAJVJK4W3'),39 b32decode('IXJTSUCYYFECGSG6JIB2R77CAJVJK4W3'),
39 b32decode('MA3IAHUOKXR4TRG7CWAPOO7U4WCV5WJ4'),40 b32decode('MA3IAHUOKXR4TRG7CWAPOO7U4WCV5WJ4'),
4041
=== modified file 'dmedia/tests/test_extractor.py'
--- dmedia/tests/test_extractor.py 2011-01-26 05:14:20 +0000
+++ dmedia/tests/test_extractor.py 2011-04-07 03:13:28 +0000
@@ -411,21 +411,23 @@
411 doc,411 doc,
412 dict(412 dict(
413 ext='mov',413 ext='mov',
414 width=1920,
415 height=1080,
416 duration=3,
417 codec_video='H.264 / AVC',
418 codec_audio='Raw 16-bit PCM audio',
419 sample_rate=48000,
420 fps=30,
421 channels='Stereo',
422 iso=100,
423 shutter=u'1/100',
424 aperture=11.0,
425 lens=u'Canon EF 70-200mm f/4L IS',
426 camera=u'Canon EOS 5D Mark II',
427 focal_length=u'138.0 mm',
428 mtime=1287520994 + 68 / 100.0,414 mtime=1287520994 + 68 / 100.0,
415 meta=dict(
416 width=1920,
417 height=1080,
418 duration=3,
419 codec_video='H.264 / AVC',
420 codec_audio='Raw 16-bit PCM audio',
421 sample_rate=48000,
422 fps=30,
423 channels='Stereo',
424 iso=100,
425 shutter=u'1/100',
426 aperture=11.0,
427 lens=u'Canon EF 70-200mm f/4L IS',
428 camera=u'Canon EOS 5D Mark II',
429 focal_length=u'138.0 mm',
430 ),
429 )431 )
430 )432 )
431433
432434
=== modified file 'dmedia/tests/test_importer.py'
--- dmedia/tests/test_importer.py 2011-03-28 12:38:29 +0000
+++ dmedia/tests/test_importer.py 2011-04-07 03:13:28 +0000
@@ -132,103 +132,6 @@
132 )132 )
133 os.chmod(subdir, 0o700)133 os.chmod(subdir, 0o700)
134134
135 def test_create_batch(self):
136 f = importer.create_batch
137 machine_id = random_id()
138 doc = f(machine_id)
139
140 self.assertEqual(schema.check_dmedia(doc), None)
141 self.assertTrue(isinstance(doc, dict))
142 self.assertEqual(
143 set(doc),
144 set([
145 '_id',
146 'type',
147 'time',
148 'imports',
149 'errors',
150 'machine_id',
151 'stats',
152 ])
153 )
154 _id = doc['_id']
155 self.assertEqual(b32encode(b32decode(_id)), _id)
156 self.assertEqual(len(_id), 24)
157 self.assertEqual(doc['type'], 'dmedia/batch')
158 self.assertTrue(isinstance(doc['time'], (int, float)))
159 self.assertTrue(doc['time'] <= time.time())
160 self.assertEqual(doc['imports'], [])
161 self.assertEqual(doc['errors'], [])
162 self.assertEqual(doc['machine_id'], machine_id)
163 self.assertEqual(
164 doc['stats'],
165 {
166 'considered': {'count': 0, 'bytes': 0},
167 'imported': {'count': 0, 'bytes': 0},
168 'skipped': {'count': 0, 'bytes': 0},
169 'empty': {'count': 0, 'bytes': 0},
170 'error': {'count': 0, 'bytes': 0},
171 }
172 )
173
174 def test_create_import(self):
175 f = importer.create_import
176
177 base = '/media/EOS_DIGITAL'
178 batch_id = random_id()
179 machine_id = random_id()
180
181 keys = set([
182 '_id',
183 'type',
184 'time',
185 'base',
186 'batch_id',
187 'machine_id',
188 'log',
189 'stats',
190 ])
191
192 doc = f(base, batch_id=batch_id, machine_id=machine_id)
193 self.assertEqual(schema.check_dmedia(doc), None)
194 self.assertTrue(isinstance(doc, dict))
195 self.assertEqual(set(doc), keys)
196
197 _id = doc['_id']
198 self.assertEqual(b32encode(b32decode(_id)), _id)
199 self.assertEqual(len(_id), 24)
200
201 self.assertEqual(doc['type'], 'dmedia/import')
202 self.assertTrue(isinstance(doc['time'], (int, float)))
203 self.assertTrue(doc['time'] <= time.time())
204 self.assertEqual(doc['base'], base)
205 self.assertEqual(doc['batch_id'], batch_id)
206 self.assertEqual(doc['machine_id'], machine_id)
207
208 doc = f(base)
209 self.assertEqual(schema.check_dmedia(doc), None)
210 self.assertEqual(set(doc), keys)
211 self.assertEqual(doc['batch_id'], None)
212 self.assertEqual(doc['machine_id'], None)
213 self.assertEqual(
214 doc['log'],
215 {
216 'imported': [],
217 'skipped': [],
218 'empty': [],
219 'error': [],
220 }
221 )
222 self.assertEqual(
223 doc['stats'],
224 {
225 'imported': {'count': 0, 'bytes': 0},
226 'skipped': {'count': 0, 'bytes': 0},
227 'empty': {'count': 0, 'bytes': 0},
228 'error': {'count': 0, 'bytes': 0},
229 }
230 )
231
232 def test_to_dbus_stats(self):135 def test_to_dbus_stats(self):
233 f = importer.to_dbus_stats136 f = importer.to_dbus_stats
234 stats = dict(137 stats = dict(
@@ -406,6 +309,7 @@
406 set([309 set([
407 '_id',310 '_id',
408 '_rev',311 '_rev',
312 'ver',
409 'type',313 'type',
410 'time',314 'time',
411 'base',315 'base',
@@ -507,6 +411,7 @@
507 '_id',411 '_id',
508 '_rev',412 '_rev',
509 '_attachments',413 '_attachments',
414 'ver',
510 'type',415 'type',
511 'time',416 'time',
512 'bytes',417 'bytes',
@@ -518,7 +423,8 @@
518 'mtime',423 'mtime',
519 'name',424 'name',
520 'dir',425 'dir',
521 'content_type',426 'mime',
427 'media',
522 ])428 ])
523 )429 )
524 self.assertEqual(schema.check_dmedia_file(doc), None)430 self.assertEqual(schema.check_dmedia_file(doc), None)
@@ -534,7 +440,7 @@
534 self.assertEqual(doc['mtime'], path.getmtime(src1))440 self.assertEqual(doc['mtime'], path.getmtime(src1))
535 self.assertEqual(doc['name'], 'MVI_5751.MOV')441 self.assertEqual(doc['name'], 'MVI_5751.MOV')
536 self.assertEqual(doc['dir'], 'DCIM/100EOS5D2')442 self.assertEqual(doc['dir'], 'DCIM/100EOS5D2')
537 self.assertEqual(doc['content_type'], 'video/quicktime')443 self.assertEqual(doc['mime'], 'video/quicktime')
538444
539 # Test with duplicate445 # Test with duplicate
540 (action, doc) = inst._import_file(src2)446 (action, doc) = inst._import_file(src2)
@@ -834,7 +740,9 @@
834 self.assertEqual(740 self.assertEqual(
835 set(batch),741 set(batch),
836 set([742 set([
837 '_id', '_rev',743 '_id',
744 '_rev',
745 'ver',
838 'type',746 'type',
839 'time',747 'time',
840 'imports',748 'imports',
841749
=== modified file 'dmedia/tests/test_schema.py'
--- dmedia/tests/test_schema.py 2011-03-27 09:05:32 +0000
+++ dmedia/tests/test_schema.py 2011-04-07 03:13:28 +0000
@@ -24,10 +24,10 @@
24"""24"""
2525
26from unittest import TestCase26from unittest import TestCase
27from base64 import b32encode, b32decode27from base64 import b32encode, b32decode, b64encode
28from copy import deepcopy28from copy import deepcopy
29import time29import time
30from .helpers import raises, TempDir30from .helpers import raises, TempDir, mov_hash, mov_leaves, mov_size
31from dmedia.constants import TYPE_ERROR31from dmedia.constants import TYPE_ERROR
32from dmedia.schema import random_id32from dmedia.schema import random_id
33from dmedia import schema33from dmedia import schema
@@ -163,13 +163,24 @@
163163
164 good = {164 good = {
165 '_id': 'MZZG2ZDSOQVSW2TEMVZG643F',165 '_id': 'MZZG2ZDSOQVSW2TEMVZG643F',
166 'ver': 0,
166 'type': 'dmedia/foo',167 'type': 'dmedia/foo',
167 'time': 1234567890,168 'time': 1234567890,
168 'foo': 'bar',169 'foo': 'bar',
169 }170 }
170 g = deepcopy(good)171 g = deepcopy(good)
171 self.assertEqual(f(g), None)172 self.assertEqual(f(g), None)
172 for key in ['_id', 'type', 'time']:173
174 # check with bad ver:
175 bad = deepcopy(good)
176 bad['ver'] = 0.0
177 e = raises(TypeError, f, bad)
178 self.assertEqual(str(e), TYPE_ERROR % ('ver', int, float, 0.0))
179 bad['ver'] = 1
180 e = raises(ValueError, f, bad)
181 self.assertEqual(str(e), "doc['ver'] must be 0; got 1")
182
183 for key in ['_id', 'ver', 'type', 'time']:
173 bad = deepcopy(good)184 bad = deepcopy(good)
174 del bad[key]185 del bad[key]
175 e = raises(ValueError, f, bad)186 e = raises(ValueError, f, bad)
@@ -190,7 +201,7 @@
190 e = raises(ValueError, f, bad)201 e = raises(ValueError, f, bad)
191 self.assertEqual(202 self.assertEqual(
192 str(e),203 str(e),
193 'doc missing keys: %r' % ['_id', 'time', 'type']204 'doc missing keys: %r' % ['_id', 'time', 'type', 'ver']
194 )205 )
195206
196 def test_check_stored(self):207 def test_check_stored(self):
@@ -275,11 +286,11 @@
275 TYPE_ERROR % (label, int, float, 2.0)286 TYPE_ERROR % (label, int, float, 2.0)
276 )287 )
277 bad = deepcopy(good)288 bad = deepcopy(good)
278 bad['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] = 0289 bad['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] = -2
279 e = raises(ValueError, f, bad)290 e = raises(ValueError, f, bad)
280 self.assertEqual(291 self.assertEqual(
281 str(e),292 str(e),
282 '%s must be >= 1; got 0' % label293 '%s must be >= 0; got -2' % label
283 )294 )
284295
285 # Test with bad 'time' type/value:296 # Test with bad 'time' type/value:
@@ -420,6 +431,7 @@
420 # Test with good doc:431 # Test with good doc:
421 good = {432 good = {
422 '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',433 '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',
434 'ver': 0,
423 'type': 'dmedia/file',435 'type': 'dmedia/file',
424 'time': 1234567890,436 'time': 1234567890,
425 'bytes': 20202333,437 'bytes': 20202333,
@@ -517,7 +529,103 @@
517 e = raises(ValueError, f, bad)529 e = raises(ValueError, f, bad)
518 self.assertEqual(530 self.assertEqual(
519 str(e),531 str(e),
520 "stored['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] must be >= 1; got -1"532 "stored['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] must be >= 0; got -1"
533 )
534
535 def test_check_dmedia_file_optional(self):
536 f = schema.check_dmedia_file_optional
537 f({})
538
539 # mime
540 self.assertIsNone(f({'mime': 'video/quicktime'}))
541 e = raises(TypeError, f, {'mime': 42})
542 self.assertEqual(
543 str(e),
544 TYPE_ERROR % ("doc['mime']", basestring, int, 42)
545 )
546
547 # media
548 self.assertIsNone(f({'media': None}))
549 self.assertIsNone(f({'media': 'video'}))
550 self.assertIsNone(f({'media': 'audio'}))
551 self.assertIsNone(f({'media': 'image'}))
552 e = raises(TypeError, f, {'media': 42})
553 self.assertEqual(
554 str(e),
555 TYPE_ERROR % ("doc['media']", basestring, int, 42)
556 )
557 e = raises(ValueError, f, {'media': 'stuff'})
558 self.assertEqual(
559 str(e),
560 "doc['media'] value 'stuff' not in ('video', 'audio', 'image')"
561 )
562
563 # mtime
564 self.assertIsNone(f({'mtime': 1302125982.946627}))
565 self.assertIsNone(f({'mtime': 1234567890}))
566 e = raises(TypeError, f, {'mtime': '1234567890'})
567 self.assertEqual(
568 str(e),
569 TYPE_ERROR % ("doc['mtime']", (int, float), str, '1234567890')
570 )
571 e = raises(ValueError, f, {'mtime': -1})
572 self.assertEqual(
573 str(e),
574 "doc['mtime'] must be >= 0; got -1"
575 )
576
577 # atime
578 self.assertIsNone(f({'atime': 1302125982.946627}))
579 self.assertIsNone(f({'atime': 1234567890}))
580 e = raises(TypeError, f, {'atime': '1234567890'})
581 self.assertEqual(
582 str(e),
583 TYPE_ERROR % ("doc['atime']", (int, float), str, '1234567890')
584 )
585 e = raises(ValueError, f, {'atime': -0.3})
586 self.assertEqual(
587 str(e),
588 "doc['atime'] must be >= 0; got -0.3"
589 )
590
591 # name
592 self.assertIsNone(f({'name': 'MVI_5899.MOV'}))
593 e = raises(TypeError, f, {'name': 42})
594 self.assertEqual(
595 str(e),
596 TYPE_ERROR % ("doc['name']", basestring, int, 42)
597 )
598
599 # dir
600 self.assertIsNone(f({'dir': 'DCIM/100EOS5D2'}))
601 e = raises(TypeError, f, {'dir': 42})
602 self.assertEqual(
603 str(e),
604 TYPE_ERROR % ("doc['dir']", basestring, int, 42)
605 )
606
607 # meta
608 self.assertIsNone(f({'meta': {'iso': 800}}))
609 e = raises(TypeError, f, {'meta': 42})
610 self.assertEqual(
611 str(e),
612 TYPE_ERROR % ("doc['meta']", dict, int, 42)
613 )
614
615 # user
616 self.assertIsNone(f({'user': {'title': 'cool sunset'}}))
617 e = raises(TypeError, f, {'user': 42})
618 self.assertEqual(
619 str(e),
620 TYPE_ERROR % ("doc['user']", dict, int, 42)
621 )
622
623 # tags
624 self.assertIsNone(f({'tags': {'burp': {'start': 6, 'end': 73}}}))
625 e = raises(TypeError, f, {'tags': 42})
626 self.assertEqual(
627 str(e),
628 TYPE_ERROR % ("doc['tags']", dict, int, 42)
521 )629 )
522630
523631
@@ -527,6 +635,7 @@
527 # Test with good doc:635 # Test with good doc:
528 good = {636 good = {
529 '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',637 '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O',
638 'ver': 0,
530 'type': 'dmedia/file',639 'type': 'dmedia/file',
531 'time': 1234567890,640 'time': 1234567890,
532 'plugin': 'filestore',641 'plugin': 'filestore',
@@ -593,6 +702,64 @@
593 self.assertEqual(len(binary), 15)702 self.assertEqual(len(binary), 15)
594 self.assertEqual(b32encode(binary), _id)703 self.assertEqual(b32encode(binary), _id)
595704
705 def test_create_file(self):
706 f = schema.create_file
707 store = schema.random_id()
708
709 d = f(mov_size, mov_leaves, store)
710 schema.check_dmedia_file(d)
711 self.assertEqual(
712 set(d),
713 set([
714 '_id',
715 '_attachments',
716 'ver',
717 'type',
718 'time',
719 'bytes',
720 'ext',
721 'origin',
722 'stored',
723 ])
724 )
725 self.assertEqual(d['_id'], mov_hash)
726 self.assertEqual(
727 d['_attachments'],
728 {
729 'leaves': {
730 'data': b64encode(b''.join(mov_leaves)),
731 'content_type': 'application/octet-stream',
732 }
733 }
734 )
735 self.assertEqual(d['ver'], 0)
736 self.assertEqual(d['type'], 'dmedia/file')
737 self.assertLessEqual(d['time'], time.time())
738 self.assertEqual(d['bytes'], mov_size)
739 self.assertIsNone(d['ext'], None)
740 self.assertEqual(d['origin'], 'user')
741
742 s = d['stored']
743 self.assertIsInstance(s, dict)
744 self.assertEqual(list(s), [store])
745 self.assertEqual(set(s[store]), set(['copies', 'time']))
746 self.assertEqual(s[store]['copies'], 0)
747 self.assertEqual(s[store]['time'], d['time'])
748
749 # Test overriding default kwarg values:
750 d = f(mov_size, mov_leaves, store, copies=2)
751 schema.check_dmedia_file(d)
752 self.assertEqual(d['stored'][store]['copies'], 2)
753
754 d = f(mov_size, mov_leaves, store, ext='mov')
755 schema.check_dmedia_file(d)
756 self.assertEqual(d['ext'], 'mov')
757
758 d = f(mov_size, mov_leaves, store, origin='proxy')
759 schema.check_dmedia_file(d)
760 self.assertEqual(d['origin'], 'proxy')
761
762
596 def test_create_store(self):763 def test_create_store(self):
597 f = schema.create_store764 f = schema.create_store
598 tmp = TempDir()765 tmp = TempDir()
@@ -605,6 +772,7 @@
605 set(doc),772 set(doc),
606 set([773 set([
607 '_id',774 '_id',
775 'ver',
608 'type',776 'type',
609 'time',777 'time',
610 'plugin',778 'plugin',
@@ -625,6 +793,7 @@
625 set(doc),793 set(doc),
626 set([794 set([
627 '_id',795 '_id',
796 'ver',
628 'type',797 'type',
629 'time',798 'time',
630 'plugin',799 'plugin',
@@ -639,3 +808,102 @@
639 self.assertEqual(doc['copies'], 3)808 self.assertEqual(doc['copies'], 3)
640 self.assertEqual(doc['path'], base)809 self.assertEqual(doc['path'], base)
641 self.assertEqual(doc['machine_id'], machine_id)810 self.assertEqual(doc['machine_id'], machine_id)
811
812 def test_create_batch(self):
813 f = schema.create_batch
814 machine_id = random_id()
815 doc = f(machine_id)
816
817 self.assertEqual(schema.check_dmedia(doc), None)
818 self.assertTrue(isinstance(doc, dict))
819 self.assertEqual(
820 set(doc),
821 set([
822 '_id',
823 'ver',
824 'type',
825 'time',
826 'imports',
827 'errors',
828 'machine_id',
829 'stats',
830 ])
831 )
832 _id = doc['_id']
833 self.assertEqual(b32encode(b32decode(_id)), _id)
834 self.assertEqual(len(_id), 24)
835 self.assertEqual(doc['type'], 'dmedia/batch')
836 self.assertTrue(isinstance(doc['time'], (int, float)))
837 self.assertTrue(doc['time'] <= time.time())
838 self.assertEqual(doc['imports'], [])
839 self.assertEqual(doc['errors'], [])
840 self.assertEqual(doc['machine_id'], machine_id)
841 self.assertEqual(
842 doc['stats'],
843 {
844 'considered': {'count': 0, 'bytes': 0},
845 'imported': {'count': 0, 'bytes': 0},
846 'skipped': {'count': 0, 'bytes': 0},
847 'empty': {'count': 0, 'bytes': 0},
848 'error': {'count': 0, 'bytes': 0},
849 }
850 )
851
852 def test_create_import(self):
853 f = schema.create_import
854
855 base = '/media/EOS_DIGITAL'
856 batch_id = random_id()
857 machine_id = random_id()
858
859 keys = set([
860 '_id',
861 'ver',
862 'type',
863 'time',
864 'base',
865 'batch_id',
866 'machine_id',
867 'log',
868 'stats',
869 ])
870
871 doc = f(base, batch_id=batch_id, machine_id=machine_id)
872 self.assertEqual(schema.check_dmedia(doc), None)
873 self.assertTrue(isinstance(doc, dict))
874 self.assertEqual(set(doc), keys)
875
876 _id = doc['_id']
877 self.assertEqual(b32encode(b32decode(_id)), _id)
878 self.assertEqual(len(_id), 24)
879
880 self.assertEqual(doc['type'], 'dmedia/import')
881 self.assertTrue(isinstance(doc['time'], (int, float)))
882 self.assertTrue(doc['time'] <= time.time())
883 self.assertEqual(doc['base'], base)
884 self.assertEqual(doc['batch_id'], batch_id)
885 self.assertEqual(doc['machine_id'], machine_id)
886
887 doc = f(base)
888 self.assertEqual(schema.check_dmedia(doc), None)
889 self.assertEqual(set(doc), keys)
890 self.assertEqual(doc['batch_id'], None)
891 self.assertEqual(doc['machine_id'], None)
892 self.assertEqual(
893 doc['log'],
894 {
895 'imported': [],
896 'skipped': [],
897 'empty': [],
898 'error': [],
899 }
900 )
901 self.assertEqual(
902 doc['stats'],
903 {
904 'imported': {'count': 0, 'bytes': 0},
905 'skipped': {'count': 0, 'bytes': 0},
906 'empty': {'count': 0, 'bytes': 0},
907 'error': {'count': 0, 'bytes': 0},
908 }
909 )
642910
=== modified file 'dmedia/webui/data/browser.js'
--- dmedia/webui/data/browser.js 2011-03-31 09:28:58 +0000
+++ dmedia/webui/data/browser.js 2011-04-07 03:13:28 +0000
@@ -53,8 +53,8 @@
53}53}
54Browser.prototype = {54Browser.prototype = {
55 run: function() {55 run: function() {
56 var r = this.db.view('file', 'ext',56 var r = this.db.view('user', 'video',
57 {key: 'mov', reduce: false, include_docs: true}57 {include_docs: true, descending: true}
58 );58 );
59 this.load(r.rows);59 this.load(r.rows);
60 },60 },
@@ -74,7 +74,7 @@
74 };74 };
7575
76 var time = $el('div', {'class': 'time'});76 var time = $el('div', {'class': 'time'});
77 time.textContent = doc.duration + 's';77 time.textContent = doc.meta.duration + 's';
7878
79 div.appendChild(img);79 div.appendChild(img);
80 div.appendChild(time);80 div.appendChild(time);
@@ -90,7 +90,7 @@
90 names.forEach(function(n) {90 names.forEach(function(n) {
91 var el = $('meta.' + n);91 var el = $('meta.' + n);
92 if (el) {92 if (el) {
93 el.textContent = doc[n];93 el.textContent = doc.meta[n];
94 }94 }
95 });95 });
96 },96 },

Subscribers

People subscribed via source and target branches