Merge lp:~jderose/dmedia/schema-ver0 into lp:dmedia
- schema-ver0
- Merge into trunk
Proposed by
Jason Gerard DeRose
Status: | Merged |
---|---|
Merged at revision: | 181 |
Proposed branch: | lp:~jderose/dmedia/schema-ver0 |
Merge into: | lp:dmedia |
Diff against target: |
1570 lines (+859/-232) 11 files modified
dmedia/extractor.py (+6/-1) dmedia/filestore.py (+3/-0) dmedia/gtkui/__init__.py (+3/-1) dmedia/importer.py (+31/-75) dmedia/metastore.py (+103/-16) dmedia/schema.py (+409/-14) dmedia/tests/helpers.py (+1/-0) dmedia/tests/test_extractor.py (+16/-14) dmedia/tests/test_importer.py (+8/-100) dmedia/tests/test_schema.py (+275/-7) dmedia/webui/data/browser.js (+4/-4) |
To merge this branch: | bzr merge lp:~jderose/dmedia/schema-ver0 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Jason Gerard DeRose | Approve | ||
Review via email: mp+56680@code.launchpad.net |
Commit message
Description of the change
There is still a bit of work to do finalizing how tags work, but the important part is already enforced in the schema: 'tags' is a dictionary. I'll open another bug for finishing the tags schema, but this is already a fairly large change, so I'm proposing this for merge before it gets out of hand.
Oh, and this includes an important unrelated bugfix: I goofed in the 0.5 transition from PyGTK to PyGI and am no longer calling GObject.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'dmedia/extractor.py' | |||
2 | --- dmedia/extractor.py 2011-01-28 14:07:33 +0000 | |||
3 | +++ dmedia/extractor.py 2011-04-07 03:13:28 +0000 | |||
4 | @@ -223,13 +223,18 @@ | |||
5 | 223 | def merge_metadata(src, doc): | 223 | def merge_metadata(src, doc): |
6 | 224 | ext = doc['ext'] | 224 | ext = doc['ext'] |
7 | 225 | attachments = doc.get('_attachments', {}) | 225 | attachments = doc.get('_attachments', {}) |
8 | 226 | meta = doc.get('meta', {}) | ||
9 | 226 | if ext in _extractors: | 227 | if ext in _extractors: |
10 | 227 | callback = _extractors[ext] | 228 | callback = _extractors[ext] |
11 | 228 | for (key, value) in callback(src, attachments): | 229 | for (key, value) in callback(src, attachments): |
13 | 229 | if key not in doc or key == 'mtime': | 230 | if key == 'mtime': |
14 | 230 | doc[key] = value | 231 | doc[key] = value |
15 | 232 | elif key not in meta: | ||
16 | 233 | meta[key] = value | ||
17 | 231 | if attachments and '_attachments' not in doc: | 234 | if attachments and '_attachments' not in doc: |
18 | 232 | doc['_attachments'] = attachments | 235 | doc['_attachments'] = attachments |
19 | 236 | if meta and 'meta' not in doc: | ||
20 | 237 | doc['meta'] = meta | ||
21 | 233 | 238 | ||
22 | 234 | 239 | ||
23 | 235 | def merge_exif(src, attachments): | 240 | def merge_exif(src, attachments): |
24 | 236 | 241 | ||
25 | === modified file 'dmedia/filestore.py' | |||
26 | --- dmedia/filestore.py 2011-02-27 03:47:55 +0000 | |||
27 | +++ dmedia/filestore.py 2011-04-07 03:13:28 +0000 | |||
28 | @@ -613,6 +613,9 @@ | |||
29 | 613 | raise ValueError('%s.base not a directory: %r' % | 613 | raise ValueError('%s.base not a directory: %r' % |
30 | 614 | (self.__class__.__name__, self.base) | 614 | (self.__class__.__name__, self.base) |
31 | 615 | ) | 615 | ) |
32 | 616 | |||
33 | 617 | # FIXME: This is too high-level for FileStore, should instead be deault | ||
34 | 618 | # with by the core API entry point as FileStore are first initialized | ||
35 | 616 | self.record = path.join(self.base, 'store.json') | 619 | self.record = path.join(self.base, 'store.json') |
36 | 617 | try: | 620 | try: |
37 | 618 | fp = open(self.record, 'rb') | 621 | fp = open(self.record, 'rb') |
38 | 619 | 622 | ||
39 | === modified file 'dmedia/gtkui/__init__.py' | |||
40 | --- dmedia/gtkui/__init__.py 2011-03-27 08:01:30 +0000 | |||
41 | +++ dmedia/gtkui/__init__.py 2011-04-07 03:13:28 +0000 | |||
42 | @@ -26,4 +26,6 @@ | |||
43 | 26 | import gi | 26 | import gi |
44 | 27 | gi.require_version('Gtk', '2.0') | 27 | gi.require_version('Gtk', '2.0') |
45 | 28 | gi.require_version('WebKit', '1.0') | 28 | gi.require_version('WebKit', '1.0') |
47 | 29 | from gi.repository import Gtk, WebKit | 29 | from gi.repository import GObject, Gtk, WebKit |
48 | 30 | |||
49 | 31 | GObject.threads_init() | ||
50 | 30 | 32 | ||
51 | === modified file 'dmedia/importer.py' | |||
52 | --- dmedia/importer.py 2011-03-28 12:38:29 +0000 | |||
53 | +++ dmedia/importer.py 2011-04-07 03:13:28 +0000 | |||
54 | @@ -33,7 +33,7 @@ | |||
55 | 33 | 33 | ||
56 | 34 | import couchdb | 34 | import couchdb |
57 | 35 | 35 | ||
59 | 36 | from .schema import random_id | 36 | from .schema import random_id, create_file, create_batch, create_import |
60 | 37 | from .errors import DuplicateFile | 37 | from .errors import DuplicateFile |
61 | 38 | from .workers import ( | 38 | from .workers import ( |
62 | 39 | CouchWorker, CouchManager, register, isregistered, exception_name | 39 | CouchWorker, CouchManager, register, isregistered, exception_name |
63 | @@ -48,6 +48,24 @@ | |||
64 | 48 | log = logging.getLogger() | 48 | log = logging.getLogger() |
65 | 49 | 49 | ||
66 | 50 | 50 | ||
67 | 51 | # FIXME: This needs to be done with some real inspection of the file contents, | ||
68 | 52 | # but this is just a stopgap for the sake of getting the schema stable: | ||
69 | 53 | MEDIA_MAP = { | ||
70 | 54 | 'ogv': 'video', | ||
71 | 55 | 'mov': 'video', | ||
72 | 56 | 'avi': 'video', | ||
73 | 57 | |||
74 | 58 | 'oga': 'audio', | ||
75 | 59 | 'flac': 'audio', | ||
76 | 60 | 'wav': 'audio', | ||
77 | 61 | 'mp3': 'audio', | ||
78 | 62 | |||
79 | 63 | 'jpg': 'image', | ||
80 | 64 | 'cr2': 'image', | ||
81 | 65 | 'png': 'image', | ||
82 | 66 | } | ||
83 | 67 | |||
84 | 68 | |||
85 | 51 | def normalize_ext(name): | 69 | def normalize_ext(name): |
86 | 52 | """ | 70 | """ |
87 | 53 | Return (root, ext) from *name* where extension is normalized to lower-case. | 71 | Return (root, ext) from *name* where extension is normalized to lower-case. |
88 | @@ -143,53 +161,6 @@ | |||
89 | 143 | yield tup | 161 | yield tup |
90 | 144 | 162 | ||
91 | 145 | 163 | ||
92 | 146 | def create_batch(machine_id=None): | ||
93 | 147 | """ | ||
94 | 148 | Create initial 'dmedia/batch' accounting document. | ||
95 | 149 | """ | ||
96 | 150 | return { | ||
97 | 151 | '_id': random_id(), | ||
98 | 152 | 'type': 'dmedia/batch', | ||
99 | 153 | 'time': time.time(), | ||
100 | 154 | 'machine_id': machine_id, | ||
101 | 155 | 'imports': [], | ||
102 | 156 | 'errors': [], | ||
103 | 157 | 'stats': { | ||
104 | 158 | 'considered': {'count': 0, 'bytes': 0}, | ||
105 | 159 | 'imported': {'count': 0, 'bytes': 0}, | ||
106 | 160 | 'skipped': {'count': 0, 'bytes': 0}, | ||
107 | 161 | 'empty': {'count': 0, 'bytes': 0}, | ||
108 | 162 | 'error': {'count': 0, 'bytes': 0}, | ||
109 | 163 | } | ||
110 | 164 | } | ||
111 | 165 | |||
112 | 166 | |||
113 | 167 | def create_import(base, batch_id=None, machine_id=None): | ||
114 | 168 | """ | ||
115 | 169 | Create initial 'dmedia/import' accounting document. | ||
116 | 170 | """ | ||
117 | 171 | return { | ||
118 | 172 | '_id': random_id(), | ||
119 | 173 | 'type': 'dmedia/import', | ||
120 | 174 | 'time': time.time(), | ||
121 | 175 | 'batch_id': batch_id, | ||
122 | 176 | 'machine_id': machine_id, | ||
123 | 177 | 'base': base, | ||
124 | 178 | 'log': { | ||
125 | 179 | 'imported': [], | ||
126 | 180 | 'skipped': [], | ||
127 | 181 | 'empty': [], | ||
128 | 182 | 'error': [], | ||
129 | 183 | }, | ||
130 | 184 | 'stats': { | ||
131 | 185 | 'imported': {'count': 0, 'bytes': 0}, | ||
132 | 186 | 'skipped': {'count': 0, 'bytes': 0}, | ||
133 | 187 | 'empty': {'count': 0, 'bytes': 0}, | ||
134 | 188 | 'error': {'count': 0, 'bytes': 0}, | ||
135 | 189 | } | ||
136 | 190 | } | ||
137 | 191 | |||
138 | 192 | |||
139 | 193 | class ImportWorker(CouchWorker): | 164 | class ImportWorker(CouchWorker): |
140 | 194 | def __init__(self, env, q, key, args): | 165 | def __init__(self, env, q, key, args): |
141 | 195 | super(ImportWorker, self).__init__(env, q, key, args) | 166 | super(ImportWorker, self).__init__(env, q, key, args) |
142 | @@ -308,34 +279,19 @@ | |||
143 | 308 | except couchdb.ResourceNotFound as e: | 279 | except couchdb.ResourceNotFound as e: |
144 | 309 | pass | 280 | pass |
145 | 310 | 281 | ||
172 | 311 | ts = time.time() | 282 | doc = create_file(stat.st_size, leaves, self.filestore._id, |
173 | 312 | doc = { | 283 | copies=1, ext=ext |
174 | 313 | '_id': chash, | 284 | ) |
175 | 314 | '_attachments': { | 285 | assert doc['_id'] == chash |
176 | 315 | 'leaves': { | 286 | doc.update( |
177 | 316 | 'data': b64encode(pack_leaves(leaves)), | 287 | import_id=self._id, |
178 | 317 | 'content_type': 'application/octet-stream', | 288 | mtime=stat.st_mtime, |
179 | 318 | } | 289 | name=name, |
180 | 319 | }, | 290 | dir=path.relpath(path.dirname(src), self.base), |
181 | 320 | 'type': 'dmedia/file', | 291 | ) |
156 | 321 | 'time': ts, | ||
157 | 322 | 'bytes': stat.st_size, | ||
158 | 323 | 'ext': ext, | ||
159 | 324 | 'origin': 'user', | ||
160 | 325 | 'stored': { | ||
161 | 326 | self.filestore._id: { | ||
162 | 327 | 'copies': 1, | ||
163 | 328 | 'time': ts, | ||
164 | 329 | }, | ||
165 | 330 | }, | ||
166 | 331 | |||
167 | 332 | 'import_id': self._id, | ||
168 | 333 | 'mtime': stat.st_mtime, | ||
169 | 334 | 'name': name, | ||
170 | 335 | 'dir': path.relpath(path.dirname(src), self.base), | ||
171 | 336 | } | ||
182 | 337 | if ext: | 292 | if ext: |
184 | 338 | doc['content_type'] = mimetypes.types_map.get('.' + ext) | 293 | doc['mime'] = mimetypes.types_map.get('.' + ext) |
185 | 294 | doc['media'] = MEDIA_MAP.get(ext) | ||
186 | 339 | if self.extract: | 295 | if self.extract: |
187 | 340 | merge_metadata(src, doc) | 296 | merge_metadata(src, doc) |
188 | 341 | (_id, _rev) = self.db.save(doc) | 297 | (_id, _rev) = self.db.save(doc) |
189 | 342 | 298 | ||
190 | === modified file 'dmedia/metastore.py' | |||
191 | --- dmedia/metastore.py 2011-03-27 09:05:32 +0000 | |||
192 | +++ dmedia/metastore.py 2011-04-07 03:13:28 +0000 | |||
193 | @@ -62,6 +62,19 @@ | |||
194 | 62 | } | 62 | } |
195 | 63 | """ | 63 | """ |
196 | 64 | 64 | ||
197 | 65 | # views in the 'file' design only index docs for which doc.type == 'dmedia/file' | ||
198 | 66 | file_stored = """ | ||
199 | 67 | // Get list of all files on a given store, total bytes on that store | ||
200 | 68 | function(doc) { | ||
201 | 69 | if (doc.type == 'dmedia/file') { | ||
202 | 70 | var key; | ||
203 | 71 | for (key in doc.stored) { | ||
204 | 72 | emit(key, doc.bytes); | ||
205 | 73 | } | ||
206 | 74 | } | ||
207 | 75 | } | ||
208 | 76 | """ | ||
209 | 77 | |||
210 | 65 | file_bytes = """ | 78 | file_bytes = """ |
211 | 66 | function(doc) { | 79 | function(doc) { |
212 | 67 | if (doc.type == 'dmedia/file' && typeof(doc.bytes) == 'number') { | 80 | if (doc.type == 'dmedia/file' && typeof(doc.bytes) == 'number') { |
213 | @@ -78,10 +91,10 @@ | |||
214 | 78 | } | 91 | } |
215 | 79 | """ | 92 | """ |
216 | 80 | 93 | ||
218 | 81 | file_content_type = """ | 94 | file_mime = """ |
219 | 82 | function(doc) { | 95 | function(doc) { |
220 | 83 | if (doc.type == 'dmedia/file') { | 96 | if (doc.type == 'dmedia/file') { |
222 | 84 | emit(doc.content_type, null); | 97 | emit(doc.mime, null); |
223 | 85 | } | 98 | } |
224 | 86 | } | 99 | } |
225 | 87 | """ | 100 | """ |
226 | @@ -94,16 +107,6 @@ | |||
227 | 94 | } | 107 | } |
228 | 95 | """ | 108 | """ |
229 | 96 | 109 | ||
230 | 97 | file_tags = """ | ||
231 | 98 | function(doc) { | ||
232 | 99 | if (doc.type == 'dmedia/file' && doc.tags) { | ||
233 | 100 | doc.tags.forEach(function(tag) { | ||
234 | 101 | emit(tag, null); | ||
235 | 102 | }); | ||
236 | 103 | } | ||
237 | 104 | } | ||
238 | 105 | """ | ||
239 | 106 | |||
240 | 107 | file_import_id = """ | 110 | file_import_id = """ |
241 | 108 | function(doc) { | 111 | function(doc) { |
242 | 109 | if (doc.type == 'dmedia/file' && doc.import_id) { | 112 | if (doc.type == 'dmedia/file' && doc.import_id) { |
243 | @@ -112,13 +115,87 @@ | |||
244 | 112 | } | 115 | } |
245 | 113 | """ | 116 | """ |
246 | 114 | 117 | ||
247 | 118 | # views in the 'user' design only index docs for which doc.type == 'dmedia/file' | ||
248 | 119 | # and doc.origin == 'user' | ||
249 | 120 | user_copies = """ | ||
250 | 121 | // Durability of user's personal files | ||
251 | 122 | function(doc) { | ||
252 | 123 | if (doc.type == 'dmedia/file' && doc.origin == 'user') { | ||
253 | 124 | var copies = 0; | ||
254 | 125 | var key; | ||
255 | 126 | for (key in doc.stored) { | ||
256 | 127 | copies += doc.stored[key].copies; | ||
257 | 128 | } | ||
258 | 129 | emit(copies, null); | ||
259 | 130 | } | ||
260 | 131 | } | ||
261 | 132 | """ | ||
262 | 133 | |||
263 | 134 | user_media = """ | ||
264 | 135 | function(doc) { | ||
265 | 136 | if (doc.type == 'dmedia/file' && doc.origin == 'user') { | ||
266 | 137 | emit(doc.media, null); | ||
267 | 138 | } | ||
268 | 139 | } | ||
269 | 140 | """ | ||
270 | 141 | |||
271 | 142 | user_tags = """ | ||
272 | 143 | function(doc) { | ||
273 | 144 | if (doc.type == 'dmedia/file' && doc.origin == 'user' && doc.tags) { | ||
274 | 145 | var key; | ||
275 | 146 | for (key in doc.tags) { | ||
276 | 147 | emit(key, doc.tags[key]); | ||
277 | 148 | } | ||
278 | 149 | } | ||
279 | 150 | } | ||
280 | 151 | """ | ||
281 | 152 | |||
282 | 153 | user_all = """ | ||
283 | 154 | function(doc) { | ||
284 | 155 | if (doc.type == 'dmedia/file' && doc.origin == 'user') { | ||
285 | 156 | emit(doc.mtime, null); | ||
286 | 157 | } | ||
287 | 158 | } | ||
288 | 159 | """ | ||
289 | 160 | |||
290 | 161 | user_video = """ | ||
291 | 162 | function(doc) { | ||
292 | 163 | if (doc.type == 'dmedia/file' && doc.origin == 'user') { | ||
293 | 164 | if (doc.media == 'video') { | ||
294 | 165 | emit(doc.mtime, null); | ||
295 | 166 | } | ||
296 | 167 | } | ||
297 | 168 | } | ||
298 | 169 | """ | ||
299 | 170 | |||
300 | 171 | user_image = """ | ||
301 | 172 | function(doc) { | ||
302 | 173 | if (doc.type == 'dmedia/file' && doc.origin == 'user') { | ||
303 | 174 | if (doc.media == 'image') { | ||
304 | 175 | emit(doc.mtime, null); | ||
305 | 176 | } | ||
306 | 177 | } | ||
307 | 178 | } | ||
308 | 179 | """ | ||
309 | 180 | |||
310 | 181 | user_audio = """ | ||
311 | 182 | function(doc) { | ||
312 | 183 | if (doc.type == 'dmedia/file' && doc.origin == 'user') { | ||
313 | 184 | if (doc.media == 'audio') { | ||
314 | 185 | emit(doc.mtime, null); | ||
315 | 186 | } | ||
316 | 187 | } | ||
317 | 188 | } | ||
318 | 189 | """ | ||
319 | 190 | |||
320 | 191 | |||
321 | 115 | def build_design_doc(design, views): | 192 | def build_design_doc(design, views): |
322 | 116 | _id = '_design/' + design | 193 | _id = '_design/' + design |
323 | 117 | d = {} | 194 | d = {} |
324 | 118 | for (view, map_, reduce_) in views: | 195 | for (view, map_, reduce_) in views: |
326 | 119 | d[view] = {'map': map_} | 196 | d[view] = {'map': map_.strip()} |
327 | 120 | if reduce_ is not None: | 197 | if reduce_ is not None: |
329 | 121 | d[view]['reduce'] = reduce_ | 198 | d[view]['reduce'] = reduce_.strip() |
330 | 122 | doc = { | 199 | doc = { |
331 | 123 | '_id': _id, | 200 | '_id': _id, |
332 | 124 | 'language': 'javascript', | 201 | 'language': 'javascript', |
333 | @@ -168,12 +245,22 @@ | |||
334 | 168 | )), | 245 | )), |
335 | 169 | 246 | ||
336 | 170 | ('file', ( | 247 | ('file', ( |
337 | 248 | ('stored', file_stored, _sum), | ||
338 | 171 | ('import_id', file_import_id, None), | 249 | ('import_id', file_import_id, None), |
339 | 172 | ('bytes', file_bytes, _sum), | 250 | ('bytes', file_bytes, _sum), |
340 | 173 | ('ext', file_ext, _count), | 251 | ('ext', file_ext, _count), |
342 | 174 | ('content_type', file_content_type, _count), | 252 | ('mime', file_mime, _count), |
343 | 175 | ('mtime', file_mtime, None), | 253 | ('mtime', file_mtime, None), |
345 | 176 | ('tags', file_tags, _count), | 254 | )), |
346 | 255 | |||
347 | 256 | ('user', ( | ||
348 | 257 | ('copies', user_copies, None), | ||
349 | 258 | ('media', user_media, _count), | ||
350 | 259 | ('tags', user_tags, _count), | ||
351 | 260 | ('all', user_all, None), | ||
352 | 261 | ('video', user_video, None), | ||
353 | 262 | ('image', user_image, None), | ||
354 | 263 | ('audio', user_audio, None), | ||
355 | 177 | )), | 264 | )), |
356 | 178 | ) | 265 | ) |
357 | 179 | 266 | ||
358 | 180 | 267 | ||
359 | === modified file 'dmedia/schema.py' | |||
360 | --- dmedia/schema.py 2011-03-27 09:05:32 +0000 | |||
361 | +++ dmedia/schema.py 2011-04-07 03:13:28 +0000 | |||
362 | @@ -33,6 +33,7 @@ | |||
363 | 33 | 33 | ||
364 | 34 | >>> good = { | 34 | >>> good = { |
365 | 35 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', | 35 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', |
366 | 36 | ... 'ver': 0, | ||
367 | 36 | ... 'type': 'dmedia/foo', | 37 | ... 'type': 'dmedia/foo', |
368 | 37 | ... 'time': 1234567890, | 38 | ... 'time': 1234567890, |
369 | 38 | ... } | 39 | ... } |
370 | @@ -40,6 +41,7 @@ | |||
371 | 40 | >>> check_dmedia(good) # Returns None | 41 | >>> check_dmedia(good) # Returns None |
372 | 41 | >>> bad = { | 42 | >>> bad = { |
373 | 42 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', | 43 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', |
374 | 44 | ... 'ver': 0, | ||
375 | 43 | ... 'kind': 'dmedia/foo', | 45 | ... 'kind': 'dmedia/foo', |
376 | 44 | ... 'timestamp': 1234567890, | 46 | ... 'timestamp': 1234567890, |
377 | 45 | ... } | 47 | ... } |
378 | @@ -222,6 +224,7 @@ | |||
379 | 222 | 224 | ||
380 | 223 | >>> doc = { | 225 | >>> doc = { |
381 | 224 | ... '_id': 'MZZG2ZDSOQVSW2TEMVZG643F', | 226 | ... '_id': 'MZZG2ZDSOQVSW2TEMVZG643F', |
382 | 227 | ... 'ver': 0, | ||
383 | 225 | ... 'type': 'dmedia/batch', | 228 | ... 'type': 'dmedia/batch', |
384 | 226 | ... 'time': 1234567890, | 229 | ... 'time': 1234567890, |
385 | 227 | ... } | 230 | ... } |
386 | @@ -308,9 +311,11 @@ | |||
387 | 308 | from __future__ import print_function | 311 | from __future__ import print_function |
388 | 309 | 312 | ||
389 | 310 | import os | 313 | import os |
391 | 311 | from base64 import b32encode, b32decode | 314 | from hashlib import sha1 |
392 | 315 | from base64 import b32encode, b32decode, b64encode | ||
393 | 312 | import re | 316 | import re |
394 | 313 | import time | 317 | import time |
395 | 318 | |||
396 | 314 | from .constants import TYPE_ERROR, EXT_PAT | 319 | from .constants import TYPE_ERROR, EXT_PAT |
397 | 315 | 320 | ||
398 | 316 | # Some private helper functions that don't directly define any schema. | 321 | # Some private helper functions that don't directly define any schema. |
399 | @@ -325,6 +330,201 @@ | |||
400 | 325 | # | 330 | # |
401 | 326 | # That is all. | 331 | # That is all. |
402 | 327 | 332 | ||
403 | 333 | |||
404 | 334 | # FIXME: These functions are a step toward making the checks more concise and | ||
405 | 335 | # the error messages consistent and even more helpful. However, these functions | ||
406 | 336 | # aren't used much yet... but all the schema checks should be ported to these | ||
407 | 337 | # functions eventually. | ||
408 | 338 | def _label(path): | ||
409 | 339 | """ | ||
410 | 340 | Create a helpful debugging label to indicate the attribute in question. | ||
411 | 341 | |||
412 | 342 | For example: | ||
413 | 343 | |||
414 | 344 | >>> _label([]) | ||
415 | 345 | 'doc' | ||
416 | 346 | >>> _label(['log']) | ||
417 | 347 | "doc['log']" | ||
418 | 348 | >>> _label(['log', 'considered', 2, 'src']) | ||
419 | 349 | "doc['log']['considered'][2]['src']" | ||
420 | 350 | |||
421 | 351 | |||
422 | 352 | See also `_value()`. | ||
423 | 353 | """ | ||
424 | 354 | return 'doc' + ''.join('[{!r}]'.format(key) for key in path) | ||
425 | 355 | |||
426 | 356 | |||
427 | 357 | def _value(doc, path): | ||
428 | 358 | """ | ||
429 | 359 | Retrieve value from *doc* by traversing *path*. | ||
430 | 360 | |||
431 | 361 | For example: | ||
432 | 362 | |||
433 | 363 | >>> doc = {'log': {'considered': [None, None, {'src': 'hello'}, None]}} | ||
434 | 364 | >>> _value(doc, []) | ||
435 | 365 | {'log': {'considered': [None, None, {'src': 'hello'}, None]}} | ||
436 | 366 | >>> _value(doc, ['log']) | ||
437 | 367 | {'considered': [None, None, {'src': 'hello'}, None]} | ||
438 | 368 | >>> _value(doc, ['log', 'considered', 2, 'src']) | ||
439 | 369 | 'hello' | ||
440 | 370 | |||
441 | 371 | |||
442 | 372 | Or if you try to retrieve something that doesn't exist: | ||
443 | 373 | |||
444 | 374 | >>> _value(doc, ['log', 'considered', 7]) | ||
445 | 375 | Traceback (most recent call last): | ||
446 | 376 | ... | ||
447 | 377 | ValueError: doc['log']['considered'][7] does not exists | ||
448 | 378 | |||
449 | 379 | |||
450 | 380 | Or if a key/index is missing higher up in the path: | ||
451 | 381 | |||
452 | 382 | >>> _value(doc, ['dog', 'considered', 7]) | ||
453 | 383 | Traceback (most recent call last): | ||
454 | 384 | ... | ||
455 | 385 | ValueError: doc['dog'] does not exists | ||
456 | 386 | |||
457 | 387 | |||
458 | 388 | See also `_label()`. | ||
459 | 389 | """ | ||
460 | 390 | value = doc | ||
461 | 391 | p = [] | ||
462 | 392 | for key in path: | ||
463 | 393 | p.append(key) | ||
464 | 394 | try: | ||
465 | 395 | value = value[key] | ||
466 | 396 | except (KeyError, IndexError): | ||
467 | 397 | raise ValueError( | ||
468 | 398 | '{} does not exists'.format(_label(p)) | ||
469 | 399 | ) | ||
470 | 400 | return value | ||
471 | 401 | |||
472 | 402 | |||
473 | 403 | def _exists(doc, path): | ||
474 | 404 | """ | ||
475 | 405 | Return ``True`` if the end of *path* exists. | ||
476 | 406 | |||
477 | 407 | For example: | ||
478 | 408 | |||
479 | 409 | >>> doc = {'foo': {'hello': 'world'}, 'bar': ['hello', 'naughty', 'nurse']} | ||
480 | 410 | >>> _exists(doc, ['foo', 'hello']) | ||
481 | 411 | True | ||
482 | 412 | >>> _exists(doc, ['foo', 'sup']) | ||
483 | 413 | False | ||
484 | 414 | >>> _exists(doc, ['bar', 2]) | ||
485 | 415 | True | ||
486 | 416 | >>> _exists(doc, ['bar', 3]) | ||
487 | 417 | False | ||
488 | 418 | |||
489 | 419 | |||
490 | 420 | Or if a key/index is missing higher up the path: | ||
491 | 421 | |||
492 | 422 | >>> _exists(doc, ['stuff', 'junk']) | ||
493 | 423 | Traceback (most recent call last): | ||
494 | 424 | ... | ||
495 | 425 | ValueError: doc['stuff'] does not exists | ||
496 | 426 | |||
497 | 427 | |||
498 | 428 | See also `_check_if_exists()`. | ||
499 | 429 | """ | ||
500 | 430 | if len(path) == 0: | ||
501 | 431 | return True | ||
502 | 432 | base = _value(doc, path[:-1]) | ||
503 | 433 | key = path[-1] | ||
504 | 434 | try: | ||
505 | 435 | value = base[key] | ||
506 | 436 | return True | ||
507 | 437 | except (KeyError, IndexError): | ||
508 | 438 | return False | ||
509 | 439 | |||
510 | 440 | |||
511 | 441 | def _check(doc, path, *checks): | ||
512 | 442 | """ | ||
513 | 443 | Run a series of *checks* on the value in *doc* addressed by *path*. | ||
514 | 444 | |||
515 | 445 | For example: | ||
516 | 446 | |||
517 | 447 | >>> doc = {'foo': [None, {'bar': 'aye'}, None]} | ||
518 | 448 | >>> _check(doc, ['foo', 1, 'bar'], | ||
519 | 449 | ... _check_str, | ||
520 | 450 | ... (_check_in, 'bee', 'sea'), | ||
521 | 451 | ... ) | ||
522 | 452 | ... | ||
523 | 453 | Traceback (most recent call last): | ||
524 | 454 | ... | ||
525 | 455 | ValueError: doc['foo'][1]['bar'] value 'aye' not in ('bee', 'sea') | ||
526 | 456 | |||
527 | 457 | |||
528 | 458 | Or if a value is missing: | ||
529 | 459 | |||
530 | 460 | >>> _check(doc, ['foo', 3], | ||
531 | 461 | ... _can_be_none, | ||
532 | 462 | ... ) | ||
533 | 463 | ... | ||
534 | 464 | Traceback (most recent call last): | ||
535 | 465 | ... | ||
536 | 466 | ValueError: doc['foo'][3] does not exists | ||
537 | 467 | |||
538 | 468 | |||
539 | 469 | See also `_check_if_exists()`. | ||
540 | 470 | """ | ||
541 | 471 | value = _value(doc, path) | ||
542 | 472 | label = _label(path) | ||
543 | 473 | for c in checks: | ||
544 | 474 | if isinstance(c, tuple): | ||
545 | 475 | (c, args) = (c[0], c[1:]) | ||
546 | 476 | else: | ||
547 | 477 | args = tuple() | ||
548 | 478 | if c(value, label, *args) is True: | ||
549 | 479 | break | ||
550 | 480 | |||
551 | 481 | |||
552 | 482 | def _check_if_exists(doc, path, *checks): | ||
553 | 483 | """ | ||
554 | 484 | Run *checks* only if value at *path* exists. | ||
555 | 485 | |||
556 | 486 | For example: | ||
557 | 487 | |||
558 | 488 | >>> doc = {'name': 17} | ||
559 | 489 | >>> _check_if_exists(doc, ['dir'], _check_str) | ||
560 | 490 | >>> _check_if_exists(doc, ['name'], _check_str) | ||
561 | 491 | Traceback (most recent call last): | ||
562 | 492 | ... | ||
563 | 493 | TypeError: doc['name']: need a <type 'basestring'>; got a <type 'int'>: 17 | ||
564 | 494 | |||
565 | 495 | |||
566 | 496 | See also `_check()` and `_exists()`. | ||
567 | 497 | """ | ||
568 | 498 | if _exists(doc, path): | ||
569 | 499 | _check(doc, path, *checks) | ||
570 | 500 | |||
571 | 501 | |||
572 | 502 | def _can_be_none(value, label): | ||
573 | 503 | """ | ||
574 | 504 | Stop execution of check if *value* is ``None``. | ||
575 | 505 | |||
576 | 506 | `_check()` will abort upon a check function returning ``True``. | ||
577 | 507 | |||
578 | 508 | For example, here a ``TypeError`` is raised: | ||
579 | 509 | |||
580 | 510 | >>> doc = {'ext': None} | ||
581 | 511 | >>> _check(doc, ['ext'], _check_str) | ||
582 | 512 | Traceback (most recent call last): | ||
583 | 513 | ... | ||
584 | 514 | TypeError: doc['ext']: need a <type 'basestring'>; got a <type 'NoneType'>: None | ||
585 | 515 | |||
586 | 516 | |||
587 | 517 | But here it is not: | ||
588 | 518 | |||
589 | 519 | >>> _check(doc, ['ext'], _can_be_none, _check_str) | ||
590 | 520 | |||
591 | 521 | """ | ||
592 | 522 | if value is None: | ||
593 | 523 | return True | ||
594 | 524 | |||
595 | 525 | # /FIXME new helper functions | ||
596 | 526 | |||
597 | 527 | |||
598 | 328 | def _check_dict(value, label): | 528 | def _check_dict(value, label): |
599 | 329 | """ | 529 | """ |
600 | 330 | Verify that *value* is a ``dict`` instance. | 530 | Verify that *value* is a ``dict`` instance. |
601 | @@ -387,13 +587,13 @@ | |||
602 | 387 | if not isinstance(value, (int, float)): | 587 | if not isinstance(value, (int, float)): |
603 | 388 | raise TypeError(TYPE_ERROR % (label, (int, float), type(value), value)) | 588 | raise TypeError(TYPE_ERROR % (label, (int, float), type(value), value)) |
604 | 389 | 589 | ||
606 | 390 | def _check_at_least(value, minvalue, label): | 590 | def _check_at_least(value, label, minvalue=0): |
607 | 391 | """ | 591 | """ |
608 | 392 | Verify that *value* is greater than or equal to *minvalue*. | 592 | Verify that *value* is greater than or equal to *minvalue*. |
609 | 393 | 593 | ||
610 | 394 | For example: | 594 | For example: |
611 | 395 | 595 | ||
613 | 396 | >>> _check_at_least(0, 1, 'bytes') | 596 | >>> _check_at_least(0, 'bytes', 1) |
614 | 397 | Traceback (most recent call last): | 597 | Traceback (most recent call last): |
615 | 398 | ... | 598 | ... |
616 | 399 | ValueError: bytes must be >= 1; got 0 | 599 | ValueError: bytes must be >= 1; got 0 |
617 | @@ -477,6 +677,25 @@ | |||
618 | 477 | ) | 677 | ) |
619 | 478 | 678 | ||
620 | 479 | 679 | ||
621 | 680 | def _check_in(value, label, *possible): | ||
622 | 681 | """ | ||
623 | 682 | Check that *value* is one of *possible*. | ||
624 | 683 | |||
625 | 684 | For example: | ||
626 | 685 | |||
627 | 686 | >>> _check_in('foo', "doc['media']", 'video', 'audio', 'image') | ||
628 | 687 | Traceback (most recent call last): | ||
629 | 688 | ... | ||
630 | 689 | ValueError: doc['media'] value 'foo' not in ('video', 'audio', 'image') | ||
631 | 690 | |||
632 | 691 | """ | ||
633 | 692 | if value not in possible: | ||
634 | 693 | raise ValueError( | ||
635 | 694 | '{} value {!r} not in {!r}'.format(label, value, possible) | ||
636 | 695 | ) | ||
637 | 696 | |||
638 | 697 | |||
639 | 698 | |||
640 | 480 | # The schema defining functions: | 699 | # The schema defining functions: |
641 | 481 | 700 | ||
642 | 482 | def check_base32(value, label='_id'): | 701 | def check_base32(value, label='_id'): |
643 | @@ -579,7 +798,7 @@ | |||
644 | 579 | 798 | ||
645 | 580 | """ | 799 | """ |
646 | 581 | _check_int_float(value, label) | 800 | _check_int_float(value, label) |
648 | 582 | _check_at_least(value, 0, label) | 801 | _check_at_least(value, label, 0) |
649 | 583 | 802 | ||
650 | 584 | 803 | ||
651 | 585 | def check_dmedia(doc): | 804 | def check_dmedia(doc): |
652 | @@ -591,14 +810,17 @@ | |||
653 | 591 | 810 | ||
654 | 592 | 1. have '_id' that passes `check_base32()` | 811 | 1. have '_id' that passes `check_base32()` |
655 | 593 | 812 | ||
659 | 594 | 2. have 'type' that passes `check_type()` | 813 | 2. have a 'ver' equal to ``0`` |
660 | 595 | 814 | ||
661 | 596 | 3. have 'time' that passes `check_time()` | 815 | 3. have 'type' that passes `check_type()` |
662 | 816 | |||
663 | 817 | 4. have 'time' that passes `check_time()` | ||
664 | 597 | 818 | ||
665 | 598 | For example, a conforming value: | 819 | For example, a conforming value: |
666 | 599 | 820 | ||
667 | 600 | >>> doc = { | 821 | >>> doc = { |
668 | 601 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', | 822 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', |
669 | 823 | ... 'ver': 0, | ||
670 | 602 | ... 'type': 'dmedia/file', | 824 | ... 'type': 'dmedia/file', |
671 | 603 | ... 'time': 1234567890, | 825 | ... 'time': 1234567890, |
672 | 604 | ... } | 826 | ... } |
673 | @@ -610,6 +832,7 @@ | |||
674 | 610 | 832 | ||
675 | 611 | >>> doc = { | 833 | >>> doc = { |
676 | 612 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', | 834 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', |
677 | 835 | ... 'ver': 0, | ||
678 | 613 | ... 'kind': 'dmedia/file', | 836 | ... 'kind': 'dmedia/file', |
679 | 614 | ... 'timestamp': 1234567890, | 837 | ... 'timestamp': 1234567890, |
680 | 615 | ... } | 838 | ... } |
681 | @@ -620,8 +843,13 @@ | |||
682 | 620 | ValueError: doc missing keys: ['time', 'type'] | 843 | ValueError: doc missing keys: ['time', 'type'] |
683 | 621 | 844 | ||
684 | 622 | """ | 845 | """ |
686 | 623 | _check_required(doc, ['_id', 'type', 'time']) | 846 | _check_required(doc, ['_id', 'ver', 'type', 'time']) |
687 | 624 | check_base32(doc['_id']) | 847 | check_base32(doc['_id']) |
688 | 848 | _check_int(doc['ver'], 'ver') | ||
689 | 849 | if doc['ver'] != 0: | ||
690 | 850 | raise ValueError( | ||
691 | 851 | "doc['ver'] must be 0; got {!r}".format(doc['ver']) | ||
692 | 852 | ) | ||
693 | 625 | check_type(doc['type']) | 853 | check_type(doc['type']) |
694 | 626 | check_time(doc['time']) | 854 | check_time(doc['time']) |
695 | 627 | 855 | ||
696 | @@ -638,7 +866,7 @@ | |||
697 | 638 | 866 | ||
698 | 639 | 3. have values that are themselves ``dict`` instances | 867 | 3. have values that are themselves ``dict`` instances |
699 | 640 | 868 | ||
701 | 641 | 4. values must have 'copies' that is an ``int`` >= 1 | 869 | 4. values must have 'copies' that is an ``int`` >= 0 |
702 | 642 | 870 | ||
703 | 643 | 5. values must have 'time' that conforms with `check_time()` | 871 | 5. values must have 'time' that conforms with `check_time()` |
704 | 644 | 872 | ||
705 | @@ -686,7 +914,7 @@ | |||
706 | 686 | copies = value['copies'] | 914 | copies = value['copies'] |
707 | 687 | l3 = l2 + "['copies']" | 915 | l3 = l2 + "['copies']" |
708 | 688 | _check_int(copies, l3) | 916 | _check_int(copies, l3) |
710 | 689 | _check_at_least(copies, 1, l3) | 917 | _check_at_least(copies, l3, 0) |
711 | 690 | 918 | ||
712 | 691 | # Check 'time': | 919 | # Check 'time': |
713 | 692 | check_time(value['time'], l2 + "['time']") | 920 | check_time(value['time'], l2 + "['time']") |
714 | @@ -780,7 +1008,6 @@ | |||
715 | 780 | raise ValueError('%s: %r not in %r' % (label, value, allowed)) | 1008 | raise ValueError('%s: %r not in %r' % (label, value, allowed)) |
716 | 781 | 1009 | ||
717 | 782 | 1010 | ||
718 | 783 | |||
719 | 784 | def check_dmedia_file(doc): | 1011 | def check_dmedia_file(doc): |
720 | 785 | """ | 1012 | """ |
721 | 786 | Verify that *doc* is a valid 'dmedia/file' record type. | 1013 | Verify that *doc* is a valid 'dmedia/file' record type. |
722 | @@ -803,6 +1030,7 @@ | |||
723 | 803 | 1030 | ||
724 | 804 | >>> doc = { | 1031 | >>> doc = { |
725 | 805 | ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O', | 1032 | ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O', |
726 | 1033 | ... 'ver': 0, | ||
727 | 806 | ... 'type': 'dmedia/file', | 1034 | ... 'type': 'dmedia/file', |
728 | 807 | ... 'time': 1234567890, | 1035 | ... 'time': 1234567890, |
729 | 808 | ... 'bytes': 20202333, | 1036 | ... 'bytes': 20202333, |
730 | @@ -823,6 +1051,7 @@ | |||
731 | 823 | 1051 | ||
732 | 824 | >>> doc = { | 1052 | >>> doc = { |
733 | 825 | ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O', | 1053 | ... '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O', |
734 | 1054 | ... 'ver': 0, | ||
735 | 826 | ... 'type': 'dmedia/file', | 1055 | ... 'type': 'dmedia/file', |
736 | 827 | ... 'time': 1234567890, | 1056 | ... 'time': 1234567890, |
737 | 828 | ... 'bytes': 20202333, | 1057 | ... 'bytes': 20202333, |
738 | @@ -854,7 +1083,7 @@ | |||
739 | 854 | # Check 'bytes': | 1083 | # Check 'bytes': |
740 | 855 | b = doc['bytes'] | 1084 | b = doc['bytes'] |
741 | 856 | _check_int(b, 'bytes') | 1085 | _check_int(b, 'bytes') |
743 | 857 | _check_at_least(b, 1, 'bytes') | 1086 | _check_at_least(b, 'bytes', 1) |
744 | 858 | 1087 | ||
745 | 859 | # Check 'ext': | 1088 | # Check 'ext': |
746 | 860 | check_ext(doc['ext']) | 1089 | check_ext(doc['ext']) |
747 | @@ -865,6 +1094,63 @@ | |||
748 | 865 | # Check 'stored' | 1094 | # Check 'stored' |
749 | 866 | check_stored(doc['stored']) | 1095 | check_stored(doc['stored']) |
750 | 867 | 1096 | ||
751 | 1097 | check_dmedia_file_optional(doc) | ||
752 | 1098 | |||
753 | 1099 | |||
754 | 1100 | def check_dmedia_file_optional(doc): | ||
755 | 1101 | """ | ||
756 | 1102 | Check the optional attributes in a 'dmedia/file' document. | ||
757 | 1103 | """ | ||
758 | 1104 | _check_dict(doc, 'doc') | ||
759 | 1105 | |||
760 | 1106 | # 'mime' like 'video/quicktime' | ||
761 | 1107 | _check_if_exists(doc, ['mime'], | ||
762 | 1108 | _can_be_none, | ||
763 | 1109 | _check_str, | ||
764 | 1110 | ) | ||
765 | 1111 | |||
766 | 1112 | # 'media' like 'video' | ||
767 | 1113 | _check_if_exists(doc, ['media'], | ||
768 | 1114 | _can_be_none, | ||
769 | 1115 | _check_str, | ||
770 | 1116 | (_check_in, 'video', 'audio', 'image'), | ||
771 | 1117 | ) | ||
772 | 1118 | |||
773 | 1119 | # 'mtime' like 1234567890 | ||
774 | 1120 | _check_if_exists(doc, ['mtime'], | ||
775 | 1121 | check_time | ||
776 | 1122 | ) | ||
777 | 1123 | |||
778 | 1124 | # 'atime' like 1234567890 | ||
779 | 1125 | _check_if_exists(doc, ['atime'], | ||
780 | 1126 | check_time | ||
781 | 1127 | ) | ||
782 | 1128 | |||
783 | 1129 | # name like 'MVI_5899.MOV' | ||
784 | 1130 | _check_if_exists(doc, ['name'], | ||
785 | 1131 | _check_str, | ||
786 | 1132 | ) | ||
787 | 1133 | |||
788 | 1134 | # dir like 'DCIM/100EOS5D2' | ||
789 | 1135 | _check_if_exists(doc, ['dir'], | ||
790 | 1136 | _check_str, | ||
791 | 1137 | ) | ||
792 | 1138 | |||
793 | 1139 | # 'meta' like {'iso': 800} | ||
794 | 1140 | _check_if_exists(doc, ['meta'], | ||
795 | 1141 | _check_dict | ||
796 | 1142 | ) | ||
797 | 1143 | |||
798 | 1144 | # 'user' like {'title': 'cool sunset'} | ||
799 | 1145 | _check_if_exists(doc, ['user'], | ||
800 | 1146 | _check_dict | ||
801 | 1147 | ) | ||
802 | 1148 | |||
803 | 1149 | # 'tags' like {'burp': {'start': 6, 'end': 73}} | ||
804 | 1150 | _check_if_exists(doc, ['tags'], | ||
805 | 1151 | _check_dict | ||
806 | 1152 | ) | ||
807 | 1153 | |||
808 | 868 | 1154 | ||
809 | 869 | def check_dmedia_store(doc): | 1155 | def check_dmedia_store(doc): |
810 | 870 | """ | 1156 | """ |
811 | @@ -883,6 +1169,7 @@ | |||
812 | 883 | 1169 | ||
813 | 884 | >>> doc = { | 1170 | >>> doc = { |
814 | 885 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', | 1171 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', |
815 | 1172 | ... 'ver': 0, | ||
816 | 886 | ... 'type': 'dmedia/file', | 1173 | ... 'type': 'dmedia/file', |
817 | 887 | ... 'time': 1234567890, | 1174 | ... 'time': 1234567890, |
818 | 888 | ... 'plugin': 'filestore', | 1175 | ... 'plugin': 'filestore', |
819 | @@ -896,6 +1183,7 @@ | |||
820 | 896 | 1183 | ||
821 | 897 | >>> doc = { | 1184 | >>> doc = { |
822 | 898 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', | 1185 | ... '_id': 'NZXXMYLDOV2F6ZTUO5PWM5DX', |
823 | 1186 | ... 'ver': 0, | ||
824 | 899 | ... 'type': 'dmedia/file', | 1187 | ... 'type': 'dmedia/file', |
825 | 900 | ... 'time': 1234567890, | 1188 | ... 'time': 1234567890, |
826 | 901 | ... 'dispatch': 'filestore', | 1189 | ... 'dispatch': 'filestore', |
827 | @@ -925,7 +1213,7 @@ | |||
828 | 925 | key = 'copies' | 1213 | key = 'copies' |
829 | 926 | dc = doc[key] | 1214 | dc = doc[key] |
830 | 927 | _check_int(dc, key) | 1215 | _check_int(dc, key) |
832 | 928 | _check_at_least(dc, 1, key) | 1216 | _check_at_least(dc, key, 1) |
833 | 929 | 1217 | ||
834 | 930 | 1218 | ||
835 | 931 | def random_id(random=None): | 1219 | def random_id(random=None): |
836 | @@ -950,10 +1238,68 @@ | |||
837 | 950 | return b32encode(random) | 1238 | return b32encode(random) |
838 | 951 | 1239 | ||
839 | 952 | 1240 | ||
841 | 953 | # This should probably be moved | 1241 | # FIXME: There is current a recursize import issue with filestore, but FileStore |
842 | 1242 | # shouldn't deal with the store.json file anyway, should not import | ||
843 | 1243 | # `schema.create_store()` | ||
844 | 1244 | def tophash_personalization(file_size): | ||
845 | 1245 | return ' '.join(['dmedia/tophash', str(file_size)]).encode('utf-8') | ||
846 | 1246 | |||
847 | 1247 | |||
848 | 1248 | def tophash(file_size, leaves): | ||
849 | 1249 | """ | ||
850 | 1250 | Initialize hash for a file that is *file_size* bytes. | ||
851 | 1251 | """ | ||
852 | 1252 | h = sha1(tophash_personalization(file_size)) | ||
853 | 1253 | h.update(leaves) | ||
854 | 1254 | return b32encode(h.digest()) | ||
855 | 1255 | |||
856 | 1256 | |||
857 | 1257 | def create_file(file_size, leaves, store, copies=0, ext=None, origin='user'): | ||
858 | 1258 | """ | ||
859 | 1259 | Create a minimal 'dmedia/file' document. | ||
860 | 1260 | |||
861 | 1261 | :param file_size: an ``int``, the file size in bytes, eg ``20202333`` | ||
862 | 1262 | :param leaves: a ``list`` containing the content hash of each leaf | ||
863 | 1263 | :param store: the ID of the store where this file is initially stored, eg | ||
864 | 1264 | ``'Y4J3WQCMKV5GHATOCZZBHF4Y'`` | ||
865 | 1265 | :param copies: an ``int`` to represent the durability of the file on this | ||
866 | 1266 | store; default is ``0`` | ||
867 | 1267 | :param ext: the file extension, eg ``'mov'``; default is ``None`` | ||
868 | 1268 | :param origin: the file's origin (for durability/reclamation purposes); | ||
869 | 1269 | default is ``'user'`` | ||
870 | 1270 | """ | ||
871 | 1271 | ts = time.time() | ||
872 | 1272 | packed = b''.join(leaves) | ||
873 | 1273 | return { | ||
874 | 1274 | '_id': tophash(file_size, packed), | ||
875 | 1275 | '_attachments': { | ||
876 | 1276 | 'leaves': { | ||
877 | 1277 | 'data': b64encode(packed), | ||
878 | 1278 | 'content_type': 'application/octet-stream', | ||
879 | 1279 | } | ||
880 | 1280 | }, | ||
881 | 1281 | 'ver': 0, | ||
882 | 1282 | 'type': 'dmedia/file', | ||
883 | 1283 | 'time': ts, | ||
884 | 1284 | 'bytes': file_size, | ||
885 | 1285 | 'ext': ext, | ||
886 | 1286 | 'origin': origin, | ||
887 | 1287 | 'stored': { | ||
888 | 1288 | store: { | ||
889 | 1289 | 'copies': copies, | ||
890 | 1290 | 'time': ts, | ||
891 | 1291 | } | ||
892 | 1292 | } | ||
893 | 1293 | } | ||
894 | 1294 | |||
895 | 1295 | |||
896 | 954 | def create_store(base, machine_id, copies=1): | 1296 | def create_store(base, machine_id, copies=1): |
897 | 1297 | """ | ||
898 | 1298 | Create a 'dmedia/store' document. | ||
899 | 1299 | """ | ||
900 | 955 | return { | 1300 | return { |
901 | 956 | '_id': random_id(), | 1301 | '_id': random_id(), |
902 | 1302 | 'ver': 0, | ||
903 | 957 | 'type': 'dmedia/store', | 1303 | 'type': 'dmedia/store', |
904 | 958 | 'time': time.time(), | 1304 | 'time': time.time(), |
905 | 959 | 'plugin': 'filestore', | 1305 | 'plugin': 'filestore', |
906 | @@ -961,3 +1307,52 @@ | |||
907 | 961 | 'path': base, | 1307 | 'path': base, |
908 | 962 | 'machine_id': machine_id, | 1308 | 'machine_id': machine_id, |
909 | 963 | } | 1309 | } |
910 | 1310 | |||
911 | 1311 | |||
912 | 1312 | def create_batch(machine_id=None): | ||
913 | 1313 | """ | ||
914 | 1314 | Create initial 'dmedia/batch' accounting document. | ||
915 | 1315 | """ | ||
916 | 1316 | return { | ||
917 | 1317 | '_id': random_id(), | ||
918 | 1318 | 'ver': 0, | ||
919 | 1319 | 'type': 'dmedia/batch', | ||
920 | 1320 | 'time': time.time(), | ||
921 | 1321 | 'machine_id': machine_id, | ||
922 | 1322 | 'imports': [], | ||
923 | 1323 | 'errors': [], | ||
924 | 1324 | 'stats': { | ||
925 | 1325 | 'considered': {'count': 0, 'bytes': 0}, | ||
926 | 1326 | 'imported': {'count': 0, 'bytes': 0}, | ||
927 | 1327 | 'skipped': {'count': 0, 'bytes': 0}, | ||
928 | 1328 | 'empty': {'count': 0, 'bytes': 0}, | ||
929 | 1329 | 'error': {'count': 0, 'bytes': 0}, | ||
930 | 1330 | } | ||
931 | 1331 | } | ||
932 | 1332 | |||
933 | 1333 | |||
934 | 1334 | def create_import(base, batch_id=None, machine_id=None): | ||
935 | 1335 | """ | ||
936 | 1336 | Create initial 'dmedia/import' accounting document. | ||
937 | 1337 | """ | ||
938 | 1338 | return { | ||
939 | 1339 | '_id': random_id(), | ||
940 | 1340 | 'ver': 0, | ||
941 | 1341 | 'type': 'dmedia/import', | ||
942 | 1342 | 'time': time.time(), | ||
943 | 1343 | 'batch_id': batch_id, | ||
944 | 1344 | 'machine_id': machine_id, | ||
945 | 1345 | 'base': base, | ||
946 | 1346 | 'log': { | ||
947 | 1347 | 'imported': [], | ||
948 | 1348 | 'skipped': [], | ||
949 | 1349 | 'empty': [], | ||
950 | 1350 | 'error': [], | ||
951 | 1351 | }, | ||
952 | 1352 | 'stats': { | ||
953 | 1353 | 'imported': {'count': 0, 'bytes': 0}, | ||
954 | 1354 | 'skipped': {'count': 0, 'bytes': 0}, | ||
955 | 1355 | 'empty': {'count': 0, 'bytes': 0}, | ||
956 | 1356 | 'error': {'count': 0, 'bytes': 0}, | ||
957 | 1357 | } | ||
958 | 1358 | } | ||
959 | 964 | 1359 | ||
960 | === modified file 'dmedia/tests/helpers.py' | |||
961 | --- dmedia/tests/helpers.py 2011-02-22 14:07:47 +0000 | |||
962 | +++ dmedia/tests/helpers.py 2011-04-07 03:13:28 +0000 | |||
963 | @@ -34,6 +34,7 @@ | |||
964 | 34 | from . import sample_mov, sample_thm | 34 | from . import sample_mov, sample_thm |
965 | 35 | 35 | ||
966 | 36 | mov_hash = 'TGX33XXWU3EVHEEY5J7NBOJGKBFXLEBK' | 36 | mov_hash = 'TGX33XXWU3EVHEEY5J7NBOJGKBFXLEBK' |
967 | 37 | mov_size = 20202333 | ||
968 | 37 | mov_leaves = [ | 38 | mov_leaves = [ |
969 | 38 | b32decode('IXJTSUCYYFECGSG6JIB2R77CAJVJK4W3'), | 39 | b32decode('IXJTSUCYYFECGSG6JIB2R77CAJVJK4W3'), |
970 | 39 | b32decode('MA3IAHUOKXR4TRG7CWAPOO7U4WCV5WJ4'), | 40 | b32decode('MA3IAHUOKXR4TRG7CWAPOO7U4WCV5WJ4'), |
971 | 40 | 41 | ||
972 | === modified file 'dmedia/tests/test_extractor.py' | |||
973 | --- dmedia/tests/test_extractor.py 2011-01-26 05:14:20 +0000 | |||
974 | +++ dmedia/tests/test_extractor.py 2011-04-07 03:13:28 +0000 | |||
975 | @@ -411,21 +411,23 @@ | |||
976 | 411 | doc, | 411 | doc, |
977 | 412 | dict( | 412 | dict( |
978 | 413 | ext='mov', | 413 | ext='mov', |
979 | 414 | width=1920, | ||
980 | 415 | height=1080, | ||
981 | 416 | duration=3, | ||
982 | 417 | codec_video='H.264 / AVC', | ||
983 | 418 | codec_audio='Raw 16-bit PCM audio', | ||
984 | 419 | sample_rate=48000, | ||
985 | 420 | fps=30, | ||
986 | 421 | channels='Stereo', | ||
987 | 422 | iso=100, | ||
988 | 423 | shutter=u'1/100', | ||
989 | 424 | aperture=11.0, | ||
990 | 425 | lens=u'Canon EF 70-200mm f/4L IS', | ||
991 | 426 | camera=u'Canon EOS 5D Mark II', | ||
992 | 427 | focal_length=u'138.0 mm', | ||
993 | 428 | mtime=1287520994 + 68 / 100.0, | 414 | mtime=1287520994 + 68 / 100.0, |
994 | 415 | meta=dict( | ||
995 | 416 | width=1920, | ||
996 | 417 | height=1080, | ||
997 | 418 | duration=3, | ||
998 | 419 | codec_video='H.264 / AVC', | ||
999 | 420 | codec_audio='Raw 16-bit PCM audio', | ||
1000 | 421 | sample_rate=48000, | ||
1001 | 422 | fps=30, | ||
1002 | 423 | channels='Stereo', | ||
1003 | 424 | iso=100, | ||
1004 | 425 | shutter=u'1/100', | ||
1005 | 426 | aperture=11.0, | ||
1006 | 427 | lens=u'Canon EF 70-200mm f/4L IS', | ||
1007 | 428 | camera=u'Canon EOS 5D Mark II', | ||
1008 | 429 | focal_length=u'138.0 mm', | ||
1009 | 430 | ), | ||
1010 | 429 | ) | 431 | ) |
1011 | 430 | ) | 432 | ) |
1012 | 431 | 433 | ||
1013 | 432 | 434 | ||
1014 | === modified file 'dmedia/tests/test_importer.py' | |||
1015 | --- dmedia/tests/test_importer.py 2011-03-28 12:38:29 +0000 | |||
1016 | +++ dmedia/tests/test_importer.py 2011-04-07 03:13:28 +0000 | |||
1017 | @@ -132,103 +132,6 @@ | |||
1018 | 132 | ) | 132 | ) |
1019 | 133 | os.chmod(subdir, 0o700) | 133 | os.chmod(subdir, 0o700) |
1020 | 134 | 134 | ||
1021 | 135 | def test_create_batch(self): | ||
1022 | 136 | f = importer.create_batch | ||
1023 | 137 | machine_id = random_id() | ||
1024 | 138 | doc = f(machine_id) | ||
1025 | 139 | |||
1026 | 140 | self.assertEqual(schema.check_dmedia(doc), None) | ||
1027 | 141 | self.assertTrue(isinstance(doc, dict)) | ||
1028 | 142 | self.assertEqual( | ||
1029 | 143 | set(doc), | ||
1030 | 144 | set([ | ||
1031 | 145 | '_id', | ||
1032 | 146 | 'type', | ||
1033 | 147 | 'time', | ||
1034 | 148 | 'imports', | ||
1035 | 149 | 'errors', | ||
1036 | 150 | 'machine_id', | ||
1037 | 151 | 'stats', | ||
1038 | 152 | ]) | ||
1039 | 153 | ) | ||
1040 | 154 | _id = doc['_id'] | ||
1041 | 155 | self.assertEqual(b32encode(b32decode(_id)), _id) | ||
1042 | 156 | self.assertEqual(len(_id), 24) | ||
1043 | 157 | self.assertEqual(doc['type'], 'dmedia/batch') | ||
1044 | 158 | self.assertTrue(isinstance(doc['time'], (int, float))) | ||
1045 | 159 | self.assertTrue(doc['time'] <= time.time()) | ||
1046 | 160 | self.assertEqual(doc['imports'], []) | ||
1047 | 161 | self.assertEqual(doc['errors'], []) | ||
1048 | 162 | self.assertEqual(doc['machine_id'], machine_id) | ||
1049 | 163 | self.assertEqual( | ||
1050 | 164 | doc['stats'], | ||
1051 | 165 | { | ||
1052 | 166 | 'considered': {'count': 0, 'bytes': 0}, | ||
1053 | 167 | 'imported': {'count': 0, 'bytes': 0}, | ||
1054 | 168 | 'skipped': {'count': 0, 'bytes': 0}, | ||
1055 | 169 | 'empty': {'count': 0, 'bytes': 0}, | ||
1056 | 170 | 'error': {'count': 0, 'bytes': 0}, | ||
1057 | 171 | } | ||
1058 | 172 | ) | ||
1059 | 173 | |||
1060 | 174 | def test_create_import(self): | ||
1061 | 175 | f = importer.create_import | ||
1062 | 176 | |||
1063 | 177 | base = '/media/EOS_DIGITAL' | ||
1064 | 178 | batch_id = random_id() | ||
1065 | 179 | machine_id = random_id() | ||
1066 | 180 | |||
1067 | 181 | keys = set([ | ||
1068 | 182 | '_id', | ||
1069 | 183 | 'type', | ||
1070 | 184 | 'time', | ||
1071 | 185 | 'base', | ||
1072 | 186 | 'batch_id', | ||
1073 | 187 | 'machine_id', | ||
1074 | 188 | 'log', | ||
1075 | 189 | 'stats', | ||
1076 | 190 | ]) | ||
1077 | 191 | |||
1078 | 192 | doc = f(base, batch_id=batch_id, machine_id=machine_id) | ||
1079 | 193 | self.assertEqual(schema.check_dmedia(doc), None) | ||
1080 | 194 | self.assertTrue(isinstance(doc, dict)) | ||
1081 | 195 | self.assertEqual(set(doc), keys) | ||
1082 | 196 | |||
1083 | 197 | _id = doc['_id'] | ||
1084 | 198 | self.assertEqual(b32encode(b32decode(_id)), _id) | ||
1085 | 199 | self.assertEqual(len(_id), 24) | ||
1086 | 200 | |||
1087 | 201 | self.assertEqual(doc['type'], 'dmedia/import') | ||
1088 | 202 | self.assertTrue(isinstance(doc['time'], (int, float))) | ||
1089 | 203 | self.assertTrue(doc['time'] <= time.time()) | ||
1090 | 204 | self.assertEqual(doc['base'], base) | ||
1091 | 205 | self.assertEqual(doc['batch_id'], batch_id) | ||
1092 | 206 | self.assertEqual(doc['machine_id'], machine_id) | ||
1093 | 207 | |||
1094 | 208 | doc = f(base) | ||
1095 | 209 | self.assertEqual(schema.check_dmedia(doc), None) | ||
1096 | 210 | self.assertEqual(set(doc), keys) | ||
1097 | 211 | self.assertEqual(doc['batch_id'], None) | ||
1098 | 212 | self.assertEqual(doc['machine_id'], None) | ||
1099 | 213 | self.assertEqual( | ||
1100 | 214 | doc['log'], | ||
1101 | 215 | { | ||
1102 | 216 | 'imported': [], | ||
1103 | 217 | 'skipped': [], | ||
1104 | 218 | 'empty': [], | ||
1105 | 219 | 'error': [], | ||
1106 | 220 | } | ||
1107 | 221 | ) | ||
1108 | 222 | self.assertEqual( | ||
1109 | 223 | doc['stats'], | ||
1110 | 224 | { | ||
1111 | 225 | 'imported': {'count': 0, 'bytes': 0}, | ||
1112 | 226 | 'skipped': {'count': 0, 'bytes': 0}, | ||
1113 | 227 | 'empty': {'count': 0, 'bytes': 0}, | ||
1114 | 228 | 'error': {'count': 0, 'bytes': 0}, | ||
1115 | 229 | } | ||
1116 | 230 | ) | ||
1117 | 231 | |||
1118 | 232 | def test_to_dbus_stats(self): | 135 | def test_to_dbus_stats(self): |
1119 | 233 | f = importer.to_dbus_stats | 136 | f = importer.to_dbus_stats |
1120 | 234 | stats = dict( | 137 | stats = dict( |
1121 | @@ -406,6 +309,7 @@ | |||
1122 | 406 | set([ | 309 | set([ |
1123 | 407 | '_id', | 310 | '_id', |
1124 | 408 | '_rev', | 311 | '_rev', |
1125 | 312 | 'ver', | ||
1126 | 409 | 'type', | 313 | 'type', |
1127 | 410 | 'time', | 314 | 'time', |
1128 | 411 | 'base', | 315 | 'base', |
1129 | @@ -507,6 +411,7 @@ | |||
1130 | 507 | '_id', | 411 | '_id', |
1131 | 508 | '_rev', | 412 | '_rev', |
1132 | 509 | '_attachments', | 413 | '_attachments', |
1133 | 414 | 'ver', | ||
1134 | 510 | 'type', | 415 | 'type', |
1135 | 511 | 'time', | 416 | 'time', |
1136 | 512 | 'bytes', | 417 | 'bytes', |
1137 | @@ -518,7 +423,8 @@ | |||
1138 | 518 | 'mtime', | 423 | 'mtime', |
1139 | 519 | 'name', | 424 | 'name', |
1140 | 520 | 'dir', | 425 | 'dir', |
1142 | 521 | 'content_type', | 426 | 'mime', |
1143 | 427 | 'media', | ||
1144 | 522 | ]) | 428 | ]) |
1145 | 523 | ) | 429 | ) |
1146 | 524 | self.assertEqual(schema.check_dmedia_file(doc), None) | 430 | self.assertEqual(schema.check_dmedia_file(doc), None) |
1147 | @@ -534,7 +440,7 @@ | |||
1148 | 534 | self.assertEqual(doc['mtime'], path.getmtime(src1)) | 440 | self.assertEqual(doc['mtime'], path.getmtime(src1)) |
1149 | 535 | self.assertEqual(doc['name'], 'MVI_5751.MOV') | 441 | self.assertEqual(doc['name'], 'MVI_5751.MOV') |
1150 | 536 | self.assertEqual(doc['dir'], 'DCIM/100EOS5D2') | 442 | self.assertEqual(doc['dir'], 'DCIM/100EOS5D2') |
1152 | 537 | self.assertEqual(doc['content_type'], 'video/quicktime') | 443 | self.assertEqual(doc['mime'], 'video/quicktime') |
1153 | 538 | 444 | ||
1154 | 539 | # Test with duplicate | 445 | # Test with duplicate |
1155 | 540 | (action, doc) = inst._import_file(src2) | 446 | (action, doc) = inst._import_file(src2) |
1156 | @@ -834,7 +740,9 @@ | |||
1157 | 834 | self.assertEqual( | 740 | self.assertEqual( |
1158 | 835 | set(batch), | 741 | set(batch), |
1159 | 836 | set([ | 742 | set([ |
1161 | 837 | '_id', '_rev', | 743 | '_id', |
1162 | 744 | '_rev', | ||
1163 | 745 | 'ver', | ||
1164 | 838 | 'type', | 746 | 'type', |
1165 | 839 | 'time', | 747 | 'time', |
1166 | 840 | 'imports', | 748 | 'imports', |
1167 | 841 | 749 | ||
1168 | === modified file 'dmedia/tests/test_schema.py' | |||
1169 | --- dmedia/tests/test_schema.py 2011-03-27 09:05:32 +0000 | |||
1170 | +++ dmedia/tests/test_schema.py 2011-04-07 03:13:28 +0000 | |||
1171 | @@ -24,10 +24,10 @@ | |||
1172 | 24 | """ | 24 | """ |
1173 | 25 | 25 | ||
1174 | 26 | from unittest import TestCase | 26 | from unittest import TestCase |
1176 | 27 | from base64 import b32encode, b32decode | 27 | from base64 import b32encode, b32decode, b64encode |
1177 | 28 | from copy import deepcopy | 28 | from copy import deepcopy |
1178 | 29 | import time | 29 | import time |
1180 | 30 | from .helpers import raises, TempDir | 30 | from .helpers import raises, TempDir, mov_hash, mov_leaves, mov_size |
1181 | 31 | from dmedia.constants import TYPE_ERROR | 31 | from dmedia.constants import TYPE_ERROR |
1182 | 32 | from dmedia.schema import random_id | 32 | from dmedia.schema import random_id |
1183 | 33 | from dmedia import schema | 33 | from dmedia import schema |
1184 | @@ -163,13 +163,24 @@ | |||
1185 | 163 | 163 | ||
1186 | 164 | good = { | 164 | good = { |
1187 | 165 | '_id': 'MZZG2ZDSOQVSW2TEMVZG643F', | 165 | '_id': 'MZZG2ZDSOQVSW2TEMVZG643F', |
1188 | 166 | 'ver': 0, | ||
1189 | 166 | 'type': 'dmedia/foo', | 167 | 'type': 'dmedia/foo', |
1190 | 167 | 'time': 1234567890, | 168 | 'time': 1234567890, |
1191 | 168 | 'foo': 'bar', | 169 | 'foo': 'bar', |
1192 | 169 | } | 170 | } |
1193 | 170 | g = deepcopy(good) | 171 | g = deepcopy(good) |
1194 | 171 | self.assertEqual(f(g), None) | 172 | self.assertEqual(f(g), None) |
1196 | 172 | for key in ['_id', 'type', 'time']: | 173 | |
1197 | 174 | # check with bad ver: | ||
1198 | 175 | bad = deepcopy(good) | ||
1199 | 176 | bad['ver'] = 0.0 | ||
1200 | 177 | e = raises(TypeError, f, bad) | ||
1201 | 178 | self.assertEqual(str(e), TYPE_ERROR % ('ver', int, float, 0.0)) | ||
1202 | 179 | bad['ver'] = 1 | ||
1203 | 180 | e = raises(ValueError, f, bad) | ||
1204 | 181 | self.assertEqual(str(e), "doc['ver'] must be 0; got 1") | ||
1205 | 182 | |||
1206 | 183 | for key in ['_id', 'ver', 'type', 'time']: | ||
1207 | 173 | bad = deepcopy(good) | 184 | bad = deepcopy(good) |
1208 | 174 | del bad[key] | 185 | del bad[key] |
1209 | 175 | e = raises(ValueError, f, bad) | 186 | e = raises(ValueError, f, bad) |
1210 | @@ -190,7 +201,7 @@ | |||
1211 | 190 | e = raises(ValueError, f, bad) | 201 | e = raises(ValueError, f, bad) |
1212 | 191 | self.assertEqual( | 202 | self.assertEqual( |
1213 | 192 | str(e), | 203 | str(e), |
1215 | 193 | 'doc missing keys: %r' % ['_id', 'time', 'type'] | 204 | 'doc missing keys: %r' % ['_id', 'time', 'type', 'ver'] |
1216 | 194 | ) | 205 | ) |
1217 | 195 | 206 | ||
1218 | 196 | def test_check_stored(self): | 207 | def test_check_stored(self): |
1219 | @@ -275,11 +286,11 @@ | |||
1220 | 275 | TYPE_ERROR % (label, int, float, 2.0) | 286 | TYPE_ERROR % (label, int, float, 2.0) |
1221 | 276 | ) | 287 | ) |
1222 | 277 | bad = deepcopy(good) | 288 | bad = deepcopy(good) |
1224 | 278 | bad['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] = 0 | 289 | bad['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] = -2 |
1225 | 279 | e = raises(ValueError, f, bad) | 290 | e = raises(ValueError, f, bad) |
1226 | 280 | self.assertEqual( | 291 | self.assertEqual( |
1227 | 281 | str(e), | 292 | str(e), |
1229 | 282 | '%s must be >= 1; got 0' % label | 293 | '%s must be >= 0; got -2' % label |
1230 | 283 | ) | 294 | ) |
1231 | 284 | 295 | ||
1232 | 285 | # Test with bad 'time' type/value: | 296 | # Test with bad 'time' type/value: |
1233 | @@ -420,6 +431,7 @@ | |||
1234 | 420 | # Test with good doc: | 431 | # Test with good doc: |
1235 | 421 | good = { | 432 | good = { |
1236 | 422 | '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O', | 433 | '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O', |
1237 | 434 | 'ver': 0, | ||
1238 | 423 | 'type': 'dmedia/file', | 435 | 'type': 'dmedia/file', |
1239 | 424 | 'time': 1234567890, | 436 | 'time': 1234567890, |
1240 | 425 | 'bytes': 20202333, | 437 | 'bytes': 20202333, |
1241 | @@ -517,7 +529,103 @@ | |||
1242 | 517 | e = raises(ValueError, f, bad) | 529 | e = raises(ValueError, f, bad) |
1243 | 518 | self.assertEqual( | 530 | self.assertEqual( |
1244 | 519 | str(e), | 531 | str(e), |
1246 | 520 | "stored['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] must be >= 1; got -1" | 532 | "stored['MZZG2ZDSOQVSW2TEMVZG643F']['copies'] must be >= 0; got -1" |
1247 | 533 | ) | ||
1248 | 534 | |||
1249 | 535 | def test_check_dmedia_file_optional(self): | ||
1250 | 536 | f = schema.check_dmedia_file_optional | ||
1251 | 537 | f({}) | ||
1252 | 538 | |||
1253 | 539 | # mime | ||
1254 | 540 | self.assertIsNone(f({'mime': 'video/quicktime'})) | ||
1255 | 541 | e = raises(TypeError, f, {'mime': 42}) | ||
1256 | 542 | self.assertEqual( | ||
1257 | 543 | str(e), | ||
1258 | 544 | TYPE_ERROR % ("doc['mime']", basestring, int, 42) | ||
1259 | 545 | ) | ||
1260 | 546 | |||
1261 | 547 | # media | ||
1262 | 548 | self.assertIsNone(f({'media': None})) | ||
1263 | 549 | self.assertIsNone(f({'media': 'video'})) | ||
1264 | 550 | self.assertIsNone(f({'media': 'audio'})) | ||
1265 | 551 | self.assertIsNone(f({'media': 'image'})) | ||
1266 | 552 | e = raises(TypeError, f, {'media': 42}) | ||
1267 | 553 | self.assertEqual( | ||
1268 | 554 | str(e), | ||
1269 | 555 | TYPE_ERROR % ("doc['media']", basestring, int, 42) | ||
1270 | 556 | ) | ||
1271 | 557 | e = raises(ValueError, f, {'media': 'stuff'}) | ||
1272 | 558 | self.assertEqual( | ||
1273 | 559 | str(e), | ||
1274 | 560 | "doc['media'] value 'stuff' not in ('video', 'audio', 'image')" | ||
1275 | 561 | ) | ||
1276 | 562 | |||
1277 | 563 | # mtime | ||
1278 | 564 | self.assertIsNone(f({'mtime': 1302125982.946627})) | ||
1279 | 565 | self.assertIsNone(f({'mtime': 1234567890})) | ||
1280 | 566 | e = raises(TypeError, f, {'mtime': '1234567890'}) | ||
1281 | 567 | self.assertEqual( | ||
1282 | 568 | str(e), | ||
1283 | 569 | TYPE_ERROR % ("doc['mtime']", (int, float), str, '1234567890') | ||
1284 | 570 | ) | ||
1285 | 571 | e = raises(ValueError, f, {'mtime': -1}) | ||
1286 | 572 | self.assertEqual( | ||
1287 | 573 | str(e), | ||
1288 | 574 | "doc['mtime'] must be >= 0; got -1" | ||
1289 | 575 | ) | ||
1290 | 576 | |||
1291 | 577 | # atime | ||
1292 | 578 | self.assertIsNone(f({'atime': 1302125982.946627})) | ||
1293 | 579 | self.assertIsNone(f({'atime': 1234567890})) | ||
1294 | 580 | e = raises(TypeError, f, {'atime': '1234567890'}) | ||
1295 | 581 | self.assertEqual( | ||
1296 | 582 | str(e), | ||
1297 | 583 | TYPE_ERROR % ("doc['atime']", (int, float), str, '1234567890') | ||
1298 | 584 | ) | ||
1299 | 585 | e = raises(ValueError, f, {'atime': -0.3}) | ||
1300 | 586 | self.assertEqual( | ||
1301 | 587 | str(e), | ||
1302 | 588 | "doc['atime'] must be >= 0; got -0.3" | ||
1303 | 589 | ) | ||
1304 | 590 | |||
1305 | 591 | # name | ||
1306 | 592 | self.assertIsNone(f({'name': 'MVI_5899.MOV'})) | ||
1307 | 593 | e = raises(TypeError, f, {'name': 42}) | ||
1308 | 594 | self.assertEqual( | ||
1309 | 595 | str(e), | ||
1310 | 596 | TYPE_ERROR % ("doc['name']", basestring, int, 42) | ||
1311 | 597 | ) | ||
1312 | 598 | |||
1313 | 599 | # dir | ||
1314 | 600 | self.assertIsNone(f({'dir': 'DCIM/100EOS5D2'})) | ||
1315 | 601 | e = raises(TypeError, f, {'dir': 42}) | ||
1316 | 602 | self.assertEqual( | ||
1317 | 603 | str(e), | ||
1318 | 604 | TYPE_ERROR % ("doc['dir']", basestring, int, 42) | ||
1319 | 605 | ) | ||
1320 | 606 | |||
1321 | 607 | # meta | ||
1322 | 608 | self.assertIsNone(f({'meta': {'iso': 800}})) | ||
1323 | 609 | e = raises(TypeError, f, {'meta': 42}) | ||
1324 | 610 | self.assertEqual( | ||
1325 | 611 | str(e), | ||
1326 | 612 | TYPE_ERROR % ("doc['meta']", dict, int, 42) | ||
1327 | 613 | ) | ||
1328 | 614 | |||
1329 | 615 | # user | ||
1330 | 616 | self.assertIsNone(f({'user': {'title': 'cool sunset'}})) | ||
1331 | 617 | e = raises(TypeError, f, {'user': 42}) | ||
1332 | 618 | self.assertEqual( | ||
1333 | 619 | str(e), | ||
1334 | 620 | TYPE_ERROR % ("doc['user']", dict, int, 42) | ||
1335 | 621 | ) | ||
1336 | 622 | |||
1337 | 623 | # tags | ||
1338 | 624 | self.assertIsNone(f({'tags': {'burp': {'start': 6, 'end': 73}}})) | ||
1339 | 625 | e = raises(TypeError, f, {'tags': 42}) | ||
1340 | 626 | self.assertEqual( | ||
1341 | 627 | str(e), | ||
1342 | 628 | TYPE_ERROR % ("doc['tags']", dict, int, 42) | ||
1343 | 521 | ) | 629 | ) |
1344 | 522 | 630 | ||
1345 | 523 | 631 | ||
1346 | @@ -527,6 +635,7 @@ | |||
1347 | 527 | # Test with good doc: | 635 | # Test with good doc: |
1348 | 528 | good = { | 636 | good = { |
1349 | 529 | '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O', | 637 | '_id': 'ZR765XWSF6S7JQHLUI4GCG5BHGPE252O', |
1350 | 638 | 'ver': 0, | ||
1351 | 530 | 'type': 'dmedia/file', | 639 | 'type': 'dmedia/file', |
1352 | 531 | 'time': 1234567890, | 640 | 'time': 1234567890, |
1353 | 532 | 'plugin': 'filestore', | 641 | 'plugin': 'filestore', |
1354 | @@ -593,6 +702,64 @@ | |||
1355 | 593 | self.assertEqual(len(binary), 15) | 702 | self.assertEqual(len(binary), 15) |
1356 | 594 | self.assertEqual(b32encode(binary), _id) | 703 | self.assertEqual(b32encode(binary), _id) |
1357 | 595 | 704 | ||
1358 | 705 | def test_create_file(self): | ||
1359 | 706 | f = schema.create_file | ||
1360 | 707 | store = schema.random_id() | ||
1361 | 708 | |||
1362 | 709 | d = f(mov_size, mov_leaves, store) | ||
1363 | 710 | schema.check_dmedia_file(d) | ||
1364 | 711 | self.assertEqual( | ||
1365 | 712 | set(d), | ||
1366 | 713 | set([ | ||
1367 | 714 | '_id', | ||
1368 | 715 | '_attachments', | ||
1369 | 716 | 'ver', | ||
1370 | 717 | 'type', | ||
1371 | 718 | 'time', | ||
1372 | 719 | 'bytes', | ||
1373 | 720 | 'ext', | ||
1374 | 721 | 'origin', | ||
1375 | 722 | 'stored', | ||
1376 | 723 | ]) | ||
1377 | 724 | ) | ||
1378 | 725 | self.assertEqual(d['_id'], mov_hash) | ||
1379 | 726 | self.assertEqual( | ||
1380 | 727 | d['_attachments'], | ||
1381 | 728 | { | ||
1382 | 729 | 'leaves': { | ||
1383 | 730 | 'data': b64encode(b''.join(mov_leaves)), | ||
1384 | 731 | 'content_type': 'application/octet-stream', | ||
1385 | 732 | } | ||
1386 | 733 | } | ||
1387 | 734 | ) | ||
1388 | 735 | self.assertEqual(d['ver'], 0) | ||
1389 | 736 | self.assertEqual(d['type'], 'dmedia/file') | ||
1390 | 737 | self.assertLessEqual(d['time'], time.time()) | ||
1391 | 738 | self.assertEqual(d['bytes'], mov_size) | ||
1392 | 739 | self.assertIsNone(d['ext'], None) | ||
1393 | 740 | self.assertEqual(d['origin'], 'user') | ||
1394 | 741 | |||
1395 | 742 | s = d['stored'] | ||
1396 | 743 | self.assertIsInstance(s, dict) | ||
1397 | 744 | self.assertEqual(list(s), [store]) | ||
1398 | 745 | self.assertEqual(set(s[store]), set(['copies', 'time'])) | ||
1399 | 746 | self.assertEqual(s[store]['copies'], 0) | ||
1400 | 747 | self.assertEqual(s[store]['time'], d['time']) | ||
1401 | 748 | |||
1402 | 749 | # Test overriding default kwarg values: | ||
1403 | 750 | d = f(mov_size, mov_leaves, store, copies=2) | ||
1404 | 751 | schema.check_dmedia_file(d) | ||
1405 | 752 | self.assertEqual(d['stored'][store]['copies'], 2) | ||
1406 | 753 | |||
1407 | 754 | d = f(mov_size, mov_leaves, store, ext='mov') | ||
1408 | 755 | schema.check_dmedia_file(d) | ||
1409 | 756 | self.assertEqual(d['ext'], 'mov') | ||
1410 | 757 | |||
1411 | 758 | d = f(mov_size, mov_leaves, store, origin='proxy') | ||
1412 | 759 | schema.check_dmedia_file(d) | ||
1413 | 760 | self.assertEqual(d['origin'], 'proxy') | ||
1414 | 761 | |||
1415 | 762 | |||
1416 | 596 | def test_create_store(self): | 763 | def test_create_store(self): |
1417 | 597 | f = schema.create_store | 764 | f = schema.create_store |
1418 | 598 | tmp = TempDir() | 765 | tmp = TempDir() |
1419 | @@ -605,6 +772,7 @@ | |||
1420 | 605 | set(doc), | 772 | set(doc), |
1421 | 606 | set([ | 773 | set([ |
1422 | 607 | '_id', | 774 | '_id', |
1423 | 775 | 'ver', | ||
1424 | 608 | 'type', | 776 | 'type', |
1425 | 609 | 'time', | 777 | 'time', |
1426 | 610 | 'plugin', | 778 | 'plugin', |
1427 | @@ -625,6 +793,7 @@ | |||
1428 | 625 | set(doc), | 793 | set(doc), |
1429 | 626 | set([ | 794 | set([ |
1430 | 627 | '_id', | 795 | '_id', |
1431 | 796 | 'ver', | ||
1432 | 628 | 'type', | 797 | 'type', |
1433 | 629 | 'time', | 798 | 'time', |
1434 | 630 | 'plugin', | 799 | 'plugin', |
1435 | @@ -639,3 +808,102 @@ | |||
1436 | 639 | self.assertEqual(doc['copies'], 3) | 808 | self.assertEqual(doc['copies'], 3) |
1437 | 640 | self.assertEqual(doc['path'], base) | 809 | self.assertEqual(doc['path'], base) |
1438 | 641 | self.assertEqual(doc['machine_id'], machine_id) | 810 | self.assertEqual(doc['machine_id'], machine_id) |
1439 | 811 | |||
1440 | 812 | def test_create_batch(self): | ||
1441 | 813 | f = schema.create_batch | ||
1442 | 814 | machine_id = random_id() | ||
1443 | 815 | doc = f(machine_id) | ||
1444 | 816 | |||
1445 | 817 | self.assertEqual(schema.check_dmedia(doc), None) | ||
1446 | 818 | self.assertTrue(isinstance(doc, dict)) | ||
1447 | 819 | self.assertEqual( | ||
1448 | 820 | set(doc), | ||
1449 | 821 | set([ | ||
1450 | 822 | '_id', | ||
1451 | 823 | 'ver', | ||
1452 | 824 | 'type', | ||
1453 | 825 | 'time', | ||
1454 | 826 | 'imports', | ||
1455 | 827 | 'errors', | ||
1456 | 828 | 'machine_id', | ||
1457 | 829 | 'stats', | ||
1458 | 830 | ]) | ||
1459 | 831 | ) | ||
1460 | 832 | _id = doc['_id'] | ||
1461 | 833 | self.assertEqual(b32encode(b32decode(_id)), _id) | ||
1462 | 834 | self.assertEqual(len(_id), 24) | ||
1463 | 835 | self.assertEqual(doc['type'], 'dmedia/batch') | ||
1464 | 836 | self.assertTrue(isinstance(doc['time'], (int, float))) | ||
1465 | 837 | self.assertTrue(doc['time'] <= time.time()) | ||
1466 | 838 | self.assertEqual(doc['imports'], []) | ||
1467 | 839 | self.assertEqual(doc['errors'], []) | ||
1468 | 840 | self.assertEqual(doc['machine_id'], machine_id) | ||
1469 | 841 | self.assertEqual( | ||
1470 | 842 | doc['stats'], | ||
1471 | 843 | { | ||
1472 | 844 | 'considered': {'count': 0, 'bytes': 0}, | ||
1473 | 845 | 'imported': {'count': 0, 'bytes': 0}, | ||
1474 | 846 | 'skipped': {'count': 0, 'bytes': 0}, | ||
1475 | 847 | 'empty': {'count': 0, 'bytes': 0}, | ||
1476 | 848 | 'error': {'count': 0, 'bytes': 0}, | ||
1477 | 849 | } | ||
1478 | 850 | ) | ||
1479 | 851 | |||
1480 | 852 | def test_create_import(self): | ||
1481 | 853 | f = schema.create_import | ||
1482 | 854 | |||
1483 | 855 | base = '/media/EOS_DIGITAL' | ||
1484 | 856 | batch_id = random_id() | ||
1485 | 857 | machine_id = random_id() | ||
1486 | 858 | |||
1487 | 859 | keys = set([ | ||
1488 | 860 | '_id', | ||
1489 | 861 | 'ver', | ||
1490 | 862 | 'type', | ||
1491 | 863 | 'time', | ||
1492 | 864 | 'base', | ||
1493 | 865 | 'batch_id', | ||
1494 | 866 | 'machine_id', | ||
1495 | 867 | 'log', | ||
1496 | 868 | 'stats', | ||
1497 | 869 | ]) | ||
1498 | 870 | |||
1499 | 871 | doc = f(base, batch_id=batch_id, machine_id=machine_id) | ||
1500 | 872 | self.assertEqual(schema.check_dmedia(doc), None) | ||
1501 | 873 | self.assertTrue(isinstance(doc, dict)) | ||
1502 | 874 | self.assertEqual(set(doc), keys) | ||
1503 | 875 | |||
1504 | 876 | _id = doc['_id'] | ||
1505 | 877 | self.assertEqual(b32encode(b32decode(_id)), _id) | ||
1506 | 878 | self.assertEqual(len(_id), 24) | ||
1507 | 879 | |||
1508 | 880 | self.assertEqual(doc['type'], 'dmedia/import') | ||
1509 | 881 | self.assertTrue(isinstance(doc['time'], (int, float))) | ||
1510 | 882 | self.assertTrue(doc['time'] <= time.time()) | ||
1511 | 883 | self.assertEqual(doc['base'], base) | ||
1512 | 884 | self.assertEqual(doc['batch_id'], batch_id) | ||
1513 | 885 | self.assertEqual(doc['machine_id'], machine_id) | ||
1514 | 886 | |||
1515 | 887 | doc = f(base) | ||
1516 | 888 | self.assertEqual(schema.check_dmedia(doc), None) | ||
1517 | 889 | self.assertEqual(set(doc), keys) | ||
1518 | 890 | self.assertEqual(doc['batch_id'], None) | ||
1519 | 891 | self.assertEqual(doc['machine_id'], None) | ||
1520 | 892 | self.assertEqual( | ||
1521 | 893 | doc['log'], | ||
1522 | 894 | { | ||
1523 | 895 | 'imported': [], | ||
1524 | 896 | 'skipped': [], | ||
1525 | 897 | 'empty': [], | ||
1526 | 898 | 'error': [], | ||
1527 | 899 | } | ||
1528 | 900 | ) | ||
1529 | 901 | self.assertEqual( | ||
1530 | 902 | doc['stats'], | ||
1531 | 903 | { | ||
1532 | 904 | 'imported': {'count': 0, 'bytes': 0}, | ||
1533 | 905 | 'skipped': {'count': 0, 'bytes': 0}, | ||
1534 | 906 | 'empty': {'count': 0, 'bytes': 0}, | ||
1535 | 907 | 'error': {'count': 0, 'bytes': 0}, | ||
1536 | 908 | } | ||
1537 | 909 | ) | ||
1538 | 642 | 910 | ||
1539 | === modified file 'dmedia/webui/data/browser.js' | |||
1540 | --- dmedia/webui/data/browser.js 2011-03-31 09:28:58 +0000 | |||
1541 | +++ dmedia/webui/data/browser.js 2011-04-07 03:13:28 +0000 | |||
1542 | @@ -53,8 +53,8 @@ | |||
1543 | 53 | } | 53 | } |
1544 | 54 | Browser.prototype = { | 54 | Browser.prototype = { |
1545 | 55 | run: function() { | 55 | run: function() { |
1548 | 56 | var r = this.db.view('file', 'ext', | 56 | var r = this.db.view('user', 'video', |
1549 | 57 | {key: 'mov', reduce: false, include_docs: true} | 57 | {include_docs: true, descending: true} |
1550 | 58 | ); | 58 | ); |
1551 | 59 | this.load(r.rows); | 59 | this.load(r.rows); |
1552 | 60 | }, | 60 | }, |
1553 | @@ -74,7 +74,7 @@ | |||
1554 | 74 | }; | 74 | }; |
1555 | 75 | 75 | ||
1556 | 76 | var time = $el('div', {'class': 'time'}); | 76 | var time = $el('div', {'class': 'time'}); |
1558 | 77 | time.textContent = doc.duration + 's'; | 77 | time.textContent = doc.meta.duration + 's'; |
1559 | 78 | 78 | ||
1560 | 79 | div.appendChild(img); | 79 | div.appendChild(img); |
1561 | 80 | div.appendChild(time); | 80 | div.appendChild(time); |
1562 | @@ -90,7 +90,7 @@ | |||
1563 | 90 | names.forEach(function(n) { | 90 | names.forEach(function(n) { |
1564 | 91 | var el = $('meta.' + n); | 91 | var el = $('meta.' + n); |
1565 | 92 | if (el) { | 92 | if (el) { |
1567 | 93 | el.textContent = doc[n]; | 93 | el.textContent = doc.meta[n]; |
1568 | 94 | } | 94 | } |
1569 | 95 | }); | 95 | }); |
1570 | 96 | }, | 96 | }, |
Okay, I'm gonna self-approve as I want to get the important PyGI fix into trunk. This month needs to be an especially high-velocity month for dmedia, so I might break the rules often. To make up for it, I will write an unusually high number of unit tests, even by my standards.
"""
Jason, looks great! Amazingly there isn't even a single typo, I'm sure!
--Jason
"""