Merge lp:~thisfred/u1db/put_after_delete into lp:u1db

Proposed by Eric Casteleijn
Status: Superseded
Proposed branch: lp:~thisfred/u1db/put_after_delete
Merge into: lp:u1db
Diff against target: 1363 lines (+349/-125)
22 files modified
include/u1db/u1db.h (+3/-2)
src/u1db.c (+20/-12)
src/u1db_http_sync_target.c (+2/-2)
src/u1db_query.c (+2/-1)
src/u1db_sync_target.c (+2/-2)
u1db/__init__.py (+9/-2)
u1db/backends/__init__.py (+4/-1)
u1db/backends/inmemory.py (+12/-7)
u1db/backends/sqlite_backend.py (+12/-7)
u1db/remote/http_app.py (+28/-13)
u1db/remote/http_client.py (+22/-8)
u1db/remote/http_database.py (+3/-2)
u1db/sync.py (+8/-5)
u1db/tests/__init__.py (+7/-0)
u1db/tests/c_backend_wrapper.pyx (+12/-12)
u1db/tests/commandline/test_client.py (+9/-8)
u1db/tests/test_backends.py (+61/-6)
u1db/tests/test_http_app.py (+54/-9)
u1db/tests/test_http_client.py (+23/-5)
u1db/tests/test_http_database.py (+17/-5)
u1db/tests/test_remote_sync_target.py (+8/-1)
u1db/tests/test_sync.py (+31/-15)
To merge this branch: bzr merge lp:~thisfred/u1db/put_after_delete
Reviewer Review Type Date Requested Status
Ubuntu One hackers Pending
Review via email: mp+107445@code.launchpad.net

This proposal has been superseded by a proposal from 2012-05-25.

Commit message

Changed conflict detection so that creating a new document with the id of a deleted one does not trigger a conflict.

Description of the change

Changed conflict detection so that creating a new document with the id of a deleted one does not trigger a conflict.

To post a comment you must log in.
lp:~thisfred/u1db/put_after_delete updated
313. By Eric Casteleijn

check vectorclocks and add test for put

314. By Eric Casteleijn

merged lp:~thisfred/u1db/exclude_deleted_documents

Revision history for this message
Eric Casteleijn (thisfred) wrote :

added vector clock checks in tests and fixed bugs thus exposed

lp:~thisfred/u1db/put_after_delete updated
315. By Eric Casteleijn

expanded comment

316. By Eric Casteleijn

remerged trunk

Unmerged revisions

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'include/u1db/u1db.h'
2--- include/u1db/u1db.h 2012-05-23 17:33:57 +0000
3+++ include/u1db/u1db.h 2012-05-25 19:26:18 +0000
4@@ -147,7 +147,8 @@
5 * @return status, will be U1DB_OK if there is no error, even if there is no
6 * document matching that doc_id.
7 */
8-int u1db_get_doc(u1database *db, const char *doc_id, u1db_document **doc);
9+int u1db_get_doc(u1database *db, const char *doc_id, int include_deleted,
10+ u1db_document **doc);
11
12
13 /**
14@@ -164,7 +165,7 @@
15 * u1db_free_doc.
16 */
17 int u1db_get_docs(u1database *db, int n_doc_ids, const char **doc_ids,
18- int check_for_conflicts, void *context,
19+ int check_for_conflicts, int include_deleted, void *context,
20 u1db_doc_callback cb);
21
22 /**
23
24=== modified file 'src/u1db.c'
25--- src/u1db.c 2012-05-25 17:48:41 +0000
26+++ src/u1db.c 2012-05-25 19:26:18 +0000
27@@ -513,7 +513,7 @@
28 &old_content_len, &statement);
29 if (status != SQLITE_OK) { goto finish; }
30 if (doc->doc_rev == NULL) {
31- if (old_doc_rev == NULL) {
32+ if (old_doc_rev == NULL || old_content == NULL) {
33 // We are creating a new document from scratch. No problem.
34 status = 0;
35 } else {
36@@ -941,7 +941,8 @@
37
38
39 int
40-u1db_get_doc(u1database *db, const char *doc_id, u1db_document **doc)
41+u1db_get_doc(u1database *db, const char *doc_id, int include_deleted,
42+ u1db_document **doc)
43 {
44 int status = 0, content_len = 0;
45 sqlite3_stmt *statement;
46@@ -959,12 +960,16 @@
47 *doc = NULL;
48 goto finish;
49 }
50- *doc = u1db__allocate_document(doc_id, (const char*)doc_rev,
51- (const char*)content, 0);
52+ if (content != NULL || include_deleted) {
53+ *doc = u1db__allocate_document(doc_id, (const char*)doc_rev,
54+ (const char*)content, 0);
55
56- if (*doc != NULL) {
57- status = lookup_conflict(db, (*doc)->doc_id,
58- &((*doc)->has_conflicts));
59+ if (*doc != NULL) {
60+ status = lookup_conflict(db, (*doc)->doc_id,
61+ &((*doc)->has_conflicts));
62+ }
63+ } else {
64+ *doc = NULL;
65 }
66 } else {
67 *doc = NULL;
68@@ -976,7 +981,8 @@
69
70 int
71 u1db_get_docs(u1database *db, int n_doc_ids, const char **doc_ids,
72- int check_for_conflicts, void *context, u1db_doc_callback cb)
73+ int check_for_conflicts, int include_deleted,
74+ void *context, u1db_doc_callback cb)
75 {
76 int status, i;
77 sqlite3_stmt *statement;
78@@ -1005,11 +1011,13 @@
79 u1db_document *doc;
80 revision = (char *)sqlite3_column_text(statement, 0);
81 content = (char *)sqlite3_column_text(statement, 1);
82- doc = u1db__allocate_document(doc_ids[i], revision, content, 0);
83- if (check_for_conflicts) {
84- status = lookup_conflict(db, doc_ids[i], &(doc->has_conflicts));
85+ if (content != NULL || include_deleted) {
86+ doc = u1db__allocate_document(doc_ids[i], revision, content, 0);
87+ if (check_for_conflicts) {
88+ status = lookup_conflict(db, doc_ids[i], &(doc->has_conflicts));
89+ }
90+ cb(context, doc);
91 }
92- cb(context, doc);
93 } else if (status == SQLITE_DONE) {
94 // This document doesn't exist
95 // TODO: I believe the python implementation returns the Null
96
97=== modified file 'src/u1db_http_sync_target.c'
98--- src/u1db_http_sync_target.c 2012-05-22 18:58:04 +0000
99+++ src/u1db_http_sync_target.c 2012-05-25 19:26:18 +0000
100@@ -921,7 +921,7 @@
101
102
103 int u1db_get_docs(u1database *db, int n_doc_ids, const char **doc_ids,
104- int check_for_conflicts, void *context,
105+ int check_for_conflicts, int include_deleted, void *context,
106 u1db_doc_callback cb);
107
108 static int
109@@ -951,7 +951,7 @@
110 state.num = n_doc_ids;
111 state.generations = generations;
112 state.temp_fd = temp_fd;
113- status = u1db_get_docs(source_db, n_doc_ids, doc_ids, 0,
114+ status = u1db_get_docs(source_db, n_doc_ids, doc_ids, 0, 1,
115 &state, get_docs_to_tempfile);
116 if (status != U1DB_OK) { goto finish; }
117 status = finalize_and_send_temp_file(st, temp_fd, source_replica_uid, &req);
118
119=== modified file 'src/u1db_query.c'
120--- src/u1db_query.c 2012-05-23 10:57:02 +0000
121+++ src/u1db_query.c 2012-05-25 19:26:18 +0000
122@@ -591,7 +591,8 @@
123 doc_id = (char*)sqlite3_column_text(statement, 0);
124 // We use u1db_get_docs so we can pass check_for_conflicts=0, which is
125 // currently expected by the test suite.
126- status = u1db_get_docs(db, 1, (const char**)&doc_id, 0, context, cb);
127+ status = u1db_get_docs(
128+ db, 1, (const char**)&doc_id, 0, 0, context, cb);
129 if (status != U1DB_OK) { goto finish; }
130 status = sqlite3_step(statement);
131 }
132
133=== modified file 'src/u1db_sync_target.c'
134--- src/u1db_sync_target.c 2012-05-21 18:28:05 +0000
135+++ src/u1db_sync_target.c 2012-05-25 19:26:18 +0000
136@@ -422,7 +422,7 @@
137 if (se->num_doc_ids > 0) {
138 status = u1db_get_docs(se->db, se->num_doc_ids,
139 (const char **)se->doc_ids_to_return,
140- 0, &state, get_docs_to_gen_docs);
141+ 0, 1, &state, get_docs_to_gen_docs);
142 }
143 finish:
144 return status;
145@@ -469,7 +469,7 @@
146 (u1db_doc_gen_callback)u1db__sync_exchange_insert_doc_from_source;
147 get_doc_state.gen_for_doc_ids = generations;
148 return u1db_get_docs(source_db, n_doc_ids, doc_ids,
149- 0, &get_doc_state, get_docs_to_gen_docs);
150+ 0, 1, &get_doc_state, get_docs_to_gen_docs);
151 }
152
153
154
155=== modified file 'u1db/__init__.py'
156--- u1db/__init__.py 2012-05-23 13:11:46 +0000
157+++ u1db/__init__.py 2012-05-25 19:26:18 +0000
158@@ -68,20 +68,27 @@
159 """
160 raise NotImplementedError(self.whats_changed)
161
162- def get_doc(self, doc_id):
163+ def get_doc(self, doc_id, include_deleted=False):
164 """Get the JSON string for the given document.
165
166 :param doc_id: The unique document identifier
167+ :param include_deleted: If set to True, deleted documents will be
168+ returned with empty content. Otherwise asking for a deleted
169+ document will return None.
170 :return: a Document object.
171 """
172 raise NotImplementedError(self.get_doc)
173
174- def get_docs(self, doc_ids, check_for_conflicts=True):
175+ def get_docs(self, doc_ids, check_for_conflicts=True,
176+ include_deleted=False):
177 """Get the JSON content for many documents.
178
179 :param doc_ids: A list of document identifiers.
180 :param check_for_conflicts: If set to False, then the conflict check
181 will be skipped, and 'None' will be returned instead of True/False.
182+ :param include_deleted: If set to True, deleted documents will be
183+ returned with empty content. Otherwise deleted documents will not
184+ be included in the results.
185 :return: [Document] for each document id and matching doc_ids order.
186 """
187 raise NotImplementedError(self.get_docs)
188
189=== modified file 'u1db/backends/__init__.py'
190--- u1db/backends/__init__.py 2012-05-22 20:38:49 +0000
191+++ u1db/backends/__init__.py 2012-05-25 19:26:18 +0000
192@@ -81,10 +81,13 @@
193 def _put_and_update_indexes(self, doc_id, old_doc, new_rev, content):
194 raise NotImplementedError(self._put_and_update_indexes)
195
196- def get_docs(self, doc_ids, check_for_conflicts=True):
197+ def get_docs(self, doc_ids, check_for_conflicts=True,
198+ include_deleted=False):
199 result = []
200 for doc_id in doc_ids:
201 doc = self._get_doc(doc_id)
202+ if doc.is_tombstone() and not include_deleted:
203+ continue
204 if check_for_conflicts:
205 doc.has_conflicts = self._has_conflicts(doc_id)
206 result.append(doc)
207
208=== modified file 'u1db/backends/inmemory.py'
209--- u1db/backends/inmemory.py 2012-05-25 16:19:49 +0000
210+++ u1db/backends/inmemory.py 2012-05-25 19:26:18 +0000
211@@ -77,13 +77,16 @@
212 if self._has_conflicts(doc.doc_id):
213 raise errors.ConflictedDoc()
214 old_doc = self._get_doc(doc.doc_id)
215- if old_doc is not None:
216- if old_doc.rev != doc.rev:
217- raise errors.RevisionConflict()
218+ if old_doc and doc.rev is None and old_doc.is_tombstone():
219+ new_rev = self._allocate_doc_rev(old_doc.rev)
220 else:
221- if doc.rev is not None:
222- raise errors.RevisionConflict()
223- new_rev = self._allocate_doc_rev(doc.rev)
224+ if old_doc is not None:
225+ if old_doc.rev != doc.rev:
226+ raise errors.RevisionConflict()
227+ else:
228+ if doc.rev is not None:
229+ raise errors.RevisionConflict()
230+ new_rev = self._allocate_doc_rev(doc.rev)
231 doc.rev = new_rev
232 self._put_and_update_indexes(old_doc, doc)
233 return new_rev
234@@ -108,10 +111,12 @@
235 def _has_conflicts(self, doc_id):
236 return doc_id in self._conflicts
237
238- def get_doc(self, doc_id):
239+ def get_doc(self, doc_id, include_deleted=False):
240 doc = self._get_doc(doc_id)
241 if doc is None:
242 return None
243+ if doc.is_tombstone() and not include_deleted:
244+ return None
245 doc.has_conflicts = (doc.doc_id in self._conflicts)
246 return doc
247
248
249=== modified file 'u1db/backends/sqlite_backend.py'
250--- u1db/backends/sqlite_backend.py 2012-05-25 16:19:49 +0000
251+++ u1db/backends/sqlite_backend.py 2012-05-25 19:26:18 +0000
252@@ -291,10 +291,12 @@
253 else:
254 return True
255
256- def get_doc(self, doc_id):
257+ def get_doc(self, doc_id, include_deleted=False):
258 doc = self._get_doc(doc_id)
259 if doc is None:
260 return None
261+ if doc.is_tombstone() and not include_deleted:
262+ return None
263 # TODO: A doc which appears deleted could still have conflicts...
264 doc.has_conflicts = self._has_conflicts(doc.doc_id)
265 return doc
266@@ -307,13 +309,16 @@
267 if self._has_conflicts(doc.doc_id):
268 raise errors.ConflictedDoc()
269 old_doc = self._get_doc(doc.doc_id)
270- if old_doc is not None:
271- if old_doc.rev != doc.rev:
272- raise errors.RevisionConflict()
273+ if old_doc and doc.rev is None and old_doc.is_tombstone():
274+ new_rev = self._allocate_doc_rev(old_doc.rev)
275 else:
276- if doc.rev is not None:
277- raise errors.RevisionConflict()
278- new_rev = self._allocate_doc_rev(doc.rev)
279+ if old_doc is not None:
280+ if old_doc.rev != doc.rev:
281+ raise errors.RevisionConflict()
282+ else:
283+ if doc.rev is not None:
284+ raise errors.RevisionConflict()
285+ new_rev = self._allocate_doc_rev(doc.rev)
286 doc.rev = new_rev
287 self._put_and_update_indexes(old_doc, doc)
288 return new_rev
289
290=== modified file 'u1db/remote/http_app.py'
291--- u1db/remote/http_app.py 2012-05-23 13:11:46 +0000
292+++ u1db/remote/http_app.py 2012-05-25 19:26:18 +0000
293@@ -38,6 +38,13 @@
294 )
295
296
297+def parse_bool(expression):
298+ """Parse boolean querystring parameter."""
299+ if expression == 'true':
300+ return True
301+ return False
302+
303+
304 class BadRequest(Exception):
305 """Bad request."""
306
307@@ -89,7 +96,8 @@
308
309
310 def http_method(**control):
311- """Decoration for handling of query arguments and content for a HTTP method.
312+ """Decoration for handling of query arguments and content for a HTTP
313+ method.
314
315 args and content here are the query arguments and body of the incoming
316 HTTP requests.
317@@ -118,6 +126,7 @@
318 content_as_args = control.pop('content_as_args', False)
319 no_query = control.pop('no_query', False)
320 conversions = control.items()
321+
322 def wrap(f):
323 argspec = inspect.getargspec(f)
324 assert argspec.args[0] == "self"
325@@ -125,6 +134,7 @@
326 ndefaults = len(argspec.defaults or ())
327 required_args = set(argspec.args[1:nargs - ndefaults])
328 all_args = set(argspec.args)
329+
330 @functools.wraps(f)
331 def wrapper(self, args, content):
332 if no_query and args:
333@@ -147,7 +157,9 @@
334 except ValueError:
335 raise BadRequest()
336 return f(self, **args)
337+
338 return wrapper
339+
340 return wrap
341
342
343@@ -243,9 +255,9 @@
344 self.db.delete_doc(doc)
345 self.responder.send_response_json(200, rev=doc.rev)
346
347- @http_method()
348- def get(self):
349- doc = self.db.get_doc(self.id)
350+ @http_method(include_deleted=parse_bool)
351+ def get(self, include_deleted=False):
352+ doc = self.db.get_doc(self.id, include_deleted=include_deleted)
353 if doc is None:
354 wire_descr = errors.DocumentDoesNotExist.wire_description
355 self.responder.send_response_json(
356@@ -262,7 +274,8 @@
357 }
358 if doc.is_tombstone():
359 self.responder.send_response_json(
360- http_errors.wire_description_to_status[errors.DOCUMENT_DELETED],
361+ http_errors.wire_description_to_status[
362+ errors.DOCUMENT_DELETED],
363 error=errors.DOCUMENT_DELETED,
364 headers=headers)
365 else:
366@@ -288,11 +301,11 @@
367 @http_method()
368 def get(self):
369 result = self.target.get_sync_info(self.source_replica_uid)
370- self.responder.send_response_json(target_replica_uid=result[0],
371- target_replica_generation=result[1],
372- source_replica_uid=self.source_replica_uid,
373- source_replica_generation=result[2],
374- source_transaction_id=result[3])
375+ self.responder.send_response_json(
376+ target_replica_uid=result[0], target_replica_generation=result[1],
377+ source_replica_uid=self.source_replica_uid,
378+ source_replica_generation=result[2],
379+ source_transaction_id=result[3])
380
381 @http_method(generation=int,
382 content_as_args=True, no_query=True)
383@@ -421,7 +434,8 @@
384 args = urlparse.parse_qsl(self.environ['QUERY_STRING'],
385 strict_parsing=False)
386 try:
387- args = dict((k.decode('utf-8'), v.decode('utf-8')) for k, v in args)
388+ args = dict(
389+ (k.decode('utf-8'), v.decode('utf-8')) for k, v in args)
390 except ValueError:
391 raise BadRequest()
392 method = self.environ['REQUEST_METHOD'].lower()
393@@ -433,7 +447,7 @@
394 # to support chunked enconding
395 try:
396 content_length = int(self.environ['CONTENT_LENGTH'])
397- except (ValueError, KeyError), e:
398+ except (ValueError, KeyError):
399 raise BadRequest
400 if content_length <= 0:
401 raise BadRequest
402@@ -486,7 +500,8 @@
403 resource_cls, params = url_to_resource.match(environ['PATH_INFO'])
404 if resource_cls is None:
405 raise BadRequest # 404 instead?
406- resource = resource_cls(state=self.state, responder=responder, **params)
407+ resource = resource_cls(
408+ state=self.state, responder=responder, **params)
409 return resource
410
411 def __call__(self, environ, start_response):
412
413=== modified file 'u1db/remote/http_client.py'
414--- u1db/remote/http_client.py 2012-05-11 13:40:45 +0000
415+++ u1db/remote/http_client.py 2012-05-25 19:26:18 +0000
416@@ -42,6 +42,16 @@
417 CA_CERTS = "/etc/ssl/certs/ca-certificates.crt"
418
419
420+def _encode_query_parameter(value):
421+ """Encode query parameter."""
422+ if isinstance(value, bool):
423+ if value:
424+ value = 'true'
425+ else:
426+ value = 'false'
427+ return unicode(value).encode('utf-8')
428+
429+
430 class _VerifiedHTTPSConnection(httplib.HTTPSConnection):
431 """HTTPSConnection verifying server side certificates."""
432 # derived from httplib.py
433@@ -87,8 +97,9 @@
434
435 def set_oauth_credentials(self, consumer_key, consumer_secret,
436 token_key, token_secret):
437- self._oauth_creds = (oauth.OAuthConsumer(consumer_key, consumer_secret),
438- oauth.OAuthToken(token_key, token_secret))
439+ self._oauth_creds = (
440+ oauth.OAuthConsumer(consumer_key, consumer_secret),
441+ oauth.OAuthToken(token_key, token_secret))
442
443 def _ensure_connection(self):
444 if self._conn is not None:
445@@ -142,7 +153,8 @@
446 parameters=params,
447 http_url=full_url
448 )
449- oauth_req.sign_request(self.oauth_signature_method, consumer, token)
450+ oauth_req.sign_request(
451+ self.oauth_signature_method, consumer, token)
452 # Authorization: OAuth ...
453 return oauth_req.to_header().items()
454 else:
455@@ -160,18 +172,20 @@
456 for part in url_parts)
457 # oauth performs its own quoting
458 unquoted_url += '/'.join(url_parts)
459+ encoded_params = {}
460 if params:
461- params = dict((unicode(v).encode('utf-8'),
462- unicode(k).encode('utf-8'))
463- for v, k in params.items())
464- url_query += ('?' + urllib.urlencode(params))
465+ for key, value in params.items():
466+ key = unicode(key).encode('utf-8')
467+ encoded_params[key] = _encode_query_parameter(value)
468+ url_query += ('?' + urllib.urlencode(encoded_params))
469 if body is not None and not isinstance(body, basestring):
470 body = simplejson.dumps(body)
471 content_type = 'application/json'
472 headers = {}
473 if content_type:
474 headers['content-type'] = content_type
475- headers.update(self._sign_request(method, unquoted_url, params))
476+ headers.update(
477+ self._sign_request(method, unquoted_url, encoded_params))
478 self._conn.request(method, url_query, body, headers)
479 return self._response()
480
481
482=== modified file 'u1db/remote/http_database.py'
483--- u1db/remote/http_database.py 2012-05-23 13:11:46 +0000
484+++ u1db/remote/http_database.py 2012-05-25 19:26:18 +0000
485@@ -76,9 +76,10 @@
486 doc.rev = res['rev']
487 return res['rev']
488
489- def get_doc(self, doc_id):
490+ def get_doc(self, doc_id, include_deleted=False):
491 try:
492- res, headers = self._request('GET', ['doc', doc_id])
493+ res, headers = self._request(
494+ 'GET', ['doc', doc_id], {"include_deleted": include_deleted})
495 except errors.DocumentDoesNotExist:
496 return None
497 except errors.HTTPError, e:
498
499=== modified file 'u1db/sync.py'
500--- u1db/sync.py 2012-05-18 15:12:42 +0000
501+++ u1db/sync.py 2012-05-25 19:26:18 +0000
502@@ -95,19 +95,21 @@
503 # get target identifier, its current generation,
504 # and its last-seen database generation for this source
505 (self.target_replica_uid, target_gen, target_my_gen,
506- target_my_trans_id) = sync_target.get_sync_info(self.source._replica_uid)
507+ target_my_trans_id) = sync_target.get_sync_info(
508+ self.source._replica_uid)
509 # what's changed since that generation and this current gen
510 my_gen, _, changes = self.source.whats_changed(target_my_gen)
511
512 # this source last-seen database generation for the target
513- target_last_known_gen, target_trans_id = self.source._get_sync_gen_info(
514- self.target_replica_uid)
515+ (target_last_known_gen,
516+ target_trans_id) = self.source._get_sync_gen_info(
517+ self.target_replica_uid)
518 if not changes and target_last_known_gen == target_gen:
519 return my_gen
520 changed_doc_ids = [doc_id for doc_id, _, _ in changes]
521 # prepare to send all the changed docs
522 docs_to_send = self.source.get_docs(changed_doc_ids,
523- check_for_conflicts=False)
524+ check_for_conflicts=False, include_deleted=True)
525 docs_by_generation = zip(docs_to_send, (gen for _, gen, _ in changes))
526
527 # exchange documents and try to insert the returned ones with
528@@ -229,7 +231,8 @@
529 # return docs, including conflicts
530 changed_doc_ids = [doc_id for doc_id, _ in changes_to_return]
531 self._trace('before get_docs')
532- docs = self._db.get_docs(changed_doc_ids, check_for_conflicts=False)
533+ docs = self._db.get_docs(
534+ changed_doc_ids, check_for_conflicts=False, include_deleted=True)
535
536 docs_by_gen = izip(docs, (gen for _, gen in changes_to_return))
537 for doc, gen in docs_by_gen:
538
539=== modified file 'u1db/tests/__init__.py'
540--- u1db/tests/__init__.py 2012-05-22 18:58:04 +0000
541+++ u1db/tests/__init__.py 2012-05-25 19:26:18 +0000
542@@ -71,6 +71,13 @@
543 has_conflicts=has_conflicts)
544 self.assertEqual(exp_doc, db.get_doc(doc_id))
545
546+ def assertGetDocIncludeDeleted(self, db, doc_id, doc_rev, content,
547+ has_conflicts):
548+ """Assert that the document in the database looks correct."""
549+ exp_doc = self.make_document(doc_id, doc_rev, content,
550+ has_conflicts=has_conflicts)
551+ self.assertEqual(exp_doc, db.get_doc(doc_id, include_deleted=True))
552+
553 def assertGetDocConflicts(self, db, doc_id, conflicts):
554 """Assert what conflicts are stored for a given doc_id.
555
556
557=== modified file 'u1db/tests/c_backend_wrapper.pyx'
558--- u1db/tests/c_backend_wrapper.pyx 2012-05-23 17:33:57 +0000
559+++ u1db/tests/c_backend_wrapper.pyx 2012-05-25 19:26:18 +0000
560@@ -71,10 +71,11 @@
561 int u1db_create_doc(u1database *db, char *json, char *doc_id,
562 u1db_document **doc)
563 int u1db_delete_doc(u1database *db, u1db_document *doc)
564- int u1db_get_doc(u1database *db, char *doc_id, u1db_document **doc)
565+ int u1db_get_doc(u1database *db, char *doc_id, int include_deleted,
566+ u1db_document **doc)
567 int u1db_get_docs(u1database *db, int n_doc_ids, const_char_ptr *doc_ids,
568- int check_for_conflicts, void *context,
569- u1db_doc_callback cb)
570+ int check_for_conflicts, int include_deleted,
571+ void *context, u1db_doc_callback cb)
572 int u1db_put_doc(u1database *db, u1db_document *doc)
573 int u1db__put_doc_if_newer(u1database *db, u1db_document *doc,
574 int save_conflict, char *replica_uid,
575@@ -907,30 +908,29 @@
576 else:
577 raise RuntimeError("Unknown _put_doc_if_newer state: %d" % (state,))
578
579- def get_doc(self, doc_id):
580+ def get_doc(self, doc_id, include_deleted=False):
581 cdef u1db_document *doc = NULL
582-
583+ deleted = 1 if include_deleted else 0
584 handle_status("get_doc failed",
585- u1db_get_doc(self._db, doc_id, &doc))
586+ u1db_get_doc(self._db, doc_id, deleted, &doc))
587 if doc == NULL:
588 return None
589 pydoc = CDocument()
590 pydoc._doc = doc
591 return pydoc
592
593- def get_docs(self, doc_ids, check_for_conflicts=True):
594+ def get_docs(self, doc_ids, check_for_conflicts=True,
595+ include_deleted=False):
596 cdef int n_doc_ids, conflicts
597 cdef const_char_ptr *c_doc_ids
598
599 _list_to_array(doc_ids, &c_doc_ids, &n_doc_ids)
600- if check_for_conflicts:
601- conflicts = 1
602- else:
603- conflicts = 0
604+ deleted = 1 if include_deleted else 0
605+ conflicts = 1 if check_for_conflicts else 0
606 a_list = []
607 handle_status("get_docs",
608 u1db_get_docs(self._db, n_doc_ids, c_doc_ids,
609- conflicts, <void*>a_list, _append_doc_to_list))
610+ conflicts, deleted, <void*>a_list, _append_doc_to_list))
611 free(<void*>c_doc_ids)
612 return a_list
613
614
615=== modified file 'u1db/tests/commandline/test_client.py'
616--- u1db/tests/commandline/test_client.py 2012-05-24 14:50:19 +0000
617+++ u1db/tests/commandline/test_client.py 2012-05-25 19:26:18 +0000
618@@ -50,7 +50,7 @@
619 # parsing.
620 try:
621 return self.parser.parse_args(args)
622- except SystemExit, e:
623+ except SystemExit:
624 raise AssertionError('got SystemExit')
625
626 def test_create(self):
627@@ -203,7 +203,7 @@
628 doc = self.db.create_doc(tests.simple_doc)
629 cmd = self.make_command(client.CmdDelete)
630 cmd.run(self.db_path, doc.doc_id, doc.rev)
631- doc2 = self.db.get_doc(doc.doc_id)
632+ doc2 = self.db.get_doc(doc.doc_id, include_deleted=True)
633 self.assertEqual(doc.doc_id, doc2.doc_id)
634 self.assertNotEqual(doc.rev, doc2.rev)
635 self.assertIs(None, doc2.get_json())
636@@ -390,7 +390,7 @@
637 cmd = self.make_command(client.CmdCreateIndex)
638 retval = cmd.run(self.db_path, "foo", ["bar", "baz"])
639 self.assertEqual(self.db.list_indexes(), [('foo', ['bar', "baz"])])
640- self.assertEqual(retval, None) # conveniently mapped to 0
641+ self.assertEqual(retval, None) # conveniently mapped to 0
642 self.assertEqual(cmd.stdout.getvalue(), '')
643 self.assertEqual(cmd.stderr.getvalue(), '')
644
645@@ -688,7 +688,7 @@
646 doc = self.db.create_doc(tests.simple_doc, doc_id='test-id')
647 ret, stdout, stderr = self.run_main(
648 ['delete', self.db_path, 'test-id', doc.rev])
649- doc = self.db.get_doc('test-id')
650+ doc = self.db.get_doc('test-id', include_deleted=True)
651 self.assertEqual(0, ret)
652 self.assertEqual('', stdout)
653 self.assertEqual('rev: %s\n' % (doc.rev,), stderr)
654@@ -696,7 +696,7 @@
655 def test_init_db(self):
656 path = self.working_dir + '/test2.db'
657 ret, stdout, stderr = self.run_main(['init-db', path])
658- db2 = u1db_open(path, create=False)
659+ u1db_open(path, create=False)
660
661 def test_put(self):
662 doc = self.db.create_doc(tests.simple_doc, doc_id='test-id')
663@@ -720,7 +720,8 @@
664 self.assertEqual(0, ret)
665 self.assertEqual('', stdout)
666 self.assertEqual('', stderr)
667- self.assertGetDoc(self.db2, 'test-id', doc.rev, tests.simple_doc, False)
668+ self.assertGetDoc(
669+ self.db2, 'test-id', doc.rev, tests.simple_doc, False)
670
671
672 class TestHTTPIntegration(tests.TestCaseWithServer, RunMainHelper):
673@@ -743,7 +744,7 @@
674 def test_init_db(self):
675 url = self.getURL('new.db')
676 ret, stdout, stderr = self.run_main(['init-db', url])
677- db2 = u1db_open(self.getPath('new.db'), create=False)
678+ u1db_open(self.getPath('new.db'), create=False)
679
680 def test_create_get_put_delete(self):
681 db = u1db_open(self.getPath('test.db'), create=True)
682@@ -766,4 +767,4 @@
683 self.assertEqual(0, ret)
684 self.assertTrue(stderr.startswith('rev: '))
685 doc_rev2 = stderr[len('rev: '):].rstrip()
686- self.assertGetDoc(db, doc_id, doc_rev2, None, False)
687+ self.assertGetDocIncludeDeleted(db, doc_id, doc_rev2, None, False)
688
689=== modified file 'u1db/tests/test_backends.py'
690--- u1db/tests/test_backends.py 2012-05-25 16:19:49 +0000
691+++ u1db/tests/test_backends.py 2012-05-25 19:26:18 +0000
692@@ -152,6 +152,31 @@
693 self.assertRaises(errors.RevisionConflict, self.db.put_doc, bad_doc)
694 self.assertGetDoc(self.db, 'my_doc_id', old_rev, simple_doc, False)
695
696+ def test_create_succeeds_after_delete(self):
697+ doc = self.db.create_doc(simple_doc, doc_id='my_doc_id')
698+ self.db.delete_doc(doc)
699+ deleted_doc = self.db.get_doc('my_doc_id', include_deleted=True)
700+ deleted_vc = vectorclock.VectorClockRev(deleted_doc.rev)
701+ new_doc = self.db.create_doc(simple_doc, doc_id='my_doc_id')
702+ self.assertGetDoc(self.db, 'my_doc_id', new_doc.rev, simple_doc, False)
703+ new_vc = vectorclock.VectorClockRev(new_doc.rev)
704+ self.assertTrue(
705+ new_vc.is_newer(deleted_vc),
706+ "%s does not supersede %s" % (new_doc.rev, deleted_doc.rev))
707+
708+ def test_put_succeeds_after_delete(self):
709+ doc = self.db.create_doc(simple_doc, doc_id='my_doc_id')
710+ self.db.delete_doc(doc)
711+ deleted_doc = self.db.get_doc('my_doc_id', include_deleted=True)
712+ deleted_vc = vectorclock.VectorClockRev(deleted_doc.rev)
713+ doc2 = self.make_document('my_doc_id', None, simple_doc)
714+ self.db.put_doc(doc2)
715+ self.assertGetDoc(self.db, 'my_doc_id', doc2.rev, simple_doc, False)
716+ new_vc = vectorclock.VectorClockRev(doc2.rev)
717+ self.assertTrue(
718+ new_vc.is_newer(deleted_vc),
719+ "%s does not supersede %s" % (doc2.rev, deleted_doc.rev))
720+
721 def test_get_doc_after_put(self):
722 doc = self.db.create_doc(simple_doc, doc_id='my_doc_id')
723 self.assertGetDoc(self.db, 'my_doc_id', doc.rev, simple_doc, False)
724@@ -159,6 +184,17 @@
725 def test_get_doc_nonexisting(self):
726 self.assertIs(None, self.db.get_doc('non-existing'))
727
728+ def test_get_doc_deleted(self):
729+ doc = self.db.create_doc(simple_doc, doc_id='my_doc_id')
730+ self.db.delete_doc(doc)
731+ self.assertIs(None, self.db.get_doc('my_doc_id'))
732+
733+ def test_get_doc_include_deleted(self):
734+ doc = self.db.create_doc(simple_doc, doc_id='my_doc_id')
735+ self.db.delete_doc(doc)
736+ self.assertGetDocIncludeDeleted(
737+ self.db, doc.doc_id, doc.rev, None, False)
738+
739 def test_handles_nested_content(self):
740 doc = self.db.create_doc(nested_doc)
741 self.assertGetDoc(self.db, doc.doc_id, doc.rev, nested_doc, False)
742@@ -173,8 +209,9 @@
743 orig_rev = doc.rev
744 self.db.delete_doc(doc)
745 self.assertNotEqual(orig_rev, doc.rev)
746- self.assertGetDoc(self.db, doc.doc_id, doc.rev, None, False)
747- self.assertIsNot(None, self.db.get_doc(doc.doc_id))
748+ self.assertGetDocIncludeDeleted(
749+ self.db, doc.doc_id, doc.rev, None, False)
750+ self.assertIs(None, self.db.get_doc(doc.doc_id))
751
752 def test_delete_doc_non_existant(self):
753 doc = self.make_document('non-existing', 'other:1', simple_doc)
754@@ -186,7 +223,8 @@
755 self.db.delete_doc(doc)
756 self.assertRaises(errors.DocumentAlreadyDeleted,
757 self.db.delete_doc, doc)
758- self.assertGetDoc(self.db, doc.doc_id, doc.rev, None, False)
759+ self.assertGetDocIncludeDeleted(
760+ self.db, doc.doc_id, doc.rev, None, False)
761
762 def test_delete_doc_bad_rev(self):
763 doc1 = self.db.create_doc(simple_doc)
764@@ -216,7 +254,8 @@
765 def test_delete_then_put(self):
766 doc = self.db.create_doc(simple_doc)
767 self.db.delete_doc(doc)
768- self.assertGetDoc(self.db, doc.doc_id, doc.rev, None, False)
769+ self.assertGetDocIncludeDeleted(
770+ self.db, doc.doc_id, doc.rev, None, False)
771 doc.set_json(nested_doc)
772 self.db.put_doc(doc)
773 self.assertGetDoc(self.db, doc.doc_id, doc.rev, nested_doc, False)
774@@ -242,6 +281,20 @@
775 self.assertEqual([doc1, doc2],
776 self.db.get_docs([doc1.doc_id, doc2.doc_id]))
777
778+ def test_get_docs_deleted(self):
779+ doc1 = self.db.create_doc(simple_doc)
780+ doc2 = self.db.create_doc(nested_doc)
781+ self.db.delete_doc(doc1)
782+ self.assertEqual([doc2], self.db.get_docs([doc1.doc_id, doc2.doc_id]))
783+
784+ def test_get_docs_include_deleted(self):
785+ doc1 = self.db.create_doc(simple_doc)
786+ doc2 = self.db.create_doc(nested_doc)
787+ self.db.delete_doc(doc1)
788+ self.assertEqual(
789+ [doc1, doc2],
790+ self.db.get_docs([doc1.doc_id, doc2.doc_id], include_deleted=True))
791+
792 def test_get_docs_request_ordered(self):
793 doc1 = self.db.create_doc(simple_doc)
794 doc2 = self.db.create_doc(nested_doc)
795@@ -264,7 +317,8 @@
796 doc = self.make_document('my-doc-id', 'test:2', None)
797 state_at_gen = self.db._put_doc_if_newer(doc, save_conflict=False)
798 self.assertEqual(('inserted', 2), state_at_gen)
799- self.assertGetDoc(self.db, 'my-doc-id', 'test:2', None, False)
800+ self.assertGetDocIncludeDeleted(
801+ self.db, 'my-doc-id', 'test:2', None, False)
802
803 def test_put_doc_if_newer_already_superseded(self):
804 orig_doc = '{"new": "doc"}'
805@@ -518,7 +572,8 @@
806 (doc1.rev, None)])
807 self.db.resolve_doc(doc1, [doc1.rev, doc2.rev])
808 self.assertGetDocConflicts(self.db, doc1.doc_id, [])
809- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, None, False)
810+ self.assertGetDocIncludeDeleted(
811+ self.db, doc1.doc_id, doc1.rev, None, False)
812
813 def test_put_doc_if_newer_save_conflicted(self):
814 doc1 = self.db.create_doc(simple_doc)
815
816=== modified file 'u1db/tests/test_http_app.py'
817--- u1db/tests/test_http_app.py 2012-05-22 18:58:04 +0000
818+++ u1db/tests/test_http_app.py 2012-05-25 19:26:18 +0000
819@@ -292,7 +292,8 @@
820 res = invoke()
821 self.assertEqual('Put/end', res)
822 self.assertEqual({'a': '1', 'b': 2}, resource.args)
823- self.assertEqual(['{"entry": "x"}', '{"entry": "y"}'], resource.entries)
824+ self.assertEqual(
825+ ['{"entry": "x"}', '{"entry": "y"}'], resource.entries)
826 self.assertEqual(['a', 's', 's', 'e'], resource.order)
827
828 def _put_sync_stream(self, body):
829@@ -303,7 +304,7 @@
830 'CONTENT_TYPE': 'application/x-u1db-sync-stream'}
831 invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
832 parameters)
833- res = invoke()
834+ invoke()
835
836 def test_put_sync_stream_wrong_start(self):
837 self.assertRaises(http_app.BadRequest,
838@@ -362,9 +363,11 @@
839 'wsgi.input': StringIO.StringIO('{}'),
840 'CONTENT_LENGTH': '10000',
841 'CONTENT_TYPE': 'text/plain'}
842+
843 class params:
844 max_request_size = 5000
845 max_entry_size = sys.maxint # we don't get to use this
846+
847 invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
848 params)
849 self.assertRaises(http_app.BadRequest, invoke)
850@@ -430,8 +433,10 @@
851 self.status = status
852 self.headers = dict(headers)
853 self.response_body = []
854+
855 def write(data):
856 self.response_body.append(data)
857+
858 return write
859
860 def test_send_response_content_w_headers(self):
861@@ -621,7 +626,7 @@
862 def test_delete_doc(self):
863 doc = self.db0.create_doc('{"x": 1}', doc_id='doc1')
864 resp = self.app.delete('/db0/doc/doc1?old_rev=%s' % doc.rev)
865- doc = self.db0.get_doc('doc1')
866+ doc = self.db0.get_doc('doc1', include_deleted=True)
867 self.assertEqual(None, doc.content)
868 self.assertEqual(200, resp.status)
869 self.assertEqual('application/json', resp.header('content-type'))
870@@ -651,8 +656,30 @@
871 resp = self.app.get('/db0/doc/doc1', expect_errors=True)
872 self.assertEqual(404, resp.status)
873 self.assertEqual('application/json', resp.header('content-type'))
874- self.assertEqual({"error": errors.DOCUMENT_DELETED},
875- simplejson.loads(resp.body))
876+ self.assertEqual(
877+ {"error": errors.DocumentDoesNotExist.wire_description},
878+ simplejson.loads(resp.body))
879+
880+ def test_get_doc_deleted_explicit_exclude(self):
881+ doc = self.db0.create_doc('{"x": 1}', doc_id='doc1')
882+ self.db0.delete_doc(doc)
883+ resp = self.app.get(
884+ '/db0/doc/doc1?include_deleted=false', expect_errors=True)
885+ self.assertEqual(404, resp.status)
886+ self.assertEqual('application/json', resp.header('content-type'))
887+ self.assertEqual(
888+ {"error": errors.DocumentDoesNotExist.wire_description},
889+ simplejson.loads(resp.body))
890+
891+ def test_get_deleted_doc(self):
892+ doc = self.db0.create_doc('{"x": 1}', doc_id='doc1')
893+ self.db0.delete_doc(doc)
894+ resp = self.app.get(
895+ '/db0/doc/doc1?include_deleted=true', expect_errors=True)
896+ self.assertEqual(404, resp.status)
897+ self.assertEqual('application/json', resp.header('content-type'))
898+ self.assertEqual(
899+ {"error": errors.DOCUMENT_DELETED}, simplejson.loads(resp.body))
900 self.assertEqual(doc.rev, resp.header('x-u1db-rev'))
901 self.assertEqual('false', resp.header('x-u1db-has-conflicts'))
902
903@@ -695,6 +722,7 @@
904
905 gens = []
906 _do_set_sync_info = self.db0._do_set_sync_info
907+
908 def set_sync_generation_witness(other_uid, other_gen, other_trans_id):
909 gens.append((other_uid, other_gen))
910 _do_set_sync_info(other_uid, other_gen, other_trans_id)
911@@ -767,8 +795,10 @@
912 def test_sync_exchange_error_in_stream(self):
913 args = dict(last_known_generation=0)
914 body = "[\r\n%s\r\n]" % simplejson.dumps(args)
915+
916 def boom(self, return_doc_cb):
917 raise errors.Unavailable
918+
919 self.patch(sync.SyncExchange, 'return_docs',
920 boom)
921 resp = self.app.post('/db0/sync-from/replica',
922@@ -797,32 +827,38 @@
923
924 def test_begin_and_done(self):
925 calls = []
926+
927 def begin(environ):
928 self.assertTrue('PATH_INFO' in environ)
929 calls.append('begin')
930+
931 def done(environ):
932 self.assertTrue('PATH_INFO' in environ)
933 calls.append('done')
934+
935 self.http_app.request_begin = begin
936 self.http_app.request_done = done
937
938 doc = self.db0.create_doc('{"x": 1}', doc_id='doc1')
939- resp = self.app.get('/db0/doc/%s' % doc.doc_id)
940+ self.app.get('/db0/doc/%s' % doc.doc_id)
941
942 self.assertEqual(['begin', 'done'], calls)
943
944 def test_bad_request(self):
945 calls = []
946+
947 def begin(environ):
948 self.assertTrue('PATH_INFO' in environ)
949 calls.append('begin')
950+
951 def bad_request(environ):
952 self.assertTrue('PATH_INFO' in environ)
953 calls.append('bad-request')
954+
955 self.http_app.request_begin = begin
956 self.http_app.request_bad_request = bad_request
957 # shouldn't be called
958- self.http_app.request_done = lambda env: borken
959+ self.http_app.request_done = lambda env: 1 / 0
960
961 resp = self.app.put('/db0/foo/doc1', params='{"x": 1}',
962 headers={'content-type': 'application/json'},
963@@ -843,6 +879,7 @@
964 super(TestHTTPAppErrorHandling, self).setUp()
965 self.exc = None
966 self.state = tests.ServerStateForTests()
967+
968 class ErroringResource(object):
969
970 def post(_, args, content):
971@@ -887,16 +924,19 @@
972
973 def test_generic_u1db_errors_hooks(self):
974 calls = []
975+
976 def begin(environ):
977 self.assertTrue('PATH_INFO' in environ)
978 calls.append('begin')
979+
980 def u1db_error(environ, exc):
981 self.assertTrue('PATH_INFO' in environ)
982 calls.append(('error', exc))
983+
984 self.http_app.request_begin = begin
985 self.http_app.request_u1db_error = u1db_error
986 # shouldn't be called
987- self.http_app.request_done = lambda env: borken
988+ self.http_app.request_done = lambda env: 1 / 0
989
990 self.exc = errors.U1DBError()
991 resp = self.app.post('/req', params='{}',
992@@ -916,15 +956,18 @@
993 class Failure(Exception):
994 pass
995 calls = []
996+
997 def begin(environ):
998 calls.append('begin')
999+
1000 def failed(environ):
1001 self.assertTrue('PATH_INFO' in environ)
1002 calls.append(('failed', sys.exc_info()))
1003+
1004 self.http_app.request_begin = begin
1005 self.http_app.request_failed = failed
1006 # shouldn't be called
1007- self.http_app.request_done = lambda env: borken
1008+ self.http_app.request_done = lambda env: 1 / 0
1009
1010 self.exc = Failure()
1011 self.assertRaises(Failure, self.app.post, '/req', params='{}',
1012@@ -945,9 +988,11 @@
1013 self.state.ensure_database('foo')
1014
1015 def test_plugging(self):
1016+
1017 class MySyncExchange(object):
1018 def __init__(self, db, source_replica_uid, last_known_generation):
1019 pass
1020+
1021 class MySyncResource(http_app.SyncResource):
1022 sync_exchange_class = MySyncExchange
1023
1024
1025=== modified file 'u1db/tests/test_http_client.py'
1026--- u1db/tests/test_http_client.py 2012-04-25 12:14:42 +0000
1027+++ u1db/tests/test_http_client.py 2012-05-25 19:26:18 +0000
1028@@ -29,6 +29,18 @@
1029 )
1030
1031
1032+class TestEncoder(tests.TestCase):
1033+
1034+ def test_encode_string(self):
1035+ self.assertEqual("foo", http_client._encode_query_parameter("foo"))
1036+
1037+ def test_encode_true(self):
1038+ self.assertEqual("true", http_client._encode_query_parameter(True))
1039+
1040+ def test_encode_false(self):
1041+ self.assertEqual("false", http_client._encode_query_parameter(False))
1042+
1043+
1044 class TestHTTPClientBase(tests.TestCaseWithServer):
1045
1046 def app(self, environ, start_response):
1047@@ -44,7 +56,8 @@
1048 return [simplejson.dumps(ret)]
1049 elif environ['PATH_INFO'].endswith('error'):
1050 content_length = int(environ['CONTENT_LENGTH'])
1051- error = simplejson.loads(environ['wsgi.input'].read(content_length))
1052+ error = simplejson.loads(
1053+ environ['wsgi.input'].read(content_length))
1054 response = error['response']
1055 # In debug mode, wsgiref has an assertion that the status parameter
1056 # is a 'str' object. However error['status'] returns a unicode
1057@@ -69,7 +82,8 @@
1058 oauth_server = oauth.OAuthServer(tests.testingOAuthStore)
1059 oauth_server.add_signature_method(tests.sign_meth_HMAC_SHA1)
1060 try:
1061- consumer, token, params = oauth_server.verify_request(oauth_req)
1062+ consumer, token, params = oauth_server.verify_request(
1063+ oauth_req)
1064 except oauth.OAuthError, e:
1065 start_response("401 Unauthorized",
1066 [('Content-Type', 'application/json')])
1067@@ -85,9 +99,11 @@
1068 srv = simple_server.WSGIServer(host_port, handler)
1069 srv.set_app(self.app)
1070 return srv
1071+
1072 class req_handler(simple_server.WSGIRequestHandler):
1073 def log_request(*args):
1074 pass # suppress
1075+
1076 return make_server, req_handler, "shutdown", "http"
1077
1078 def getClient(self):
1079@@ -153,7 +169,8 @@
1080
1081 def test__request_json(self):
1082 cli = self.getClient()
1083- res, headers = cli._request_json('POST', ['echo'], {'b': 2}, {'a': 'x'})
1084+ res, headers = cli._request_json(
1085+ 'POST', ['echo'], {'b': 2}, {'a': 'x'})
1086 self.assertEqual('application/json', headers['content-type'])
1087 self.assertEqual({'CONTENT_TYPE': 'application/json',
1088 'PATH_INFO': '/dbase/echo',
1089@@ -248,8 +265,9 @@
1090 # oauth does its own internal quoting
1091 params = {'x': u'\xf0', 'y': "foo"}
1092 res, headers = cli._request('GET', ['doc', 'oauth', 'foo bar'], params)
1093- self.assertEqual(['/dbase/doc/oauth/foo bar', tests.token1.key, params],
1094- simplejson.loads(res))
1095+ self.assertEqual(
1096+ ['/dbase/doc/oauth/foo bar', tests.token1.key, params],
1097+ simplejson.loads(res))
1098
1099 def test_oauth_Unauthorized(self):
1100 cli = self.getClient()
1101
1102=== modified file 'u1db/tests/test_http_database.py'
1103--- u1db/tests/test_http_database.py 2012-05-22 18:58:04 +0000
1104+++ u1db/tests/test_http_database.py 2012-05-25 19:26:18 +0000
1105@@ -107,26 +107,38 @@
1106 self.response_val = '{"v": 2}', {'x-u1db-rev': 'doc-rev',
1107 'x-u1db-has-conflicts': 'false'}
1108 self.assertGetDoc(self.db, 'doc-id', 'doc-rev', '{"v": 2}', False)
1109- self.assertEqual(('GET', ['doc', 'doc-id'], None, None, None),
1110- self.got)
1111+ self.assertEqual(
1112+ ('GET', ['doc', 'doc-id'], {'include_deleted': False}, None, None),
1113+ self.got)
1114
1115 def test_get_doc_non_existing(self):
1116 self.response_val = errors.DocumentDoesNotExist()
1117 self.assertIs(None, self.db.get_doc('not-there'))
1118- self.assertEqual(('GET', ['doc', 'not-there'], None, None, None),
1119- self.got)
1120+ self.assertEqual(
1121+ ('GET', ['doc', 'not-there'], {'include_deleted': False}, None,
1122+ None), self.got)
1123
1124 def test_get_doc_deleted(self):
1125+ self.response_val = errors.DocumentDoesNotExist()
1126+ self.assertIs(None, self.db.get_doc('deleted'))
1127+ self.assertEqual(
1128+ ('GET', ['doc', 'deleted'], {'include_deleted': False}, None,
1129+ None), self.got)
1130+
1131+ def test_get_doc_deleted_include_deleted(self):
1132 self.response_val = errors.HTTPError(404,
1133 simplejson.dumps(
1134 {"error": errors.DOCUMENT_DELETED}
1135 ),
1136 {'x-u1db-rev': 'doc-rev-gone',
1137 'x-u1db-has-conflicts': 'false'})
1138- doc = self.db.get_doc('deleted')
1139+ doc = self.db.get_doc('deleted', include_deleted=True)
1140 self.assertEqual('deleted', doc.doc_id)
1141 self.assertEqual('doc-rev-gone', doc.rev)
1142 self.assertIs(None, doc.content)
1143+ self.assertEqual(
1144+ ('GET', ['doc', 'deleted'], {'include_deleted': True}, None, None),
1145+ self.got)
1146
1147 def test_get_doc_pass_through_errors(self):
1148 self.response_val = errors.HTTPError(500, 'Crash.')
1149
1150=== modified file 'u1db/tests/test_remote_sync_target.py'
1151--- u1db/tests/test_remote_sync_target.py 2012-05-22 18:58:04 +0000
1152+++ u1db/tests/test_remote_sync_target.py 2012-05-25 19:26:18 +0000
1153@@ -242,21 +242,28 @@
1154
1155 def test_sync_exchange_in_stream_error(self):
1156 self.startServer()
1157+
1158 def blackhole_getstderr(inst):
1159 return cStringIO.StringIO()
1160+
1161 self.patch(self.server.RequestHandlerClass, 'get_stderr',
1162 blackhole_getstderr)
1163 db = self.request_state._create_database('test')
1164 doc = db.create_doc('{"value": "there"}')
1165- def bomb_get_docs(doc_ids, check_for_conflicts=None):
1166+
1167+ def bomb_get_docs(doc_ids, check_for_conflicts=None,
1168+ include_deleted=False):
1169 yield doc
1170 # delayed failure case
1171 raise errors.Unavailable
1172+
1173 self.patch(db, 'get_docs', bomb_get_docs)
1174 remote_target = self.getSyncTarget('test')
1175 other_changes = []
1176+
1177 def receive_doc(doc, gen):
1178 other_changes.append((doc.doc_id, doc.rev, doc.get_json(), gen))
1179+
1180 self.assertRaises(errors.Unavailable, remote_target.sync_exchange,
1181 [], 'replica', last_known_generation=0,
1182 return_doc_cb=receive_doc)
1183
1184=== modified file 'u1db/tests/test_sync.py'
1185--- u1db/tests/test_sync.py 2012-05-22 18:58:04 +0000
1186+++ u1db/tests/test_sync.py 2012-05-25 19:26:18 +0000
1187@@ -134,7 +134,7 @@
1188 def set_trace_hook(self, callback):
1189 try:
1190 self.st._set_trace_hook(callback)
1191- except NotImplementedError, e:
1192+ except NotImplementedError:
1193 self.skipTest("%s does not implement _set_trace_hook"
1194 % (self.st.__class__.__name__,))
1195
1196@@ -146,7 +146,7 @@
1197
1198 def test_create_doc_updates_sync_info(self):
1199 self.assertEqual(('test', 0, 0, ''), self.st.get_sync_info('other'))
1200- doc = self.db.create_doc(simple_doc)
1201+ self.db.create_doc(simple_doc)
1202 self.assertEqual(('test', 1, 0, ''), self.st.get_sync_info('other'))
1203
1204 def test_record_sync_info(self):
1205@@ -174,7 +174,8 @@
1206 new_gen = self.st.sync_exchange(docs_by_gen, 'replica',
1207 last_known_generation=0,
1208 return_doc_cb=self.receive_doc)
1209- self.assertGetDoc(self.db, doc.doc_id, edit_rev, None, False)
1210+ self.assertGetDocIncludeDeleted(
1211+ self.db, doc.doc_id, edit_rev, None, False)
1212 self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
1213 self.assertEqual(([], 2), (self.other_changes, new_gen))
1214 self.assertEqual(10, self.st.get_sync_info('replica')[2])
1215@@ -258,9 +259,10 @@
1216 (doc2.doc_id, doc2.rev, nested_doc, 2)], 2),
1217 (self.other_changes, new_gen))
1218 if self.whitebox:
1219- self.assertEqual(self.db._last_exchange_log['return'],
1220- {'last_gen': 2, 'docs': [(doc.doc_id, doc.rev),
1221- (doc2.doc_id, doc2.rev)]})
1222+ self.assertEqual(
1223+ self.db._last_exchange_log['return'],
1224+ {'last_gen': 2, 'docs':
1225+ [(doc.doc_id, doc.rev), (doc2.doc_id, doc2.rev)]})
1226
1227 def test_sync_exchange_getting_newer_docs(self):
1228 doc = self.db.create_doc(simple_doc)
1229@@ -276,6 +278,7 @@
1230
1231 def test_sync_exchange_with_concurrent_updates_of_synced_doc(self):
1232 expected = []
1233+
1234 def before_whatschanged_cb(state):
1235 if state != 'before whats_changed':
1236 return
1237@@ -283,6 +286,7 @@
1238 conc_rev = self.db.put_doc(
1239 self.make_document(doc.doc_id, 'test:1|z:2', cont))
1240 expected.append((doc.doc_id, conc_rev, cont, 3))
1241+
1242 self.set_trace_hook(before_whatschanged_cb)
1243 doc = self.db.create_doc(simple_doc)
1244 self.assertTransactionLog([doc.doc_id], self.db)
1245@@ -351,8 +355,10 @@
1246
1247 def test__set_trace_hook(self):
1248 called = []
1249+
1250 def cb(state):
1251 called.append(state)
1252+
1253 self.set_trace_hook(cb)
1254 self.st.sync_exchange([], 'replica', 0, self.receive_doc)
1255 self.st.record_sync_info('replica', 0, 'T-sid')
1256@@ -465,10 +471,12 @@
1257 # new record.
1258 # When we finish synchronizing, we can notice that something locally
1259 # was updated, and we cannot tell c2 our new updated generation
1260+
1261 def before_get_docs(state):
1262 if state != 'before get_docs':
1263 return
1264 self.db1.create_doc(simple_doc)
1265+
1266 self.assertEqual(0, self.sync(self.db1, self.db2,
1267 trace_hook=before_get_docs))
1268 self.assertLastExchangeLog(self.db2,
1269@@ -482,7 +490,8 @@
1270 self.assertEqual((0, ''), self.db2._get_sync_gen_info('test1'))
1271
1272 def test_sync_doesnt_update_other_if_nothing_pulled(self):
1273- doc = self.db1.create_doc(simple_doc)
1274+ self.db1.create_doc(simple_doc)
1275+
1276 def no_record_sync_info(state):
1277 if state != 'record_sync_info':
1278 return
1279@@ -565,8 +574,9 @@
1280 'return': {'docs': [(doc_id, doc2.rev)],
1281 'last_gen': 2}})
1282 self.assertTransactionLog([doc_id, doc_id, doc_id], self.db1)
1283- self.assertGetDoc(self.db1, doc_id, doc2.rev, None, True)
1284- self.assertGetDoc(self.db2, doc_id, doc2.rev, None, False)
1285+ self.assertGetDocIncludeDeleted(self.db1, doc_id, doc2.rev, None, True)
1286+ self.assertGetDocIncludeDeleted(
1287+ self.db2, doc_id, doc2.rev, None, False)
1288 self.assertEqual([], self.db1.get_from_index('test-idx', [('value',)]))
1289
1290 def test_sync_local_race_conflicted(self):
1291@@ -581,12 +591,14 @@
1292 self.db2.put_doc(doc)
1293 doc2_rev2 = doc.rev
1294 triggered = []
1295+
1296 def after_whatschanged(state):
1297 if state != 'after whats_changed':
1298 return
1299 triggered.append(True)
1300 doc = self.make_document(doc_id, doc1_rev, content1)
1301 self.db1.put_doc(doc)
1302+
1303 self.sync(self.db1, self.db2, trace_hook=after_whatschanged)
1304 self.assertEqual([True], triggered)
1305 self.assertGetDoc(self.db1, doc_id, doc2_rev2, content2, True)
1306@@ -599,7 +611,6 @@
1307 def test_sync_propagates_deletes(self):
1308 doc1 = self.db1.create_doc(simple_doc)
1309 doc_id = doc1.doc_id
1310- doc1_rev = doc1.rev
1311 self.db1.create_index('test-idx', ['key'])
1312 self.sync(self.db1, self.db2)
1313 self.db2.create_index('test-idx', ['key'])
1314@@ -613,8 +624,10 @@
1315 'source_uid': 'test1',
1316 'source_gen': 2, 'last_known_gen': 1},
1317 'return': {'docs': [], 'last_gen': 2}})
1318- self.assertGetDoc(self.db1, doc_id, deleted_rev, None, False)
1319- self.assertGetDoc(self.db2, doc_id, deleted_rev, None, False)
1320+ self.assertGetDocIncludeDeleted(
1321+ self.db1, doc_id, deleted_rev, None, False)
1322+ self.assertGetDocIncludeDeleted(
1323+ self.db2, doc_id, deleted_rev, None, False)
1324 self.assertEqual([], self.db1.get_from_index('test-idx', [('value',)]))
1325 self.assertEqual([], self.db2.get_from_index('test-idx', [('value',)]))
1326 self.sync(self.db2, self.db3)
1327@@ -623,7 +636,8 @@
1328 'source_uid': 'test2',
1329 'source_gen': 2, 'last_known_gen': 0},
1330 'return': {'docs': [], 'last_gen': 2}})
1331- self.assertGetDoc(self.db3, doc_id, deleted_rev, None, False)
1332+ self.assertGetDocIncludeDeleted(
1333+ self.db3, doc_id, deleted_rev, None, False)
1334
1335 def test_sync_propagates_resolution(self):
1336 doc1 = self.db1.create_doc('{"a": 1}', doc_id='the-doc')
1337@@ -659,8 +673,8 @@
1338 def test_sync_supersedes_conflicts(self):
1339 db3 = self.create_database('test3')
1340 doc1 = self.db1.create_doc('{"a": 1}', doc_id='the-doc')
1341- doc2 = self.db2.create_doc('{"b": 1}', doc_id='the-doc')
1342- doc3 = db3.create_doc('{"c": 1}', doc_id='the-doc')
1343+ self.db2.create_doc('{"b": 1}', doc_id='the-doc')
1344+ db3.create_doc('{"c": 1}', doc_id='the-doc')
1345 self.sync(db3, self.db1)
1346 self.sync(db3, self.db2)
1347 self.assertEqual(3, len(db3.get_doc_conflicts('the-doc')))
1348@@ -724,6 +738,7 @@
1349 progress1 = []
1350 progress2 = []
1351 _do_set_sync_info = self.db1._do_set_sync_info
1352+
1353 def set_sync_generation_witness1(other_uid, other_gen, trans_id):
1354 progress1.append((other_uid, other_gen,
1355 [d for d, t in self.db1._get_transaction_log()[2:]]))
1356@@ -732,6 +747,7 @@
1357 set_sync_generation_witness1)
1358
1359 _do_set_sync_info2 = self.db2._do_set_sync_info
1360+
1361 def set_sync_generation_witness2(other_uid, other_gen, trans_id):
1362 progress2.append((other_uid, other_gen,
1363 [d for d, t in self.db2._get_transaction_log()[2:]]))

Subscribers

People subscribed via source and target branches