Merge lp:~thisfred/u1db/exclude_deleted_documents into lp:u1db
- exclude_deleted_documents
- Merge into trunk
Proposed by
Eric Casteleijn
Status: | Merged |
---|---|
Approved by: | Eric Casteleijn |
Approved revision: | 314 |
Merged at revision: | 312 |
Proposed branch: | lp:~thisfred/u1db/exclude_deleted_documents |
Merge into: | lp:u1db |
Diff against target: |
1276 lines (+305/-112) 22 files modified
include/u1db/u1db.h (+3/-2) src/u1db.c (+19/-11) src/u1db_http_sync_target.c (+2/-2) src/u1db_query.c (+2/-1) src/u1db_sync_target.c (+2/-2) u1db/__init__.py (+9/-2) u1db/backends/__init__.py (+4/-1) u1db/backends/inmemory.py (+3/-1) u1db/backends/sqlite_backend.py (+3/-1) u1db/remote/http_app.py (+28/-13) u1db/remote/http_client.py (+22/-8) u1db/remote/http_database.py (+3/-2) u1db/sync.py (+8/-5) u1db/tests/__init__.py (+7/-0) u1db/tests/c_backend_wrapper.pyx (+12/-12) u1db/tests/commandline/test_client.py (+9/-8) u1db/tests/test_backends.py (+36/-6) u1db/tests/test_http_app.py (+54/-9) u1db/tests/test_http_client.py (+23/-5) u1db/tests/test_http_database.py (+17/-5) u1db/tests/test_remote_sync_target.py (+8/-1) u1db/tests/test_sync.py (+31/-15) |
To merge this branch: | bzr merge lp:~thisfred/u1db/exclude_deleted_documents |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Samuele Pedroni | Approve | ||
Review via email: mp+107286@code.launchpad.net |
Commit message
made get_doc() and get_docs() not return deleted documents, and added an include_deleted argument in case one does want them
Description of the change
made get_doc() and get_docs() not return deleted documents, and added an include_deleted argument in case one does want them
To post a comment you must log in.
Revision history for this message
Eric Casteleijn (thisfred) wrote : | # |
fixed
- 313. By Eric Casteleijn
-
fixes to http layer
- 314. By Eric Casteleijn
-
factored out _encode_
query_parameter and added test
Revision history for this message
Eric Casteleijn (thisfred) wrote : | # |
added unit test for encoding
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'include/u1db/u1db.h' |
2 | --- include/u1db/u1db.h 2012-05-23 17:33:57 +0000 |
3 | +++ include/u1db/u1db.h 2012-05-25 19:04:26 +0000 |
4 | @@ -147,7 +147,8 @@ |
5 | * @return status, will be U1DB_OK if there is no error, even if there is no |
6 | * document matching that doc_id. |
7 | */ |
8 | -int u1db_get_doc(u1database *db, const char *doc_id, u1db_document **doc); |
9 | +int u1db_get_doc(u1database *db, const char *doc_id, int include_deleted, |
10 | + u1db_document **doc); |
11 | |
12 | |
13 | /** |
14 | @@ -164,7 +165,7 @@ |
15 | * u1db_free_doc. |
16 | */ |
17 | int u1db_get_docs(u1database *db, int n_doc_ids, const char **doc_ids, |
18 | - int check_for_conflicts, void *context, |
19 | + int check_for_conflicts, int include_deleted, void *context, |
20 | u1db_doc_callback cb); |
21 | |
22 | /** |
23 | |
24 | === modified file 'src/u1db.c' |
25 | --- src/u1db.c 2012-05-25 17:48:41 +0000 |
26 | +++ src/u1db.c 2012-05-25 19:04:26 +0000 |
27 | @@ -941,7 +941,8 @@ |
28 | |
29 | |
30 | int |
31 | -u1db_get_doc(u1database *db, const char *doc_id, u1db_document **doc) |
32 | +u1db_get_doc(u1database *db, const char *doc_id, int include_deleted, |
33 | + u1db_document **doc) |
34 | { |
35 | int status = 0, content_len = 0; |
36 | sqlite3_stmt *statement; |
37 | @@ -959,12 +960,16 @@ |
38 | *doc = NULL; |
39 | goto finish; |
40 | } |
41 | - *doc = u1db__allocate_document(doc_id, (const char*)doc_rev, |
42 | - (const char*)content, 0); |
43 | + if (content != NULL || include_deleted) { |
44 | + *doc = u1db__allocate_document(doc_id, (const char*)doc_rev, |
45 | + (const char*)content, 0); |
46 | |
47 | - if (*doc != NULL) { |
48 | - status = lookup_conflict(db, (*doc)->doc_id, |
49 | - &((*doc)->has_conflicts)); |
50 | + if (*doc != NULL) { |
51 | + status = lookup_conflict(db, (*doc)->doc_id, |
52 | + &((*doc)->has_conflicts)); |
53 | + } |
54 | + } else { |
55 | + *doc = NULL; |
56 | } |
57 | } else { |
58 | *doc = NULL; |
59 | @@ -976,7 +981,8 @@ |
60 | |
61 | int |
62 | u1db_get_docs(u1database *db, int n_doc_ids, const char **doc_ids, |
63 | - int check_for_conflicts, void *context, u1db_doc_callback cb) |
64 | + int check_for_conflicts, int include_deleted, |
65 | + void *context, u1db_doc_callback cb) |
66 | { |
67 | int status, i; |
68 | sqlite3_stmt *statement; |
69 | @@ -1005,11 +1011,13 @@ |
70 | u1db_document *doc; |
71 | revision = (char *)sqlite3_column_text(statement, 0); |
72 | content = (char *)sqlite3_column_text(statement, 1); |
73 | - doc = u1db__allocate_document(doc_ids[i], revision, content, 0); |
74 | - if (check_for_conflicts) { |
75 | - status = lookup_conflict(db, doc_ids[i], &(doc->has_conflicts)); |
76 | + if (content != NULL || include_deleted) { |
77 | + doc = u1db__allocate_document(doc_ids[i], revision, content, 0); |
78 | + if (check_for_conflicts) { |
79 | + status = lookup_conflict(db, doc_ids[i], &(doc->has_conflicts)); |
80 | + } |
81 | + cb(context, doc); |
82 | } |
83 | - cb(context, doc); |
84 | } else if (status == SQLITE_DONE) { |
85 | // This document doesn't exist |
86 | // TODO: I believe the python implementation returns the Null |
87 | |
88 | === modified file 'src/u1db_http_sync_target.c' |
89 | --- src/u1db_http_sync_target.c 2012-05-22 18:58:04 +0000 |
90 | +++ src/u1db_http_sync_target.c 2012-05-25 19:04:26 +0000 |
91 | @@ -921,7 +921,7 @@ |
92 | |
93 | |
94 | int u1db_get_docs(u1database *db, int n_doc_ids, const char **doc_ids, |
95 | - int check_for_conflicts, void *context, |
96 | + int check_for_conflicts, int include_deleted, void *context, |
97 | u1db_doc_callback cb); |
98 | |
99 | static int |
100 | @@ -951,7 +951,7 @@ |
101 | state.num = n_doc_ids; |
102 | state.generations = generations; |
103 | state.temp_fd = temp_fd; |
104 | - status = u1db_get_docs(source_db, n_doc_ids, doc_ids, 0, |
105 | + status = u1db_get_docs(source_db, n_doc_ids, doc_ids, 0, 1, |
106 | &state, get_docs_to_tempfile); |
107 | if (status != U1DB_OK) { goto finish; } |
108 | status = finalize_and_send_temp_file(st, temp_fd, source_replica_uid, &req); |
109 | |
110 | === modified file 'src/u1db_query.c' |
111 | --- src/u1db_query.c 2012-05-23 10:57:02 +0000 |
112 | +++ src/u1db_query.c 2012-05-25 19:04:26 +0000 |
113 | @@ -591,7 +591,8 @@ |
114 | doc_id = (char*)sqlite3_column_text(statement, 0); |
115 | // We use u1db_get_docs so we can pass check_for_conflicts=0, which is |
116 | // currently expected by the test suite. |
117 | - status = u1db_get_docs(db, 1, (const char**)&doc_id, 0, context, cb); |
118 | + status = u1db_get_docs( |
119 | + db, 1, (const char**)&doc_id, 0, 0, context, cb); |
120 | if (status != U1DB_OK) { goto finish; } |
121 | status = sqlite3_step(statement); |
122 | } |
123 | |
124 | === modified file 'src/u1db_sync_target.c' |
125 | --- src/u1db_sync_target.c 2012-05-21 18:28:05 +0000 |
126 | +++ src/u1db_sync_target.c 2012-05-25 19:04:26 +0000 |
127 | @@ -422,7 +422,7 @@ |
128 | if (se->num_doc_ids > 0) { |
129 | status = u1db_get_docs(se->db, se->num_doc_ids, |
130 | (const char **)se->doc_ids_to_return, |
131 | - 0, &state, get_docs_to_gen_docs); |
132 | + 0, 1, &state, get_docs_to_gen_docs); |
133 | } |
134 | finish: |
135 | return status; |
136 | @@ -469,7 +469,7 @@ |
137 | (u1db_doc_gen_callback)u1db__sync_exchange_insert_doc_from_source; |
138 | get_doc_state.gen_for_doc_ids = generations; |
139 | return u1db_get_docs(source_db, n_doc_ids, doc_ids, |
140 | - 0, &get_doc_state, get_docs_to_gen_docs); |
141 | + 0, 1, &get_doc_state, get_docs_to_gen_docs); |
142 | } |
143 | |
144 | |
145 | |
146 | === modified file 'u1db/__init__.py' |
147 | --- u1db/__init__.py 2012-05-23 13:11:46 +0000 |
148 | +++ u1db/__init__.py 2012-05-25 19:04:26 +0000 |
149 | @@ -68,20 +68,27 @@ |
150 | """ |
151 | raise NotImplementedError(self.whats_changed) |
152 | |
153 | - def get_doc(self, doc_id): |
154 | + def get_doc(self, doc_id, include_deleted=False): |
155 | """Get the JSON string for the given document. |
156 | |
157 | :param doc_id: The unique document identifier |
158 | + :param include_deleted: If set to True, deleted documents will be |
159 | + returned with empty content. Otherwise asking for a deleted |
160 | + document will return None. |
161 | :return: a Document object. |
162 | """ |
163 | raise NotImplementedError(self.get_doc) |
164 | |
165 | - def get_docs(self, doc_ids, check_for_conflicts=True): |
166 | + def get_docs(self, doc_ids, check_for_conflicts=True, |
167 | + include_deleted=False): |
168 | """Get the JSON content for many documents. |
169 | |
170 | :param doc_ids: A list of document identifiers. |
171 | :param check_for_conflicts: If set to False, then the conflict check |
172 | will be skipped, and 'None' will be returned instead of True/False. |
173 | + :param include_deleted: If set to True, deleted documents will be |
174 | + returned with empty content. Otherwise deleted documents will not |
175 | + be included in the results. |
176 | :return: [Document] for each document id and matching doc_ids order. |
177 | """ |
178 | raise NotImplementedError(self.get_docs) |
179 | |
180 | === modified file 'u1db/backends/__init__.py' |
181 | --- u1db/backends/__init__.py 2012-05-22 20:38:49 +0000 |
182 | +++ u1db/backends/__init__.py 2012-05-25 19:04:26 +0000 |
183 | @@ -81,10 +81,13 @@ |
184 | def _put_and_update_indexes(self, doc_id, old_doc, new_rev, content): |
185 | raise NotImplementedError(self._put_and_update_indexes) |
186 | |
187 | - def get_docs(self, doc_ids, check_for_conflicts=True): |
188 | + def get_docs(self, doc_ids, check_for_conflicts=True, |
189 | + include_deleted=False): |
190 | result = [] |
191 | for doc_id in doc_ids: |
192 | doc = self._get_doc(doc_id) |
193 | + if doc.is_tombstone() and not include_deleted: |
194 | + continue |
195 | if check_for_conflicts: |
196 | doc.has_conflicts = self._has_conflicts(doc_id) |
197 | result.append(doc) |
198 | |
199 | === modified file 'u1db/backends/inmemory.py' |
200 | --- u1db/backends/inmemory.py 2012-05-25 16:19:49 +0000 |
201 | +++ u1db/backends/inmemory.py 2012-05-25 19:04:26 +0000 |
202 | @@ -108,10 +108,12 @@ |
203 | def _has_conflicts(self, doc_id): |
204 | return doc_id in self._conflicts |
205 | |
206 | - def get_doc(self, doc_id): |
207 | + def get_doc(self, doc_id, include_deleted=False): |
208 | doc = self._get_doc(doc_id) |
209 | if doc is None: |
210 | return None |
211 | + if doc.is_tombstone() and not include_deleted: |
212 | + return None |
213 | doc.has_conflicts = (doc.doc_id in self._conflicts) |
214 | return doc |
215 | |
216 | |
217 | === modified file 'u1db/backends/sqlite_backend.py' |
218 | --- u1db/backends/sqlite_backend.py 2012-05-25 16:19:49 +0000 |
219 | +++ u1db/backends/sqlite_backend.py 2012-05-25 19:04:26 +0000 |
220 | @@ -291,10 +291,12 @@ |
221 | else: |
222 | return True |
223 | |
224 | - def get_doc(self, doc_id): |
225 | + def get_doc(self, doc_id, include_deleted=False): |
226 | doc = self._get_doc(doc_id) |
227 | if doc is None: |
228 | return None |
229 | + if doc.is_tombstone() and not include_deleted: |
230 | + return None |
231 | # TODO: A doc which appears deleted could still have conflicts... |
232 | doc.has_conflicts = self._has_conflicts(doc.doc_id) |
233 | return doc |
234 | |
235 | === modified file 'u1db/remote/http_app.py' |
236 | --- u1db/remote/http_app.py 2012-05-23 13:11:46 +0000 |
237 | +++ u1db/remote/http_app.py 2012-05-25 19:04:26 +0000 |
238 | @@ -38,6 +38,13 @@ |
239 | ) |
240 | |
241 | |
242 | +def parse_bool(expression): |
243 | + """Parse boolean querystring parameter.""" |
244 | + if expression == 'true': |
245 | + return True |
246 | + return False |
247 | + |
248 | + |
249 | class BadRequest(Exception): |
250 | """Bad request.""" |
251 | |
252 | @@ -89,7 +96,8 @@ |
253 | |
254 | |
255 | def http_method(**control): |
256 | - """Decoration for handling of query arguments and content for a HTTP method. |
257 | + """Decoration for handling of query arguments and content for a HTTP |
258 | + method. |
259 | |
260 | args and content here are the query arguments and body of the incoming |
261 | HTTP requests. |
262 | @@ -118,6 +126,7 @@ |
263 | content_as_args = control.pop('content_as_args', False) |
264 | no_query = control.pop('no_query', False) |
265 | conversions = control.items() |
266 | + |
267 | def wrap(f): |
268 | argspec = inspect.getargspec(f) |
269 | assert argspec.args[0] == "self" |
270 | @@ -125,6 +134,7 @@ |
271 | ndefaults = len(argspec.defaults or ()) |
272 | required_args = set(argspec.args[1:nargs - ndefaults]) |
273 | all_args = set(argspec.args) |
274 | + |
275 | @functools.wraps(f) |
276 | def wrapper(self, args, content): |
277 | if no_query and args: |
278 | @@ -147,7 +157,9 @@ |
279 | except ValueError: |
280 | raise BadRequest() |
281 | return f(self, **args) |
282 | + |
283 | return wrapper |
284 | + |
285 | return wrap |
286 | |
287 | |
288 | @@ -243,9 +255,9 @@ |
289 | self.db.delete_doc(doc) |
290 | self.responder.send_response_json(200, rev=doc.rev) |
291 | |
292 | - @http_method() |
293 | - def get(self): |
294 | - doc = self.db.get_doc(self.id) |
295 | + @http_method(include_deleted=parse_bool) |
296 | + def get(self, include_deleted=False): |
297 | + doc = self.db.get_doc(self.id, include_deleted=include_deleted) |
298 | if doc is None: |
299 | wire_descr = errors.DocumentDoesNotExist.wire_description |
300 | self.responder.send_response_json( |
301 | @@ -262,7 +274,8 @@ |
302 | } |
303 | if doc.is_tombstone(): |
304 | self.responder.send_response_json( |
305 | - http_errors.wire_description_to_status[errors.DOCUMENT_DELETED], |
306 | + http_errors.wire_description_to_status[ |
307 | + errors.DOCUMENT_DELETED], |
308 | error=errors.DOCUMENT_DELETED, |
309 | headers=headers) |
310 | else: |
311 | @@ -288,11 +301,11 @@ |
312 | @http_method() |
313 | def get(self): |
314 | result = self.target.get_sync_info(self.source_replica_uid) |
315 | - self.responder.send_response_json(target_replica_uid=result[0], |
316 | - target_replica_generation=result[1], |
317 | - source_replica_uid=self.source_replica_uid, |
318 | - source_replica_generation=result[2], |
319 | - source_transaction_id=result[3]) |
320 | + self.responder.send_response_json( |
321 | + target_replica_uid=result[0], target_replica_generation=result[1], |
322 | + source_replica_uid=self.source_replica_uid, |
323 | + source_replica_generation=result[2], |
324 | + source_transaction_id=result[3]) |
325 | |
326 | @http_method(generation=int, |
327 | content_as_args=True, no_query=True) |
328 | @@ -421,7 +434,8 @@ |
329 | args = urlparse.parse_qsl(self.environ['QUERY_STRING'], |
330 | strict_parsing=False) |
331 | try: |
332 | - args = dict((k.decode('utf-8'), v.decode('utf-8')) for k, v in args) |
333 | + args = dict( |
334 | + (k.decode('utf-8'), v.decode('utf-8')) for k, v in args) |
335 | except ValueError: |
336 | raise BadRequest() |
337 | method = self.environ['REQUEST_METHOD'].lower() |
338 | @@ -433,7 +447,7 @@ |
339 | # to support chunked enconding |
340 | try: |
341 | content_length = int(self.environ['CONTENT_LENGTH']) |
342 | - except (ValueError, KeyError), e: |
343 | + except (ValueError, KeyError): |
344 | raise BadRequest |
345 | if content_length <= 0: |
346 | raise BadRequest |
347 | @@ -486,7 +500,8 @@ |
348 | resource_cls, params = url_to_resource.match(environ['PATH_INFO']) |
349 | if resource_cls is None: |
350 | raise BadRequest # 404 instead? |
351 | - resource = resource_cls(state=self.state, responder=responder, **params) |
352 | + resource = resource_cls( |
353 | + state=self.state, responder=responder, **params) |
354 | return resource |
355 | |
356 | def __call__(self, environ, start_response): |
357 | |
358 | === modified file 'u1db/remote/http_client.py' |
359 | --- u1db/remote/http_client.py 2012-05-11 13:40:45 +0000 |
360 | +++ u1db/remote/http_client.py 2012-05-25 19:04:26 +0000 |
361 | @@ -42,6 +42,16 @@ |
362 | CA_CERTS = "/etc/ssl/certs/ca-certificates.crt" |
363 | |
364 | |
365 | +def _encode_query_parameter(value): |
366 | + """Encode query parameter.""" |
367 | + if isinstance(value, bool): |
368 | + if value: |
369 | + value = 'true' |
370 | + else: |
371 | + value = 'false' |
372 | + return unicode(value).encode('utf-8') |
373 | + |
374 | + |
375 | class _VerifiedHTTPSConnection(httplib.HTTPSConnection): |
376 | """HTTPSConnection verifying server side certificates.""" |
377 | # derived from httplib.py |
378 | @@ -87,8 +97,9 @@ |
379 | |
380 | def set_oauth_credentials(self, consumer_key, consumer_secret, |
381 | token_key, token_secret): |
382 | - self._oauth_creds = (oauth.OAuthConsumer(consumer_key, consumer_secret), |
383 | - oauth.OAuthToken(token_key, token_secret)) |
384 | + self._oauth_creds = ( |
385 | + oauth.OAuthConsumer(consumer_key, consumer_secret), |
386 | + oauth.OAuthToken(token_key, token_secret)) |
387 | |
388 | def _ensure_connection(self): |
389 | if self._conn is not None: |
390 | @@ -142,7 +153,8 @@ |
391 | parameters=params, |
392 | http_url=full_url |
393 | ) |
394 | - oauth_req.sign_request(self.oauth_signature_method, consumer, token) |
395 | + oauth_req.sign_request( |
396 | + self.oauth_signature_method, consumer, token) |
397 | # Authorization: OAuth ... |
398 | return oauth_req.to_header().items() |
399 | else: |
400 | @@ -160,18 +172,20 @@ |
401 | for part in url_parts) |
402 | # oauth performs its own quoting |
403 | unquoted_url += '/'.join(url_parts) |
404 | + encoded_params = {} |
405 | if params: |
406 | - params = dict((unicode(v).encode('utf-8'), |
407 | - unicode(k).encode('utf-8')) |
408 | - for v, k in params.items()) |
409 | - url_query += ('?' + urllib.urlencode(params)) |
410 | + for key, value in params.items(): |
411 | + key = unicode(key).encode('utf-8') |
412 | + encoded_params[key] = _encode_query_parameter(value) |
413 | + url_query += ('?' + urllib.urlencode(encoded_params)) |
414 | if body is not None and not isinstance(body, basestring): |
415 | body = simplejson.dumps(body) |
416 | content_type = 'application/json' |
417 | headers = {} |
418 | if content_type: |
419 | headers['content-type'] = content_type |
420 | - headers.update(self._sign_request(method, unquoted_url, params)) |
421 | + headers.update( |
422 | + self._sign_request(method, unquoted_url, encoded_params)) |
423 | self._conn.request(method, url_query, body, headers) |
424 | return self._response() |
425 | |
426 | |
427 | === modified file 'u1db/remote/http_database.py' |
428 | --- u1db/remote/http_database.py 2012-05-23 13:11:46 +0000 |
429 | +++ u1db/remote/http_database.py 2012-05-25 19:04:26 +0000 |
430 | @@ -76,9 +76,10 @@ |
431 | doc.rev = res['rev'] |
432 | return res['rev'] |
433 | |
434 | - def get_doc(self, doc_id): |
435 | + def get_doc(self, doc_id, include_deleted=False): |
436 | try: |
437 | - res, headers = self._request('GET', ['doc', doc_id]) |
438 | + res, headers = self._request( |
439 | + 'GET', ['doc', doc_id], {"include_deleted": include_deleted}) |
440 | except errors.DocumentDoesNotExist: |
441 | return None |
442 | except errors.HTTPError, e: |
443 | |
444 | === modified file 'u1db/sync.py' |
445 | --- u1db/sync.py 2012-05-18 15:12:42 +0000 |
446 | +++ u1db/sync.py 2012-05-25 19:04:26 +0000 |
447 | @@ -95,19 +95,21 @@ |
448 | # get target identifier, its current generation, |
449 | # and its last-seen database generation for this source |
450 | (self.target_replica_uid, target_gen, target_my_gen, |
451 | - target_my_trans_id) = sync_target.get_sync_info(self.source._replica_uid) |
452 | + target_my_trans_id) = sync_target.get_sync_info( |
453 | + self.source._replica_uid) |
454 | # what's changed since that generation and this current gen |
455 | my_gen, _, changes = self.source.whats_changed(target_my_gen) |
456 | |
457 | # this source last-seen database generation for the target |
458 | - target_last_known_gen, target_trans_id = self.source._get_sync_gen_info( |
459 | - self.target_replica_uid) |
460 | + (target_last_known_gen, |
461 | + target_trans_id) = self.source._get_sync_gen_info( |
462 | + self.target_replica_uid) |
463 | if not changes and target_last_known_gen == target_gen: |
464 | return my_gen |
465 | changed_doc_ids = [doc_id for doc_id, _, _ in changes] |
466 | # prepare to send all the changed docs |
467 | docs_to_send = self.source.get_docs(changed_doc_ids, |
468 | - check_for_conflicts=False) |
469 | + check_for_conflicts=False, include_deleted=True) |
470 | docs_by_generation = zip(docs_to_send, (gen for _, gen, _ in changes)) |
471 | |
472 | # exchange documents and try to insert the returned ones with |
473 | @@ -229,7 +231,8 @@ |
474 | # return docs, including conflicts |
475 | changed_doc_ids = [doc_id for doc_id, _ in changes_to_return] |
476 | self._trace('before get_docs') |
477 | - docs = self._db.get_docs(changed_doc_ids, check_for_conflicts=False) |
478 | + docs = self._db.get_docs( |
479 | + changed_doc_ids, check_for_conflicts=False, include_deleted=True) |
480 | |
481 | docs_by_gen = izip(docs, (gen for _, gen in changes_to_return)) |
482 | for doc, gen in docs_by_gen: |
483 | |
484 | === modified file 'u1db/tests/__init__.py' |
485 | --- u1db/tests/__init__.py 2012-05-22 18:58:04 +0000 |
486 | +++ u1db/tests/__init__.py 2012-05-25 19:04:26 +0000 |
487 | @@ -71,6 +71,13 @@ |
488 | has_conflicts=has_conflicts) |
489 | self.assertEqual(exp_doc, db.get_doc(doc_id)) |
490 | |
491 | + def assertGetDocIncludeDeleted(self, db, doc_id, doc_rev, content, |
492 | + has_conflicts): |
493 | + """Assert that the document in the database looks correct.""" |
494 | + exp_doc = self.make_document(doc_id, doc_rev, content, |
495 | + has_conflicts=has_conflicts) |
496 | + self.assertEqual(exp_doc, db.get_doc(doc_id, include_deleted=True)) |
497 | + |
498 | def assertGetDocConflicts(self, db, doc_id, conflicts): |
499 | """Assert what conflicts are stored for a given doc_id. |
500 | |
501 | |
502 | === modified file 'u1db/tests/c_backend_wrapper.pyx' |
503 | --- u1db/tests/c_backend_wrapper.pyx 2012-05-23 17:33:57 +0000 |
504 | +++ u1db/tests/c_backend_wrapper.pyx 2012-05-25 19:04:26 +0000 |
505 | @@ -71,10 +71,11 @@ |
506 | int u1db_create_doc(u1database *db, char *json, char *doc_id, |
507 | u1db_document **doc) |
508 | int u1db_delete_doc(u1database *db, u1db_document *doc) |
509 | - int u1db_get_doc(u1database *db, char *doc_id, u1db_document **doc) |
510 | + int u1db_get_doc(u1database *db, char *doc_id, int include_deleted, |
511 | + u1db_document **doc) |
512 | int u1db_get_docs(u1database *db, int n_doc_ids, const_char_ptr *doc_ids, |
513 | - int check_for_conflicts, void *context, |
514 | - u1db_doc_callback cb) |
515 | + int check_for_conflicts, int include_deleted, |
516 | + void *context, u1db_doc_callback cb) |
517 | int u1db_put_doc(u1database *db, u1db_document *doc) |
518 | int u1db__put_doc_if_newer(u1database *db, u1db_document *doc, |
519 | int save_conflict, char *replica_uid, |
520 | @@ -907,30 +908,29 @@ |
521 | else: |
522 | raise RuntimeError("Unknown _put_doc_if_newer state: %d" % (state,)) |
523 | |
524 | - def get_doc(self, doc_id): |
525 | + def get_doc(self, doc_id, include_deleted=False): |
526 | cdef u1db_document *doc = NULL |
527 | - |
528 | + deleted = 1 if include_deleted else 0 |
529 | handle_status("get_doc failed", |
530 | - u1db_get_doc(self._db, doc_id, &doc)) |
531 | + u1db_get_doc(self._db, doc_id, deleted, &doc)) |
532 | if doc == NULL: |
533 | return None |
534 | pydoc = CDocument() |
535 | pydoc._doc = doc |
536 | return pydoc |
537 | |
538 | - def get_docs(self, doc_ids, check_for_conflicts=True): |
539 | + def get_docs(self, doc_ids, check_for_conflicts=True, |
540 | + include_deleted=False): |
541 | cdef int n_doc_ids, conflicts |
542 | cdef const_char_ptr *c_doc_ids |
543 | |
544 | _list_to_array(doc_ids, &c_doc_ids, &n_doc_ids) |
545 | - if check_for_conflicts: |
546 | - conflicts = 1 |
547 | - else: |
548 | - conflicts = 0 |
549 | + deleted = 1 if include_deleted else 0 |
550 | + conflicts = 1 if check_for_conflicts else 0 |
551 | a_list = [] |
552 | handle_status("get_docs", |
553 | u1db_get_docs(self._db, n_doc_ids, c_doc_ids, |
554 | - conflicts, <void*>a_list, _append_doc_to_list)) |
555 | + conflicts, deleted, <void*>a_list, _append_doc_to_list)) |
556 | free(<void*>c_doc_ids) |
557 | return a_list |
558 | |
559 | |
560 | === modified file 'u1db/tests/commandline/test_client.py' |
561 | --- u1db/tests/commandline/test_client.py 2012-05-24 14:50:19 +0000 |
562 | +++ u1db/tests/commandline/test_client.py 2012-05-25 19:04:26 +0000 |
563 | @@ -50,7 +50,7 @@ |
564 | # parsing. |
565 | try: |
566 | return self.parser.parse_args(args) |
567 | - except SystemExit, e: |
568 | + except SystemExit: |
569 | raise AssertionError('got SystemExit') |
570 | |
571 | def test_create(self): |
572 | @@ -203,7 +203,7 @@ |
573 | doc = self.db.create_doc(tests.simple_doc) |
574 | cmd = self.make_command(client.CmdDelete) |
575 | cmd.run(self.db_path, doc.doc_id, doc.rev) |
576 | - doc2 = self.db.get_doc(doc.doc_id) |
577 | + doc2 = self.db.get_doc(doc.doc_id, include_deleted=True) |
578 | self.assertEqual(doc.doc_id, doc2.doc_id) |
579 | self.assertNotEqual(doc.rev, doc2.rev) |
580 | self.assertIs(None, doc2.get_json()) |
581 | @@ -390,7 +390,7 @@ |
582 | cmd = self.make_command(client.CmdCreateIndex) |
583 | retval = cmd.run(self.db_path, "foo", ["bar", "baz"]) |
584 | self.assertEqual(self.db.list_indexes(), [('foo', ['bar', "baz"])]) |
585 | - self.assertEqual(retval, None) # conveniently mapped to 0 |
586 | + self.assertEqual(retval, None) # conveniently mapped to 0 |
587 | self.assertEqual(cmd.stdout.getvalue(), '') |
588 | self.assertEqual(cmd.stderr.getvalue(), '') |
589 | |
590 | @@ -688,7 +688,7 @@ |
591 | doc = self.db.create_doc(tests.simple_doc, doc_id='test-id') |
592 | ret, stdout, stderr = self.run_main( |
593 | ['delete', self.db_path, 'test-id', doc.rev]) |
594 | - doc = self.db.get_doc('test-id') |
595 | + doc = self.db.get_doc('test-id', include_deleted=True) |
596 | self.assertEqual(0, ret) |
597 | self.assertEqual('', stdout) |
598 | self.assertEqual('rev: %s\n' % (doc.rev,), stderr) |
599 | @@ -696,7 +696,7 @@ |
600 | def test_init_db(self): |
601 | path = self.working_dir + '/test2.db' |
602 | ret, stdout, stderr = self.run_main(['init-db', path]) |
603 | - db2 = u1db_open(path, create=False) |
604 | + u1db_open(path, create=False) |
605 | |
606 | def test_put(self): |
607 | doc = self.db.create_doc(tests.simple_doc, doc_id='test-id') |
608 | @@ -720,7 +720,8 @@ |
609 | self.assertEqual(0, ret) |
610 | self.assertEqual('', stdout) |
611 | self.assertEqual('', stderr) |
612 | - self.assertGetDoc(self.db2, 'test-id', doc.rev, tests.simple_doc, False) |
613 | + self.assertGetDoc( |
614 | + self.db2, 'test-id', doc.rev, tests.simple_doc, False) |
615 | |
616 | |
617 | class TestHTTPIntegration(tests.TestCaseWithServer, RunMainHelper): |
618 | @@ -743,7 +744,7 @@ |
619 | def test_init_db(self): |
620 | url = self.getURL('new.db') |
621 | ret, stdout, stderr = self.run_main(['init-db', url]) |
622 | - db2 = u1db_open(self.getPath('new.db'), create=False) |
623 | + u1db_open(self.getPath('new.db'), create=False) |
624 | |
625 | def test_create_get_put_delete(self): |
626 | db = u1db_open(self.getPath('test.db'), create=True) |
627 | @@ -766,4 +767,4 @@ |
628 | self.assertEqual(0, ret) |
629 | self.assertTrue(stderr.startswith('rev: ')) |
630 | doc_rev2 = stderr[len('rev: '):].rstrip() |
631 | - self.assertGetDoc(db, doc_id, doc_rev2, None, False) |
632 | + self.assertGetDocIncludeDeleted(db, doc_id, doc_rev2, None, False) |
633 | |
634 | === modified file 'u1db/tests/test_backends.py' |
635 | --- u1db/tests/test_backends.py 2012-05-25 16:19:49 +0000 |
636 | +++ u1db/tests/test_backends.py 2012-05-25 19:04:26 +0000 |
637 | @@ -159,6 +159,17 @@ |
638 | def test_get_doc_nonexisting(self): |
639 | self.assertIs(None, self.db.get_doc('non-existing')) |
640 | |
641 | + def test_get_doc_deleted(self): |
642 | + doc = self.db.create_doc(simple_doc, doc_id='my_doc_id') |
643 | + self.db.delete_doc(doc) |
644 | + self.assertIs(None, self.db.get_doc('my_doc_id')) |
645 | + |
646 | + def test_get_doc_include_deleted(self): |
647 | + doc = self.db.create_doc(simple_doc, doc_id='my_doc_id') |
648 | + self.db.delete_doc(doc) |
649 | + self.assertGetDocIncludeDeleted( |
650 | + self.db, doc.doc_id, doc.rev, None, False) |
651 | + |
652 | def test_handles_nested_content(self): |
653 | doc = self.db.create_doc(nested_doc) |
654 | self.assertGetDoc(self.db, doc.doc_id, doc.rev, nested_doc, False) |
655 | @@ -173,8 +184,9 @@ |
656 | orig_rev = doc.rev |
657 | self.db.delete_doc(doc) |
658 | self.assertNotEqual(orig_rev, doc.rev) |
659 | - self.assertGetDoc(self.db, doc.doc_id, doc.rev, None, False) |
660 | - self.assertIsNot(None, self.db.get_doc(doc.doc_id)) |
661 | + self.assertGetDocIncludeDeleted( |
662 | + self.db, doc.doc_id, doc.rev, None, False) |
663 | + self.assertIs(None, self.db.get_doc(doc.doc_id)) |
664 | |
665 | def test_delete_doc_non_existant(self): |
666 | doc = self.make_document('non-existing', 'other:1', simple_doc) |
667 | @@ -186,7 +198,8 @@ |
668 | self.db.delete_doc(doc) |
669 | self.assertRaises(errors.DocumentAlreadyDeleted, |
670 | self.db.delete_doc, doc) |
671 | - self.assertGetDoc(self.db, doc.doc_id, doc.rev, None, False) |
672 | + self.assertGetDocIncludeDeleted( |
673 | + self.db, doc.doc_id, doc.rev, None, False) |
674 | |
675 | def test_delete_doc_bad_rev(self): |
676 | doc1 = self.db.create_doc(simple_doc) |
677 | @@ -216,7 +229,8 @@ |
678 | def test_delete_then_put(self): |
679 | doc = self.db.create_doc(simple_doc) |
680 | self.db.delete_doc(doc) |
681 | - self.assertGetDoc(self.db, doc.doc_id, doc.rev, None, False) |
682 | + self.assertGetDocIncludeDeleted( |
683 | + self.db, doc.doc_id, doc.rev, None, False) |
684 | doc.set_json(nested_doc) |
685 | self.db.put_doc(doc) |
686 | self.assertGetDoc(self.db, doc.doc_id, doc.rev, nested_doc, False) |
687 | @@ -242,6 +256,20 @@ |
688 | self.assertEqual([doc1, doc2], |
689 | self.db.get_docs([doc1.doc_id, doc2.doc_id])) |
690 | |
691 | + def test_get_docs_deleted(self): |
692 | + doc1 = self.db.create_doc(simple_doc) |
693 | + doc2 = self.db.create_doc(nested_doc) |
694 | + self.db.delete_doc(doc1) |
695 | + self.assertEqual([doc2], self.db.get_docs([doc1.doc_id, doc2.doc_id])) |
696 | + |
697 | + def test_get_docs_include_deleted(self): |
698 | + doc1 = self.db.create_doc(simple_doc) |
699 | + doc2 = self.db.create_doc(nested_doc) |
700 | + self.db.delete_doc(doc1) |
701 | + self.assertEqual( |
702 | + [doc1, doc2], |
703 | + self.db.get_docs([doc1.doc_id, doc2.doc_id], include_deleted=True)) |
704 | + |
705 | def test_get_docs_request_ordered(self): |
706 | doc1 = self.db.create_doc(simple_doc) |
707 | doc2 = self.db.create_doc(nested_doc) |
708 | @@ -264,7 +292,8 @@ |
709 | doc = self.make_document('my-doc-id', 'test:2', None) |
710 | state_at_gen = self.db._put_doc_if_newer(doc, save_conflict=False) |
711 | self.assertEqual(('inserted', 2), state_at_gen) |
712 | - self.assertGetDoc(self.db, 'my-doc-id', 'test:2', None, False) |
713 | + self.assertGetDocIncludeDeleted( |
714 | + self.db, 'my-doc-id', 'test:2', None, False) |
715 | |
716 | def test_put_doc_if_newer_already_superseded(self): |
717 | orig_doc = '{"new": "doc"}' |
718 | @@ -518,7 +547,8 @@ |
719 | (doc1.rev, None)]) |
720 | self.db.resolve_doc(doc1, [doc1.rev, doc2.rev]) |
721 | self.assertGetDocConflicts(self.db, doc1.doc_id, []) |
722 | - self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, None, False) |
723 | + self.assertGetDocIncludeDeleted( |
724 | + self.db, doc1.doc_id, doc1.rev, None, False) |
725 | |
726 | def test_put_doc_if_newer_save_conflicted(self): |
727 | doc1 = self.db.create_doc(simple_doc) |
728 | |
729 | === modified file 'u1db/tests/test_http_app.py' |
730 | --- u1db/tests/test_http_app.py 2012-05-22 18:58:04 +0000 |
731 | +++ u1db/tests/test_http_app.py 2012-05-25 19:04:26 +0000 |
732 | @@ -292,7 +292,8 @@ |
733 | res = invoke() |
734 | self.assertEqual('Put/end', res) |
735 | self.assertEqual({'a': '1', 'b': 2}, resource.args) |
736 | - self.assertEqual(['{"entry": "x"}', '{"entry": "y"}'], resource.entries) |
737 | + self.assertEqual( |
738 | + ['{"entry": "x"}', '{"entry": "y"}'], resource.entries) |
739 | self.assertEqual(['a', 's', 's', 'e'], resource.order) |
740 | |
741 | def _put_sync_stream(self, body): |
742 | @@ -303,7 +304,7 @@ |
743 | 'CONTENT_TYPE': 'application/x-u1db-sync-stream'} |
744 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
745 | parameters) |
746 | - res = invoke() |
747 | + invoke() |
748 | |
749 | def test_put_sync_stream_wrong_start(self): |
750 | self.assertRaises(http_app.BadRequest, |
751 | @@ -362,9 +363,11 @@ |
752 | 'wsgi.input': StringIO.StringIO('{}'), |
753 | 'CONTENT_LENGTH': '10000', |
754 | 'CONTENT_TYPE': 'text/plain'} |
755 | + |
756 | class params: |
757 | max_request_size = 5000 |
758 | max_entry_size = sys.maxint # we don't get to use this |
759 | + |
760 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
761 | params) |
762 | self.assertRaises(http_app.BadRequest, invoke) |
763 | @@ -430,8 +433,10 @@ |
764 | self.status = status |
765 | self.headers = dict(headers) |
766 | self.response_body = [] |
767 | + |
768 | def write(data): |
769 | self.response_body.append(data) |
770 | + |
771 | return write |
772 | |
773 | def test_send_response_content_w_headers(self): |
774 | @@ -621,7 +626,7 @@ |
775 | def test_delete_doc(self): |
776 | doc = self.db0.create_doc('{"x": 1}', doc_id='doc1') |
777 | resp = self.app.delete('/db0/doc/doc1?old_rev=%s' % doc.rev) |
778 | - doc = self.db0.get_doc('doc1') |
779 | + doc = self.db0.get_doc('doc1', include_deleted=True) |
780 | self.assertEqual(None, doc.content) |
781 | self.assertEqual(200, resp.status) |
782 | self.assertEqual('application/json', resp.header('content-type')) |
783 | @@ -651,8 +656,30 @@ |
784 | resp = self.app.get('/db0/doc/doc1', expect_errors=True) |
785 | self.assertEqual(404, resp.status) |
786 | self.assertEqual('application/json', resp.header('content-type')) |
787 | - self.assertEqual({"error": errors.DOCUMENT_DELETED}, |
788 | - simplejson.loads(resp.body)) |
789 | + self.assertEqual( |
790 | + {"error": errors.DocumentDoesNotExist.wire_description}, |
791 | + simplejson.loads(resp.body)) |
792 | + |
793 | + def test_get_doc_deleted_explicit_exclude(self): |
794 | + doc = self.db0.create_doc('{"x": 1}', doc_id='doc1') |
795 | + self.db0.delete_doc(doc) |
796 | + resp = self.app.get( |
797 | + '/db0/doc/doc1?include_deleted=false', expect_errors=True) |
798 | + self.assertEqual(404, resp.status) |
799 | + self.assertEqual('application/json', resp.header('content-type')) |
800 | + self.assertEqual( |
801 | + {"error": errors.DocumentDoesNotExist.wire_description}, |
802 | + simplejson.loads(resp.body)) |
803 | + |
804 | + def test_get_deleted_doc(self): |
805 | + doc = self.db0.create_doc('{"x": 1}', doc_id='doc1') |
806 | + self.db0.delete_doc(doc) |
807 | + resp = self.app.get( |
808 | + '/db0/doc/doc1?include_deleted=true', expect_errors=True) |
809 | + self.assertEqual(404, resp.status) |
810 | + self.assertEqual('application/json', resp.header('content-type')) |
811 | + self.assertEqual( |
812 | + {"error": errors.DOCUMENT_DELETED}, simplejson.loads(resp.body)) |
813 | self.assertEqual(doc.rev, resp.header('x-u1db-rev')) |
814 | self.assertEqual('false', resp.header('x-u1db-has-conflicts')) |
815 | |
816 | @@ -695,6 +722,7 @@ |
817 | |
818 | gens = [] |
819 | _do_set_sync_info = self.db0._do_set_sync_info |
820 | + |
821 | def set_sync_generation_witness(other_uid, other_gen, other_trans_id): |
822 | gens.append((other_uid, other_gen)) |
823 | _do_set_sync_info(other_uid, other_gen, other_trans_id) |
824 | @@ -767,8 +795,10 @@ |
825 | def test_sync_exchange_error_in_stream(self): |
826 | args = dict(last_known_generation=0) |
827 | body = "[\r\n%s\r\n]" % simplejson.dumps(args) |
828 | + |
829 | def boom(self, return_doc_cb): |
830 | raise errors.Unavailable |
831 | + |
832 | self.patch(sync.SyncExchange, 'return_docs', |
833 | boom) |
834 | resp = self.app.post('/db0/sync-from/replica', |
835 | @@ -797,32 +827,38 @@ |
836 | |
837 | def test_begin_and_done(self): |
838 | calls = [] |
839 | + |
840 | def begin(environ): |
841 | self.assertTrue('PATH_INFO' in environ) |
842 | calls.append('begin') |
843 | + |
844 | def done(environ): |
845 | self.assertTrue('PATH_INFO' in environ) |
846 | calls.append('done') |
847 | + |
848 | self.http_app.request_begin = begin |
849 | self.http_app.request_done = done |
850 | |
851 | doc = self.db0.create_doc('{"x": 1}', doc_id='doc1') |
852 | - resp = self.app.get('/db0/doc/%s' % doc.doc_id) |
853 | + self.app.get('/db0/doc/%s' % doc.doc_id) |
854 | |
855 | self.assertEqual(['begin', 'done'], calls) |
856 | |
857 | def test_bad_request(self): |
858 | calls = [] |
859 | + |
860 | def begin(environ): |
861 | self.assertTrue('PATH_INFO' in environ) |
862 | calls.append('begin') |
863 | + |
864 | def bad_request(environ): |
865 | self.assertTrue('PATH_INFO' in environ) |
866 | calls.append('bad-request') |
867 | + |
868 | self.http_app.request_begin = begin |
869 | self.http_app.request_bad_request = bad_request |
870 | # shouldn't be called |
871 | - self.http_app.request_done = lambda env: borken |
872 | + self.http_app.request_done = lambda env: 1 / 0 |
873 | |
874 | resp = self.app.put('/db0/foo/doc1', params='{"x": 1}', |
875 | headers={'content-type': 'application/json'}, |
876 | @@ -843,6 +879,7 @@ |
877 | super(TestHTTPAppErrorHandling, self).setUp() |
878 | self.exc = None |
879 | self.state = tests.ServerStateForTests() |
880 | + |
881 | class ErroringResource(object): |
882 | |
883 | def post(_, args, content): |
884 | @@ -887,16 +924,19 @@ |
885 | |
886 | def test_generic_u1db_errors_hooks(self): |
887 | calls = [] |
888 | + |
889 | def begin(environ): |
890 | self.assertTrue('PATH_INFO' in environ) |
891 | calls.append('begin') |
892 | + |
893 | def u1db_error(environ, exc): |
894 | self.assertTrue('PATH_INFO' in environ) |
895 | calls.append(('error', exc)) |
896 | + |
897 | self.http_app.request_begin = begin |
898 | self.http_app.request_u1db_error = u1db_error |
899 | # shouldn't be called |
900 | - self.http_app.request_done = lambda env: borken |
901 | + self.http_app.request_done = lambda env: 1 / 0 |
902 | |
903 | self.exc = errors.U1DBError() |
904 | resp = self.app.post('/req', params='{}', |
905 | @@ -916,15 +956,18 @@ |
906 | class Failure(Exception): |
907 | pass |
908 | calls = [] |
909 | + |
910 | def begin(environ): |
911 | calls.append('begin') |
912 | + |
913 | def failed(environ): |
914 | self.assertTrue('PATH_INFO' in environ) |
915 | calls.append(('failed', sys.exc_info())) |
916 | + |
917 | self.http_app.request_begin = begin |
918 | self.http_app.request_failed = failed |
919 | # shouldn't be called |
920 | - self.http_app.request_done = lambda env: borken |
921 | + self.http_app.request_done = lambda env: 1 / 0 |
922 | |
923 | self.exc = Failure() |
924 | self.assertRaises(Failure, self.app.post, '/req', params='{}', |
925 | @@ -945,9 +988,11 @@ |
926 | self.state.ensure_database('foo') |
927 | |
928 | def test_plugging(self): |
929 | + |
930 | class MySyncExchange(object): |
931 | def __init__(self, db, source_replica_uid, last_known_generation): |
932 | pass |
933 | + |
934 | class MySyncResource(http_app.SyncResource): |
935 | sync_exchange_class = MySyncExchange |
936 | |
937 | |
938 | === modified file 'u1db/tests/test_http_client.py' |
939 | --- u1db/tests/test_http_client.py 2012-04-25 12:14:42 +0000 |
940 | +++ u1db/tests/test_http_client.py 2012-05-25 19:04:26 +0000 |
941 | @@ -29,6 +29,18 @@ |
942 | ) |
943 | |
944 | |
945 | +class TestEncoder(tests.TestCase): |
946 | + |
947 | + def test_encode_string(self): |
948 | + self.assertEqual("foo", http_client._encode_query_parameter("foo")) |
949 | + |
950 | + def test_encode_true(self): |
951 | + self.assertEqual("true", http_client._encode_query_parameter(True)) |
952 | + |
953 | + def test_encode_false(self): |
954 | + self.assertEqual("false", http_client._encode_query_parameter(False)) |
955 | + |
956 | + |
957 | class TestHTTPClientBase(tests.TestCaseWithServer): |
958 | |
959 | def app(self, environ, start_response): |
960 | @@ -44,7 +56,8 @@ |
961 | return [simplejson.dumps(ret)] |
962 | elif environ['PATH_INFO'].endswith('error'): |
963 | content_length = int(environ['CONTENT_LENGTH']) |
964 | - error = simplejson.loads(environ['wsgi.input'].read(content_length)) |
965 | + error = simplejson.loads( |
966 | + environ['wsgi.input'].read(content_length)) |
967 | response = error['response'] |
968 | # In debug mode, wsgiref has an assertion that the status parameter |
969 | # is a 'str' object. However error['status'] returns a unicode |
970 | @@ -69,7 +82,8 @@ |
971 | oauth_server = oauth.OAuthServer(tests.testingOAuthStore) |
972 | oauth_server.add_signature_method(tests.sign_meth_HMAC_SHA1) |
973 | try: |
974 | - consumer, token, params = oauth_server.verify_request(oauth_req) |
975 | + consumer, token, params = oauth_server.verify_request( |
976 | + oauth_req) |
977 | except oauth.OAuthError, e: |
978 | start_response("401 Unauthorized", |
979 | [('Content-Type', 'application/json')]) |
980 | @@ -85,9 +99,11 @@ |
981 | srv = simple_server.WSGIServer(host_port, handler) |
982 | srv.set_app(self.app) |
983 | return srv |
984 | + |
985 | class req_handler(simple_server.WSGIRequestHandler): |
986 | def log_request(*args): |
987 | pass # suppress |
988 | + |
989 | return make_server, req_handler, "shutdown", "http" |
990 | |
991 | def getClient(self): |
992 | @@ -153,7 +169,8 @@ |
993 | |
994 | def test__request_json(self): |
995 | cli = self.getClient() |
996 | - res, headers = cli._request_json('POST', ['echo'], {'b': 2}, {'a': 'x'}) |
997 | + res, headers = cli._request_json( |
998 | + 'POST', ['echo'], {'b': 2}, {'a': 'x'}) |
999 | self.assertEqual('application/json', headers['content-type']) |
1000 | self.assertEqual({'CONTENT_TYPE': 'application/json', |
1001 | 'PATH_INFO': '/dbase/echo', |
1002 | @@ -248,8 +265,9 @@ |
1003 | # oauth does its own internal quoting |
1004 | params = {'x': u'\xf0', 'y': "foo"} |
1005 | res, headers = cli._request('GET', ['doc', 'oauth', 'foo bar'], params) |
1006 | - self.assertEqual(['/dbase/doc/oauth/foo bar', tests.token1.key, params], |
1007 | - simplejson.loads(res)) |
1008 | + self.assertEqual( |
1009 | + ['/dbase/doc/oauth/foo bar', tests.token1.key, params], |
1010 | + simplejson.loads(res)) |
1011 | |
1012 | def test_oauth_Unauthorized(self): |
1013 | cli = self.getClient() |
1014 | |
1015 | === modified file 'u1db/tests/test_http_database.py' |
1016 | --- u1db/tests/test_http_database.py 2012-05-22 18:58:04 +0000 |
1017 | +++ u1db/tests/test_http_database.py 2012-05-25 19:04:26 +0000 |
1018 | @@ -107,26 +107,38 @@ |
1019 | self.response_val = '{"v": 2}', {'x-u1db-rev': 'doc-rev', |
1020 | 'x-u1db-has-conflicts': 'false'} |
1021 | self.assertGetDoc(self.db, 'doc-id', 'doc-rev', '{"v": 2}', False) |
1022 | - self.assertEqual(('GET', ['doc', 'doc-id'], None, None, None), |
1023 | - self.got) |
1024 | + self.assertEqual( |
1025 | + ('GET', ['doc', 'doc-id'], {'include_deleted': False}, None, None), |
1026 | + self.got) |
1027 | |
1028 | def test_get_doc_non_existing(self): |
1029 | self.response_val = errors.DocumentDoesNotExist() |
1030 | self.assertIs(None, self.db.get_doc('not-there')) |
1031 | - self.assertEqual(('GET', ['doc', 'not-there'], None, None, None), |
1032 | - self.got) |
1033 | + self.assertEqual( |
1034 | + ('GET', ['doc', 'not-there'], {'include_deleted': False}, None, |
1035 | + None), self.got) |
1036 | |
1037 | def test_get_doc_deleted(self): |
1038 | + self.response_val = errors.DocumentDoesNotExist() |
1039 | + self.assertIs(None, self.db.get_doc('deleted')) |
1040 | + self.assertEqual( |
1041 | + ('GET', ['doc', 'deleted'], {'include_deleted': False}, None, |
1042 | + None), self.got) |
1043 | + |
1044 | + def test_get_doc_deleted_include_deleted(self): |
1045 | self.response_val = errors.HTTPError(404, |
1046 | simplejson.dumps( |
1047 | {"error": errors.DOCUMENT_DELETED} |
1048 | ), |
1049 | {'x-u1db-rev': 'doc-rev-gone', |
1050 | 'x-u1db-has-conflicts': 'false'}) |
1051 | - doc = self.db.get_doc('deleted') |
1052 | + doc = self.db.get_doc('deleted', include_deleted=True) |
1053 | self.assertEqual('deleted', doc.doc_id) |
1054 | self.assertEqual('doc-rev-gone', doc.rev) |
1055 | self.assertIs(None, doc.content) |
1056 | + self.assertEqual( |
1057 | + ('GET', ['doc', 'deleted'], {'include_deleted': True}, None, None), |
1058 | + self.got) |
1059 | |
1060 | def test_get_doc_pass_through_errors(self): |
1061 | self.response_val = errors.HTTPError(500, 'Crash.') |
1062 | |
1063 | === modified file 'u1db/tests/test_remote_sync_target.py' |
1064 | --- u1db/tests/test_remote_sync_target.py 2012-05-22 18:58:04 +0000 |
1065 | +++ u1db/tests/test_remote_sync_target.py 2012-05-25 19:04:26 +0000 |
1066 | @@ -242,21 +242,28 @@ |
1067 | |
1068 | def test_sync_exchange_in_stream_error(self): |
1069 | self.startServer() |
1070 | + |
1071 | def blackhole_getstderr(inst): |
1072 | return cStringIO.StringIO() |
1073 | + |
1074 | self.patch(self.server.RequestHandlerClass, 'get_stderr', |
1075 | blackhole_getstderr) |
1076 | db = self.request_state._create_database('test') |
1077 | doc = db.create_doc('{"value": "there"}') |
1078 | - def bomb_get_docs(doc_ids, check_for_conflicts=None): |
1079 | + |
1080 | + def bomb_get_docs(doc_ids, check_for_conflicts=None, |
1081 | + include_deleted=False): |
1082 | yield doc |
1083 | # delayed failure case |
1084 | raise errors.Unavailable |
1085 | + |
1086 | self.patch(db, 'get_docs', bomb_get_docs) |
1087 | remote_target = self.getSyncTarget('test') |
1088 | other_changes = [] |
1089 | + |
1090 | def receive_doc(doc, gen): |
1091 | other_changes.append((doc.doc_id, doc.rev, doc.get_json(), gen)) |
1092 | + |
1093 | self.assertRaises(errors.Unavailable, remote_target.sync_exchange, |
1094 | [], 'replica', last_known_generation=0, |
1095 | return_doc_cb=receive_doc) |
1096 | |
1097 | === modified file 'u1db/tests/test_sync.py' |
1098 | --- u1db/tests/test_sync.py 2012-05-22 18:58:04 +0000 |
1099 | +++ u1db/tests/test_sync.py 2012-05-25 19:04:26 +0000 |
1100 | @@ -134,7 +134,7 @@ |
1101 | def set_trace_hook(self, callback): |
1102 | try: |
1103 | self.st._set_trace_hook(callback) |
1104 | - except NotImplementedError, e: |
1105 | + except NotImplementedError: |
1106 | self.skipTest("%s does not implement _set_trace_hook" |
1107 | % (self.st.__class__.__name__,)) |
1108 | |
1109 | @@ -146,7 +146,7 @@ |
1110 | |
1111 | def test_create_doc_updates_sync_info(self): |
1112 | self.assertEqual(('test', 0, 0, ''), self.st.get_sync_info('other')) |
1113 | - doc = self.db.create_doc(simple_doc) |
1114 | + self.db.create_doc(simple_doc) |
1115 | self.assertEqual(('test', 1, 0, ''), self.st.get_sync_info('other')) |
1116 | |
1117 | def test_record_sync_info(self): |
1118 | @@ -174,7 +174,8 @@ |
1119 | new_gen = self.st.sync_exchange(docs_by_gen, 'replica', |
1120 | last_known_generation=0, |
1121 | return_doc_cb=self.receive_doc) |
1122 | - self.assertGetDoc(self.db, doc.doc_id, edit_rev, None, False) |
1123 | + self.assertGetDocIncludeDeleted( |
1124 | + self.db, doc.doc_id, edit_rev, None, False) |
1125 | self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db) |
1126 | self.assertEqual(([], 2), (self.other_changes, new_gen)) |
1127 | self.assertEqual(10, self.st.get_sync_info('replica')[2]) |
1128 | @@ -258,9 +259,10 @@ |
1129 | (doc2.doc_id, doc2.rev, nested_doc, 2)], 2), |
1130 | (self.other_changes, new_gen)) |
1131 | if self.whitebox: |
1132 | - self.assertEqual(self.db._last_exchange_log['return'], |
1133 | - {'last_gen': 2, 'docs': [(doc.doc_id, doc.rev), |
1134 | - (doc2.doc_id, doc2.rev)]}) |
1135 | + self.assertEqual( |
1136 | + self.db._last_exchange_log['return'], |
1137 | + {'last_gen': 2, 'docs': |
1138 | + [(doc.doc_id, doc.rev), (doc2.doc_id, doc2.rev)]}) |
1139 | |
1140 | def test_sync_exchange_getting_newer_docs(self): |
1141 | doc = self.db.create_doc(simple_doc) |
1142 | @@ -276,6 +278,7 @@ |
1143 | |
1144 | def test_sync_exchange_with_concurrent_updates_of_synced_doc(self): |
1145 | expected = [] |
1146 | + |
1147 | def before_whatschanged_cb(state): |
1148 | if state != 'before whats_changed': |
1149 | return |
1150 | @@ -283,6 +286,7 @@ |
1151 | conc_rev = self.db.put_doc( |
1152 | self.make_document(doc.doc_id, 'test:1|z:2', cont)) |
1153 | expected.append((doc.doc_id, conc_rev, cont, 3)) |
1154 | + |
1155 | self.set_trace_hook(before_whatschanged_cb) |
1156 | doc = self.db.create_doc(simple_doc) |
1157 | self.assertTransactionLog([doc.doc_id], self.db) |
1158 | @@ -351,8 +355,10 @@ |
1159 | |
1160 | def test__set_trace_hook(self): |
1161 | called = [] |
1162 | + |
1163 | def cb(state): |
1164 | called.append(state) |
1165 | + |
1166 | self.set_trace_hook(cb) |
1167 | self.st.sync_exchange([], 'replica', 0, self.receive_doc) |
1168 | self.st.record_sync_info('replica', 0, 'T-sid') |
1169 | @@ -465,10 +471,12 @@ |
1170 | # new record. |
1171 | # When we finish synchronizing, we can notice that something locally |
1172 | # was updated, and we cannot tell c2 our new updated generation |
1173 | + |
1174 | def before_get_docs(state): |
1175 | if state != 'before get_docs': |
1176 | return |
1177 | self.db1.create_doc(simple_doc) |
1178 | + |
1179 | self.assertEqual(0, self.sync(self.db1, self.db2, |
1180 | trace_hook=before_get_docs)) |
1181 | self.assertLastExchangeLog(self.db2, |
1182 | @@ -482,7 +490,8 @@ |
1183 | self.assertEqual((0, ''), self.db2._get_sync_gen_info('test1')) |
1184 | |
1185 | def test_sync_doesnt_update_other_if_nothing_pulled(self): |
1186 | - doc = self.db1.create_doc(simple_doc) |
1187 | + self.db1.create_doc(simple_doc) |
1188 | + |
1189 | def no_record_sync_info(state): |
1190 | if state != 'record_sync_info': |
1191 | return |
1192 | @@ -565,8 +574,9 @@ |
1193 | 'return': {'docs': [(doc_id, doc2.rev)], |
1194 | 'last_gen': 2}}) |
1195 | self.assertTransactionLog([doc_id, doc_id, doc_id], self.db1) |
1196 | - self.assertGetDoc(self.db1, doc_id, doc2.rev, None, True) |
1197 | - self.assertGetDoc(self.db2, doc_id, doc2.rev, None, False) |
1198 | + self.assertGetDocIncludeDeleted(self.db1, doc_id, doc2.rev, None, True) |
1199 | + self.assertGetDocIncludeDeleted( |
1200 | + self.db2, doc_id, doc2.rev, None, False) |
1201 | self.assertEqual([], self.db1.get_from_index('test-idx', [('value',)])) |
1202 | |
1203 | def test_sync_local_race_conflicted(self): |
1204 | @@ -581,12 +591,14 @@ |
1205 | self.db2.put_doc(doc) |
1206 | doc2_rev2 = doc.rev |
1207 | triggered = [] |
1208 | + |
1209 | def after_whatschanged(state): |
1210 | if state != 'after whats_changed': |
1211 | return |
1212 | triggered.append(True) |
1213 | doc = self.make_document(doc_id, doc1_rev, content1) |
1214 | self.db1.put_doc(doc) |
1215 | + |
1216 | self.sync(self.db1, self.db2, trace_hook=after_whatschanged) |
1217 | self.assertEqual([True], triggered) |
1218 | self.assertGetDoc(self.db1, doc_id, doc2_rev2, content2, True) |
1219 | @@ -599,7 +611,6 @@ |
1220 | def test_sync_propagates_deletes(self): |
1221 | doc1 = self.db1.create_doc(simple_doc) |
1222 | doc_id = doc1.doc_id |
1223 | - doc1_rev = doc1.rev |
1224 | self.db1.create_index('test-idx', ['key']) |
1225 | self.sync(self.db1, self.db2) |
1226 | self.db2.create_index('test-idx', ['key']) |
1227 | @@ -613,8 +624,10 @@ |
1228 | 'source_uid': 'test1', |
1229 | 'source_gen': 2, 'last_known_gen': 1}, |
1230 | 'return': {'docs': [], 'last_gen': 2}}) |
1231 | - self.assertGetDoc(self.db1, doc_id, deleted_rev, None, False) |
1232 | - self.assertGetDoc(self.db2, doc_id, deleted_rev, None, False) |
1233 | + self.assertGetDocIncludeDeleted( |
1234 | + self.db1, doc_id, deleted_rev, None, False) |
1235 | + self.assertGetDocIncludeDeleted( |
1236 | + self.db2, doc_id, deleted_rev, None, False) |
1237 | self.assertEqual([], self.db1.get_from_index('test-idx', [('value',)])) |
1238 | self.assertEqual([], self.db2.get_from_index('test-idx', [('value',)])) |
1239 | self.sync(self.db2, self.db3) |
1240 | @@ -623,7 +636,8 @@ |
1241 | 'source_uid': 'test2', |
1242 | 'source_gen': 2, 'last_known_gen': 0}, |
1243 | 'return': {'docs': [], 'last_gen': 2}}) |
1244 | - self.assertGetDoc(self.db3, doc_id, deleted_rev, None, False) |
1245 | + self.assertGetDocIncludeDeleted( |
1246 | + self.db3, doc_id, deleted_rev, None, False) |
1247 | |
1248 | def test_sync_propagates_resolution(self): |
1249 | doc1 = self.db1.create_doc('{"a": 1}', doc_id='the-doc') |
1250 | @@ -659,8 +673,8 @@ |
1251 | def test_sync_supersedes_conflicts(self): |
1252 | db3 = self.create_database('test3') |
1253 | doc1 = self.db1.create_doc('{"a": 1}', doc_id='the-doc') |
1254 | - doc2 = self.db2.create_doc('{"b": 1}', doc_id='the-doc') |
1255 | - doc3 = db3.create_doc('{"c": 1}', doc_id='the-doc') |
1256 | + self.db2.create_doc('{"b": 1}', doc_id='the-doc') |
1257 | + db3.create_doc('{"c": 1}', doc_id='the-doc') |
1258 | self.sync(db3, self.db1) |
1259 | self.sync(db3, self.db2) |
1260 | self.assertEqual(3, len(db3.get_doc_conflicts('the-doc'))) |
1261 | @@ -724,6 +738,7 @@ |
1262 | progress1 = [] |
1263 | progress2 = [] |
1264 | _do_set_sync_info = self.db1._do_set_sync_info |
1265 | + |
1266 | def set_sync_generation_witness1(other_uid, other_gen, trans_id): |
1267 | progress1.append((other_uid, other_gen, |
1268 | [d for d, t in self.db1._get_transaction_log()[2:]])) |
1269 | @@ -732,6 +747,7 @@ |
1270 | set_sync_generation_witness1) |
1271 | |
1272 | _do_set_sync_info2 = self.db2._do_set_sync_info |
1273 | + |
1274 | def set_sync_generation_witness2(other_uid, other_gen, trans_id): |
1275 | progress2.append((other_uid, other_gen, |
1276 | [d for d, t in self.db2._get_transaction_log()[2:]])) |
- you don't need the extra constant DOCUMENT_ DOES_NOT_ EXIST, it's by construction DocumentDoesNot Exist.wire_ description
- lines 369 and 370 shouldn't be needed, what's missing is proper encoding/decoding of the values of include_deleted in the query_params