Merge lp:~gholt/swift/timeout_baseexception into lp:~hudson-openstack/swift/trunk

Proposed by gholt
Status: Merged
Approved by: Mike Barton
Approved revision: 264
Merged at revision: 265
Proposed branch: lp:~gholt/swift/timeout_baseexception
Merge into: lp:~hudson-openstack/swift/trunk
Diff against target: 75 lines (+26/-5)
2 files modified
swift/obj/replicator.py (+20/-3)
swift/obj/server.py (+6/-2)
To merge this branch: bzr merge lp:~gholt/swift/timeout_baseexception
Reviewer Review Type Date Requested Status
Swift Core security contacts Pending
Review via email: mp+57256@code.launchpad.net

Description of the change

Handle TimeoutErrors with a tpooled get_hashes

To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'swift/obj/replicator.py'
2--- swift/obj/replicator.py 2011-03-20 22:14:03 +0000
3+++ swift/obj/replicator.py 2011-04-11 23:09:56 +0000
4@@ -163,6 +163,16 @@
5 return hashed, hashes
6
7
8+# Hack to work around Eventlet's tpool not catching and reraising Timeouts. We
9+# return the Timeout, Timeout if it's raised, the caller looks for it and
10+# reraises it if found.
11+def tpooled_get_hashes(*args, **kwargs):
12+ try:
13+ return get_hashes(*args, **kwargs)
14+ except Timeout, err:
15+ return err, err
16+
17+
18 class ObjectReplicator(Daemon):
19 """
20 Replicate objects.
21@@ -336,9 +346,12 @@
22 self.replication_count += 1
23 begin = time.time()
24 try:
25- hashed, local_hash = tpool.execute(get_hashes, job['path'],
26+ hashed, local_hash = tpool.execute(tpooled_get_hashes, job['path'],
27 do_listdir=(self.replication_count % 10) == 0,
28 reclaim_age=self.reclaim_age)
29+ # See tpooled_get_hashes "Hack".
30+ if isinstance(hashed, BaseException):
31+ raise hashed
32 self.suffix_hash += hashed
33 attempts_left = self.object_ring.replica_count - 1
34 nodes = itertools.chain(job['nodes'],
35@@ -368,8 +381,12 @@
36 local_hash[suffix] != remote_hash.get(suffix, -1)]
37 if not suffixes:
38 continue
39- hashed, local_hash = tpool.execute(get_hashes, job['path'],
40- recalculate=suffixes, reclaim_age=self.reclaim_age)
41+ hashed, local_hash = tpool.execute(tpooled_get_hashes,
42+ job['path'], recalculate=suffixes,
43+ reclaim_age=self.reclaim_age)
44+ # See tpooled_get_hashes "Hack".
45+ if isinstance(hashed, BaseException):
46+ raise hashed
47 suffixes = [suffix for suffix in local_hash if
48 local_hash[suffix] != remote_hash.get(suffix, -1)]
49 self.rsync(node, job, suffixes)
50
51=== modified file 'swift/obj/server.py'
52--- swift/obj/server.py 2011-03-29 02:29:20 +0000
53+++ swift/obj/server.py 2011-04-11 23:09:56 +0000
54@@ -44,7 +44,7 @@
55 check_float, check_utf8
56 from swift.common.exceptions import ConnectionTimeout, DiskFileError, \
57 DiskFileNotExist
58-from swift.obj.replicator import get_hashes, invalidate_hash
59+from swift.obj.replicator import tpooled_get_hashes, invalidate_hash
60
61
62 DATADIR = 'objects'
63@@ -708,7 +708,11 @@
64 if not os.path.exists(path):
65 mkdirs(path)
66 suffixes = suffix.split('-') if suffix else []
67- _junk, hashes = tpool.execute(get_hashes, path, recalculate=suffixes)
68+ _junk, hashes = tpool.execute(tpooled_get_hashes, path,
69+ recalculate=suffixes)
70+ # See tpooled_get_hashes "Hack".
71+ if isinstance(hashes, BaseException):
72+ raise hashes
73 return Response(body=pickle.dumps(hashes))
74
75 def __call__(self, env, start_response):