Merge lp:~djfroofy/txaws/921419-uploadpart into lp:txaws
- 921419-uploadpart
- Merge into trunk
Proposed by
Drew Smathers
Status: | Merged | ||||
---|---|---|---|---|---|
Approved by: | Duncan McGreggor | ||||
Approved revision: | 145 | ||||
Merged at revision: | 147 | ||||
Proposed branch: | lp:~djfroofy/txaws/921419-uploadpart | ||||
Merge into: | lp:txaws | ||||
Diff against target: |
1301 lines (+648/-82) 9 files modified
txaws/client/_producers.py (+122/-0) txaws/client/base.py (+159/-14) txaws/client/tests/test_base.py (+53/-11) txaws/client/tests/test_ssl.py (+20/-4) txaws/s3/client.py (+85/-26) txaws/s3/model.py (+29/-0) txaws/s3/tests/test_client.py (+148/-27) txaws/testing/payload.py (+9/-0) txaws/testing/producers.py (+23/-0) |
||||
To merge this branch: | bzr merge lp:~djfroofy/txaws/921419-uploadpart | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Duncan McGreggor | Approve | ||
Review via email: mp+92583@code.launchpad.net |
Commit message
Description of the change
This depends on initmultipart branch which in turn has other branch deps.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file 'txaws/client/_producers.py' | |||
2 | --- txaws/client/_producers.py 1970-01-01 00:00:00 +0000 | |||
3 | +++ txaws/client/_producers.py 2012-02-10 21:24:20 +0000 | |||
4 | @@ -0,0 +1,122 @@ | |||
5 | 1 | import os | ||
6 | 2 | |||
7 | 3 | from zope.interface import implements | ||
8 | 4 | |||
9 | 5 | from twisted.internet import defer, task | ||
10 | 6 | from twisted.web.iweb import UNKNOWN_LENGTH, IBodyProducer | ||
11 | 7 | |||
12 | 8 | |||
13 | 9 | # Code below for FileBodyProducer cut-and-paste from twisted source. | ||
14 | 10 | # Currently this is not released so here temporarily for forward compat. | ||
15 | 11 | |||
16 | 12 | |||
17 | 13 | class FileBodyProducer(object): | ||
18 | 14 | """ | ||
19 | 15 | L{FileBodyProducer} produces bytes from an input file object incrementally | ||
20 | 16 | and writes them to a consumer. | ||
21 | 17 | |||
22 | 18 | Since file-like objects cannot be read from in an event-driven manner, | ||
23 | 19 | L{FileBodyProducer} uses a L{Cooperator} instance to schedule reads from | ||
24 | 20 | the file. This process is also paused and resumed based on notifications | ||
25 | 21 | from the L{IConsumer} provider being written to. | ||
26 | 22 | |||
27 | 23 | The file is closed after it has been read, or if the producer is stopped | ||
28 | 24 | early. | ||
29 | 25 | |||
30 | 26 | @ivar _inputFile: Any file-like object, bytes read from which will be | ||
31 | 27 | written to a consumer. | ||
32 | 28 | |||
33 | 29 | @ivar _cooperate: A method like L{Cooperator.cooperate} which is used to | ||
34 | 30 | schedule all reads. | ||
35 | 31 | |||
36 | 32 | @ivar _readSize: The number of bytes to read from C{_inputFile} at a time. | ||
37 | 33 | """ | ||
38 | 34 | implements(IBodyProducer) | ||
39 | 35 | |||
40 | 36 | # Python 2.4 doesn't have these symbolic constants | ||
41 | 37 | _SEEK_SET = getattr(os, 'SEEK_SET', 0) | ||
42 | 38 | _SEEK_END = getattr(os, 'SEEK_END', 2) | ||
43 | 39 | |||
44 | 40 | def __init__(self, inputFile, cooperator=task, readSize=2 ** 16): | ||
45 | 41 | self._inputFile = inputFile | ||
46 | 42 | self._cooperate = cooperator.cooperate | ||
47 | 43 | self._readSize = readSize | ||
48 | 44 | self.length = self._determineLength(inputFile) | ||
49 | 45 | |||
50 | 46 | |||
51 | 47 | def _determineLength(self, fObj): | ||
52 | 48 | """ | ||
53 | 49 | Determine how many bytes can be read out of C{fObj} (assuming it is not | ||
54 | 50 | modified from this point on). If the determination cannot be made, | ||
55 | 51 | return C{UNKNOWN_LENGTH}. | ||
56 | 52 | """ | ||
57 | 53 | try: | ||
58 | 54 | seek = fObj.seek | ||
59 | 55 | tell = fObj.tell | ||
60 | 56 | except AttributeError: | ||
61 | 57 | return UNKNOWN_LENGTH | ||
62 | 58 | originalPosition = tell() | ||
63 | 59 | seek(0, self._SEEK_END) | ||
64 | 60 | end = tell() | ||
65 | 61 | seek(originalPosition, self._SEEK_SET) | ||
66 | 62 | return end - originalPosition | ||
67 | 63 | |||
68 | 64 | |||
69 | 65 | def stopProducing(self): | ||
70 | 66 | """ | ||
71 | 67 | Permanently stop writing bytes from the file to the consumer by | ||
72 | 68 | stopping the underlying L{CooperativeTask}. | ||
73 | 69 | """ | ||
74 | 70 | self._inputFile.close() | ||
75 | 71 | self._task.stop() | ||
76 | 72 | |||
77 | 73 | |||
78 | 74 | def startProducing(self, consumer): | ||
79 | 75 | """ | ||
80 | 76 | Start a cooperative task which will read bytes from the input file and | ||
81 | 77 | write them to C{consumer}. Return a L{Deferred} which fires after all | ||
82 | 78 | bytes have been written. | ||
83 | 79 | |||
84 | 80 | @param consumer: Any L{IConsumer} provider | ||
85 | 81 | """ | ||
86 | 82 | self._task = self._cooperate(self._writeloop(consumer)) | ||
87 | 83 | d = self._task.whenDone() | ||
88 | 84 | def maybeStopped(reason): | ||
89 | 85 | # IBodyProducer.startProducing's Deferred isn't support to fire if | ||
90 | 86 | # stopProducing is called. | ||
91 | 87 | reason.trap(task.TaskStopped) | ||
92 | 88 | return defer.Deferred() | ||
93 | 89 | d.addCallbacks(lambda ignored: None, maybeStopped) | ||
94 | 90 | return d | ||
95 | 91 | |||
96 | 92 | |||
97 | 93 | def _writeloop(self, consumer): | ||
98 | 94 | """ | ||
99 | 95 | Return an iterator which reads one chunk of bytes from the input file | ||
100 | 96 | and writes them to the consumer for each time it is iterated. | ||
101 | 97 | """ | ||
102 | 98 | while True: | ||
103 | 99 | bytes = self._inputFile.read(self._readSize) | ||
104 | 100 | if not bytes: | ||
105 | 101 | self._inputFile.close() | ||
106 | 102 | break | ||
107 | 103 | consumer.write(bytes) | ||
108 | 104 | yield None | ||
109 | 105 | |||
110 | 106 | |||
111 | 107 | def pauseProducing(self): | ||
112 | 108 | """ | ||
113 | 109 | Temporarily suspend copying bytes from the input file to the consumer | ||
114 | 110 | by pausing the L{CooperativeTask} which drives that activity. | ||
115 | 111 | """ | ||
116 | 112 | self._task.pause() | ||
117 | 113 | |||
118 | 114 | |||
119 | 115 | def resumeProducing(self): | ||
120 | 116 | """ | ||
121 | 117 | Undo the effects of a previous C{pauseProducing} and resume copying | ||
122 | 118 | bytes to the consumer by resuming the L{CooperativeTask} which drives | ||
123 | 119 | the write activity. | ||
124 | 120 | """ | ||
125 | 121 | self._task.resume() | ||
126 | 122 | |||
127 | 0 | 123 | ||
128 | === modified file 'txaws/client/base.py' | |||
129 | --- txaws/client/base.py 2011-11-29 08:17:54 +0000 | |||
130 | +++ txaws/client/base.py 2012-02-10 21:24:20 +0000 | |||
131 | @@ -3,10 +3,25 @@ | |||
132 | 3 | except ImportError: | 3 | except ImportError: |
133 | 4 | from xml.parsers.expat import ExpatError as ParseError | 4 | from xml.parsers.expat import ExpatError as ParseError |
134 | 5 | 5 | ||
135 | 6 | import warnings | ||
136 | 7 | from StringIO import StringIO | ||
137 | 8 | |||
138 | 6 | from twisted.internet.ssl import ClientContextFactory | 9 | from twisted.internet.ssl import ClientContextFactory |
139 | 10 | from twisted.internet.protocol import Protocol | ||
140 | 11 | from twisted.internet.defer import Deferred, succeed | ||
141 | 12 | from twisted.python import failure | ||
142 | 7 | from twisted.web import http | 13 | from twisted.web import http |
143 | 14 | from twisted.web.iweb import UNKNOWN_LENGTH | ||
144 | 8 | from twisted.web.client import HTTPClientFactory | 15 | from twisted.web.client import HTTPClientFactory |
145 | 16 | from twisted.web.client import Agent | ||
146 | 17 | from twisted.web.client import ResponseDone | ||
147 | 18 | from twisted.web.http import NO_CONTENT | ||
148 | 19 | from twisted.web.http_headers import Headers | ||
149 | 9 | from twisted.web.error import Error as TwistedWebError | 20 | from twisted.web.error import Error as TwistedWebError |
150 | 21 | try: | ||
151 | 22 | from twisted.web.client import FileBodyProducer | ||
152 | 23 | except ImportError: | ||
153 | 24 | from txaws.client._producers import FileBodyProducer | ||
154 | 10 | 25 | ||
155 | 11 | from txaws.util import parse | 26 | from txaws.util import parse |
156 | 12 | from txaws.credentials import AWSCredentials | 27 | from txaws.credentials import AWSCredentials |
157 | @@ -59,9 +74,10 @@ | |||
158 | 59 | @param query_factory: The class or function that produces a query | 74 | @param query_factory: The class or function that produces a query |
159 | 60 | object for making requests to the EC2 service. | 75 | object for making requests to the EC2 service. |
160 | 61 | @param parser: A parser object for parsing responses from the EC2 service. | 76 | @param parser: A parser object for parsing responses from the EC2 service. |
161 | 77 | @param receiver_factory: Factory for receiving responses from EC2 service. | ||
162 | 62 | """ | 78 | """ |
163 | 63 | def __init__(self, creds=None, endpoint=None, query_factory=None, | 79 | def __init__(self, creds=None, endpoint=None, query_factory=None, |
165 | 64 | parser=None): | 80 | parser=None, receiver_factory=None): |
166 | 65 | if creds is None: | 81 | if creds is None: |
167 | 66 | creds = AWSCredentials() | 82 | creds = AWSCredentials() |
168 | 67 | if endpoint is None: | 83 | if endpoint is None: |
169 | @@ -69,22 +85,109 @@ | |||
170 | 69 | self.creds = creds | 85 | self.creds = creds |
171 | 70 | self.endpoint = endpoint | 86 | self.endpoint = endpoint |
172 | 71 | self.query_factory = query_factory | 87 | self.query_factory = query_factory |
173 | 88 | self.receiver_factory = receiver_factory | ||
174 | 72 | self.parser = parser | 89 | self.parser = parser |
175 | 73 | 90 | ||
176 | 91 | class StreamingError(Exception): | ||
177 | 92 | """ | ||
178 | 93 | Raised if more data or less data is received than expected. | ||
179 | 94 | """ | ||
180 | 95 | |||
181 | 96 | |||
182 | 97 | class StringIOBodyReceiver(Protocol): | ||
183 | 98 | """ | ||
184 | 99 | Simple StringIO-based HTTP response body receiver. | ||
185 | 100 | |||
186 | 101 | TODO: perhaps there should be an interface specifying why | ||
187 | 102 | finished (Deferred) and content_length are necessary and | ||
188 | 103 | how to used them; eg. callback/errback finished on completion. | ||
189 | 104 | """ | ||
190 | 105 | finished = None | ||
191 | 106 | content_length = None | ||
192 | 107 | |||
193 | 108 | def __init__(self): | ||
194 | 109 | self._buffer = StringIO() | ||
195 | 110 | self._received = 0 | ||
196 | 111 | |||
197 | 112 | def dataReceived(self, bytes): | ||
198 | 113 | streaming = self.content_length is UNKNOWN_LENGTH | ||
199 | 114 | if not streaming and (self._received > self.content_length): | ||
200 | 115 | self.transport.loseConnection() | ||
201 | 116 | raise StreamingError( | ||
202 | 117 | "Buffer overflow - received more data than " | ||
203 | 118 | "Content-Length dictated: %d" % self.content_length) | ||
204 | 119 | # TODO should be some limit on how much we receive | ||
205 | 120 | self._buffer.write(bytes) | ||
206 | 121 | self._received += len(bytes) | ||
207 | 122 | |||
208 | 123 | def connectionLost(self, reason): | ||
209 | 124 | reason.trap(ResponseDone) | ||
210 | 125 | d = self.finished | ||
211 | 126 | self.finished = None | ||
212 | 127 | streaming = self.content_length is UNKNOWN_LENGTH | ||
213 | 128 | if streaming or (self._received == self.content_length): | ||
214 | 129 | d.callback(self._buffer.getvalue()) | ||
215 | 130 | else: | ||
216 | 131 | f = failure.Failure(StreamingError("Connection lost before " | ||
217 | 132 | "receiving all data")) | ||
218 | 133 | d.errback(f) | ||
219 | 134 | |||
220 | 135 | |||
221 | 136 | class WebClientContextFactory(ClientContextFactory): | ||
222 | 137 | |||
223 | 138 | def getContext(self, hostname, port): | ||
224 | 139 | return ClientContextFactory.getContext(self) | ||
225 | 140 | |||
226 | 141 | |||
227 | 142 | class WebVerifyingContextFactory(VerifyingContextFactory): | ||
228 | 143 | |||
229 | 144 | def getContext(self, hostname, port): | ||
230 | 145 | return VerifyingContextFactory.getContext(self) | ||
231 | 146 | |||
232 | 147 | |||
233 | 148 | class FakeClient(object): | ||
234 | 149 | """ | ||
235 | 150 | XXX | ||
236 | 151 | A fake client object for some degree of backwards compatability for | ||
237 | 152 | code using the client attibute on BaseQuery to check url, status | ||
238 | 153 | etc. | ||
239 | 154 | """ | ||
240 | 155 | url = None | ||
241 | 156 | status = None | ||
242 | 74 | 157 | ||
243 | 75 | class BaseQuery(object): | 158 | class BaseQuery(object): |
244 | 76 | 159 | ||
246 | 77 | def __init__(self, action=None, creds=None, endpoint=None, reactor=None): | 160 | def __init__(self, action=None, creds=None, endpoint=None, reactor=None, |
247 | 161 | body_producer=None, receiver_factory=None): | ||
248 | 78 | if not action: | 162 | if not action: |
249 | 79 | raise TypeError("The query requires an action parameter.") | 163 | raise TypeError("The query requires an action parameter.") |
250 | 80 | self.factory = HTTPClientFactory | ||
251 | 81 | self.action = action | 164 | self.action = action |
252 | 82 | self.creds = creds | 165 | self.creds = creds |
253 | 83 | self.endpoint = endpoint | 166 | self.endpoint = endpoint |
254 | 84 | if reactor is None: | 167 | if reactor is None: |
255 | 85 | from twisted.internet import reactor | 168 | from twisted.internet import reactor |
256 | 86 | self.reactor = reactor | 169 | self.reactor = reactor |
258 | 87 | self.client = None | 170 | self._client = None |
259 | 171 | self.request_headers = None | ||
260 | 172 | self.response_headers = None | ||
261 | 173 | self.body_producer = body_producer | ||
262 | 174 | self.receiver_factory = receiver_factory or StringIOBodyReceiver | ||
263 | 175 | |||
264 | 176 | @property | ||
265 | 177 | def client(self): | ||
266 | 178 | if self._client is None: | ||
267 | 179 | self._client_deprecation_warning() | ||
268 | 180 | self._client = FakeClient() | ||
269 | 181 | return self._client | ||
270 | 182 | |||
271 | 183 | @client.setter | ||
272 | 184 | def client(self, value): | ||
273 | 185 | self._client_deprecation_warning() | ||
274 | 186 | self._client = value | ||
275 | 187 | |||
276 | 188 | def _client_deprecation_warning(self): | ||
277 | 189 | warnings.warn('The client attribute on BaseQuery is deprecated and' | ||
278 | 190 | ' will go away in future release.') | ||
279 | 88 | 191 | ||
280 | 89 | def get_page(self, url, *args, **kwds): | 192 | def get_page(self, url, *args, **kwds): |
281 | 90 | """ | 193 | """ |
282 | @@ -95,16 +198,39 @@ | |||
283 | 95 | """ | 198 | """ |
284 | 96 | contextFactory = None | 199 | contextFactory = None |
285 | 97 | scheme, host, port, path = parse(url) | 200 | scheme, host, port, path = parse(url) |
287 | 98 | self.client = self.factory(url, *args, **kwds) | 201 | data = kwds.get('postdata', None) |
288 | 202 | self._method = method = kwds.get('method', 'GET') | ||
289 | 203 | self.request_headers = self._headers(kwds.get('headers', {})) | ||
290 | 204 | if (self.body_producer is None) and (data is not None): | ||
291 | 205 | self.body_producer = FileBodyProducer(StringIO(data)) | ||
292 | 99 | if scheme == "https": | 206 | if scheme == "https": |
293 | 100 | if self.endpoint.ssl_hostname_verification: | 207 | if self.endpoint.ssl_hostname_verification: |
295 | 101 | contextFactory = VerifyingContextFactory(host) | 208 | contextFactory = WebVerifyingContextFactory(host) |
296 | 102 | else: | 209 | else: |
299 | 103 | contextFactory = ClientContextFactory() | 210 | contextFactory = WebClientContextFactory() |
300 | 104 | self.reactor.connectSSL(host, port, self.client, contextFactory) | 211 | agent = Agent(self.reactor, contextFactory) |
301 | 212 | self.client.url = url | ||
302 | 213 | d = agent.request(method, url, self.request_headers, | ||
303 | 214 | self.body_producer) | ||
304 | 105 | else: | 215 | else: |
307 | 106 | self.reactor.connectTCP(host, port, self.client) | 216 | agent = Agent(self.reactor) |
308 | 107 | return self.client.deferred | 217 | d = agent.request(method, url, self.request_headers, |
309 | 218 | self.body_producer) | ||
310 | 219 | d.addCallback(self._handle_response) | ||
311 | 220 | return d | ||
312 | 221 | |||
313 | 222 | def _headers(self, headers_dict): | ||
314 | 223 | """ | ||
315 | 224 | Convert dictionary of headers into twisted.web.client.Headers object. | ||
316 | 225 | """ | ||
317 | 226 | return Headers(dict((k,[v]) for (k,v) in headers_dict.items())) | ||
318 | 227 | |||
319 | 228 | def _unpack_headers(self, headers): | ||
320 | 229 | """ | ||
321 | 230 | Unpack twisted.web.client.Headers object to dict. This is to provide | ||
322 | 231 | backwards compatability. | ||
323 | 232 | """ | ||
324 | 233 | return dict((k,v[0]) for (k,v) in headers.getAllRawHeaders()) | ||
325 | 108 | 234 | ||
326 | 109 | def get_request_headers(self, *args, **kwds): | 235 | def get_request_headers(self, *args, **kwds): |
327 | 110 | """ | 236 | """ |
328 | @@ -114,8 +240,26 @@ | |||
329 | 114 | The AWS S3 API depends upon setting headers. This method is provided as | 240 | The AWS S3 API depends upon setting headers. This method is provided as |
330 | 115 | a convenience for debugging issues with the S3 communications. | 241 | a convenience for debugging issues with the S3 communications. |
331 | 116 | """ | 242 | """ |
334 | 117 | if self.client: | 243 | if self.request_headers: |
335 | 118 | return self.client.headers | 244 | return self._unpack_headers(self.request_headers) |
336 | 245 | |||
337 | 246 | def _handle_response(self, response): | ||
338 | 247 | """ | ||
339 | 248 | Handle the HTTP response by memoing the headers and then delivering | ||
340 | 249 | bytes. | ||
341 | 250 | """ | ||
342 | 251 | self.client.status = response.code | ||
343 | 252 | self.response_headers = headers = response.headers | ||
344 | 253 | # XXX This workaround (which needs to be improved at that) for possible | ||
345 | 254 | # bug in Twisted with new client: | ||
346 | 255 | # http://twistedmatrix.com/trac/ticket/5476 | ||
347 | 256 | if self._method.upper() == 'HEAD' or response.code == NO_CONTENT: | ||
348 | 257 | return succeed('') | ||
349 | 258 | receiver = self.receiver_factory() | ||
350 | 259 | receiver.finished = d = Deferred() | ||
351 | 260 | receiver.content_length = response.length | ||
352 | 261 | response.deliverBody(receiver) | ||
353 | 262 | return d | ||
354 | 119 | 263 | ||
355 | 120 | def get_response_headers(self, *args, **kwargs): | 264 | def get_response_headers(self, *args, **kwargs): |
356 | 121 | """ | 265 | """ |
357 | @@ -125,5 +269,6 @@ | |||
358 | 125 | The AWS S3 API depends upon setting headers. This method is used by the | 269 | The AWS S3 API depends upon setting headers. This method is used by the |
359 | 126 | head_object API call for getting a S3 object's metadata. | 270 | head_object API call for getting a S3 object's metadata. |
360 | 127 | """ | 271 | """ |
363 | 128 | if self.client: | 272 | if self.response_headers: |
364 | 129 | return self.client.response_headers | 273 | return self._unpack_headers(self.response_headers) |
365 | 274 | |||
366 | 130 | 275 | ||
367 | === modified file 'txaws/client/tests/test_base.py' | |||
368 | --- txaws/client/tests/test_base.py 2012-01-26 18:43:48 +0000 | |||
369 | +++ txaws/client/tests/test_base.py 2012-02-10 21:24:20 +0000 | |||
370 | @@ -1,6 +1,9 @@ | |||
371 | 1 | import os | 1 | import os |
372 | 2 | 2 | ||
373 | 3 | from zope.interface import implements | ||
374 | 4 | |||
375 | 3 | from twisted.internet import reactor | 5 | from twisted.internet import reactor |
376 | 6 | from twisted.internet.defer import succeed | ||
377 | 4 | from twisted.internet.error import ConnectionRefusedError | 7 | from twisted.internet.error import ConnectionRefusedError |
378 | 5 | from twisted.protocols.policies import WrappingFactory | 8 | from twisted.protocols.policies import WrappingFactory |
379 | 6 | from twisted.python import log | 9 | from twisted.python import log |
380 | @@ -8,14 +11,16 @@ | |||
381 | 8 | from twisted.python.failure import Failure | 11 | from twisted.python.failure import Failure |
382 | 9 | from twisted.test.test_sslverify import makeCertificate | 12 | from twisted.test.test_sslverify import makeCertificate |
383 | 10 | from twisted.web import server, static | 13 | from twisted.web import server, static |
384 | 14 | from twisted.web.iweb import IBodyProducer | ||
385 | 11 | from twisted.web.client import HTTPClientFactory | 15 | from twisted.web.client import HTTPClientFactory |
386 | 12 | from twisted.web.error import Error as TwistedWebError | 16 | from twisted.web.error import Error as TwistedWebError |
387 | 13 | 17 | ||
388 | 14 | from txaws.client import ssl | 18 | from txaws.client import ssl |
389 | 15 | from txaws.client.base import BaseClient, BaseQuery, error_wrapper | 19 | from txaws.client.base import BaseClient, BaseQuery, error_wrapper |
390 | 20 | from txaws.client.base import StringIOBodyReceiver | ||
391 | 16 | from txaws.service import AWSServiceEndpoint | 21 | from txaws.service import AWSServiceEndpoint |
392 | 17 | from txaws.testing.base import TXAWSTestCase | 22 | from txaws.testing.base import TXAWSTestCase |
394 | 18 | 23 | from txaws.testing.producers import StringBodyProducer | |
395 | 19 | 24 | ||
396 | 20 | class ErrorWrapperTestCase(TXAWSTestCase): | 25 | class ErrorWrapperTestCase(TXAWSTestCase): |
397 | 21 | 26 | ||
398 | @@ -99,7 +104,6 @@ | |||
399 | 99 | 104 | ||
400 | 100 | def test_creation(self): | 105 | def test_creation(self): |
401 | 101 | query = BaseQuery("an action", "creds", "http://endpoint") | 106 | query = BaseQuery("an action", "creds", "http://endpoint") |
402 | 102 | self.assertEquals(query.factory, HTTPClientFactory) | ||
403 | 103 | self.assertEquals(query.action, "an action") | 107 | self.assertEquals(query.action, "an action") |
404 | 104 | self.assertEquals(query.creds, "creds") | 108 | self.assertEquals(query.creds, "creds") |
405 | 105 | self.assertEquals(query.endpoint, "http://endpoint") | 109 | self.assertEquals(query.endpoint, "http://endpoint") |
406 | @@ -142,16 +146,52 @@ | |||
407 | 142 | def test_get_response_headers_with_client(self): | 146 | def test_get_response_headers_with_client(self): |
408 | 143 | 147 | ||
409 | 144 | def check_results(results): | 148 | def check_results(results): |
410 | 149 | #self.assertEquals(sorted(results.keys()), [ | ||
411 | 150 | # "accept-ranges", "content-length", "content-type", "date", | ||
412 | 151 | # "last-modified", "server"]) | ||
413 | 152 | # XXX I think newclient exludes content-length from headers? | ||
414 | 153 | # Also the header names are capitalized ... do we need to worry | ||
415 | 154 | # about backwards compat? | ||
416 | 145 | self.assertEquals(sorted(results.keys()), [ | 155 | self.assertEquals(sorted(results.keys()), [ |
420 | 146 | "accept-ranges", "content-length", "content-type", "date", | 156 | "Accept-Ranges", "Content-Type", "Date", |
421 | 147 | "last-modified", "server"]) | 157 | "Last-Modified", "Server"]) |
422 | 148 | self.assertEquals(len(results.values()), 6) | 158 | self.assertEquals(len(results.values()), 5) |
423 | 149 | 159 | ||
424 | 150 | query = BaseQuery("an action", "creds", "http://endpoint") | 160 | query = BaseQuery("an action", "creds", "http://endpoint") |
425 | 151 | d = query.get_page(self._get_url("file")) | 161 | d = query.get_page(self._get_url("file")) |
426 | 152 | d.addCallback(query.get_response_headers) | 162 | d.addCallback(query.get_response_headers) |
427 | 153 | return d.addCallback(check_results) | 163 | return d.addCallback(check_results) |
428 | 154 | 164 | ||
429 | 165 | def test_custom_body_producer(self): | ||
430 | 166 | |||
431 | 167 | def check_producer_was_used(ignore): | ||
432 | 168 | self.assertEqual(producer.written, 'test data') | ||
433 | 169 | |||
434 | 170 | producer = StringBodyProducer('test data') | ||
435 | 171 | query = BaseQuery("an action", "creds", "http://endpoint", | ||
436 | 172 | body_producer=producer) | ||
437 | 173 | d = query.get_page(self._get_url("file"), method='PUT') | ||
438 | 174 | return d.addCallback(check_producer_was_used) | ||
439 | 175 | |||
440 | 176 | def test_custom_receiver_factory(self): | ||
441 | 177 | |||
442 | 178 | class TestReceiverProtocol(StringIOBodyReceiver): | ||
443 | 179 | used = False | ||
444 | 180 | |||
445 | 181 | def __init__(self): | ||
446 | 182 | StringIOBodyReceiver.__init__(self) | ||
447 | 183 | TestReceiverProtocol.used = True | ||
448 | 184 | |||
449 | 185 | def check_used(ignore): | ||
450 | 186 | self.assert_(TestReceiverProtocol.used) | ||
451 | 187 | |||
452 | 188 | query = BaseQuery("an action", "creds", "http://endpoint", | ||
453 | 189 | receiver_factory=TestReceiverProtocol) | ||
454 | 190 | d = query.get_page(self._get_url("file")) | ||
455 | 191 | d.addCallback(self.assertEquals, "0123456789") | ||
456 | 192 | d.addCallback(check_used) | ||
457 | 193 | return d | ||
458 | 194 | |||
459 | 155 | # XXX for systems that don't have certs in the DEFAULT_CERT_PATH, this test | 195 | # XXX for systems that don't have certs in the DEFAULT_CERT_PATH, this test |
460 | 156 | # will fail; instead, let's create some certs in a temp directory and set | 196 | # will fail; instead, let's create some certs in a temp directory and set |
461 | 157 | # the DEFAULT_CERT_PATH to point there. | 197 | # the DEFAULT_CERT_PATH to point there. |
462 | @@ -167,8 +207,9 @@ | |||
463 | 167 | def __init__(self): | 207 | def __init__(self): |
464 | 168 | self.connects = [] | 208 | self.connects = [] |
465 | 169 | 209 | ||
468 | 170 | def connectSSL(self, host, port, client, factory): | 210 | def connectSSL(self, host, port, factory, contextFactory, timeout, |
469 | 171 | self.connects.append((host, port, client, factory)) | 211 | bindAddress): |
470 | 212 | self.connects.append((host, port, factory, contextFactory)) | ||
471 | 172 | 213 | ||
472 | 173 | certs = makeCertificate(O="Test Certificate", CN="something")[1] | 214 | certs = makeCertificate(O="Test Certificate", CN="something")[1] |
473 | 174 | self.patch(ssl, "_ca_certs", certs) | 215 | self.patch(ssl, "_ca_certs", certs) |
474 | @@ -176,9 +217,10 @@ | |||
475 | 176 | endpoint = AWSServiceEndpoint(ssl_hostname_verification=True) | 217 | endpoint = AWSServiceEndpoint(ssl_hostname_verification=True) |
476 | 177 | query = BaseQuery("an action", "creds", endpoint, fake_reactor) | 218 | query = BaseQuery("an action", "creds", endpoint, fake_reactor) |
477 | 178 | query.get_page("https://example.com/file") | 219 | query.get_page("https://example.com/file") |
479 | 179 | [(host, port, client, factory)] = fake_reactor.connects | 220 | [(host, port, factory, contextFactory)] = fake_reactor.connects |
480 | 180 | self.assertEqual("example.com", host) | 221 | self.assertEqual("example.com", host) |
481 | 181 | self.assertEqual(443, port) | 222 | self.assertEqual(443, port) |
485 | 182 | self.assertTrue(isinstance(factory, ssl.VerifyingContextFactory)) | 223 | wrappedFactory = contextFactory._webContext |
486 | 183 | self.assertEqual("example.com", factory.host) | 224 | self.assertTrue(isinstance(wrappedFactory, ssl.VerifyingContextFactory)) |
487 | 184 | self.assertNotEqual([], factory.caCerts) | 225 | self.assertEqual("example.com", wrappedFactory.host) |
488 | 226 | self.assertNotEqual([], wrappedFactory.caCerts) | ||
489 | 185 | 227 | ||
490 | === modified file 'txaws/client/tests/test_ssl.py' | |||
491 | --- txaws/client/tests/test_ssl.py 2012-01-26 22:54:44 +0000 | |||
492 | +++ txaws/client/tests/test_ssl.py 2012-02-10 21:24:20 +0000 | |||
493 | @@ -12,6 +12,10 @@ | |||
494 | 12 | from twisted.python.filepath import FilePath | 12 | from twisted.python.filepath import FilePath |
495 | 13 | from twisted.test.test_sslverify import makeCertificate | 13 | from twisted.test.test_sslverify import makeCertificate |
496 | 14 | from twisted.web import server, static | 14 | from twisted.web import server, static |
497 | 15 | try: | ||
498 | 16 | from twisted.web.client import ResponseFailed | ||
499 | 17 | except ImportError: | ||
500 | 18 | from twisted.web._newclient import ResponseFailed | ||
501 | 15 | 19 | ||
502 | 16 | from txaws import exception | 20 | from txaws import exception |
503 | 17 | from txaws.client import ssl | 21 | from txaws.client import ssl |
504 | @@ -32,6 +36,11 @@ | |||
505 | 32 | PUBSANKEY = sibpath("public_san.ssl") | 36 | PUBSANKEY = sibpath("public_san.ssl") |
506 | 33 | 37 | ||
507 | 34 | 38 | ||
508 | 39 | class WebDefaultOpenSSLContextFactory(DefaultOpenSSLContextFactory): | ||
509 | 40 | def getContext(self, hostname=None, port=None): | ||
510 | 41 | return DefaultOpenSSLContextFactory.getContext(self) | ||
511 | 42 | |||
512 | 43 | |||
513 | 35 | class BaseQuerySSLTestCase(TXAWSTestCase): | 44 | class BaseQuerySSLTestCase(TXAWSTestCase): |
514 | 36 | 45 | ||
515 | 37 | def setUp(self): | 46 | def setUp(self): |
516 | @@ -75,7 +84,7 @@ | |||
517 | 75 | The L{VerifyingContextFactory} properly allows to connect to the | 84 | The L{VerifyingContextFactory} properly allows to connect to the |
518 | 76 | endpoint if the certificates match. | 85 | endpoint if the certificates match. |
519 | 77 | """ | 86 | """ |
521 | 78 | context_factory = DefaultOpenSSLContextFactory(PRIVKEY, PUBKEY) | 87 | context_factory = WebDefaultOpenSSLContextFactory(PRIVKEY, PUBKEY) |
522 | 79 | self.port = reactor.listenSSL( | 88 | self.port = reactor.listenSSL( |
523 | 80 | 0, self.site, context_factory, interface="127.0.0.1") | 89 | 0, self.site, context_factory, interface="127.0.0.1") |
524 | 81 | self.portno = self.port.getHost().port | 90 | self.portno = self.port.getHost().port |
525 | @@ -90,7 +99,7 @@ | |||
526 | 90 | The L{VerifyingContextFactory} fails with a SSL error the certificates | 99 | The L{VerifyingContextFactory} fails with a SSL error the certificates |
527 | 91 | can't be checked. | 100 | can't be checked. |
528 | 92 | """ | 101 | """ |
530 | 93 | context_factory = DefaultOpenSSLContextFactory(BADPRIVKEY, BADPUBKEY) | 102 | context_factory = WebDefaultOpenSSLContextFactory(BADPRIVKEY, BADPUBKEY) |
531 | 94 | self.port = reactor.listenSSL( | 103 | self.port = reactor.listenSSL( |
532 | 95 | 0, self.site, context_factory, interface="127.0.0.1") | 104 | 0, self.site, context_factory, interface="127.0.0.1") |
533 | 96 | self.portno = self.port.getHost().port | 105 | self.portno = self.port.getHost().port |
534 | @@ -98,7 +107,14 @@ | |||
535 | 98 | endpoint = AWSServiceEndpoint(ssl_hostname_verification=True) | 107 | endpoint = AWSServiceEndpoint(ssl_hostname_verification=True) |
536 | 99 | query = BaseQuery("an action", "creds", endpoint) | 108 | query = BaseQuery("an action", "creds", endpoint) |
537 | 100 | d = query.get_page(self._get_url("file")) | 109 | d = query.get_page(self._get_url("file")) |
539 | 101 | return self.assertFailure(d, SSLError) | 110 | def fail(ignore): |
540 | 111 | self.fail('Expected SSLError') | ||
541 | 112 | def check_exception(why): | ||
542 | 113 | # XXX kind of a mess here ... need to unwrap the | ||
543 | 114 | # exception and check | ||
544 | 115 | root_exc = why.value[0][0].value | ||
545 | 116 | self.assert_(isinstance(root_exc, SSLError)) | ||
546 | 117 | return d.addCallbacks(fail, check_exception) | ||
547 | 102 | 118 | ||
548 | 103 | def test_ssl_verification_bypassed(self): | 119 | def test_ssl_verification_bypassed(self): |
549 | 104 | """ | 120 | """ |
550 | @@ -121,7 +137,7 @@ | |||
551 | 121 | L{VerifyingContextFactory} supports checking C{subjectAltName} in the | 137 | L{VerifyingContextFactory} supports checking C{subjectAltName} in the |
552 | 122 | certificate if it's available. | 138 | certificate if it's available. |
553 | 123 | """ | 139 | """ |
555 | 124 | context_factory = DefaultOpenSSLContextFactory(PRIVSANKEY, PUBSANKEY) | 140 | context_factory = WebDefaultOpenSSLContextFactory(PRIVSANKEY, PUBSANKEY) |
556 | 125 | self.port = reactor.listenSSL( | 141 | self.port = reactor.listenSSL( |
557 | 126 | 0, self.site, context_factory, interface="127.0.0.1") | 142 | 0, self.site, context_factory, interface="127.0.0.1") |
558 | 127 | self.portno = self.port.getHost().port | 143 | self.portno = self.port.getHost().port |
559 | 128 | 144 | ||
560 | === modified file 'txaws/s3/client.py' | |||
561 | --- txaws/s3/client.py 2012-01-28 00:39:00 +0000 | |||
562 | +++ txaws/s3/client.py 2012-02-10 21:24:20 +0000 | |||
563 | @@ -23,7 +23,7 @@ | |||
564 | 23 | from txaws.s3.model import ( | 23 | from txaws.s3.model import ( |
565 | 24 | Bucket, BucketItem, BucketListing, ItemOwner, LifecycleConfiguration, | 24 | Bucket, BucketItem, BucketListing, ItemOwner, LifecycleConfiguration, |
566 | 25 | LifecycleConfigurationRule, NotificationConfiguration, RequestPayment, | 25 | LifecycleConfigurationRule, NotificationConfiguration, RequestPayment, |
568 | 26 | VersioningConfiguration, WebsiteConfiguration) | 26 | VersioningConfiguration, WebsiteConfiguration, MultipartInitiationResponse) |
569 | 27 | from txaws.s3.exception import S3Error | 27 | from txaws.s3.exception import S3Error |
570 | 28 | from txaws.service import AWSServiceEndpoint, S3_ENDPOINT | 28 | from txaws.service import AWSServiceEndpoint, S3_ENDPOINT |
571 | 29 | from txaws.util import XML, calculate_md5 | 29 | from txaws.util import XML, calculate_md5 |
572 | @@ -74,10 +74,12 @@ | |||
573 | 74 | class S3Client(BaseClient): | 74 | class S3Client(BaseClient): |
574 | 75 | """A client for S3.""" | 75 | """A client for S3.""" |
575 | 76 | 76 | ||
577 | 77 | def __init__(self, creds=None, endpoint=None, query_factory=None): | 77 | def __init__(self, creds=None, endpoint=None, query_factory=None, |
578 | 78 | receiver_factory=None): | ||
579 | 78 | if query_factory is None: | 79 | if query_factory is None: |
580 | 79 | query_factory = Query | 80 | query_factory = Query |
582 | 80 | super(S3Client, self).__init__(creds, endpoint, query_factory) | 81 | super(S3Client, self).__init__(creds, endpoint, query_factory, |
583 | 82 | receiver_factory=receiver_factory) | ||
584 | 81 | 83 | ||
585 | 82 | def list_buckets(self): | 84 | def list_buckets(self): |
586 | 83 | """ | 85 | """ |
587 | @@ -87,7 +89,8 @@ | |||
588 | 87 | the request. | 89 | the request. |
589 | 88 | """ | 90 | """ |
590 | 89 | query = self.query_factory( | 91 | query = self.query_factory( |
592 | 90 | action="GET", creds=self.creds, endpoint=self.endpoint) | 92 | action="GET", creds=self.creds, endpoint=self.endpoint, |
593 | 93 | receiver_factory=self.receiver_factory) | ||
594 | 91 | d = query.submit() | 94 | d = query.submit() |
595 | 92 | return d.addCallback(self._parse_list_buckets) | 95 | return d.addCallback(self._parse_list_buckets) |
596 | 93 | 96 | ||
597 | @@ -131,7 +134,7 @@ | |||
598 | 131 | """ | 134 | """ |
599 | 132 | query = self.query_factory( | 135 | query = self.query_factory( |
600 | 133 | action="GET", creds=self.creds, endpoint=self.endpoint, | 136 | action="GET", creds=self.creds, endpoint=self.endpoint, |
602 | 134 | bucket=bucket) | 137 | bucket=bucket, receiver_factory=self.receiver_factory) |
603 | 135 | d = query.submit() | 138 | d = query.submit() |
604 | 136 | return d.addCallback(self._parse_get_bucket) | 139 | return d.addCallback(self._parse_get_bucket) |
605 | 137 | 140 | ||
606 | @@ -174,7 +177,8 @@ | |||
607 | 174 | """ | 177 | """ |
608 | 175 | query = self.query_factory(action="GET", creds=self.creds, | 178 | query = self.query_factory(action="GET", creds=self.creds, |
609 | 176 | endpoint=self.endpoint, bucket=bucket, | 179 | endpoint=self.endpoint, bucket=bucket, |
611 | 177 | object_name="?location") | 180 | object_name="?location", |
612 | 181 | receiver_factory=self.receiver_factory) | ||
613 | 178 | d = query.submit() | 182 | d = query.submit() |
614 | 179 | return d.addCallback(self._parse_bucket_location) | 183 | return d.addCallback(self._parse_bucket_location) |
615 | 180 | 184 | ||
616 | @@ -193,7 +197,8 @@ | |||
617 | 193 | """ | 197 | """ |
618 | 194 | query = self.query_factory( | 198 | query = self.query_factory( |
619 | 195 | action='GET', creds=self.creds, endpoint=self.endpoint, | 199 | action='GET', creds=self.creds, endpoint=self.endpoint, |
621 | 196 | bucket=bucket, object_name='?lifecycle') | 200 | bucket=bucket, object_name='?lifecycle', |
622 | 201 | receiver_factory=self.receiver_factory) | ||
623 | 197 | return query.submit().addCallback(self._parse_lifecycle_config) | 202 | return query.submit().addCallback(self._parse_lifecycle_config) |
624 | 198 | 203 | ||
625 | 199 | def _parse_lifecycle_config(self, xml_bytes): | 204 | def _parse_lifecycle_config(self, xml_bytes): |
626 | @@ -221,7 +226,8 @@ | |||
627 | 221 | """ | 226 | """ |
628 | 222 | query = self.query_factory( | 227 | query = self.query_factory( |
629 | 223 | action='GET', creds=self.creds, endpoint=self.endpoint, | 228 | action='GET', creds=self.creds, endpoint=self.endpoint, |
631 | 224 | bucket=bucket, object_name='?website') | 229 | bucket=bucket, object_name='?website', |
632 | 230 | receiver_factory=self.receiver_factory) | ||
633 | 225 | return query.submit().addCallback(self._parse_website_config) | 231 | return query.submit().addCallback(self._parse_website_config) |
634 | 226 | 232 | ||
635 | 227 | def _parse_website_config(self, xml_bytes): | 233 | def _parse_website_config(self, xml_bytes): |
636 | @@ -242,7 +248,8 @@ | |||
637 | 242 | """ | 248 | """ |
638 | 243 | query = self.query_factory( | 249 | query = self.query_factory( |
639 | 244 | action='GET', creds=self.creds, endpoint=self.endpoint, | 250 | action='GET', creds=self.creds, endpoint=self.endpoint, |
641 | 245 | bucket=bucket, object_name='?notification') | 251 | bucket=bucket, object_name='?notification', |
642 | 252 | receiver_factory=self.receiver_factory) | ||
643 | 246 | return query.submit().addCallback(self._parse_notification_config) | 253 | return query.submit().addCallback(self._parse_notification_config) |
644 | 247 | 254 | ||
645 | 248 | def _parse_notification_config(self, xml_bytes): | 255 | def _parse_notification_config(self, xml_bytes): |
646 | @@ -262,7 +269,8 @@ | |||
647 | 262 | """ | 269 | """ |
648 | 263 | query = self.query_factory( | 270 | query = self.query_factory( |
649 | 264 | action='GET', creds=self.creds, endpoint=self.endpoint, | 271 | action='GET', creds=self.creds, endpoint=self.endpoint, |
651 | 265 | bucket=bucket, object_name='?versioning') | 272 | bucket=bucket, object_name='?versioning', |
652 | 273 | receiver_factory=self.receiver_factory) | ||
653 | 266 | return query.submit().addCallback(self._parse_versioning_config) | 274 | return query.submit().addCallback(self._parse_versioning_config) |
654 | 267 | 275 | ||
655 | 268 | def _parse_versioning_config(self, xml_bytes): | 276 | def _parse_versioning_config(self, xml_bytes): |
656 | @@ -279,7 +287,8 @@ | |||
657 | 279 | """ | 287 | """ |
658 | 280 | query = self.query_factory( | 288 | query = self.query_factory( |
659 | 281 | action='GET', creds=self.creds, endpoint=self.endpoint, | 289 | action='GET', creds=self.creds, endpoint=self.endpoint, |
661 | 282 | bucket=bucket, object_name='?acl') | 290 | bucket=bucket, object_name='?acl', |
662 | 291 | receiver_factory=self.receiver_factory) | ||
663 | 283 | return query.submit().addCallback(self._parse_acl) | 292 | return query.submit().addCallback(self._parse_acl) |
664 | 284 | 293 | ||
665 | 285 | def put_bucket_acl(self, bucket, access_control_policy): | 294 | def put_bucket_acl(self, bucket, access_control_policy): |
666 | @@ -289,7 +298,8 @@ | |||
667 | 289 | data = access_control_policy.to_xml() | 298 | data = access_control_policy.to_xml() |
668 | 290 | query = self.query_factory( | 299 | query = self.query_factory( |
669 | 291 | action='PUT', creds=self.creds, endpoint=self.endpoint, | 300 | action='PUT', creds=self.creds, endpoint=self.endpoint, |
671 | 292 | bucket=bucket, object_name='?acl', data=data) | 301 | bucket=bucket, object_name='?acl', data=data, |
672 | 302 | receiver_factory=self.receiver_factory) | ||
673 | 293 | return query.submit().addCallback(self._parse_acl) | 303 | return query.submit().addCallback(self._parse_acl) |
674 | 294 | 304 | ||
675 | 295 | def _parse_acl(self, xml_bytes): | 305 | def _parse_acl(self, xml_bytes): |
676 | @@ -299,8 +309,8 @@ | |||
677 | 299 | """ | 309 | """ |
678 | 300 | return AccessControlPolicy.from_xml(xml_bytes) | 310 | return AccessControlPolicy.from_xml(xml_bytes) |
679 | 301 | 311 | ||
682 | 302 | def put_object(self, bucket, object_name, data, content_type=None, | 312 | def put_object(self, bucket, object_name, data=None, content_type=None, |
683 | 303 | metadata={}, amz_headers={}): | 313 | metadata={}, amz_headers={}, body_producer=None): |
684 | 304 | """ | 314 | """ |
685 | 305 | Put an object in a bucket. | 315 | Put an object in a bucket. |
686 | 306 | 316 | ||
687 | @@ -318,7 +328,8 @@ | |||
688 | 318 | action="PUT", creds=self.creds, endpoint=self.endpoint, | 328 | action="PUT", creds=self.creds, endpoint=self.endpoint, |
689 | 319 | bucket=bucket, object_name=object_name, data=data, | 329 | bucket=bucket, object_name=object_name, data=data, |
690 | 320 | content_type=content_type, metadata=metadata, | 330 | content_type=content_type, metadata=metadata, |
692 | 321 | amz_headers=amz_headers) | 331 | amz_headers=amz_headers, body_producer=body_producer, |
693 | 332 | receiver_factory=self.receiver_factory) | ||
694 | 322 | return query.submit() | 333 | return query.submit() |
695 | 323 | 334 | ||
696 | 324 | def copy_object(self, source_bucket, source_object_name, dest_bucket=None, | 335 | def copy_object(self, source_bucket, source_object_name, dest_bucket=None, |
697 | @@ -344,7 +355,8 @@ | |||
698 | 344 | query = self.query_factory( | 355 | query = self.query_factory( |
699 | 345 | action="PUT", creds=self.creds, endpoint=self.endpoint, | 356 | action="PUT", creds=self.creds, endpoint=self.endpoint, |
700 | 346 | bucket=dest_bucket, object_name=dest_object_name, | 357 | bucket=dest_bucket, object_name=dest_object_name, |
702 | 347 | metadata=metadata, amz_headers=amz_headers) | 358 | metadata=metadata, amz_headers=amz_headers, |
703 | 359 | receiver_factory=self.receiver_factory) | ||
704 | 348 | return query.submit() | 360 | return query.submit() |
705 | 349 | 361 | ||
706 | 350 | def get_object(self, bucket, object_name): | 362 | def get_object(self, bucket, object_name): |
707 | @@ -353,7 +365,8 @@ | |||
708 | 353 | """ | 365 | """ |
709 | 354 | query = self.query_factory( | 366 | query = self.query_factory( |
710 | 355 | action="GET", creds=self.creds, endpoint=self.endpoint, | 367 | action="GET", creds=self.creds, endpoint=self.endpoint, |
712 | 356 | bucket=bucket, object_name=object_name) | 368 | bucket=bucket, object_name=object_name, |
713 | 369 | receiver_factory=self.receiver_factory) | ||
714 | 357 | return query.submit() | 370 | return query.submit() |
715 | 358 | 371 | ||
716 | 359 | def head_object(self, bucket, object_name): | 372 | def head_object(self, bucket, object_name): |
717 | @@ -384,7 +397,8 @@ | |||
718 | 384 | data = access_control_policy.to_xml() | 397 | data = access_control_policy.to_xml() |
719 | 385 | query = self.query_factory( | 398 | query = self.query_factory( |
720 | 386 | action='PUT', creds=self.creds, endpoint=self.endpoint, | 399 | action='PUT', creds=self.creds, endpoint=self.endpoint, |
722 | 387 | bucket=bucket, object_name='%s?acl' % object_name, data=data) | 400 | bucket=bucket, object_name='%s?acl' % object_name, data=data, |
723 | 401 | receiver_factory=self.receiver_factory) | ||
724 | 388 | return query.submit().addCallback(self._parse_acl) | 402 | return query.submit().addCallback(self._parse_acl) |
725 | 389 | 403 | ||
726 | 390 | def get_object_acl(self, bucket, object_name): | 404 | def get_object_acl(self, bucket, object_name): |
727 | @@ -393,7 +407,8 @@ | |||
728 | 393 | """ | 407 | """ |
729 | 394 | query = self.query_factory( | 408 | query = self.query_factory( |
730 | 395 | action='GET', creds=self.creds, endpoint=self.endpoint, | 409 | action='GET', creds=self.creds, endpoint=self.endpoint, |
732 | 396 | bucket=bucket, object_name='%s?acl' % object_name) | 410 | bucket=bucket, object_name='%s?acl' % object_name, |
733 | 411 | receiver_factory=self.receiver_factory) | ||
734 | 397 | return query.submit().addCallback(self._parse_acl) | 412 | return query.submit().addCallback(self._parse_acl) |
735 | 398 | 413 | ||
736 | 399 | def put_request_payment(self, bucket, payer): | 414 | def put_request_payment(self, bucket, payer): |
737 | @@ -407,7 +422,8 @@ | |||
738 | 407 | data = RequestPayment(payer).to_xml() | 422 | data = RequestPayment(payer).to_xml() |
739 | 408 | query = self.query_factory( | 423 | query = self.query_factory( |
740 | 409 | action="PUT", creds=self.creds, endpoint=self.endpoint, | 424 | action="PUT", creds=self.creds, endpoint=self.endpoint, |
742 | 410 | bucket=bucket, object_name="?requestPayment", data=data) | 425 | bucket=bucket, object_name="?requestPayment", data=data, |
743 | 426 | receiver_factory=self.receiver_factory) | ||
744 | 411 | return query.submit() | 427 | return query.submit() |
745 | 412 | 428 | ||
746 | 413 | def get_request_payment(self, bucket): | 429 | def get_request_payment(self, bucket): |
747 | @@ -419,7 +435,8 @@ | |||
748 | 419 | """ | 435 | """ |
749 | 420 | query = self.query_factory( | 436 | query = self.query_factory( |
750 | 421 | action="GET", creds=self.creds, endpoint=self.endpoint, | 437 | action="GET", creds=self.creds, endpoint=self.endpoint, |
752 | 422 | bucket=bucket, object_name="?requestPayment") | 438 | bucket=bucket, object_name="?requestPayment", |
753 | 439 | receiver_factory=self.receiver_factory) | ||
754 | 423 | return query.submit().addCallback(self._parse_get_request_payment) | 440 | return query.submit().addCallback(self._parse_get_request_payment) |
755 | 424 | 441 | ||
756 | 425 | def _parse_get_request_payment(self, xml_bytes): | 442 | def _parse_get_request_payment(self, xml_bytes): |
757 | @@ -429,17 +446,53 @@ | |||
758 | 429 | """ | 446 | """ |
759 | 430 | return RequestPayment.from_xml(xml_bytes).payer | 447 | return RequestPayment.from_xml(xml_bytes).payer |
760 | 431 | 448 | ||
761 | 449 | def init_multipart_upload(self, bucket, object_name, content_type=None, | ||
762 | 450 | metadata={}): | ||
763 | 451 | """ | ||
764 | 452 | Initiate a multipart upload to a bucket. | ||
765 | 453 | |||
766 | 454 | @param bucket: The name of the bucket | ||
767 | 455 | @param object_name: The object name | ||
768 | 456 | @param content_type: The Content-Type for the object | ||
769 | 457 | @param metadata: C{dict} containing additional metadata | ||
770 | 458 | @return: C{str} upload_id | ||
771 | 459 | """ | ||
772 | 460 | objectname_plus = '%s?uploads' % object_name | ||
773 | 461 | query = self.query_factory( | ||
774 | 462 | action="POST", creds=self.creds, endpoint=self.endpoint, | ||
775 | 463 | bucket=bucket, object_name=objectname_plus, data='', | ||
776 | 464 | content_type=content_type, metadata=metadata) | ||
777 | 465 | d = query.submit() | ||
778 | 466 | return d.addCallback(MultipartInitiationResponse.from_xml) | ||
779 | 467 | |||
780 | 468 | def upload_part(self, bucket, object_name, upload_id, part_number, data=None, | ||
781 | 469 | content_type=None, metadata={}, body_producer=None): | ||
782 | 470 | """ | ||
783 | 471 | Upload a part of data correcsponding to a multipart upload. | ||
784 | 472 | |||
785 | 473 | @return: the C{Deferred} from underlying query.submit() call | ||
786 | 474 | """ | ||
787 | 475 | parms = 'partNumber=%s&uploadId=%s' % (str(part_number), upload_id) | ||
788 | 476 | objectname_plus = '%s?%s' % (object_name, parms) | ||
789 | 477 | query = self.query_factory( | ||
790 | 478 | action="PUT", creds=self.creds, endpoint=self.endpoint, | ||
791 | 479 | bucket=bucket, object_name=objectname_plus, data=data, | ||
792 | 480 | content_type=content_type, metadata=metadata, body_producer=body_producer, | ||
793 | 481 | receiver_factory=self.receiver_factory) | ||
794 | 482 | d = query.submit() | ||
795 | 483 | return d.addCallback(query.get_response_headers) | ||
796 | 432 | 484 | ||
797 | 433 | class Query(BaseQuery): | 485 | class Query(BaseQuery): |
798 | 434 | """A query for submission to the S3 service.""" | 486 | """A query for submission to the S3 service.""" |
799 | 435 | 487 | ||
800 | 436 | def __init__(self, bucket=None, object_name=None, data="", | 488 | def __init__(self, bucket=None, object_name=None, data="", |
803 | 437 | content_type=None, metadata={}, amz_headers={}, *args, | 489 | content_type=None, metadata={}, amz_headers={}, |
804 | 438 | **kwargs): | 490 | body_producer=None, *args, **kwargs): |
805 | 439 | super(Query, self).__init__(*args, **kwargs) | 491 | super(Query, self).__init__(*args, **kwargs) |
806 | 440 | self.bucket = bucket | 492 | self.bucket = bucket |
807 | 441 | self.object_name = object_name | 493 | self.object_name = object_name |
808 | 442 | self.data = data | 494 | self.data = data |
809 | 495 | self.body_producer = body_producer | ||
810 | 443 | self.content_type = content_type | 496 | self.content_type = content_type |
811 | 444 | self.metadata = metadata | 497 | self.metadata = metadata |
812 | 445 | self.amz_headers = amz_headers | 498 | self.amz_headers = amz_headers |
813 | @@ -463,9 +516,14 @@ | |||
814 | 463 | """ | 516 | """ |
815 | 464 | Build the list of headers needed in order to perform S3 operations. | 517 | Build the list of headers needed in order to perform S3 operations. |
816 | 465 | """ | 518 | """ |
819 | 466 | headers = {"Content-Length": len(self.data), | 519 | if self.body_producer: |
820 | 467 | "Content-MD5": calculate_md5(self.data), | 520 | content_length = self.body_producer.length |
821 | 521 | else: | ||
822 | 522 | content_length = len(self.data) | ||
823 | 523 | headers = {"Content-Length": content_length, | ||
824 | 468 | "Date": self.date} | 524 | "Date": self.date} |
825 | 525 | if self.body_producer is None: | ||
826 | 526 | headers["Content-MD5"] = calculate_md5(self.data) | ||
827 | 469 | for key, value in self.metadata.iteritems(): | 527 | for key, value in self.metadata.iteritems(): |
828 | 470 | headers["x-amz-meta-" + key] = value | 528 | headers["x-amz-meta-" + key] = value |
829 | 471 | for key, value in self.amz_headers.iteritems(): | 529 | for key, value in self.amz_headers.iteritems(): |
830 | @@ -529,5 +587,6 @@ | |||
831 | 529 | self.endpoint, self.bucket, self.object_name) | 587 | self.endpoint, self.bucket, self.object_name) |
832 | 530 | d = self.get_page( | 588 | d = self.get_page( |
833 | 531 | url_context.get_url(), method=self.action, postdata=self.data, | 589 | url_context.get_url(), method=self.action, postdata=self.data, |
835 | 532 | headers=self.get_headers()) | 590 | headers=self.get_headers(), body_producer=self.body_producer, |
836 | 591 | receiver_factory=self.receiver_factory) | ||
837 | 533 | return d.addErrback(s3_error_wrapper) | 592 | return d.addErrback(s3_error_wrapper) |
838 | 534 | 593 | ||
839 | === modified file 'txaws/s3/model.py' | |||
840 | --- txaws/s3/model.py 2012-01-28 00:42:38 +0000 | |||
841 | +++ txaws/s3/model.py 2012-02-10 21:24:20 +0000 | |||
842 | @@ -150,3 +150,32 @@ | |||
843 | 150 | """ | 150 | """ |
844 | 151 | root = XML(xml_bytes) | 151 | root = XML(xml_bytes) |
845 | 152 | return cls(root.findtext("Payer")) | 152 | return cls(root.findtext("Payer")) |
846 | 153 | |||
847 | 154 | |||
848 | 155 | class MultipartInitiationResponse(object): | ||
849 | 156 | """ | ||
850 | 157 | A response to Initiate Multipart Upload | ||
851 | 158 | """ | ||
852 | 159 | |||
853 | 160 | def __init__(self, bucket, object_name, upload_id): | ||
854 | 161 | """ | ||
855 | 162 | @param bucket: The bucket name | ||
856 | 163 | @param object_name: The object name | ||
857 | 164 | @param upload_id: The upload id | ||
858 | 165 | """ | ||
859 | 166 | self.bucket = bucket | ||
860 | 167 | self.object_name = object_name | ||
861 | 168 | self.upload_id = upload_id | ||
862 | 169 | |||
863 | 170 | @classmethod | ||
864 | 171 | def from_xml(cls, xml_bytes): | ||
865 | 172 | """ | ||
866 | 173 | Create an instance of this from XML bytes. | ||
867 | 174 | |||
868 | 175 | @param xml_bytes: C{str} bytes of XML to parse | ||
869 | 176 | @return: and instance of L{MultipartInitiationResponse} | ||
870 | 177 | """ | ||
871 | 178 | root = XML(xml_bytes) | ||
872 | 179 | return cls(root.findtext('Bucket'), | ||
873 | 180 | root.findtext('Key'), | ||
874 | 181 | root.findtext('UploadId')) | ||
875 | 153 | 182 | ||
876 | === modified file 'txaws/s3/tests/test_client.py' | |||
877 | --- txaws/s3/tests/test_client.py 2012-01-28 00:44:53 +0000 | |||
878 | +++ txaws/s3/tests/test_client.py 2012-02-10 21:24:20 +0000 | |||
879 | @@ -9,7 +9,8 @@ | |||
880 | 9 | else: | 9 | else: |
881 | 10 | s3clientSkip = None | 10 | s3clientSkip = None |
882 | 11 | from txaws.s3.acls import AccessControlPolicy | 11 | from txaws.s3.acls import AccessControlPolicy |
884 | 12 | from txaws.s3.model import RequestPayment | 12 | from txaws.s3.model import RequestPayment, MultipartInitiationResponse |
885 | 13 | from txaws.testing.producers import StringBodyProducer | ||
886 | 13 | from txaws.service import AWSServiceEndpoint | 14 | from txaws.service import AWSServiceEndpoint |
887 | 14 | from txaws.testing import payload | 15 | from txaws.testing import payload |
888 | 15 | from txaws.testing.base import TXAWSTestCase | 16 | from txaws.testing.base import TXAWSTestCase |
889 | @@ -100,7 +101,8 @@ | |||
890 | 100 | 101 | ||
891 | 101 | class StubQuery(client.Query): | 102 | class StubQuery(client.Query): |
892 | 102 | 103 | ||
894 | 103 | def __init__(query, action, creds, endpoint): | 104 | def __init__(query, action, creds, endpoint, |
895 | 105 | body_producer=None, receiver_factory=None): | ||
896 | 104 | super(StubQuery, query).__init__( | 106 | super(StubQuery, query).__init__( |
897 | 105 | action=action, creds=creds) | 107 | action=action, creds=creds) |
898 | 106 | self.assertEquals(action, "GET") | 108 | self.assertEquals(action, "GET") |
899 | @@ -134,7 +136,8 @@ | |||
900 | 134 | 136 | ||
901 | 135 | class StubQuery(client.Query): | 137 | class StubQuery(client.Query): |
902 | 136 | 138 | ||
904 | 137 | def __init__(query, action, creds, endpoint, bucket=None): | 139 | def __init__(query, action, creds, endpoint, bucket=None, |
905 | 140 | body_producer=None, receiver_factory=None): | ||
906 | 138 | super(StubQuery, query).__init__( | 141 | super(StubQuery, query).__init__( |
907 | 139 | action=action, creds=creds, bucket=bucket) | 142 | action=action, creds=creds, bucket=bucket) |
908 | 140 | self.assertEquals(action, "PUT") | 143 | self.assertEquals(action, "PUT") |
909 | @@ -156,7 +159,8 @@ | |||
910 | 156 | 159 | ||
911 | 157 | class StubQuery(client.Query): | 160 | class StubQuery(client.Query): |
912 | 158 | 161 | ||
914 | 159 | def __init__(query, action, creds, endpoint, bucket=None): | 162 | def __init__(query, action, creds, endpoint, bucket=None, |
915 | 163 | body_producer=None, receiver_factory=None): | ||
916 | 160 | super(StubQuery, query).__init__( | 164 | super(StubQuery, query).__init__( |
917 | 161 | action=action, creds=creds, bucket=bucket) | 165 | action=action, creds=creds, bucket=bucket) |
918 | 162 | self.assertEquals(action, "GET") | 166 | self.assertEquals(action, "GET") |
919 | @@ -208,7 +212,8 @@ | |||
920 | 208 | class StubQuery(client.Query): | 212 | class StubQuery(client.Query): |
921 | 209 | 213 | ||
922 | 210 | def __init__(query, action, creds, endpoint, bucket=None, | 214 | def __init__(query, action, creds, endpoint, bucket=None, |
924 | 211 | object_name=None): | 215 | object_name=None, body_producer=None, |
925 | 216 | receiver_factory=None): | ||
926 | 212 | super(StubQuery, query).__init__(action=action, creds=creds, | 217 | super(StubQuery, query).__init__(action=action, creds=creds, |
927 | 213 | bucket=bucket, | 218 | bucket=bucket, |
928 | 214 | object_name=object_name) | 219 | object_name=object_name) |
929 | @@ -243,7 +248,8 @@ | |||
930 | 243 | class StubQuery(client.Query): | 248 | class StubQuery(client.Query): |
931 | 244 | 249 | ||
932 | 245 | def __init__(query, action, creds, endpoint, bucket=None, | 250 | def __init__(query, action, creds, endpoint, bucket=None, |
934 | 246 | object_name=None): | 251 | object_name=None, body_producer=None, |
935 | 252 | receiver_factory=None): | ||
936 | 247 | super(StubQuery, query).__init__(action=action, creds=creds, | 253 | super(StubQuery, query).__init__(action=action, creds=creds, |
937 | 248 | bucket=bucket, | 254 | bucket=bucket, |
938 | 249 | object_name=object_name) | 255 | object_name=object_name) |
939 | @@ -284,7 +290,8 @@ | |||
940 | 284 | class StubQuery(client.Query): | 290 | class StubQuery(client.Query): |
941 | 285 | 291 | ||
942 | 286 | def __init__(query, action, creds, endpoint, bucket=None, | 292 | def __init__(query, action, creds, endpoint, bucket=None, |
944 | 287 | object_name=None): | 293 | object_name=None, body_producer=None, |
945 | 294 | receiver_factory=None): | ||
946 | 288 | super(StubQuery, query).__init__(action=action, creds=creds, | 295 | super(StubQuery, query).__init__(action=action, creds=creds, |
947 | 289 | bucket=bucket, | 296 | bucket=bucket, |
948 | 290 | object_name=object_name) | 297 | object_name=object_name) |
949 | @@ -323,7 +330,8 @@ | |||
950 | 323 | class StubQuery(client.Query): | 330 | class StubQuery(client.Query): |
951 | 324 | 331 | ||
952 | 325 | def __init__(query, action, creds, endpoint, bucket=None, | 332 | def __init__(query, action, creds, endpoint, bucket=None, |
954 | 326 | object_name=None): | 333 | object_name=None, body_producer=None, |
955 | 334 | receiver_factory=None): | ||
956 | 327 | super(StubQuery, query).__init__(action=action, creds=creds, | 335 | super(StubQuery, query).__init__(action=action, creds=creds, |
957 | 328 | bucket=bucket, | 336 | bucket=bucket, |
958 | 329 | object_name=object_name) | 337 | object_name=object_name) |
959 | @@ -360,7 +368,8 @@ | |||
960 | 360 | class StubQuery(client.Query): | 368 | class StubQuery(client.Query): |
961 | 361 | 369 | ||
962 | 362 | def __init__(query, action, creds, endpoint, bucket=None, | 370 | def __init__(query, action, creds, endpoint, bucket=None, |
964 | 363 | object_name=None): | 371 | object_name=None, body_producer=None, |
965 | 372 | receiver_factory=None): | ||
966 | 364 | super(StubQuery, query).__init__(action=action, creds=creds, | 373 | super(StubQuery, query).__init__(action=action, creds=creds, |
967 | 365 | bucket=bucket, | 374 | bucket=bucket, |
968 | 366 | object_name=object_name) | 375 | object_name=object_name) |
969 | @@ -396,7 +405,8 @@ | |||
970 | 396 | class StubQuery(client.Query): | 405 | class StubQuery(client.Query): |
971 | 397 | 406 | ||
972 | 398 | def __init__(query, action, creds, endpoint, bucket=None, | 407 | def __init__(query, action, creds, endpoint, bucket=None, |
974 | 399 | object_name=None): | 408 | object_name=None, body_producer=None, |
975 | 409 | receiver_factory=None): | ||
976 | 400 | super(StubQuery, query).__init__(action=action, creds=creds, | 410 | super(StubQuery, query).__init__(action=action, creds=creds, |
977 | 401 | bucket=bucket, | 411 | bucket=bucket, |
978 | 402 | object_name=object_name) | 412 | object_name=object_name) |
979 | @@ -433,7 +443,8 @@ | |||
980 | 433 | class StubQuery(client.Query): | 443 | class StubQuery(client.Query): |
981 | 434 | 444 | ||
982 | 435 | def __init__(query, action, creds, endpoint, bucket=None, | 445 | def __init__(query, action, creds, endpoint, bucket=None, |
984 | 436 | object_name=None): | 446 | object_name=None, body_producer=None, |
985 | 447 | receiver_factory=None): | ||
986 | 437 | super(StubQuery, query).__init__(action=action, creds=creds, | 448 | super(StubQuery, query).__init__(action=action, creds=creds, |
987 | 438 | bucket=bucket, | 449 | bucket=bucket, |
988 | 439 | object_name=object_name) | 450 | object_name=object_name) |
989 | @@ -473,7 +484,8 @@ | |||
990 | 473 | class StubQuery(client.Query): | 484 | class StubQuery(client.Query): |
991 | 474 | 485 | ||
992 | 475 | def __init__(query, action, creds, endpoint, bucket=None, | 486 | def __init__(query, action, creds, endpoint, bucket=None, |
994 | 476 | object_name=None): | 487 | object_name=None, body_producer=None, |
995 | 488 | receiver_factory=None): | ||
996 | 477 | super(StubQuery, query).__init__(action=action, creds=creds, | 489 | super(StubQuery, query).__init__(action=action, creds=creds, |
997 | 478 | bucket=bucket, | 490 | bucket=bucket, |
998 | 479 | object_name=object_name) | 491 | object_name=object_name) |
999 | @@ -509,7 +521,8 @@ | |||
1000 | 509 | class StubQuery(client.Query): | 521 | class StubQuery(client.Query): |
1001 | 510 | 522 | ||
1002 | 511 | def __init__(query, action, creds, endpoint, bucket=None, | 523 | def __init__(query, action, creds, endpoint, bucket=None, |
1004 | 512 | object_name=None): | 524 | object_name=None, body_producer=None, |
1005 | 525 | receiver_factory=None): | ||
1006 | 513 | super(StubQuery, query).__init__(action=action, creds=creds, | 526 | super(StubQuery, query).__init__(action=action, creds=creds, |
1007 | 514 | bucket=bucket, | 527 | bucket=bucket, |
1008 | 515 | object_name=object_name) | 528 | object_name=object_name) |
1009 | @@ -546,7 +559,8 @@ | |||
1010 | 546 | class StubQuery(client.Query): | 559 | class StubQuery(client.Query): |
1011 | 547 | 560 | ||
1012 | 548 | def __init__(query, action, creds, endpoint, bucket=None, | 561 | def __init__(query, action, creds, endpoint, bucket=None, |
1014 | 549 | object_name=None): | 562 | object_name=None, body_producer=None, |
1015 | 563 | receiver_factory=None): | ||
1016 | 550 | super(StubQuery, query).__init__(action=action, creds=creds, | 564 | super(StubQuery, query).__init__(action=action, creds=creds, |
1017 | 551 | bucket=bucket, | 565 | bucket=bucket, |
1018 | 552 | object_name=object_name) | 566 | object_name=object_name) |
1019 | @@ -576,7 +590,8 @@ | |||
1020 | 576 | 590 | ||
1021 | 577 | class StubQuery(client.Query): | 591 | class StubQuery(client.Query): |
1022 | 578 | 592 | ||
1024 | 579 | def __init__(query, action, creds, endpoint, bucket=None): | 593 | def __init__(query, action, creds, endpoint, bucket=None, |
1025 | 594 | body_producer=None, receiver_factory=None): | ||
1026 | 580 | super(StubQuery, query).__init__( | 595 | super(StubQuery, query).__init__( |
1027 | 581 | action=action, creds=creds, bucket=bucket) | 596 | action=action, creds=creds, bucket=bucket) |
1028 | 582 | self.assertEquals(action, "DELETE") | 597 | self.assertEquals(action, "DELETE") |
1029 | @@ -599,7 +614,8 @@ | |||
1030 | 599 | class StubQuery(client.Query): | 614 | class StubQuery(client.Query): |
1031 | 600 | 615 | ||
1032 | 601 | def __init__(query, action, creds, endpoint, bucket=None, | 616 | def __init__(query, action, creds, endpoint, bucket=None, |
1034 | 602 | object_name=None, data=""): | 617 | object_name=None, data="", body_producer=None, |
1035 | 618 | receiver_factory=None): | ||
1036 | 603 | super(StubQuery, query).__init__(action=action, creds=creds, | 619 | super(StubQuery, query).__init__(action=action, creds=creds, |
1037 | 604 | bucket=bucket, | 620 | bucket=bucket, |
1038 | 605 | object_name=object_name, | 621 | object_name=object_name, |
1039 | @@ -630,7 +646,8 @@ | |||
1040 | 630 | class StubQuery(client.Query): | 646 | class StubQuery(client.Query): |
1041 | 631 | 647 | ||
1042 | 632 | def __init__(query, action, creds, endpoint, bucket=None, | 648 | def __init__(query, action, creds, endpoint, bucket=None, |
1044 | 633 | object_name=None, data=""): | 649 | object_name=None, data="", receiver_factory=None, |
1045 | 650 | body_producer=None): | ||
1046 | 634 | super(StubQuery, query).__init__(action=action, creds=creds, | 651 | super(StubQuery, query).__init__(action=action, creds=creds, |
1047 | 635 | bucket=bucket, | 652 | bucket=bucket, |
1048 | 636 | object_name=object_name, | 653 | object_name=object_name, |
1049 | @@ -665,7 +682,7 @@ | |||
1050 | 665 | 682 | ||
1051 | 666 | def __init__(query, action, creds, endpoint, bucket=None, | 683 | def __init__(query, action, creds, endpoint, bucket=None, |
1052 | 667 | object_name=None, data=None, content_type=None, | 684 | object_name=None, data=None, content_type=None, |
1054 | 668 | metadata=None): | 685 | metadata=None, body_producer=None, receiver_factory=None): |
1055 | 669 | super(StubQuery, query).__init__( | 686 | super(StubQuery, query).__init__( |
1056 | 670 | action=action, creds=creds, bucket=bucket, | 687 | action=action, creds=creds, bucket=bucket, |
1057 | 671 | object_name=object_name, data=data, | 688 | object_name=object_name, data=data, |
1058 | @@ -701,7 +718,7 @@ | |||
1059 | 701 | 718 | ||
1060 | 702 | def __init__(query, action, creds, endpoint, bucket=None, | 719 | def __init__(query, action, creds, endpoint, bucket=None, |
1061 | 703 | object_name=None, data=None, content_type=None, | 720 | object_name=None, data=None, content_type=None, |
1063 | 704 | metadata=None): | 721 | metadata=None, body_producer=None, receiver_factory=None): |
1064 | 705 | super(StubQuery, query).__init__( | 722 | super(StubQuery, query).__init__( |
1065 | 706 | action=action, creds=creds, bucket=bucket, | 723 | action=action, creds=creds, bucket=bucket, |
1066 | 707 | object_name=object_name, data=data, | 724 | object_name=object_name, data=data, |
1067 | @@ -730,7 +747,8 @@ | |||
1068 | 730 | 747 | ||
1069 | 731 | def __init__(query, action, creds, endpoint, bucket=None, | 748 | def __init__(query, action, creds, endpoint, bucket=None, |
1070 | 732 | object_name=None, data=None, content_type=None, | 749 | object_name=None, data=None, content_type=None, |
1072 | 733 | metadata=None, amz_headers=None): | 750 | metadata=None, amz_headers=None, body_producer=None, |
1073 | 751 | receiver_factory=None): | ||
1074 | 734 | super(StubQuery, query).__init__( | 752 | super(StubQuery, query).__init__( |
1075 | 735 | action=action, creds=creds, bucket=bucket, | 753 | action=action, creds=creds, bucket=bucket, |
1076 | 736 | object_name=object_name, data=data, | 754 | object_name=object_name, data=data, |
1077 | @@ -756,6 +774,42 @@ | |||
1078 | 756 | metadata={"key": "some meta data"}, | 774 | metadata={"key": "some meta data"}, |
1079 | 757 | amz_headers={"acl": "public-read"}) | 775 | amz_headers={"acl": "public-read"}) |
1080 | 758 | 776 | ||
1081 | 777 | def test_put_object_with_custom_body_producer(self): | ||
1082 | 778 | |||
1083 | 779 | class StubQuery(client.Query): | ||
1084 | 780 | |||
1085 | 781 | def __init__(query, action, creds, endpoint, bucket=None, | ||
1086 | 782 | object_name=None, data=None, content_type=None, | ||
1087 | 783 | metadata=None, amz_headers=None, body_producer=None, | ||
1088 | 784 | receiver_factory=None): | ||
1089 | 785 | super(StubQuery, query).__init__( | ||
1090 | 786 | action=action, creds=creds, bucket=bucket, | ||
1091 | 787 | object_name=object_name, data=data, | ||
1092 | 788 | content_type=content_type, metadata=metadata, | ||
1093 | 789 | amz_headers=amz_headers, body_producer=body_producer) | ||
1094 | 790 | self.assertEqual(action, "PUT") | ||
1095 | 791 | self.assertEqual(creds.access_key, "foo") | ||
1096 | 792 | self.assertEqual(creds.secret_key, "bar") | ||
1097 | 793 | self.assertEqual(query.bucket, "mybucket") | ||
1098 | 794 | self.assertEqual(query.object_name, "objectname") | ||
1099 | 795 | self.assertEqual(query.content_type, "text/plain") | ||
1100 | 796 | self.assertEqual(query.metadata, {"key": "some meta data"}) | ||
1101 | 797 | self.assertEqual(query.amz_headers, {"acl": "public-read"}) | ||
1102 | 798 | self.assertIdentical(body_producer, string_producer) | ||
1103 | 799 | |||
1104 | 800 | def submit(query): | ||
1105 | 801 | return succeed(None) | ||
1106 | 802 | |||
1107 | 803 | |||
1108 | 804 | string_producer = StringBodyProducer("some data") | ||
1109 | 805 | creds = AWSCredentials("foo", "bar") | ||
1110 | 806 | s3 = client.S3Client(creds, query_factory=StubQuery) | ||
1111 | 807 | return s3.put_object("mybucket", "objectname", | ||
1112 | 808 | content_type="text/plain", | ||
1113 | 809 | metadata={"key": "some meta data"}, | ||
1114 | 810 | amz_headers={"acl": "public-read"}, | ||
1115 | 811 | body_producer=string_producer) | ||
1116 | 812 | |||
1117 | 759 | def test_copy_object(self): | 813 | def test_copy_object(self): |
1118 | 760 | """ | 814 | """ |
1119 | 761 | L{S3Client.copy_object} creates a L{Query} to copy an object from one | 815 | L{S3Client.copy_object} creates a L{Query} to copy an object from one |
1120 | @@ -766,7 +820,8 @@ | |||
1121 | 766 | 820 | ||
1122 | 767 | def __init__(query, action, creds, endpoint, bucket=None, | 821 | def __init__(query, action, creds, endpoint, bucket=None, |
1123 | 768 | object_name=None, data=None, content_type=None, | 822 | object_name=None, data=None, content_type=None, |
1125 | 769 | metadata=None, amz_headers=None): | 823 | metadata=None, amz_headers=None, body_producer=None, |
1126 | 824 | receiver_factory=None): | ||
1127 | 770 | super(StubQuery, query).__init__( | 825 | super(StubQuery, query).__init__( |
1128 | 771 | action=action, creds=creds, bucket=bucket, | 826 | action=action, creds=creds, bucket=bucket, |
1129 | 772 | object_name=object_name, data=data, | 827 | object_name=object_name, data=data, |
1130 | @@ -798,7 +853,8 @@ | |||
1131 | 798 | 853 | ||
1132 | 799 | def __init__(query, action, creds, endpoint, bucket=None, | 854 | def __init__(query, action, creds, endpoint, bucket=None, |
1133 | 800 | object_name=None, data=None, content_type=None, | 855 | object_name=None, data=None, content_type=None, |
1135 | 801 | metadata=None, amz_headers=None): | 856 | metadata=None, amz_headers=None, body_producer=None, |
1136 | 857 | receiver_factory=None): | ||
1137 | 802 | super(StubQuery, query).__init__( | 858 | super(StubQuery, query).__init__( |
1138 | 803 | action=action, creds=creds, bucket=bucket, | 859 | action=action, creds=creds, bucket=bucket, |
1139 | 804 | object_name=object_name, data=data, | 860 | object_name=object_name, data=data, |
1140 | @@ -822,7 +878,7 @@ | |||
1141 | 822 | 878 | ||
1142 | 823 | def __init__(query, action, creds, endpoint, bucket=None, | 879 | def __init__(query, action, creds, endpoint, bucket=None, |
1143 | 824 | object_name=None, data=None, content_type=None, | 880 | object_name=None, data=None, content_type=None, |
1145 | 825 | metadata=None): | 881 | metadata=None, body_producer=None, receiver_factory=None): |
1146 | 826 | super(StubQuery, query).__init__( | 882 | super(StubQuery, query).__init__( |
1147 | 827 | action=action, creds=creds, bucket=bucket, | 883 | action=action, creds=creds, bucket=bucket, |
1148 | 828 | object_name=object_name, data=data, | 884 | object_name=object_name, data=data, |
1149 | @@ -846,7 +902,7 @@ | |||
1150 | 846 | 902 | ||
1151 | 847 | def __init__(query, action, creds, endpoint, bucket=None, | 903 | def __init__(query, action, creds, endpoint, bucket=None, |
1152 | 848 | object_name=None, data=None, content_type=None, | 904 | object_name=None, data=None, content_type=None, |
1154 | 849 | metadata=None): | 905 | metadata=None, body_producer=None, receiver_factory=None): |
1155 | 850 | super(StubQuery, query).__init__( | 906 | super(StubQuery, query).__init__( |
1156 | 851 | action=action, creds=creds, bucket=bucket, | 907 | action=action, creds=creds, bucket=bucket, |
1157 | 852 | object_name=object_name, data=data, | 908 | object_name=object_name, data=data, |
1158 | @@ -869,7 +925,8 @@ | |||
1159 | 869 | class StubQuery(client.Query): | 925 | class StubQuery(client.Query): |
1160 | 870 | 926 | ||
1161 | 871 | def __init__(query, action, creds, endpoint, bucket=None, | 927 | def __init__(query, action, creds, endpoint, bucket=None, |
1163 | 872 | object_name=None, data=""): | 928 | object_name=None, data="", body_producer=None, |
1164 | 929 | receiver_factory=None): | ||
1165 | 873 | super(StubQuery, query).__init__(action=action, creds=creds, | 930 | super(StubQuery, query).__init__(action=action, creds=creds, |
1166 | 874 | bucket=bucket, | 931 | bucket=bucket, |
1167 | 875 | object_name=object_name, | 932 | object_name=object_name, |
1168 | @@ -902,7 +959,8 @@ | |||
1169 | 902 | class StubQuery(client.Query): | 959 | class StubQuery(client.Query): |
1170 | 903 | 960 | ||
1171 | 904 | def __init__(query, action, creds, endpoint, bucket=None, | 961 | def __init__(query, action, creds, endpoint, bucket=None, |
1173 | 905 | object_name=None, data=""): | 962 | object_name=None, data="", body_producer=None, |
1174 | 963 | receiver_factory=None): | ||
1175 | 906 | super(StubQuery, query).__init__(action=action, creds=creds, | 964 | super(StubQuery, query).__init__(action=action, creds=creds, |
1176 | 907 | bucket=bucket, | 965 | bucket=bucket, |
1177 | 908 | object_name=object_name, | 966 | object_name=object_name, |
1178 | @@ -926,6 +984,68 @@ | |||
1179 | 926 | deferred = s3.get_object_acl("mybucket", "myobject") | 984 | deferred = s3.get_object_acl("mybucket", "myobject") |
1180 | 927 | return deferred.addCallback(check_result) | 985 | return deferred.addCallback(check_result) |
1181 | 928 | 986 | ||
1182 | 987 | def test_init_multipart_upload(self): | ||
1183 | 988 | |||
1184 | 989 | class StubQuery(client.Query): | ||
1185 | 990 | |||
1186 | 991 | def __init__(query, action, creds, endpoint, bucket=None, | ||
1187 | 992 | object_name=None, data="", body_producer=None, | ||
1188 | 993 | content_type=None, receiver_factory=None, metadata={}): | ||
1189 | 994 | super(StubQuery, query).__init__(action=action, creds=creds, | ||
1190 | 995 | bucket=bucket, | ||
1191 | 996 | object_name=object_name, | ||
1192 | 997 | data=data) | ||
1193 | 998 | self.assertEquals(action, "POST") | ||
1194 | 999 | self.assertEqual(creds.access_key, "foo") | ||
1195 | 1000 | self.assertEqual(creds.secret_key, "bar") | ||
1196 | 1001 | self.assertEqual(query.bucket, "example-bucket") | ||
1197 | 1002 | self.assertEqual(query.object_name, "example-object?uploads") | ||
1198 | 1003 | self.assertEqual(query.data, "") | ||
1199 | 1004 | self.assertEqual(query.metadata, {}) | ||
1200 | 1005 | |||
1201 | 1006 | def submit(query, url_context=None): | ||
1202 | 1007 | return succeed(payload.sample_s3_init_multipart_upload_result) | ||
1203 | 1008 | |||
1204 | 1009 | |||
1205 | 1010 | def check_result(result): | ||
1206 | 1011 | self.assert_(isinstance(result, MultipartInitiationResponse)) | ||
1207 | 1012 | self.assertEqual(result.bucket, "example-bucket") | ||
1208 | 1013 | self.assertEqual(result.object_name, "example-object") | ||
1209 | 1014 | self.assertEqual(result.upload_id, "deadbeef") | ||
1210 | 1015 | |||
1211 | 1016 | creds = AWSCredentials("foo", "bar") | ||
1212 | 1017 | s3 = client.S3Client(creds, query_factory=StubQuery) | ||
1213 | 1018 | deferred = s3.init_multipart_upload("example-bucket", "example-object") | ||
1214 | 1019 | return deferred.addCallback(check_result) | ||
1215 | 1020 | |||
1216 | 1021 | def test_upload_part(self): | ||
1217 | 1022 | |||
1218 | 1023 | class StubQuery(client.Query): | ||
1219 | 1024 | |||
1220 | 1025 | def __init__(query, action, creds, endpoint, bucket=None, | ||
1221 | 1026 | object_name=None, data="", body_producer=None, | ||
1222 | 1027 | content_type=None, receiver_factory=None, metadata={}): | ||
1223 | 1028 | super(StubQuery, query).__init__(action=action, creds=creds, | ||
1224 | 1029 | bucket=bucket, | ||
1225 | 1030 | object_name=object_name, | ||
1226 | 1031 | data=data) | ||
1227 | 1032 | self.assertEquals(action, "PUT") | ||
1228 | 1033 | self.assertEqual(creds.access_key, "foo") | ||
1229 | 1034 | self.assertEqual(creds.secret_key, "bar") | ||
1230 | 1035 | self.assertEqual(query.bucket, "example-bucket") | ||
1231 | 1036 | self.assertEqual(query.object_name, | ||
1232 | 1037 | "example-object?partNumber=3&uploadId=testid") | ||
1233 | 1038 | self.assertEqual(query.data, "some data") | ||
1234 | 1039 | self.assertEqual(query.metadata, {}) | ||
1235 | 1040 | |||
1236 | 1041 | def submit(query, url_context=None): | ||
1237 | 1042 | return succeed(None) | ||
1238 | 1043 | |||
1239 | 1044 | creds = AWSCredentials("foo", "bar") | ||
1240 | 1045 | s3 = client.S3Client(creds, query_factory=StubQuery) | ||
1241 | 1046 | return s3.upload_part("example-bucket", "example-object", "testid", 3, | ||
1242 | 1047 | "some data") | ||
1243 | 1048 | |||
1244 | 929 | S3ClientTestCase.skip = s3clientSkip | 1049 | S3ClientTestCase.skip = s3clientSkip |
1245 | 930 | 1050 | ||
1246 | 931 | 1051 | ||
1247 | @@ -1077,7 +1197,8 @@ | |||
1248 | 1077 | """ | 1197 | """ |
1249 | 1078 | class StubQuery(client.Query): | 1198 | class StubQuery(client.Query): |
1250 | 1079 | 1199 | ||
1252 | 1080 | def __init__(query, action, creds, endpoint, bucket): | 1200 | def __init__(query, action, creds, endpoint, bucket, |
1253 | 1201 | body_producer=None, receiver_factory=None): | ||
1254 | 1081 | super(StubQuery, query).__init__( | 1202 | super(StubQuery, query).__init__( |
1255 | 1082 | action=action, creds=creds, bucket=bucket) | 1203 | action=action, creds=creds, bucket=bucket) |
1256 | 1083 | self.assertEquals(action, "GET") | 1204 | self.assertEquals(action, "GET") |
1257 | 1084 | 1205 | ||
1258 | === modified file 'txaws/testing/payload.py' | |||
1259 | --- txaws/testing/payload.py 2012-01-28 00:39:00 +0000 | |||
1260 | +++ txaws/testing/payload.py 2012-02-10 21:24:20 +0000 | |||
1261 | @@ -1085,3 +1085,12 @@ | |||
1262 | 1085 | <Status>Enabled</Status> | 1085 | <Status>Enabled</Status> |
1263 | 1086 | <MfaDelete>Disabled</MfaDelete> | 1086 | <MfaDelete>Disabled</MfaDelete> |
1264 | 1087 | </VersioningConfiguration>""" | 1087 | </VersioningConfiguration>""" |
1265 | 1088 | |||
1266 | 1089 | sample_s3_init_multipart_upload_result = """\ | ||
1267 | 1090 | <InitiateMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> | ||
1268 | 1091 | <Bucket>example-bucket</Bucket> | ||
1269 | 1092 | <Key>example-object</Key> | ||
1270 | 1093 | <UploadId>deadbeef</UploadId> | ||
1271 | 1094 | </InitiateMultipartUploadResult>""" | ||
1272 | 1095 | |||
1273 | 1096 | |||
1274 | 1088 | 1097 | ||
1275 | === added file 'txaws/testing/producers.py' | |||
1276 | --- txaws/testing/producers.py 1970-01-01 00:00:00 +0000 | |||
1277 | +++ txaws/testing/producers.py 2012-02-10 21:24:20 +0000 | |||
1278 | @@ -0,0 +1,23 @@ | |||
1279 | 1 | from zope.interface import implements | ||
1280 | 2 | |||
1281 | 3 | from twisted.internet.defer import succeed | ||
1282 | 4 | from twisted.web.iweb import IBodyProducer | ||
1283 | 5 | |||
1284 | 6 | class StringBodyProducer(object): | ||
1285 | 7 | implements(IBodyProducer) | ||
1286 | 8 | |||
1287 | 9 | def __init__(self, data): | ||
1288 | 10 | self.data = data | ||
1289 | 11 | self.length = len(data) | ||
1290 | 12 | self.written = None | ||
1291 | 13 | |||
1292 | 14 | def startProducing(self, consumer): | ||
1293 | 15 | consumer.write(self.data) | ||
1294 | 16 | self.written = self.data | ||
1295 | 17 | return succeed(None) | ||
1296 | 18 | |||
1297 | 19 | def pauseProducing(self): | ||
1298 | 20 | pass | ||
1299 | 21 | |||
1300 | 22 | def stopProducing(self): | ||
1301 | 23 | pass |
Merge away!