Merge ~adam-collard/simplestreams:remove-requests-read-timeout into simplestreams:master

Proposed by Adam Collard
Status: Merged
Approved by: Paride Legovini
Approved revision: 10bf9cc60f5c35a996f32a0300d831b40526e00e
Merge reported by: Server Team CI bot
Merged at revision: not available
Proposed branch: ~adam-collard/simplestreams:remove-requests-read-timeout
Merge into: simplestreams:master
Diff against target: 110 lines (+29/-10)
2 files modified
simplestreams/contentsource.py (+18/-8)
tests/unittests/test_contentsource.py (+11/-2)
Reviewer Review Type Date Requested Status
Server Team CI bot continuous-integration Approve
Paride Legovini Approve
Review via email: mp+409434@code.launchpad.net

Commit message

Remove requests read timeout, whilst retaining connect timeout

Drive-by: make the negative tests actually test negative sizes

Fix for bug introduced in f37d2ed - the intention there was only
for a connect timeout and not a read timeout.

Description of the change

Changed the timeout to just over a multiple of 3s to match advice in https://docs.python-requests.org/en/master/user/advanced/#timeouts

To post a comment you must log in.
Revision history for this message
Server Team CI bot (server-team-bot) wrote :
review: Approve (continuous-integration)
Revision history for this message
Paride Legovini (paride) wrote :

LGTM, thanks for the pointer on why 9.05s, TIL!

review: Approve
Revision history for this message
Server Team CI bot (server-team-bot) wrote :

Commit message lints:
- Line #4 has 36 too many characters. Line starts with: "Fix for bug introduced"...

review: Needs Fixing
Revision history for this message
Paride Legovini (paride) wrote (last edit ):

Rewrapped

review: Approve
Revision history for this message
Server Team CI bot (server-team-bot) :
review: Approve (continuous-integration)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/simplestreams/contentsource.py b/simplestreams/contentsource.py
index b54e898..55028a7 100644
--- a/simplestreams/contentsource.py
+++ b/simplestreams/contentsource.py
@@ -32,7 +32,7 @@ else:
32 urllib_error = urllib_request32 urllib_error = urllib_request
3333
34READ_BUFFER_SIZE = 1024 * 1034READ_BUFFER_SIZE = 1024 * 10
35TIMEOUT = 1035TIMEOUT = 9.05
3636
37try:37try:
38 # We try to use requests because we can do gzip encoding with it.38 # We try to use requests because we can do gzip encoding with it.
@@ -322,6 +322,9 @@ class FileReader(UrlReader):
322322
323323
324class Urllib2UrlReader(UrlReader):324class Urllib2UrlReader(UrlReader):
325
326 timeout = TIMEOUT
327
325 def __init__(self, url, offset=None, user_agent=None):328 def __init__(self, url, offset=None, user_agent=None):
326 (url, username, password) = parse_url_auth(url)329 (url, username, password) = parse_url_auth(url)
327 self.url = url330 self.url = url
@@ -339,7 +342,7 @@ class Urllib2UrlReader(UrlReader):
339 req.add_header('User-Agent', user_agent)342 req.add_header('User-Agent', user_agent)
340 if offset is not None:343 if offset is not None:
341 req.add_header('Range', 'bytes=%d-' % offset)344 req.add_header('Range', 'bytes=%d-' % offset)
342 self.req = opener(req, timeout=TIMEOUT)345 self.req = opener(req, timeout=self.timeout)
343 except urllib_error.HTTPError as e:346 except urllib_error.HTTPError as e:
344 if e.code == 404:347 if e.code == 404:
345 myerr = IOError("Unable to open %s" % url)348 myerr = IOError("Unable to open %s" % url)
@@ -355,11 +358,18 @@ class Urllib2UrlReader(UrlReader):
355358
356359
357class RequestsUrlReader(UrlReader):360class RequestsUrlReader(UrlReader):
358 # This provides a url reader that supports deflate/gzip encoding361 """
359 # but still implements 'read'.362 This provides a url reader that supports deflate/gzip encoding
360 # r = RequestsUrlReader(http://example.com)363 but still implements 'read'.
361 # r.read(10)364 r = RequestsUrlReader('http://example.com')
362 # r.close()365 r.read(10)
366 r.close()
367 """
368
369 # requests takes a 2-tuple for timeout, the first being a
370 # connect timeout, the second being a read timeout.
371 timeout = (TIMEOUT, None)
372
363 def __init__(self, url, buflen=None, offset=None, user_agent=None):373 def __init__(self, url, buflen=None, offset=None, user_agent=None):
364 if requests is None:374 if requests is None:
365 raise ImportError("Attempt to use RequestsUrlReader "375 raise ImportError("Attempt to use RequestsUrlReader "
@@ -380,7 +390,7 @@ class RequestsUrlReader(UrlReader):
380 headers = None390 headers = None
381391
382 self.req = requests.get(392 self.req = requests.get(
383 url, stream=True, auth=auth, headers=headers, timeout=TIMEOUT393 url, stream=True, auth=auth, headers=headers, timeout=self.timeout
384 )394 )
385 self.r_iter = None395 self.r_iter = None
386 if buflen is None:396 if buflen is None:
diff --git a/tests/unittests/test_contentsource.py b/tests/unittests/test_contentsource.py
index 6487494..77a566d 100644
--- a/tests/unittests/test_contentsource.py
+++ b/tests/unittests/test_contentsource.py
@@ -170,6 +170,11 @@ class TestUrlContentSource(BaseDirUsingTestCase):
170 self.assertEqual(data, self.fdata)170 self.assertEqual(data, self.fdata)
171171
172 @skipIf(contentsource.requests is None, "requests not available")172 @skipIf(contentsource.requests is None, "requests not available")
173 def test_requests_default_timeout(self):
174 self.assertEqual(contentsource.RequestsUrlReader.timeout,
175 (contentsource.TIMEOUT, None))
176
177 @skipIf(contentsource.requests is None, "requests not available")
173 def test_requests_url_read_handles_None(self):178 def test_requests_url_read_handles_None(self):
174 scs = self.getcs(self.fpath, contentsource.RequestsUrlReader)179 scs = self.getcs(self.fpath, contentsource.RequestsUrlReader)
175 data = scs.read(None)180 data = scs.read(None)
@@ -178,7 +183,7 @@ class TestUrlContentSource(BaseDirUsingTestCase):
178 @skipIf(contentsource.requests is None, "requests not available")183 @skipIf(contentsource.requests is None, "requests not available")
179 def test_requests_url_read_handles_negative_size(self):184 def test_requests_url_read_handles_negative_size(self):
180 scs = self.getcs(self.fpath, contentsource.RequestsUrlReader)185 scs = self.getcs(self.fpath, contentsource.RequestsUrlReader)
181 data = scs.read(None)186 data = scs.read(-2)
182 self.assertEqual(data, self.fdata)187 self.assertEqual(data, self.fdata)
183188
184 @skipIf(contentsource.requests is None, "requests not available")189 @skipIf(contentsource.requests is None, "requests not available")
@@ -193,6 +198,10 @@ class TestUrlContentSource(BaseDirUsingTestCase):
193 data = scs.read(3)198 data = scs.read(3)
194 self.assertEqual(data, self.fdata[0:3])199 self.assertEqual(data, self.fdata[0:3])
195200
201 def test_urllib_default_timeout(self):
202 self.assertEqual(contentsource.Urllib2UrlReader.timeout,
203 contentsource.TIMEOUT)
204
196 def test_urllib_url_read_handles_None(self):205 def test_urllib_url_read_handles_None(self):
197 scs = self.getcs(self.fpath, contentsource.Urllib2UrlReader)206 scs = self.getcs(self.fpath, contentsource.Urllib2UrlReader)
198 data = scs.read(None)207 data = scs.read(None)
@@ -200,7 +209,7 @@ class TestUrlContentSource(BaseDirUsingTestCase):
200209
201 def test_urllib_url_read_handles_negative_size(self):210 def test_urllib_url_read_handles_negative_size(self):
202 scs = self.getcs(self.fpath, contentsource.Urllib2UrlReader)211 scs = self.getcs(self.fpath, contentsource.Urllib2UrlReader)
203 data = scs.read(None)212 data = scs.read(-2)
204 self.assertEqual(data, self.fdata)213 self.assertEqual(data, self.fdata)
205214
206 def test_urllib_url_read_handles_no_size(self):215 def test_urllib_url_read_handles_no_size(self):

Subscribers

People subscribed via source and target branches