Merge lp:~cjwatson/launchpad/explicit-proxy-mirror-prober into lp:launchpad
- explicit-proxy-mirror-prober
- Merge into devel
Proposed by
Colin Watson
Status: | Merged |
---|---|
Merged at revision: | 18712 |
Proposed branch: | lp:~cjwatson/launchpad/explicit-proxy-mirror-prober |
Merge into: | lp:launchpad |
Prerequisite: | lp:~cjwatson/launchpad/no-default-http-proxy |
Diff against target: |
585 lines (+167/-110) 5 files modified
cronscripts/distributionmirror-prober.py (+4/-9) lib/lp/registry/scripts/distributionmirror_prober.py (+24/-29) lib/lp/registry/tests/test_distributionmirror_prober.py (+81/-50) lib/lp/services/tests/test_timeout.py (+50/-20) lib/lp/services/timeout.py (+8/-2) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/explicit-proxy-mirror-prober |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+348543@code.launchpad.net |
Commit message
Convert the mirror prober to use explicit proxy configuration.
Description of the change
To post a comment you must log in.
Revision history for this message
William Grant (wgrant) : | # |
review:
Approve
(code)
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'cronscripts/distributionmirror-prober.py' |
2 | --- cronscripts/distributionmirror-prober.py 2013-01-07 02:40:55 +0000 |
3 | +++ cronscripts/distributionmirror-prober.py 2018-07-02 11:30:55 +0000 |
4 | @@ -1,14 +1,12 @@ |
5 | #!/usr/bin/python -S |
6 | # |
7 | -# Copyright 2009-2011 Canonical Ltd. This software is licensed under the |
8 | +# Copyright 2009-2018 Canonical Ltd. This software is licensed under the |
9 | # GNU Affero General Public License version 3 (see the file LICENSE). |
10 | |
11 | """Script to probe distribution mirrors and check how up-to-date they are.""" |
12 | |
13 | import _pythonpath |
14 | |
15 | -import os |
16 | - |
17 | from lp.registry.interfaces.distributionmirror import MirrorContent |
18 | from lp.registry.scripts.distributionmirror_prober import DistroMirrorProber |
19 | from lp.services.config import config |
20 | @@ -16,6 +14,7 @@ |
21 | LaunchpadCronScript, |
22 | LaunchpadScriptFailure, |
23 | ) |
24 | +from lp.services.timeout import set_default_timeout_function |
25 | |
26 | |
27 | class DistroMirrorProberScript(LaunchpadCronScript): |
28 | @@ -49,12 +48,8 @@ |
29 | 'Wrong value for argument --content-type: %s' |
30 | % self.options.content_type) |
31 | |
32 | - if config.distributionmirrorprober.use_proxy: |
33 | - os.environ['http_proxy'] = config.launchpad.http_proxy |
34 | - self.logger.debug("Using %s as proxy." % os.environ['http_proxy']) |
35 | - else: |
36 | - self.logger.debug("Not using any proxy.") |
37 | - |
38 | + set_default_timeout_function( |
39 | + lambda: config.distributionmirrorprober.timeout) |
40 | DistroMirrorProber(self.txn, self.logger).probe( |
41 | content_type, self.options.no_remote_hosts, self.options.force, |
42 | self.options.max_mirrors, not self.options.no_owner_notification) |
43 | |
44 | === modified file 'lib/lp/registry/scripts/distributionmirror_prober.py' |
45 | --- lib/lp/registry/scripts/distributionmirror_prober.py 2017-05-22 09:33:45 +0000 |
46 | +++ lib/lp/registry/scripts/distributionmirror_prober.py 2018-07-02 11:30:55 +0000 |
47 | @@ -1,4 +1,4 @@ |
48 | -# Copyright 2009-2017 Canonical Ltd. This software is licensed under the |
49 | +# Copyright 2009-2018 Canonical Ltd. This software is licensed under the |
50 | # GNU Affero General Public License version 3 (see the file LICENSE). |
51 | |
52 | __metaclass__ = type |
53 | @@ -10,12 +10,12 @@ |
54 | import httplib |
55 | import itertools |
56 | import logging |
57 | -import os |
58 | +import os.path |
59 | from StringIO import StringIO |
60 | import urllib |
61 | -import urllib2 |
62 | import urlparse |
63 | |
64 | +import requests |
65 | from twisted.internet import ( |
66 | defer, |
67 | protocol, |
68 | @@ -36,6 +36,7 @@ |
69 | from lp.registry.interfaces.distroseries import IDistroSeries |
70 | from lp.services.config import config |
71 | from lp.services.librarian.interfaces import ILibraryFileAliasSet |
72 | +from lp.services.timeout import urlfetch |
73 | from lp.services.webapp import canonical_url |
74 | from lp.soyuz.interfaces.distroarchseries import IDistroArchSeries |
75 | |
76 | @@ -202,9 +203,9 @@ |
77 | request_path = None |
78 | |
79 | # Details of the URL of the host in which we'll connect, which will only |
80 | - # be different from request_* in case we have an http_proxy environment |
81 | - # variable --in that case the scheme, host and port will be the ones |
82 | - # extracted from http_proxy and the path will be self.url |
83 | + # be different from request_* in case we have a configured http_proxy -- |
84 | + # in that case the scheme, host and port will be the ones extracted from |
85 | + # http_proxy and the path will be self.url. |
86 | connect_scheme = None |
87 | connect_host = None |
88 | connect_port = None |
89 | @@ -279,7 +280,7 @@ |
90 | # XXX Guilherme Salgado 2006-09-19: |
91 | # We don't actually know how to handle FTP responses, but we |
92 | # expect to be behind a squid HTTP proxy with the patch at |
93 | - # http://www.squid-cache.org/bugs/show_bug.cgi?id=1758 applied. So, if |
94 | + # https://bugs.squid-cache.org/show_bug.cgi?id=1758 applied. So, if |
95 | # you encounter any problems with FTP URLs you'll probably have to nag |
96 | # the sysadmins to fix squid for you. |
97 | if scheme not in ('http', 'ftp'): |
98 | @@ -296,9 +297,9 @@ |
99 | if self.request_host not in host_timeouts: |
100 | host_timeouts[self.request_host] = 0 |
101 | |
102 | - # If the http_proxy variable is set, we want to use it as the host |
103 | - # we're going to connect to. |
104 | - proxy = os.getenv('http_proxy') |
105 | + # If launchpad.http_proxy is set in our configuration, we want to |
106 | + # use it as the host we're going to connect to. |
107 | + proxy = config.launchpad.http_proxy |
108 | if proxy: |
109 | scheme, host, port, dummy = _parse(proxy) |
110 | path = url |
111 | @@ -612,31 +613,25 @@ |
112 | return failure |
113 | |
114 | |
115 | -def _build_request_for_cdimage_file_list(url): |
116 | - headers = {'Pragma': 'no-cache', 'Cache-control': 'no-cache'} |
117 | - return urllib2.Request(url, headers=headers) |
118 | - |
119 | - |
120 | def _get_cdimage_file_list(): |
121 | url = config.distributionmirrorprober.cdimage_file_list_url |
122 | + # In test environments, this may be a file: URL. Adjust it to be in a |
123 | + # form that requests can cope with (i.e. using an absolute path). |
124 | + parsed_url = urlparse.urlparse(url) |
125 | + if parsed_url.scheme == 'file' and not os.path.isabs(parsed_url.path): |
126 | + assert parsed_url.path == parsed_url[2] |
127 | + parsed_url = list(parsed_url) |
128 | + parsed_url[2] = os.path.join(config.root, parsed_url[2]) |
129 | + url = urlparse.urlunparse(parsed_url) |
130 | try: |
131 | - return urllib2.urlopen(_build_request_for_cdimage_file_list(url)) |
132 | - except urllib2.URLError as e: |
133 | + return urlfetch( |
134 | + url, headers={'Pragma': 'no-cache', 'Cache-control': 'no-cache'}, |
135 | + trust_env=False, use_proxy=True, allow_file=True) |
136 | + except requests.RequestException as e: |
137 | raise UnableToFetchCDImageFileList( |
138 | 'Unable to fetch %s: %s' % (url, e)) |
139 | |
140 | |
141 | -def restore_http_proxy(http_proxy): |
142 | - """Restore the http_proxy environment variable to the given value.""" |
143 | - if http_proxy is None: |
144 | - try: |
145 | - del os.environ['http_proxy'] |
146 | - except KeyError: |
147 | - pass |
148 | - else: |
149 | - os.environ['http_proxy'] = http_proxy |
150 | - |
151 | - |
152 | def get_expected_cdimage_paths(): |
153 | """Get all paths where we can find CD image files on a cdimage mirror. |
154 | |
155 | @@ -648,7 +643,7 @@ |
156 | UnableToFetchCDImageFileList exception will be raised. |
157 | """ |
158 | d = {} |
159 | - for line in _get_cdimage_file_list().readlines(): |
160 | + for line in _get_cdimage_file_list().iter_lines(): |
161 | flavour, seriesname, path, size = line.split('\t') |
162 | paths = d.setdefault((flavour, seriesname), []) |
163 | paths.append(path.lstrip('/')) |
164 | |
165 | === modified file 'lib/lp/registry/tests/test_distributionmirror_prober.py' |
166 | --- lib/lp/registry/tests/test_distributionmirror_prober.py 2018-01-02 16:10:26 +0000 |
167 | +++ lib/lp/registry/tests/test_distributionmirror_prober.py 2018-07-02 11:30:55 +0000 |
168 | @@ -1,4 +1,4 @@ |
169 | -# Copyright 2009-2016 Canonical Ltd. This software is licensed under the |
170 | +# Copyright 2009-2018 Canonical Ltd. This software is licensed under the |
171 | # GNU Affero General Public License version 3 (see the file LICENSE). |
172 | |
173 | """distributionmirror-prober tests.""" |
174 | @@ -13,13 +13,23 @@ |
175 | from StringIO import StringIO |
176 | |
177 | from lazr.uri import URI |
178 | +import responses |
179 | from sqlobject import SQLObjectNotFound |
180 | +from testtools.matchers import ( |
181 | + ContainsDict, |
182 | + Equals, |
183 | + MatchesStructure, |
184 | + ) |
185 | +from testtools.twistedsupport import ( |
186 | + assert_fails_with, |
187 | + AsynchronousDeferredRunTest, |
188 | + AsynchronousDeferredRunTestForBrokenTwisted, |
189 | + ) |
190 | from twisted.internet import ( |
191 | defer, |
192 | reactor, |
193 | ) |
194 | from twisted.python.failure import Failure |
195 | -from twisted.trial.unittest import TestCase as TrialTestCase |
196 | from twisted.web import server |
197 | from zope.component import getUtility |
198 | from zope.security.proxy import removeSecurityProxy |
199 | @@ -29,7 +39,7 @@ |
200 | from lp.registry.model.distributionmirror import DistributionMirror |
201 | from lp.registry.scripts import distributionmirror_prober |
202 | from lp.registry.scripts.distributionmirror_prober import ( |
203 | - _build_request_for_cdimage_file_list, |
204 | + _get_cdimage_file_list, |
205 | ArchiveMirrorProberCallbacks, |
206 | BadResponseCode, |
207 | ConnectionSkipped, |
208 | @@ -50,7 +60,6 @@ |
209 | RedirectAwareProberProtocol, |
210 | RedirectToDifferentFile, |
211 | RequestManager, |
212 | - restore_http_proxy, |
213 | should_skip_host, |
214 | UnknownURLSchemeAfterRedirect, |
215 | ) |
216 | @@ -59,6 +68,7 @@ |
217 | ) |
218 | from lp.services.config import config |
219 | from lp.services.daemons.tachandler import TacTestSetup |
220 | +from lp.services.timeout import default_timeout |
221 | from lp.testing import ( |
222 | clean_up_reactor, |
223 | TestCase, |
224 | @@ -93,39 +103,54 @@ |
225 | return os.path.join(self.root, 'distributionmirror_http_server.log') |
226 | |
227 | |
228 | -class TestProberProtocolAndFactory(TrialTestCase): |
229 | +class TestProberProtocolAndFactory(TestCase): |
230 | |
231 | layer = TwistedLayer |
232 | + run_tests_with = AsynchronousDeferredRunTestForBrokenTwisted.make_factory( |
233 | + timeout=30) |
234 | |
235 | def setUp(self): |
236 | - self.orig_proxy = os.getenv('http_proxy') |
237 | + super(TestProberProtocolAndFactory, self).setUp() |
238 | root = DistributionMirrorTestHTTPServer() |
239 | site = server.Site(root) |
240 | site.displayTracebacks = False |
241 | self.listening_port = reactor.listenTCP(0, site) |
242 | + self.addCleanup(self.listening_port.stopListening) |
243 | self.port = self.listening_port.getHost().port |
244 | self.urls = {'timeout': u'http://localhost:%s/timeout' % self.port, |
245 | '200': u'http://localhost:%s/valid-mirror' % self.port, |
246 | '500': u'http://localhost:%s/error' % self.port, |
247 | '404': u'http://localhost:%s/invalid-mirror' % self.port} |
248 | - |
249 | - def tearDown(self): |
250 | - restore_http_proxy(self.orig_proxy) |
251 | - return self.listening_port.stopListening() |
252 | + self.pushConfig('launchpad', http_proxy=None) |
253 | |
254 | def _createProberAndProbe(self, url): |
255 | prober = ProberFactory(url) |
256 | return prober.probe() |
257 | |
258 | - def test_environment_http_proxy_is_handled_correctly(self): |
259 | - os.environ['http_proxy'] = 'http://squid.internal:3128' |
260 | - prober = ProberFactory(self.urls['200']) |
261 | - self.assertEqual(prober.request_host, 'localhost') |
262 | - self.assertEqual(prober.request_port, self.port) |
263 | - self.assertEqual(prober.request_path, '/valid-mirror') |
264 | - self.assertEqual(prober.connect_host, 'squid.internal') |
265 | - self.assertEqual(prober.connect_port, 3128) |
266 | - self.assertEqual(prober.connect_path, self.urls['200']) |
267 | + def test_config_no_http_proxy(self): |
268 | + prober = ProberFactory(self.urls['200']) |
269 | + self.assertThat(prober, MatchesStructure.byEquality( |
270 | + request_scheme='http', |
271 | + request_host='localhost', |
272 | + request_port=self.port, |
273 | + request_path='/valid-mirror', |
274 | + connect_scheme='http', |
275 | + connect_host='localhost', |
276 | + connect_port=self.port, |
277 | + connect_path='/valid-mirror')) |
278 | + |
279 | + def test_config_http_proxy(self): |
280 | + self.pushConfig('launchpad', http_proxy='http://squid.internal:3128') |
281 | + prober = ProberFactory(self.urls['200']) |
282 | + self.assertThat(prober, MatchesStructure.byEquality( |
283 | + request_scheme='http', |
284 | + request_host='localhost', |
285 | + request_port=self.port, |
286 | + request_path='/valid-mirror', |
287 | + connect_scheme='http', |
288 | + connect_host='squid.internal', |
289 | + connect_port=3128, |
290 | + connect_path=self.urls['200'])) |
291 | |
292 | def test_connect_cancels_existing_timeout_call(self): |
293 | prober = ProberFactory(self.urls['200']) |
294 | @@ -161,14 +186,10 @@ |
295 | return deferred.addCallback(restore_connect, orig_connect) |
296 | |
297 | def test_connect_to_proxy_when_http_proxy_exists(self): |
298 | - os.environ['http_proxy'] = 'http://squid.internal:3128' |
299 | + self.pushConfig('launchpad', http_proxy='http://squid.internal:3128') |
300 | self._test_connect_to_host(self.urls['200'], 'squid.internal') |
301 | |
302 | def test_connect_to_host_when_http_proxy_does_not_exist(self): |
303 | - try: |
304 | - del os.environ['http_proxy'] |
305 | - except KeyError: |
306 | - pass |
307 | self._test_connect_to_host(self.urls['200'], 'localhost') |
308 | |
309 | def test_probe_sets_up_timeout_call(self): |
310 | @@ -197,13 +218,13 @@ |
311 | prober = RedirectAwareProberFactory( |
312 | 'http://localhost:%s/redirect-infinite-loop' % self.port) |
313 | deferred = prober.probe() |
314 | - return self.assertFailure(deferred, InfiniteLoopDetected) |
315 | + return assert_fails_with(deferred, InfiniteLoopDetected) |
316 | |
317 | def test_redirectawareprober_fail_on_unknown_scheme(self): |
318 | prober = RedirectAwareProberFactory( |
319 | 'http://localhost:%s/redirect-unknown-url-scheme' % self.port) |
320 | deferred = prober.probe() |
321 | - return self.assertFailure(deferred, UnknownURLSchemeAfterRedirect) |
322 | + return assert_fails_with(deferred, UnknownURLSchemeAfterRedirect) |
323 | |
324 | def test_200(self): |
325 | d = self._createProberAndProbe(self.urls['200']) |
326 | @@ -237,15 +258,15 @@ |
327 | |
328 | def test_notfound(self): |
329 | d = self._createProberAndProbe(self.urls['404']) |
330 | - return self.assertFailure(d, BadResponseCode) |
331 | + return assert_fails_with(d, BadResponseCode) |
332 | |
333 | def test_500(self): |
334 | d = self._createProberAndProbe(self.urls['500']) |
335 | - return self.assertFailure(d, BadResponseCode) |
336 | + return assert_fails_with(d, BadResponseCode) |
337 | |
338 | def test_timeout(self): |
339 | d = self._createProberAndProbe(self.urls['timeout']) |
340 | - return self.assertFailure(d, ProberTimeout) |
341 | + return assert_fails_with(d, ProberTimeout) |
342 | |
343 | def test_prober_user_agent(self): |
344 | protocol = RedirectAwareProberProtocol() |
345 | @@ -380,7 +401,7 @@ |
346 | self.assertFalse(should_skip_host(self.host)) |
347 | |
348 | |
349 | -class TestProberFactoryRequestTimeoutRatioWithTwisted(TrialTestCase): |
350 | +class TestProberFactoryRequestTimeoutRatioWithTwisted(TestCase): |
351 | """Tests to ensure we stop issuing requests on a given host if the |
352 | requests/timeouts ratio on that host is too low. |
353 | |
354 | @@ -392,25 +413,29 @@ |
355 | """ |
356 | |
357 | layer = TwistedLayer |
358 | + run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=30) |
359 | |
360 | def setUp(self): |
361 | - self.orig_host_requests = dict( |
362 | - distributionmirror_prober.host_requests) |
363 | - self.orig_host_timeouts = dict( |
364 | - distributionmirror_prober.host_timeouts) |
365 | + super(TestProberFactoryRequestTimeoutRatioWithTwisted, self).setUp() |
366 | + orig_host_requests = dict(distributionmirror_prober.host_requests) |
367 | + orig_host_timeouts = dict(distributionmirror_prober.host_timeouts) |
368 | distributionmirror_prober.host_requests = {} |
369 | distributionmirror_prober.host_timeouts = {} |
370 | + |
371 | + def restore_prober_globals(): |
372 | + # Restore the globals that our tests fiddle with. |
373 | + distributionmirror_prober.host_requests = orig_host_requests |
374 | + distributionmirror_prober.host_timeouts = orig_host_timeouts |
375 | + |
376 | + self.addCleanup(restore_prober_globals) |
377 | + |
378 | root = DistributionMirrorTestHTTPServer() |
379 | site = server.Site(root) |
380 | site.displayTracebacks = False |
381 | self.listening_port = reactor.listenTCP(0, site) |
382 | + self.addCleanup(self.listening_port.stopListening) |
383 | self.port = self.listening_port.getHost().port |
384 | - |
385 | - def tearDown(self): |
386 | - # Restore the globals that our tests fiddle with. |
387 | - distributionmirror_prober.host_requests = self.orig_host_requests |
388 | - distributionmirror_prober.host_timeouts = self.orig_host_timeouts |
389 | - return self.listening_port.stopListening() |
390 | + self.pushConfig('launchpad', http_proxy=None) |
391 | |
392 | def _createProberAndProbe(self, url): |
393 | prober = ProberFactory(url) |
394 | @@ -455,7 +480,7 @@ |
395 | |
396 | d = self._createProberAndProbe( |
397 | u'http://%s:%s/timeout' % (host, self.port)) |
398 | - return self.assertFailure(d, ConnectionSkipped) |
399 | + return assert_fails_with(d, ConnectionSkipped) |
400 | |
401 | |
402 | class TestMultiLock(TestCase): |
403 | @@ -842,7 +867,8 @@ |
404 | self.assertNotEqual(0, len(mirror.cdimage_series)) |
405 | # Note that calling this function won't actually probe any mirrors; we |
406 | # need to call reactor.run() to actually start the probing. |
407 | - probe_cdimage_mirror(mirror, StringIO(), [], logging) |
408 | + with default_timeout(15.0): |
409 | + probe_cdimage_mirror(mirror, StringIO(), [], logging) |
410 | self.assertEqual(0, len(mirror.cdimage_series)) |
411 | |
412 | def test_archive_mirror_probe_function(self): |
413 | @@ -856,8 +882,9 @@ |
414 | mirror1 = DistributionMirror.byName('releases-mirror') |
415 | mirror2 = DistributionMirror.byName('releases-mirror2') |
416 | mirror3 = DistributionMirror.byName('canonical-releases') |
417 | - self._test_one_semaphore_for_each_host( |
418 | - mirror1, mirror2, mirror3, probe_cdimage_mirror) |
419 | + with default_timeout(15.0): |
420 | + self._test_one_semaphore_for_each_host( |
421 | + mirror1, mirror2, mirror3, probe_cdimage_mirror) |
422 | |
423 | def _test_one_semaphore_for_each_host( |
424 | self, mirror1, mirror2, mirror3, probe_function): |
425 | @@ -905,20 +932,24 @@ |
426 | # When using an http_proxy, even though we'll actually connect to the |
427 | # proxy, we'll use the mirror's host as the key to find the semaphore |
428 | # that should be used |
429 | - orig_proxy = os.getenv('http_proxy') |
430 | - os.environ['http_proxy'] = 'http://squid.internal:3128/' |
431 | + self.pushConfig('launchpad', http_proxy='http://squid.internal:3128/') |
432 | probe_function(mirror3, StringIO(), [], logging) |
433 | self.assertEqual(len(request_manager.host_locks), 2) |
434 | - restore_http_proxy(orig_proxy) |
435 | |
436 | |
437 | class TestCDImageFileListFetching(TestCase): |
438 | |
439 | + @responses.activate |
440 | def test_no_cache(self): |
441 | url = 'http://releases.ubuntu.com/.manifest' |
442 | - request = _build_request_for_cdimage_file_list(url) |
443 | - self.assertEqual(request.headers['Pragma'], 'no-cache') |
444 | - self.assertEqual(request.headers['Cache-control'], 'no-cache') |
445 | + self.pushConfig('distributionmirrorprober', cdimage_file_list_url=url) |
446 | + responses.add('GET', url) |
447 | + with default_timeout(1.0): |
448 | + _get_cdimage_file_list() |
449 | + self.assertThat(responses.calls[0].request.headers, ContainsDict({ |
450 | + 'Pragma': Equals('no-cache'), |
451 | + 'Cache-control': Equals('no-cache'), |
452 | + })) |
453 | |
454 | |
455 | class TestLoggingMixin(TestCase): |
456 | |
457 | === modified file 'lib/lp/services/tests/test_timeout.py' |
458 | --- lib/lp/services/tests/test_timeout.py 2018-06-26 19:17:19 +0000 |
459 | +++ lib/lp/services/tests/test_timeout.py 2018-07-02 11:30:55 +0000 |
460 | @@ -367,26 +367,6 @@ |
461 | MatchesStructure.byEquality(status_code=200, content='Success.')) |
462 | t.join() |
463 | |
464 | - def test_urlfetch_does_not_support_ftp_urls(self): |
465 | - """urlfetch() does not support ftp urls.""" |
466 | - set_default_timeout_function(lambda: 1) |
467 | - self.addCleanup(set_default_timeout_function, None) |
468 | - url = 'ftp://localhost/' |
469 | - e = self.assertRaises(InvalidSchema, urlfetch, url) |
470 | - self.assertEqual( |
471 | - "No connection adapters were found for '%s'" % url, str(e)) |
472 | - |
473 | - def test_urlfetch_does_not_support_file_urls_by_default(self): |
474 | - """urlfetch() does not support file urls by default.""" |
475 | - set_default_timeout_function(lambda: 1) |
476 | - self.addCleanup(set_default_timeout_function, None) |
477 | - test_path = self.useFixture(TempDir()).join('file') |
478 | - write_file(test_path, '') |
479 | - url = 'file://' + test_path |
480 | - e = self.assertRaises(InvalidSchema, urlfetch, url) |
481 | - self.assertEqual( |
482 | - "No connection adapters were found for '%s'" % url, str(e)) |
483 | - |
484 | def test_urlfetch_no_proxy_by_default(self): |
485 | """urlfetch does not use a proxy by default.""" |
486 | self.pushConfig('launchpad', http_proxy='http://proxy.example:3128/') |
487 | @@ -418,6 +398,56 @@ |
488 | {scheme: proxy for scheme in ('http', 'https')}, |
489 | fake_send.calls[0][1]['proxies']) |
490 | |
491 | + def test_urlfetch_does_not_support_ftp_urls_by_default(self): |
492 | + """urlfetch() does not support ftp urls by default.""" |
493 | + set_default_timeout_function(lambda: 1) |
494 | + self.addCleanup(set_default_timeout_function, None) |
495 | + url = 'ftp://localhost/' |
496 | + e = self.assertRaises(InvalidSchema, urlfetch, url) |
497 | + self.assertEqual( |
498 | + "No connection adapters were found for '%s'" % url, str(e)) |
499 | + |
500 | + def test_urlfetch_supports_ftp_urls_if_allow_ftp(self): |
501 | + """urlfetch() supports ftp urls via a proxy if explicitly asked.""" |
502 | + sock, http_server_url = self.make_test_socket() |
503 | + sock.listen(1) |
504 | + |
505 | + def success_result(): |
506 | + (client_sock, client_addr) = sock.accept() |
507 | + # We only provide a test HTTP proxy, not anything beyond that. |
508 | + client_sock.sendall(dedent("""\ |
509 | + HTTP/1.0 200 Ok |
510 | + Content-Type: text/plain |
511 | + Content-Length: 8 |
512 | + |
513 | + Success.""")) |
514 | + client_sock.close() |
515 | + |
516 | + self.pushConfig('launchpad', http_proxy=http_server_url) |
517 | + t = threading.Thread(target=success_result) |
518 | + t.start() |
519 | + set_default_timeout_function(lambda: 1) |
520 | + self.addCleanup(set_default_timeout_function, None) |
521 | + response = urlfetch( |
522 | + 'ftp://example.com/', trust_env=False, use_proxy=True, |
523 | + allow_ftp=True) |
524 | + self.assertThat(response, MatchesStructure( |
525 | + status_code=Equals(200), |
526 | + headers=ContainsDict({'content-length': Equals('8')}), |
527 | + content=Equals('Success.'))) |
528 | + t.join() |
529 | + |
530 | + def test_urlfetch_does_not_support_file_urls_by_default(self): |
531 | + """urlfetch() does not support file urls by default.""" |
532 | + set_default_timeout_function(lambda: 1) |
533 | + self.addCleanup(set_default_timeout_function, None) |
534 | + test_path = self.useFixture(TempDir()).join('file') |
535 | + write_file(test_path, '') |
536 | + url = 'file://' + test_path |
537 | + e = self.assertRaises(InvalidSchema, urlfetch, url) |
538 | + self.assertEqual( |
539 | + "No connection adapters were found for '%s'" % url, str(e)) |
540 | + |
541 | def test_urlfetch_supports_file_urls_if_allow_file(self): |
542 | """urlfetch() supports file urls if explicitly asked to do so.""" |
543 | set_default_timeout_function(lambda: 1) |
544 | |
545 | === modified file 'lib/lp/services/timeout.py' |
546 | --- lib/lp/services/timeout.py 2018-06-26 19:17:19 +0000 |
547 | +++ lib/lp/services/timeout.py 2018-07-02 11:30:55 +0000 |
548 | @@ -324,8 +324,8 @@ |
549 | self.session = None |
550 | |
551 | @with_timeout(cleanup='cleanup') |
552 | - def fetch(self, url, trust_env=None, use_proxy=False, allow_file=False, |
553 | - output_file=None, **request_kwargs): |
554 | + def fetch(self, url, trust_env=None, use_proxy=False, allow_ftp=False, |
555 | + allow_file=False, output_file=None, **request_kwargs): |
556 | """Fetch the URL using a custom HTTP handler supporting timeout. |
557 | |
558 | :param url: The URL to fetch. |
559 | @@ -333,6 +333,7 @@ |
560 | to determine whether it fetches proxy configuration from the |
561 | environment. |
562 | :param use_proxy: If True, use Launchpad's configured proxy. |
563 | + :param allow_ftp: If True, allow ftp:// URLs. |
564 | :param allow_file: If True, allow file:// URLs. (Be careful to only |
565 | pass this if the URL is trusted.) |
566 | :param output_file: If not None, download the response content to |
567 | @@ -346,6 +347,9 @@ |
568 | # Mount our custom adapters. |
569 | self.session.mount("https://", CleanableHTTPAdapter()) |
570 | self.session.mount("http://", CleanableHTTPAdapter()) |
571 | + # We can do FTP, but currently only via an HTTP proxy. |
572 | + if allow_ftp and use_proxy: |
573 | + self.session.mount("ftp://", CleanableHTTPAdapter()) |
574 | if allow_file: |
575 | self.session.mount("file://", FileAdapter()) |
576 | |
577 | @@ -354,6 +358,8 @@ |
578 | request_kwargs.setdefault("proxies", {}) |
579 | request_kwargs["proxies"]["http"] = config.launchpad.http_proxy |
580 | request_kwargs["proxies"]["https"] = config.launchpad.http_proxy |
581 | + if allow_ftp: |
582 | + request_kwargs["proxies"]["ftp"] = config.launchpad.http_proxy |
583 | if output_file is not None: |
584 | request_kwargs["stream"] = True |
585 | response = self.session.request(url=url, **request_kwargs) |