Merge lp:~cjwatson/launchpad/bugs-remote-finders-requests into lp:launchpad

Proposed by Colin Watson
Status: Merged
Merged at revision: 18681
Proposed branch: lp:~cjwatson/launchpad/bugs-remote-finders-requests
Merge into: lp:launchpad
Prerequisite: lp:~cjwatson/launchpad/responses
Diff against target: 692 lines (+210/-160)
9 files modified
cronscripts/nightly.sh (+1/-4)
lib/lp/bugs/doc/sourceforge-remote-products.txt (+62/-19)
lib/lp/bugs/scripts/bzremotecomponentfinder.py (+14/-20)
lib/lp/bugs/scripts/sfremoteproductfinder.py (+11/-8)
lib/lp/bugs/tests/sfremoteproductfinder.py (+0/-50)
lib/lp/bugs/tests/test_bzremotecomponentfinder.py (+48/-57)
lib/lp/services/config/schema-lazr.conf (+6/-0)
lib/lp/services/tests/test_timeout.py (+45/-1)
lib/lp/services/timeout.py (+23/-1)
To merge this branch: bzr merge lp:~cjwatson/launchpad/bugs-remote-finders-requests
Reviewer Review Type Date Requested Status
William Grant code Approve
Review via email: mp+347198@code.launchpad.net

Commit message

Convert update-bugzilla-remote-component and update-sourceforge-remote-products to urlfetch with explicit proxy configuration.

To post a comment you must log in.
Revision history for this message
William Grant (wgrant) :
review: Approve (code)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'cronscripts/nightly.sh'
2--- cronscripts/nightly.sh 2012-12-05 17:02:44 +0000
3+++ cronscripts/nightly.sh 2018-06-05 01:33:44 +0000
4@@ -1,15 +1,12 @@
5 #!/bin/sh
6 #
7-# Copyright 2009 Canonical Ltd. This software is licensed under the
8+# Copyright 2009-2018 Canonical Ltd. This software is licensed under the
9 # GNU Affero General Public License version 3 (see the file LICENSE).
10
11 # This script performs nightly chores. It should be run from
12 # cron as the launchpad user once a day. Typically the output
13 # will be sent to an email address for inspection.
14
15-# Note that http/ftp proxies are needed by the product
16-# release finder
17-
18
19 LOGDIR=$1
20 LOGFILE=$LOGDIR/nightly.log
21
22=== modified file 'lib/lp/bugs/doc/sourceforge-remote-products.txt'
23--- lib/lp/bugs/doc/sourceforge-remote-products.txt 2012-06-06 13:44:50 +0000
24+++ lib/lp/bugs/doc/sourceforge-remote-products.txt 2018-06-05 01:33:44 +0000
25@@ -37,14 +37,51 @@
26 ... print product.name, product.sourceforgeproject
27 my-first-product fronobulator
28
29-We'll use a test version of SourceForgeRemoteProductFinder that won't
30-try to access SourceForge.
31-
32+Define some request mocks so that we don't try to access SourceForge.
33+
34+ >>> import os.path
35+ >>> import re
36+ >>> import responses
37+ >>> from six.moves.urllib_parse import urlsplit
38+
39+ >>> def project_callback(request):
40+ ... url = urlsplit(request.url)
41+ ... project = re.match(r'.*/projects/([a-z]+)', url.path).group(1)
42+ ... file_path = os.path.join(
43+ ... os.path.dirname(__file__), os.pardir, 'tests', 'testfiles',
44+ ... 'sourceforge-project-%s.html' % project)
45+ ... with open(file_path) as test_file:
46+ ... return (200, {}, test_file.read())
47+ >>> def add_project_response(requests_mock):
48+ ... requests_mock.add_callback(
49+ ... 'GET', re.compile(r'.*/projects/[a-z]+'),
50+ ... callback=project_callback)
51+
52+ >>> def tracker_callback(request):
53+ ... url = urlsplit(request.url)
54+ ... group_id = re.match(r'group_id=([0-9]+)', url.query).group(1)
55+ ... file_path = os.path.join(
56+ ... os.path.dirname(__file__), os.pardir, 'tests', 'testfiles',
57+ ... 'sourceforge-tracker-%s.html' % group_id)
58+ ... with open(file_path) as test_file:
59+ ... return (200, {}, test_file.read())
60+ >>> def add_tracker_response(requests_mock):
61+ ... requests_mock.add_callback(
62+ ... 'GET', re.compile(r'.*/tracker/\?group_id=[0-9]+'),
63+ ... match_querystring=True, callback=tracker_callback)
64+
65+ >>> def print_calls(calls):
66+ ... for call in calls:
67+ ... url = urlsplit(call.request.url)
68+ ... print('Got page %s%s' % (
69+ ... url.path, '?%s' % url.query if url.query else ''))
70+
71+ >>> from lp.bugs.scripts.sfremoteproductfinder import (
72+ ... SourceForgeRemoteProductFinder,
73+ ... )
74 >>> from lp.services.log.logger import FakeLogger
75- >>> from lp.bugs.tests.sfremoteproductfinder import (
76- ... TestSFRemoteProductFinder)
77 >>> from lp.testing.layers import LaunchpadZopelessLayer
78- >>> finder = TestSFRemoteProductFinder(
79+ >>> finder = SourceForgeRemoteProductFinder(
80 ... txn=LaunchpadZopelessLayer.txn, logger=FakeLogger())
81
82 SourceForgeRemoteProductFinder has a method,
83@@ -55,10 +92,14 @@
84 extracts the URL of the project's bug tracker and returns the group_id and
85 atid therein as an ampersand-separated string.
86
87- >>> remote_product = finder.getRemoteProductFromSourceForge(
88- ... 'fronobulator')
89- DEBUG...Getting page projects/fronobulator
90- DEBUG...Getting page tracker/?group_id=5570
91+ >>> with responses.RequestsMock() as requests_mock:
92+ ... add_project_response(requests_mock)
93+ ... add_tracker_response(requests_mock)
94+ ... remote_product = finder.getRemoteProductFromSourceForge(
95+ ... 'fronobulator')
96+ ... print_calls(requests_mock.calls)
97+ Got page /projects/fronobulator
98+ Got page /tracker/?group_id=5570
99
100 >>> print remote_product
101 5570&105570
102@@ -66,23 +107,25 @@
103 If an error is raised when trying to fetch the project pages from the
104 remote server, it will be logged.
105
106- >>> from lp.bugs.tests.sfremoteproductfinder import (
107- ... TestBrokenSFRemoteProductFinder)
108- >>> broken_finder = TestBrokenSFRemoteProductFinder(
109- ... txn=LaunchpadZopelessLayer.txn, logger=FakeLogger())
110- >>> broken_finder.getRemoteProductFromSourceForge('fronobulator')
111- ERROR...Error fetching project...: HTTP Error 500: This is an error
112+ >>> with responses.RequestsMock() as requests_mock:
113+ ... requests_mock.add('GET', re.compile(r'.*'), status=500)
114+ ... finder.getRemoteProductFromSourceForge('fronobulator')
115+ ERROR...Error fetching project...: 500 Server Error: Internal Server Error
116
117 SourceForgeRemoteProductFinder.setRemoteProductsFromSourceForge()
118 iterates over the list of products returned by
119 getSFLinkedProductsWithNoneRemoteProduct() and then calls
120 getRemoteProductFromSourceForge() to fetch their remote products.
121
122- >>> finder.setRemoteProductsFromSourceForge()
123+ >>> with responses.RequestsMock() as requests_mock:
124+ ... add_project_response(requests_mock)
125+ ... add_tracker_response(requests_mock)
126+ ... finder.setRemoteProductsFromSourceForge()
127+ ... print_calls(requests_mock.calls)
128 INFO...Updating 1 Products using SourceForge project data
129 DEBUG...Updating remote_product for Product 'my-first-product'
130- DEBUG...Getting page projects/fronobulator
131- DEBUG...Getting page tracker/?group_id=5570
132+ Got page /projects/fronobulator
133+ Got page /tracker/?group_id=5570
134
135 The product that was linked to SourceForge without a remote_product now has
136 its remote_product set.
137
138=== modified file 'lib/lp/bugs/scripts/bzremotecomponentfinder.py'
139--- lib/lp/bugs/scripts/bzremotecomponentfinder.py 2017-10-21 18:14:14 +0000
140+++ lib/lp/bugs/scripts/bzremotecomponentfinder.py 2018-06-05 01:33:44 +0000
141@@ -1,4 +1,4 @@
142-# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
143+# Copyright 2009-2018 Canonical Ltd. This software is licensed under the
144 # GNU Affero General Public License version 3 (see the file LICENSE).
145
146 """Utilities for the update-bugzilla-remote-components cronscript"""
147@@ -10,11 +10,8 @@
148 ]
149
150 import re
151-from urllib2 import (
152- HTTPError,
153- urlopen,
154- )
155
156+import requests
157 import transaction
158 from zope.component import getUtility
159
160@@ -24,9 +21,14 @@
161 )
162 from lp.bugs.model.bugtracker import BugTrackerComponent
163 from lp.services.beautifulsoup import BeautifulSoup
164+from lp.services.config import config
165 from lp.services.database import bulk
166 from lp.services.database.interfaces import IStore
167 from lp.services.scripts.logger import log as default_log
168+from lp.services.timeout import (
169+ override_timeout,
170+ urlfetch,
171+ )
172
173
174 def dictFromCSV(line):
175@@ -54,7 +56,8 @@
176
177 def getPage(self):
178 """Download and return content from the Bugzilla page"""
179- return urlopen(self.url).read()
180+ with override_timeout(config.updatebugzillaremotecomponents.timeout):
181+ return urlfetch(self.url, trust_env=False, use_proxy=True).content
182
183 def parsePage(self, page_text):
184 """Builds self.product using HTML content in page_text"""
185@@ -106,20 +109,14 @@
186 u"mozilla.org",
187 ]
188
189- def __init__(self, logger=None, static_bugzilla_scraper=None):
190+ def __init__(self, logger=None):
191 """Instantiates object, without performing any parsing.
192
193 :param logger: A logger object
194- :param static_bugzilla_scraper: Substitute this custom bugzilla
195- scraper object instead of constructing a new
196- BugzillaRemoteComponentScraper for each bugtracker's URL. This
197- is intended for testing purposes to avoid needing to make remote
198- web connections.
199 """
200 self.logger = logger
201 if logger is None:
202 self.logger = default_log
203- self.static_bugzilla_scraper = static_bugzilla_scraper
204
205 def getRemoteProductsAndComponents(self, bugtracker_name=None):
206 """Retrieves, parses, and stores component data for each bugtracker"""
207@@ -141,20 +138,17 @@
208 self.logger.info("%s: %s" % (
209 lp_bugtracker.name, lp_bugtracker.baseurl))
210
211- if self.static_bugzilla_scraper is not None:
212- bz_bugtracker = self.static_bugzilla_scraper
213- else:
214- bz_bugtracker = BugzillaRemoteComponentScraper(
215- base_url=lp_bugtracker.baseurl)
216+ bz_bugtracker = BugzillaRemoteComponentScraper(
217+ base_url=lp_bugtracker.baseurl)
218
219 try:
220 self.logger.debug("...Fetching page")
221 page_text = bz_bugtracker.getPage()
222- except HTTPError as error:
223+ except requests.HTTPError as error:
224 self.logger.warning("Could not fetch %s: %s" % (
225 lp_bugtracker.baseurl, error))
226 continue
227- except:
228+ except Exception:
229 self.logger.warning("Failed to access %s" % (
230 lp_bugtracker.baseurl))
231 continue
232
233=== modified file 'lib/lp/bugs/scripts/sfremoteproductfinder.py'
234--- lib/lp/bugs/scripts/sfremoteproductfinder.py 2017-10-21 18:14:14 +0000
235+++ lib/lp/bugs/scripts/sfremoteproductfinder.py 2018-06-05 01:33:44 +0000
236@@ -1,4 +1,4 @@
237-# Copyright 2009 Canonical Ltd. This software is licensed under the
238+# Copyright 2009-2018 Canonical Ltd. This software is licensed under the
239 # GNU Affero General Public License version 3 (see the file LICENSE).
240
241 """Utilities for the sfremoteproductfinder cronscript"""
242@@ -9,17 +9,19 @@
243 ]
244
245 import urllib
246-from urllib2 import (
247- HTTPError,
248- urlopen,
249- )
250
251+import requests
252 from zope.component import getUtility
253
254 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
255 from lp.registry.interfaces.product import IProductSet
256 from lp.services.beautifulsoup import BeautifulSoup
257+from lp.services.config import config
258 from lp.services.scripts.logger import log as default_log
259+from lp.services.timeout import (
260+ override_timeout,
261+ urlfetch,
262+ )
263 from lp.services.webapp import (
264 urlappend,
265 urlsplit,
266@@ -43,7 +45,8 @@
267 def _getPage(self, page):
268 """GET the specified page on the remote HTTP server."""
269 page_url = urlappend(self.sourceforge_baseurl, page)
270- return urlopen(page_url).read()
271+ with override_timeout(config.updatesourceforgeremoteproduct.timeout):
272+ return urlfetch(page_url, trust_env=False, use_proxy=True).content
273
274 def getRemoteProductFromSourceForge(self, sf_project):
275 """Return the remote product of a SourceForge project.
276@@ -55,7 +58,7 @@
277 # First, fetch the project page.
278 try:
279 soup = BeautifulSoup(self._getPage("projects/%s" % sf_project))
280- except HTTPError as error:
281+ except requests.HTTPError as error:
282 self.logger.error(
283 "Error fetching project %s: %s" %
284 (sf_project, error))
285@@ -75,7 +78,7 @@
286 tracker_url = tracker_url.lstrip('/')
287 try:
288 soup = BeautifulSoup(self._getPage(tracker_url))
289- except HTTPError as error:
290+ except requests.HTTPError as error:
291 self.logger.error(
292 "Error fetching project %s: %s" %
293 (sf_project, error))
294
295=== removed file 'lib/lp/bugs/tests/sfremoteproductfinder.py'
296--- lib/lp/bugs/tests/sfremoteproductfinder.py 2011-12-19 15:09:08 +0000
297+++ lib/lp/bugs/tests/sfremoteproductfinder.py 1970-01-01 00:00:00 +0000
298@@ -1,50 +0,0 @@
299-# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
300-# GNU Affero General Public License version 3 (see the file LICENSE).
301-
302-"""Testing helpers for sfremoteproductfinder."""
303-
304-__metaclass__ = type
305-__all__ = ['TestSFRemoteProductFinder']
306-
307-import os
308-import re
309-from urllib2 import HTTPError
310-
311-from lp.bugs.scripts.sfremoteproductfinder import (
312- SourceForgeRemoteProductFinder,
313- )
314-
315-
316-class TestSFRemoteProductFinder(SourceForgeRemoteProductFinder):
317-
318- def _getPage(self, page):
319- self.logger.debug("Getting page %s" % page)
320-
321- project_re = re.compile('projects/([a-z]+)')
322- tracker_re = re.compile('/?tracker/\?group_id=([0-9]+)')
323-
324- project_match = project_re.match(page)
325- tracker_match = tracker_re.match(page)
326-
327- if project_match is not None:
328- project = project_match.groups()[0]
329- file_path = os.path.join(
330- os.path.dirname(__file__), 'testfiles',
331- 'sourceforge-project-%s.html' % project)
332- elif tracker_match is not None:
333- group_id = tracker_match.groups()[0]
334- file_path = os.path.join(
335- os.path.dirname(__file__), 'testfiles',
336- 'sourceforge-tracker-%s.html' % group_id)
337- else:
338- raise AssertionError(
339- "The requested page '%s' isn't a project or tracker page."
340- % page)
341-
342- return open(file_path, 'r').read()
343-
344-
345-class TestBrokenSFRemoteProductFinder(SourceForgeRemoteProductFinder):
346-
347- def _getPage(self, page):
348- raise HTTPError(page, 500, "This is an error", None, None)
349
350=== modified file 'lib/lp/bugs/tests/test_bzremotecomponentfinder.py'
351--- lib/lp/bugs/tests/test_bzremotecomponentfinder.py 2015-10-06 06:48:01 +0000
352+++ lib/lp/bugs/tests/test_bzremotecomponentfinder.py 2018-06-05 01:33:44 +0000
353@@ -1,4 +1,4 @@
354-# Copyright 2010-2014 Canonical Ltd. This software is licensed under the
355+# Copyright 2010-2018 Canonical Ltd. This software is licensed under the
356 # GNU Affero General Public License version 3 (see the file LICENSE).
357
358 """Tests cronscript for retriving components from remote Bugzillas"""
359@@ -8,8 +8,9 @@
360 __all__ = []
361
362 import os
363-from urllib2 import HTTPError
364+import re
365
366+import responses
367 import transaction
368
369 from lp.bugs.scripts.bzremotecomponentfinder import (
370@@ -32,33 +33,8 @@
371 Test files are located in lib/canonical/launchpad/ftests/testfiles
372 """
373 file_path = os.path.join(os.path.dirname(__file__), 'testfiles', name)
374- test_file = open(file_path, 'r')
375- return test_file.read()
376-
377-
378-class StaticTextBugzillaRemoteComponentScraper(
379- BugzillaRemoteComponentScraper):
380- """A scraper that just returns static text for getPage()"""
381- def __init__(self):
382- BugzillaRemoteComponentScraper.__init__(
383- self, "http://www.example.com")
384-
385- def getPage(self):
386- return read_test_file("bugzilla-fdo-advanced-query.html")
387-
388-
389-class FaultyBugzillaRemoteComponentScraper(
390- BugzillaRemoteComponentScraper):
391- """A scraper that trips asserts when getPage() is called"""
392-
393- def __init__(self, error=None):
394- BugzillaRemoteComponentScraper.__init__(
395- self, "http://www.example.com")
396- self.error = error
397-
398- def getPage(self):
399- raise self.error
400- return None
401+ with open(file_path, 'r') as test_file:
402+ return test_file.read()
403
404
405 class TestBugzillaRemoteComponentScraper(TestCaseWithFactory):
406@@ -130,6 +106,7 @@
407 asserted = e
408 self.assertIs(None, asserted)
409
410+ @responses.activate
411 def test_store(self):
412 """Check that already-parsed data gets stored to database"""
413 lp_bugtracker = self.factory.makeBugTracker()
414@@ -173,17 +150,19 @@
415 comp = comp_group.getComponent(u'four')
416 self.assertEqual(u'four', comp.name)
417
418+ @responses.activate
419 def test_get_remote_products_and_components(self):
420 """Does a full retrieve and storing of data."""
421 lp_bugtracker = self.factory.makeBugTracker(
422 title="fdo-example",
423 name="fdo-example")
424 transaction.commit()
425- bz_scraper = StaticTextBugzillaRemoteComponentScraper()
426
427- finder = BugzillaRemoteComponentFinder(
428- logger=BufferLogger(),
429- static_bugzilla_scraper=bz_scraper)
430+ finder = BugzillaRemoteComponentFinder(logger=BufferLogger())
431+ responses.add(
432+ "GET", re.compile(r".*/query\.cgi\?format=advanced"),
433+ match_querystring=True, content_type="text/html",
434+ body=read_test_file("bugzilla-fdo-advanced-query.html"))
435 finder.getRemoteProductsAndComponents(bugtracker_name="fdo-example")
436
437 self.assertEqual(
438@@ -195,48 +174,60 @@
439 self.assertIsNot(None, comp)
440 self.assertEqual(u'Driver/Radeon', comp.name)
441
442+ @responses.activate
443 def test_get_remote_products_and_components_encounters_301(self):
444- self.factory.makeBugTracker()
445+ def redirect_callback(request):
446+ new_url = request.url.replace("query.cgi", "newquery.cgi")
447+ return (301, {"Location": new_url}, "")
448+
449+ lp_bugtracker = self.factory.makeBugTracker(
450+ title="fdo-example",
451+ name="fdo-example")
452 transaction.commit()
453- bz_scraper = FaultyBugzillaRemoteComponentScraper(
454- error=HTTPError("http://bugzilla.example.com",
455- 301, 'Moved Permanently', {}, None))
456- finder = BugzillaRemoteComponentFinder(
457- logger=BufferLogger(), static_bugzilla_scraper=bz_scraper)
458-
459- self.assertGetRemoteProductsAndComponentsDoesNotAssert(finder)
460-
461+
462+ finder = BugzillaRemoteComponentFinder(logger=BufferLogger())
463+ responses.add_callback(
464+ "GET", re.compile(r".*/query\.cgi"), callback=redirect_callback)
465+ responses.add(
466+ "GET", re.compile(r".*/newquery\.cgi\?format=advanced"),
467+ match_querystring=True, content_type="text/html",
468+ body=read_test_file("bugzilla-fdo-advanced-query.html"))
469+ finder.getRemoteProductsAndComponents(bugtracker_name="fdo-example")
470+
471+ self.assertEqual(
472+ 109, len(list(lp_bugtracker.getAllRemoteComponentGroups())))
473+ comp_group = lp_bugtracker.getRemoteComponentGroup(u'xorg')
474+ self.assertIsNot(None, comp_group)
475+ self.assertEqual(146, len(list(comp_group.components)))
476+ comp = comp_group.getComponent(u'Driver/Radeon')
477+ self.assertIsNot(None, comp)
478+ self.assertEqual(u'Driver/Radeon', comp.name)
479+
480+ @responses.activate
481 def test_get_remote_products_and_components_encounters_400(self):
482 self.factory.makeBugTracker()
483 transaction.commit()
484- bz_scraper = FaultyBugzillaRemoteComponentScraper(
485- error=HTTPError("http://bugzilla.example.com",
486- 400, 'Bad Request', {}, None))
487- finder = BugzillaRemoteComponentFinder(
488- logger=BufferLogger(), static_bugzilla_scraper=bz_scraper)
489+ finder = BugzillaRemoteComponentFinder(logger=BufferLogger())
490
491+ responses.add("GET", re.compile(r".*/query\.cgi"), status=400)
492 self.assertGetRemoteProductsAndComponentsDoesNotAssert(finder)
493
494+ @responses.activate
495 def test_get_remote_products_and_components_encounters_404(self):
496 self.factory.makeBugTracker()
497 transaction.commit()
498- bz_scraper = FaultyBugzillaRemoteComponentScraper(
499- error=HTTPError("http://bugzilla.example.com",
500- 404, 'Not Found', {}, None))
501- finder = BugzillaRemoteComponentFinder(
502- logger=BufferLogger(), static_bugzilla_scraper=bz_scraper)
503+ finder = BugzillaRemoteComponentFinder(logger=BufferLogger())
504
505+ responses.add("GET", re.compile(r".*/query\.cgi"), status=404)
506 self.assertGetRemoteProductsAndComponentsDoesNotAssert(finder)
507
508+ @responses.activate
509 def test_get_remote_products_and_components_encounters_500(self):
510 self.factory.makeBugTracker()
511 transaction.commit()
512- bz_scraper = FaultyBugzillaRemoteComponentScraper(
513- error=HTTPError("http://bugzilla.example.com",
514- 500, 'Internal Server Error', {}, None))
515- finder = BugzillaRemoteComponentFinder(
516- logger=BufferLogger(), static_bugzilla_scraper=bz_scraper)
517+ finder = BugzillaRemoteComponentFinder(logger=BufferLogger())
518
519+ responses.add("GET", re.compile(r".*/query\.cgi"), status=500)
520 self.assertGetRemoteProductsAndComponentsDoesNotAssert(finder)
521
522 # FIXME: This takes ~9 sec to run, but mars says new testsuites need to
523
524=== modified file 'lib/lp/services/config/schema-lazr.conf'
525--- lib/lp/services/config/schema-lazr.conf 2018-05-21 20:30:16 +0000
526+++ lib/lp/services/config/schema-lazr.conf 2018-06-05 01:33:44 +0000
527@@ -1667,12 +1667,18 @@
528 # datatype: string
529 dbuser: updatesourceforgeremoteproduct
530
531+# datatype: integer
532+timeout: 30
533+
534
535 [updatebugzillaremotecomponents]
536 # The database user to run this process as.
537 # datatype: string
538 dbuser: updatebugzillaremotecomponents
539
540+# datatype: integer
541+timeout: 30
542+
543
544 [uploader]
545 # The database user which will be used by this process.
546
547=== modified file 'lib/lp/services/tests/test_timeout.py'
548--- lib/lp/services/tests/test_timeout.py 2017-04-29 23:51:28 +0000
549+++ lib/lp/services/tests/test_timeout.py 2018-06-05 01:33:44 +0000
550@@ -1,4 +1,4 @@
551-# Copyright 2012-2016 Canonical Ltd. This software is licensed under the
552+# Copyright 2012-2018 Canonical Ltd. This software is licensed under the
553 # GNU Affero General Public License version 3 (see the file LICENSE).
554
555 """timeout.py tests.
556@@ -15,6 +15,8 @@
557 import threading
558 import xmlrpclib
559
560+from fixtures import MonkeyPatch
561+from requests import Response
562 from requests.exceptions import (
563 ConnectionError,
564 InvalidSchema,
565@@ -24,6 +26,7 @@
566 from lp.services.timeout import (
567 default_timeout,
568 get_default_timeout_function,
569+ override_timeout,
570 reduced_timeout,
571 set_default_timeout_function,
572 TimeoutError,
573@@ -37,6 +40,7 @@
574 )
575 from lp.services.webapp.servers import LaunchpadTestRequest
576 from lp.testing import TestCase
577+from lp.testing.fakemethod import FakeMethod
578
579
580 @with_timeout()
581@@ -247,6 +251,15 @@
582 finally:
583 clear_request_started()
584
585+ def test_override_timeout(self):
586+ """override_timeout temporarily overrides the default timeout."""
587+ self.addCleanup(set_default_timeout_function, None)
588+ with override_timeout(1.0):
589+ self.assertEqual(1.0, get_default_timeout_function()())
590+ set_default_timeout_function(lambda: 5.0)
591+ with override_timeout(1.0):
592+ self.assertEqual(1.0, get_default_timeout_function()())
593+
594 def make_test_socket(self):
595 """One common use case for timing out is when making an HTTP request
596 to an external site to fetch content. To this end, the timeout
597@@ -355,6 +368,37 @@
598 self.assertEqual(
599 "No connection adapters were found for '%s'" % url, str(e))
600
601+ def test_urlfetch_no_proxy_by_default(self):
602+ """urlfetch does not use a proxy by default."""
603+ self.pushConfig('launchpad', http_proxy='http://proxy.example:3128/')
604+ set_default_timeout_function(lambda: 1)
605+ self.addCleanup(set_default_timeout_function, None)
606+ fake_send = FakeMethod(result=Response())
607+ self.useFixture(
608+ MonkeyPatch('requests.adapters.HTTPAdapter.send', fake_send))
609+ # XXX cjwatson 2018-06-04: Eventually we'll set trust_env=False
610+ # everywhere, but for now we just do that as part of the test in
611+ # order to avoid environment variation.
612+ urlfetch('http://example.com/', trust_env=False)
613+ self.assertEqual({}, fake_send.calls[0][1]['proxies'])
614+
615+ def test_urlfetch_uses_proxies_if_requested(self):
616+ """urlfetch uses proxies if explicitly requested."""
617+ proxy = 'http://proxy.example:3128/'
618+ self.pushConfig('launchpad', http_proxy=proxy)
619+ set_default_timeout_function(lambda: 1)
620+ self.addCleanup(set_default_timeout_function, None)
621+ fake_send = FakeMethod(result=Response())
622+ self.useFixture(
623+ MonkeyPatch('requests.adapters.HTTPAdapter.send', fake_send))
624+ # XXX cjwatson 2018-06-04: Eventually we'll set trust_env=False
625+ # everywhere, but for now we just do that as part of the test in
626+ # order to avoid environment variation.
627+ urlfetch('http://example.com/', trust_env=False, use_proxy=True)
628+ self.assertEqual(
629+ {scheme: proxy for scheme in ('http', 'https')},
630+ fake_send.calls[0][1]['proxies'])
631+
632 def test_xmlrpc_transport(self):
633 """ Another use case for timeouts is communicating with external
634 systems using XMLRPC. In order to allow timeouts using XMLRPC we
635
636=== modified file 'lib/lp/services/timeout.py'
637--- lib/lp/services/timeout.py 2018-03-31 16:02:54 +0000
638+++ lib/lp/services/timeout.py 2018-06-05 01:33:44 +0000
639@@ -7,6 +7,7 @@
640 __all__ = [
641 "default_timeout",
642 "get_default_timeout_function",
643+ "override_timeout",
644 "reduced_timeout",
645 "SafeTransportWithTimeout",
646 "set_default_timeout_function",
647@@ -41,6 +42,8 @@
648 from requests.packages.urllib3.poolmanager import PoolManager
649 from six import reraise
650
651+from lp.services.config import config
652+
653
654 default_timeout_function = None
655
656@@ -110,6 +113,21 @@
657 set_default_timeout_function(original_timeout_function)
658
659
660+@contextmanager
661+def override_timeout(timeout):
662+ """A context manager that temporarily overrides the default timeout.
663+
664+ :param timeout: The new timeout to use.
665+ """
666+ original_timeout_function = get_default_timeout_function()
667+
668+ set_default_timeout_function(lambda: timeout)
669+ try:
670+ yield
671+ finally:
672+ set_default_timeout_function(original_timeout_function)
673+
674+
675 class TimeoutError(Exception):
676 """Exception raised when a function doesn't complete within time."""
677
678@@ -311,9 +329,13 @@
679 return session
680
681 @with_timeout(cleanup='cleanup')
682- def fetch(self, url, trust_env=None, **request_kwargs):
683+ def fetch(self, url, trust_env=None, use_proxy=False, **request_kwargs):
684 """Fetch the URL using a custom HTTP handler supporting timeout."""
685 request_kwargs.setdefault("method", "GET")
686+ if use_proxy and config.launchpad.http_proxy:
687+ request_kwargs.setdefault("proxies", {})
688+ request_kwargs["proxies"]["http"] = config.launchpad.http_proxy
689+ request_kwargs["proxies"]["https"] = config.launchpad.http_proxy
690 self.session = self._makeSession(trust_env=trust_env)
691 response = self.session.request(url=url, **request_kwargs)
692 response.raise_for_status()