Merge lp:~cjwatson/launchpad/explicit-proxy-ignore-env into lp:launchpad

Proposed by Colin Watson on 2018-07-13
Status: Merged
Merged at revision: 18733
Proposed branch: lp:~cjwatson/launchpad/explicit-proxy-ignore-env
Merge into: lp:launchpad
Diff against target: 244 lines (+22/-37)
14 files modified
lib/lp/bugs/externalbugtracker/base.py (+1/-2)
lib/lp/bugs/externalbugtracker/github.py (+1/-2)
lib/lp/bugs/externalbugtracker/trac.py (+1/-1)
lib/lp/bugs/externalbugtracker/xmlrpc.py (+1/-1)
lib/lp/bugs/scripts/bzremotecomponentfinder.py (+1/-1)
lib/lp/bugs/scripts/sfremoteproductfinder.py (+1/-1)
lib/lp/code/model/branchhosting.py (+1/-2)
lib/lp/code/model/githosting.py (+1/-2)
lib/lp/registry/scripts/distributionmirror_prober.py (+1/-1)
lib/lp/registry/scripts/productreleasefinder/finder.py (+1/-2)
lib/lp/registry/scripts/productreleasefinder/walker.py (+1/-1)
lib/lp/services/sitesearch/__init__.py (+1/-2)
lib/lp/services/tests/test_timeout.py (+3/-10)
lib/lp/services/timeout.py (+7/-9)
To merge this branch: bzr merge lp:~cjwatson/launchpad/explicit-proxy-ignore-env
Reviewer Review Type Date Requested Status
William Grant code 2018-07-13 Approve on 2018-07-20
Review via email: mp+349470@code.launchpad.net

Commit message

Make urlfetch always ignore proxy settings from the environment.

Description of the change

All callers now either pass use_proxy=True or are talking to another Canonical service to which they have direct firewall access.

To post a comment you must log in.
William Grant (wgrant) :
review: Approve (code)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'lib/lp/bugs/externalbugtracker/base.py'
2--- lib/lp/bugs/externalbugtracker/base.py 2018-06-23 09:46:28 +0000
3+++ lib/lp/bugs/externalbugtracker/base.py 2018-07-13 13:07:49 +0000
4@@ -259,8 +259,7 @@
5 :raises requests.RequestException: if the request fails.
6 """
7 with override_timeout(self.timeout):
8- return urlfetch(
9- url, method=method, trust_env=False, use_proxy=True, **kwargs)
10+ return urlfetch(url, method=method, use_proxy=True, **kwargs)
11
12 def _getPage(self, page, **kwargs):
13 """GET the specified page on the remote HTTP server.
14
15=== modified file 'lib/lp/bugs/externalbugtracker/github.py'
16--- lib/lp/bugs/externalbugtracker/github.py 2018-06-23 09:46:28 +0000
17+++ lib/lp/bugs/externalbugtracker/github.py 2018-07-13 13:07:49 +0000
18@@ -91,8 +91,7 @@
19 url = "https://%s/rate_limit" % host
20 try:
21 with override_timeout(timeout):
22- response = urlfetch(
23- url, headers=headers, trust_env=False, use_proxy=True)
24+ response = urlfetch(url, headers=headers, use_proxy=True)
25 return response.json()["resources"]["core"]
26 except requests.RequestException as e:
27 raise BugTrackerConnectError(url, e)
28
29=== modified file 'lib/lp/bugs/externalbugtracker/trac.py'
30--- lib/lp/bugs/externalbugtracker/trac.py 2018-06-23 09:46:28 +0000
31+++ lib/lp/bugs/externalbugtracker/trac.py 2018-07-13 13:07:49 +0000
32@@ -74,7 +74,7 @@
33 auth_url = urlappend(base_auth_url, 'check')
34 try:
35 with override_timeout(config.checkwatches.default_socket_timeout):
36- response = urlfetch(auth_url, trust_env=False, use_proxy=True)
37+ response = urlfetch(auth_url, use_proxy=True)
38 except requests.HTTPError as e:
39 # If the error is HTTP 401 Unauthorized then we're
40 # probably talking to the LP plugin.
41
42=== modified file 'lib/lp/bugs/externalbugtracker/xmlrpc.py'
43--- lib/lp/bugs/externalbugtracker/xmlrpc.py 2018-06-23 02:16:04 +0000
44+++ lib/lp/bugs/externalbugtracker/xmlrpc.py 2018-07-13 13:07:49 +0000
45@@ -79,7 +79,7 @@
46 url, method='POST', headers={'Content-Type': 'text/xml'},
47 data=request_body, cookies=self.cookie_jar,
48 hooks={'response': repost_on_redirect_hook},
49- trust_env=False, use_proxy=True)
50+ use_proxy=True)
51 except requests.HTTPError as e:
52 raise ProtocolError(
53 url.decode('utf-8'), e.response.status_code, e.response.reason,
54
55=== modified file 'lib/lp/bugs/scripts/bzremotecomponentfinder.py'
56--- lib/lp/bugs/scripts/bzremotecomponentfinder.py 2018-06-05 01:31:47 +0000
57+++ lib/lp/bugs/scripts/bzremotecomponentfinder.py 2018-07-13 13:07:49 +0000
58@@ -57,7 +57,7 @@
59 def getPage(self):
60 """Download and return content from the Bugzilla page"""
61 with override_timeout(config.updatebugzillaremotecomponents.timeout):
62- return urlfetch(self.url, trust_env=False, use_proxy=True).content
63+ return urlfetch(self.url, use_proxy=True).content
64
65 def parsePage(self, page_text):
66 """Builds self.product using HTML content in page_text"""
67
68=== modified file 'lib/lp/bugs/scripts/sfremoteproductfinder.py'
69--- lib/lp/bugs/scripts/sfremoteproductfinder.py 2018-06-05 01:31:47 +0000
70+++ lib/lp/bugs/scripts/sfremoteproductfinder.py 2018-07-13 13:07:49 +0000
71@@ -46,7 +46,7 @@
72 """GET the specified page on the remote HTTP server."""
73 page_url = urlappend(self.sourceforge_baseurl, page)
74 with override_timeout(config.updatesourceforgeremoteproduct.timeout):
75- return urlfetch(page_url, trust_env=False, use_proxy=True).content
76+ return urlfetch(page_url, use_proxy=True).content
77
78 def getRemoteProductFromSourceForge(self, sf_project):
79 """Return the remote product of a SourceForge project.
80
81=== modified file 'lib/lp/code/model/branchhosting.py'
82--- lib/lp/code/model/branchhosting.py 2018-06-21 14:56:36 +0000
83+++ lib/lp/code/model/branchhosting.py 2018-07-13 13:07:49 +0000
84@@ -65,8 +65,7 @@
85 "branch-hosting-%s" % method, "%s %s" % (path, json.dumps(kwargs)))
86 try:
87 response = urlfetch(
88- urljoin(self.endpoint, path), trust_env=False, method=method,
89- **kwargs)
90+ urljoin(self.endpoint, path), method=method, **kwargs)
91 except TimeoutError:
92 # Re-raise this directly so that it can be handled specially by
93 # callers.
94
95=== modified file 'lib/lp/code/model/githosting.py'
96--- lib/lp/code/model/githosting.py 2018-06-15 14:37:26 +0000
97+++ lib/lp/code/model/githosting.py 2018-07-13 13:07:49 +0000
98@@ -57,8 +57,7 @@
99 "git-hosting-%s" % method, "%s %s" % (path, json.dumps(kwargs)))
100 try:
101 response = urlfetch(
102- urljoin(self.endpoint, path), trust_env=False, method=method,
103- **kwargs)
104+ urljoin(self.endpoint, path), method=method, **kwargs)
105 except TimeoutError:
106 # Re-raise this directly so that it can be handled specially by
107 # callers.
108
109=== modified file 'lib/lp/registry/scripts/distributionmirror_prober.py'
110--- lib/lp/registry/scripts/distributionmirror_prober.py 2018-06-26 15:07:50 +0000
111+++ lib/lp/registry/scripts/distributionmirror_prober.py 2018-07-13 13:07:49 +0000
112@@ -626,7 +626,7 @@
113 try:
114 return urlfetch(
115 url, headers={'Pragma': 'no-cache', 'Cache-control': 'no-cache'},
116- trust_env=False, use_proxy=True, allow_file=True)
117+ use_proxy=True, allow_file=True)
118 except requests.RequestException as e:
119 raise UnableToFetchCDImageFileList(
120 'Unable to fetch %s: %s' % (url, e))
121
122=== modified file 'lib/lp/registry/scripts/productreleasefinder/finder.py'
123--- lib/lp/registry/scripts/productreleasefinder/finder.py 2018-06-26 19:17:19 +0000
124+++ lib/lp/registry/scripts/productreleasefinder/finder.py 2018-07-13 13:07:49 +0000
125@@ -229,8 +229,7 @@
126 self.log.info("Downloading %s", url)
127 with tempfile.TemporaryFile(prefix="product-release-finder") as fp:
128 try:
129- response = urlfetch(
130- url, trust_env=False, use_proxy=True, output_file=fp)
131+ response = urlfetch(url, use_proxy=True, output_file=fp)
132 # XXX cjwatson 2018-06-26: This will all change with
133 # requests 3.x. See:
134 # https://blog.petrzemek.net/2018/04/22/
135
136=== modified file 'lib/lp/registry/scripts/productreleasefinder/walker.py'
137--- lib/lp/registry/scripts/productreleasefinder/walker.py 2018-07-09 11:45:41 +0000
138+++ lib/lp/registry/scripts/productreleasefinder/walker.py 2018-07-13 13:07:49 +0000
139@@ -285,7 +285,7 @@
140 self.log.debug("Requesting %s with method %s", path, method)
141 return urlfetch(
142 urljoin(self.base, path), method=method, allow_redirects=False,
143- trust_env=False, use_proxy=True, allow_ftp=True)
144+ use_proxy=True, allow_ftp=True)
145
146 def isDirectory(self, path):
147 """Return whether the path is a directory.
148
149=== modified file 'lib/lp/services/sitesearch/__init__.py'
150--- lib/lp/services/sitesearch/__init__.py 2018-06-23 12:35:33 +0000
151+++ lib/lp/services/sitesearch/__init__.py 2018-07-13 13:07:49 +0000
152@@ -213,8 +213,7 @@
153 action = timeline.start("bing-search-api", search_url)
154 try:
155 response = urlfetch(
156- search_url, headers=search_headers,
157- trust_env=False, use_proxy=True)
158+ search_url, headers=search_headers, use_proxy=True)
159 except (TimeoutError, requests.RequestException) as error:
160 raise SiteSearchResponseError(
161 "The response errored: %s" % str(error))
162
163=== modified file 'lib/lp/services/tests/test_timeout.py'
164--- lib/lp/services/tests/test_timeout.py 2018-07-02 11:28:52 +0000
165+++ lib/lp/services/tests/test_timeout.py 2018-07-13 13:07:49 +0000
166@@ -375,10 +375,7 @@
167 fake_send = FakeMethod(result=Response())
168 self.useFixture(
169 MonkeyPatch('requests.adapters.HTTPAdapter.send', fake_send))
170- # XXX cjwatson 2018-06-04: Eventually we'll set trust_env=False
171- # everywhere, but for now we just do that as part of the test in
172- # order to avoid environment variation.
173- urlfetch('http://example.com/', trust_env=False)
174+ urlfetch('http://example.com/')
175 self.assertEqual({}, fake_send.calls[0][1]['proxies'])
176
177 def test_urlfetch_uses_proxies_if_requested(self):
178@@ -390,10 +387,7 @@
179 fake_send = FakeMethod(result=Response())
180 self.useFixture(
181 MonkeyPatch('requests.adapters.HTTPAdapter.send', fake_send))
182- # XXX cjwatson 2018-06-04: Eventually we'll set trust_env=False
183- # everywhere, but for now we just do that as part of the test in
184- # order to avoid environment variation.
185- urlfetch('http://example.com/', trust_env=False, use_proxy=True)
186+ urlfetch('http://example.com/', use_proxy=True)
187 self.assertEqual(
188 {scheme: proxy for scheme in ('http', 'https')},
189 fake_send.calls[0][1]['proxies'])
190@@ -429,8 +423,7 @@
191 set_default_timeout_function(lambda: 1)
192 self.addCleanup(set_default_timeout_function, None)
193 response = urlfetch(
194- 'ftp://example.com/', trust_env=False, use_proxy=True,
195- allow_ftp=True)
196+ 'ftp://example.com/', use_proxy=True, allow_ftp=True)
197 self.assertThat(response, MatchesStructure(
198 status_code=Equals(200),
199 headers=ContainsDict({'content-length': Equals('8')}),
200
201=== modified file 'lib/lp/services/timeout.py'
202--- lib/lp/services/timeout.py 2018-07-02 11:28:52 +0000
203+++ lib/lp/services/timeout.py 2018-07-13 13:07:49 +0000
204@@ -324,14 +324,11 @@
205 self.session = None
206
207 @with_timeout(cleanup='cleanup')
208- def fetch(self, url, trust_env=None, use_proxy=False, allow_ftp=False,
209- allow_file=False, output_file=None, **request_kwargs):
210+ def fetch(self, url, use_proxy=False, allow_ftp=False, allow_file=False,
211+ output_file=None, **request_kwargs):
212 """Fetch the URL using a custom HTTP handler supporting timeout.
213
214 :param url: The URL to fetch.
215- :param trust_env: If not None, set the session's trust_env to this
216- to determine whether it fetches proxy configuration from the
217- environment.
218 :param use_proxy: If True, use Launchpad's configured proxy.
219 :param allow_ftp: If True, allow ftp:// URLs.
220 :param allow_file: If True, allow file:// URLs. (Be careful to only
221@@ -342,8 +339,9 @@
222 `Session.request`.
223 """
224 self.session = Session()
225- if trust_env is not None:
226- self.session.trust_env = trust_env
227+ # Always ignore proxy/authentication settings in the environment; we
228+ # configure that sort of thing explicitly.
229+ self.session.trust_env = False
230 # Mount our custom adapters.
231 self.session.mount("https://", CleanableHTTPAdapter())
232 self.session.mount("http://", CleanableHTTPAdapter())
233@@ -385,9 +383,9 @@
234 self.session = None
235
236
237-def urlfetch(url, trust_env=None, **request_kwargs):
238+def urlfetch(url, **request_kwargs):
239 """Wrapper for `requests.get()` that times out."""
240- return URLFetcher().fetch(url, trust_env=trust_env, **request_kwargs)
241+ return URLFetcher().fetch(url, **request_kwargs)
242
243
244 class TransportWithTimeout(Transport):