launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #22752
[Merge] lp:~cjwatson/launchpad/explicit-proxy-ignore-env into lp:launchpad
Colin Watson has proposed merging lp:~cjwatson/launchpad/explicit-proxy-ignore-env into lp:launchpad.
Commit message:
Make urlfetch always ignore proxy settings from the environment.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/explicit-proxy-ignore-env/+merge/349470
All callers now either pass use_proxy=True or are talking to another Canonical service to which they have direct firewall access.
--
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/explicit-proxy-ignore-env into lp:launchpad.
=== modified file 'lib/lp/bugs/externalbugtracker/base.py'
--- lib/lp/bugs/externalbugtracker/base.py 2018-06-23 09:46:28 +0000
+++ lib/lp/bugs/externalbugtracker/base.py 2018-07-13 13:07:49 +0000
@@ -259,8 +259,7 @@
:raises requests.RequestException: if the request fails.
"""
with override_timeout(self.timeout):
- return urlfetch(
- url, method=method, trust_env=False, use_proxy=True, **kwargs)
+ return urlfetch(url, method=method, use_proxy=True, **kwargs)
def _getPage(self, page, **kwargs):
"""GET the specified page on the remote HTTP server.
=== modified file 'lib/lp/bugs/externalbugtracker/github.py'
--- lib/lp/bugs/externalbugtracker/github.py 2018-06-23 09:46:28 +0000
+++ lib/lp/bugs/externalbugtracker/github.py 2018-07-13 13:07:49 +0000
@@ -91,8 +91,7 @@
url = "https://%s/rate_limit" % host
try:
with override_timeout(timeout):
- response = urlfetch(
- url, headers=headers, trust_env=False, use_proxy=True)
+ response = urlfetch(url, headers=headers, use_proxy=True)
return response.json()["resources"]["core"]
except requests.RequestException as e:
raise BugTrackerConnectError(url, e)
=== modified file 'lib/lp/bugs/externalbugtracker/trac.py'
--- lib/lp/bugs/externalbugtracker/trac.py 2018-06-23 09:46:28 +0000
+++ lib/lp/bugs/externalbugtracker/trac.py 2018-07-13 13:07:49 +0000
@@ -74,7 +74,7 @@
auth_url = urlappend(base_auth_url, 'check')
try:
with override_timeout(config.checkwatches.default_socket_timeout):
- response = urlfetch(auth_url, trust_env=False, use_proxy=True)
+ response = urlfetch(auth_url, use_proxy=True)
except requests.HTTPError as e:
# If the error is HTTP 401 Unauthorized then we're
# probably talking to the LP plugin.
=== modified file 'lib/lp/bugs/externalbugtracker/xmlrpc.py'
--- lib/lp/bugs/externalbugtracker/xmlrpc.py 2018-06-23 02:16:04 +0000
+++ lib/lp/bugs/externalbugtracker/xmlrpc.py 2018-07-13 13:07:49 +0000
@@ -79,7 +79,7 @@
url, method='POST', headers={'Content-Type': 'text/xml'},
data=request_body, cookies=self.cookie_jar,
hooks={'response': repost_on_redirect_hook},
- trust_env=False, use_proxy=True)
+ use_proxy=True)
except requests.HTTPError as e:
raise ProtocolError(
url.decode('utf-8'), e.response.status_code, e.response.reason,
=== modified file 'lib/lp/bugs/scripts/bzremotecomponentfinder.py'
--- lib/lp/bugs/scripts/bzremotecomponentfinder.py 2018-06-05 01:31:47 +0000
+++ lib/lp/bugs/scripts/bzremotecomponentfinder.py 2018-07-13 13:07:49 +0000
@@ -57,7 +57,7 @@
def getPage(self):
"""Download and return content from the Bugzilla page"""
with override_timeout(config.updatebugzillaremotecomponents.timeout):
- return urlfetch(self.url, trust_env=False, use_proxy=True).content
+ return urlfetch(self.url, use_proxy=True).content
def parsePage(self, page_text):
"""Builds self.product using HTML content in page_text"""
=== modified file 'lib/lp/bugs/scripts/sfremoteproductfinder.py'
--- lib/lp/bugs/scripts/sfremoteproductfinder.py 2018-06-05 01:31:47 +0000
+++ lib/lp/bugs/scripts/sfremoteproductfinder.py 2018-07-13 13:07:49 +0000
@@ -46,7 +46,7 @@
"""GET the specified page on the remote HTTP server."""
page_url = urlappend(self.sourceforge_baseurl, page)
with override_timeout(config.updatesourceforgeremoteproduct.timeout):
- return urlfetch(page_url, trust_env=False, use_proxy=True).content
+ return urlfetch(page_url, use_proxy=True).content
def getRemoteProductFromSourceForge(self, sf_project):
"""Return the remote product of a SourceForge project.
=== modified file 'lib/lp/code/model/branchhosting.py'
--- lib/lp/code/model/branchhosting.py 2018-06-21 14:56:36 +0000
+++ lib/lp/code/model/branchhosting.py 2018-07-13 13:07:49 +0000
@@ -65,8 +65,7 @@
"branch-hosting-%s" % method, "%s %s" % (path, json.dumps(kwargs)))
try:
response = urlfetch(
- urljoin(self.endpoint, path), trust_env=False, method=method,
- **kwargs)
+ urljoin(self.endpoint, path), method=method, **kwargs)
except TimeoutError:
# Re-raise this directly so that it can be handled specially by
# callers.
=== modified file 'lib/lp/code/model/githosting.py'
--- lib/lp/code/model/githosting.py 2018-06-15 14:37:26 +0000
+++ lib/lp/code/model/githosting.py 2018-07-13 13:07:49 +0000
@@ -57,8 +57,7 @@
"git-hosting-%s" % method, "%s %s" % (path, json.dumps(kwargs)))
try:
response = urlfetch(
- urljoin(self.endpoint, path), trust_env=False, method=method,
- **kwargs)
+ urljoin(self.endpoint, path), method=method, **kwargs)
except TimeoutError:
# Re-raise this directly so that it can be handled specially by
# callers.
=== modified file 'lib/lp/registry/scripts/distributionmirror_prober.py'
--- lib/lp/registry/scripts/distributionmirror_prober.py 2018-06-26 15:07:50 +0000
+++ lib/lp/registry/scripts/distributionmirror_prober.py 2018-07-13 13:07:49 +0000
@@ -626,7 +626,7 @@
try:
return urlfetch(
url, headers={'Pragma': 'no-cache', 'Cache-control': 'no-cache'},
- trust_env=False, use_proxy=True, allow_file=True)
+ use_proxy=True, allow_file=True)
except requests.RequestException as e:
raise UnableToFetchCDImageFileList(
'Unable to fetch %s: %s' % (url, e))
=== modified file 'lib/lp/registry/scripts/productreleasefinder/finder.py'
--- lib/lp/registry/scripts/productreleasefinder/finder.py 2018-06-26 19:17:19 +0000
+++ lib/lp/registry/scripts/productreleasefinder/finder.py 2018-07-13 13:07:49 +0000
@@ -229,8 +229,7 @@
self.log.info("Downloading %s", url)
with tempfile.TemporaryFile(prefix="product-release-finder") as fp:
try:
- response = urlfetch(
- url, trust_env=False, use_proxy=True, output_file=fp)
+ response = urlfetch(url, use_proxy=True, output_file=fp)
# XXX cjwatson 2018-06-26: This will all change with
# requests 3.x. See:
# https://blog.petrzemek.net/2018/04/22/
=== modified file 'lib/lp/registry/scripts/productreleasefinder/walker.py'
--- lib/lp/registry/scripts/productreleasefinder/walker.py 2018-07-09 11:45:41 +0000
+++ lib/lp/registry/scripts/productreleasefinder/walker.py 2018-07-13 13:07:49 +0000
@@ -285,7 +285,7 @@
self.log.debug("Requesting %s with method %s", path, method)
return urlfetch(
urljoin(self.base, path), method=method, allow_redirects=False,
- trust_env=False, use_proxy=True, allow_ftp=True)
+ use_proxy=True, allow_ftp=True)
def isDirectory(self, path):
"""Return whether the path is a directory.
=== modified file 'lib/lp/services/sitesearch/__init__.py'
--- lib/lp/services/sitesearch/__init__.py 2018-06-23 12:35:33 +0000
+++ lib/lp/services/sitesearch/__init__.py 2018-07-13 13:07:49 +0000
@@ -213,8 +213,7 @@
action = timeline.start("bing-search-api", search_url)
try:
response = urlfetch(
- search_url, headers=search_headers,
- trust_env=False, use_proxy=True)
+ search_url, headers=search_headers, use_proxy=True)
except (TimeoutError, requests.RequestException) as error:
raise SiteSearchResponseError(
"The response errored: %s" % str(error))
=== modified file 'lib/lp/services/tests/test_timeout.py'
--- lib/lp/services/tests/test_timeout.py 2018-07-02 11:28:52 +0000
+++ lib/lp/services/tests/test_timeout.py 2018-07-13 13:07:49 +0000
@@ -375,10 +375,7 @@
fake_send = FakeMethod(result=Response())
self.useFixture(
MonkeyPatch('requests.adapters.HTTPAdapter.send', fake_send))
- # XXX cjwatson 2018-06-04: Eventually we'll set trust_env=False
- # everywhere, but for now we just do that as part of the test in
- # order to avoid environment variation.
- urlfetch('http://example.com/', trust_env=False)
+ urlfetch('http://example.com/')
self.assertEqual({}, fake_send.calls[0][1]['proxies'])
def test_urlfetch_uses_proxies_if_requested(self):
@@ -390,10 +387,7 @@
fake_send = FakeMethod(result=Response())
self.useFixture(
MonkeyPatch('requests.adapters.HTTPAdapter.send', fake_send))
- # XXX cjwatson 2018-06-04: Eventually we'll set trust_env=False
- # everywhere, but for now we just do that as part of the test in
- # order to avoid environment variation.
- urlfetch('http://example.com/', trust_env=False, use_proxy=True)
+ urlfetch('http://example.com/', use_proxy=True)
self.assertEqual(
{scheme: proxy for scheme in ('http', 'https')},
fake_send.calls[0][1]['proxies'])
@@ -429,8 +423,7 @@
set_default_timeout_function(lambda: 1)
self.addCleanup(set_default_timeout_function, None)
response = urlfetch(
- 'ftp://example.com/', trust_env=False, use_proxy=True,
- allow_ftp=True)
+ 'ftp://example.com/', use_proxy=True, allow_ftp=True)
self.assertThat(response, MatchesStructure(
status_code=Equals(200),
headers=ContainsDict({'content-length': Equals('8')}),
=== modified file 'lib/lp/services/timeout.py'
--- lib/lp/services/timeout.py 2018-07-02 11:28:52 +0000
+++ lib/lp/services/timeout.py 2018-07-13 13:07:49 +0000
@@ -324,14 +324,11 @@
self.session = None
@with_timeout(cleanup='cleanup')
- def fetch(self, url, trust_env=None, use_proxy=False, allow_ftp=False,
- allow_file=False, output_file=None, **request_kwargs):
+ def fetch(self, url, use_proxy=False, allow_ftp=False, allow_file=False,
+ output_file=None, **request_kwargs):
"""Fetch the URL using a custom HTTP handler supporting timeout.
:param url: The URL to fetch.
- :param trust_env: If not None, set the session's trust_env to this
- to determine whether it fetches proxy configuration from the
- environment.
:param use_proxy: If True, use Launchpad's configured proxy.
:param allow_ftp: If True, allow ftp:// URLs.
:param allow_file: If True, allow file:// URLs. (Be careful to only
@@ -342,8 +339,9 @@
`Session.request`.
"""
self.session = Session()
- if trust_env is not None:
- self.session.trust_env = trust_env
+ # Always ignore proxy/authentication settings in the environment; we
+ # configure that sort of thing explicitly.
+ self.session.trust_env = False
# Mount our custom adapters.
self.session.mount("https://", CleanableHTTPAdapter())
self.session.mount("http://", CleanableHTTPAdapter())
@@ -385,9 +383,9 @@
self.session = None
-def urlfetch(url, trust_env=None, **request_kwargs):
+def urlfetch(url, **request_kwargs):
"""Wrapper for `requests.get()` that times out."""
- return URLFetcher().fetch(url, trust_env=trust_env, **request_kwargs)
+ return URLFetcher().fetch(url, **request_kwargs)
class TransportWithTimeout(Transport):
Follow ups