← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:requests-session-cleanup into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:requests-session-cleanup into launchpad:master.

Commit message:
Close requests sessions more promptly

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/433633

This fixes a number of particularly obscure-looking `ResourceWarning`s from the test suite that look something like this:

  ResourceWarning: unclosed <socket.socket fd=8, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6, laddr=('127.0.0.1', 46726), raddr=('127.0.0.1', 39003)>

(I finally got curious enough about this to put a breakpoint at the top of `lp.scripts.utilities.warninghandler.launchpad_showwarning` so that I could track it down.)

I haven't fixed the case in `ArtifactoryPool` because the disk pool API makes that difficult and will need more extensive refactoring; I've just left an XXX comment for now.

This looks long, but it's almost entirely reindentation due to using sessions as context managers.
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:requests-session-cleanup into launchpad:master.
diff --git a/lib/lp/archivepublisher/artifactory.py b/lib/lp/archivepublisher/artifactory.py
index 2df8e2c..9af731b 100644
--- a/lib/lp/archivepublisher/artifactory.py
+++ b/lib/lp/archivepublisher/artifactory.py
@@ -412,6 +412,8 @@ class ArtifactoryPool:
         # Artifactory bindings can't be told to use
         # lp.services.timeout.urlfetch directly, but only given a substitute
         # session.)
+        # XXX cjwatson 2022-11-25: Nothing ever closes this session
+        # explicitly.
         session = requests.Session()
         session.trust_env = False
         if config.launchpad.http_proxy:
diff --git a/lib/lp/buildmaster/downloader.py b/lib/lp/buildmaster/downloader.py
index d5b60ac..527842d 100644
--- a/lib/lp/buildmaster/downloader.py
+++ b/lib/lp/buildmaster/downloader.py
@@ -59,39 +59,39 @@ class RequestProcess(AMPChild):
 
     @DownloadCommand.responder
     def downloadCommand(self, file_url, path_to_write, timeout):
-        session = Session()
-        session.trust_env = False
-        response = session.get(file_url, timeout=timeout, stream=True)
-        response.raise_for_status()
-        try:
-            os.makedirs(os.path.dirname(path_to_write))
-        except FileExistsError:
-            pass
-        f = tempfile.NamedTemporaryFile(
-            mode="wb",
-            prefix=os.path.basename(path_to_write) + "_",
-            dir=os.path.dirname(path_to_write),
-            delete=False,
-        )
-        try:
-            stream.stream_response_to_file(response, path=f)
-        except Exception:
-            f.close()
-            os.unlink(f.name)
-            raise
-        else:
-            f.close()
-            os.rename(f.name, path_to_write)
-        return {}
+        with Session() as session:
+            session.trust_env = False
+            response = session.get(file_url, timeout=timeout, stream=True)
+            response.raise_for_status()
+            try:
+                os.makedirs(os.path.dirname(path_to_write))
+            except FileExistsError:
+                pass
+            f = tempfile.NamedTemporaryFile(
+                mode="wb",
+                prefix=os.path.basename(path_to_write) + "_",
+                dir=os.path.dirname(path_to_write),
+                delete=False,
+            )
+            try:
+                stream.stream_response_to_file(response, path=f)
+            except Exception:
+                f.close()
+                os.unlink(f.name)
+                raise
+            else:
+                f.close()
+                os.rename(f.name, path_to_write)
+            return {}
 
     @RequestProxyTokenCommand.responder
     def requestProxyTokenCommand(self, url, auth_header, proxy_username):
-        session = Session()
-        session.trust_env = False
-        response = session.post(
-            url,
-            headers={"Authorization": auth_header},
-            json={"username": proxy_username},
-        )
-        response.raise_for_status()
-        return response.json()
+        with Session() as session:
+            session.trust_env = False
+            response = session.post(
+                url,
+                headers={"Authorization": auth_header},
+                json={"username": proxy_username},
+            )
+            response.raise_for_status()
+            return response.json()
diff --git a/lib/lp/services/webhooks/client.py b/lib/lp/services/webhooks/client.py
index 9e8f393..0058817 100644
--- a/lib/lp/services/webhooks/client.py
+++ b/lib/lp/services/webhooks/client.py
@@ -76,50 +76,57 @@ class WebhookClient:
             url.startswith("%s://" % scheme) for scheme in proxies.keys()
         ):
             raise Exception("Unproxied scheme!")
-        session = requests.Session()
-        session.trust_env = False
-        session.headers = {}
+        with requests.Session() as session:
+            session.trust_env = False
+            session.headers = {}
 
-        body, headers = create_request(
-            user_agent, secret, delivery_id, event_type, payload
-        )
-        preq = session.prepare_request(
-            requests.Request("POST", url, data=body, headers=headers)
-        )
-
-        result = {
-            "request": {
-                "url": url,
-                "method": "POST",
-                "headers": dict(preq.headers),
-                "body": preq.body,
-            },
-        }
-        connection_error = None
-        try:
-            resp = session.send(preq, proxies=proxies, timeout=timeout)
-        except (requests.ConnectionError, requests.exceptions.ProxyError) as e:
-            connection_error = str(e)
-        except requests.exceptions.ReadTimeout:
-            connection_error = "Request timeout"
-        if connection_error is not None:
-            result["connection_error"] = connection_error
-            return result
-        # If there was a request error, try to interpret any Squid
-        # error.
-        squid_error = resp.headers.get("X-Squid-Error")
-        if (resp.status_code < 200 or resp.status_code > 299) and squid_error:
-            human_readable = SQUID_ERROR_MESSAGES.get(
-                squid_error.split(" ", 1)[0]
+            body, headers = create_request(
+                user_agent, secret, delivery_id, event_type, payload
             )
-            if human_readable:
-                result["connection_error"] = human_readable
-            else:
-                result["connection_error"] = "Proxy error: %s" % squid_error
-        else:
-            result["response"] = {
-                "status_code": resp.status_code,
-                "headers": dict(resp.headers),
-                "body": resp.content,
+            preq = session.prepare_request(
+                requests.Request("POST", url, data=body, headers=headers)
+            )
+
+            result = {
+                "request": {
+                    "url": url,
+                    "method": "POST",
+                    "headers": dict(preq.headers),
+                    "body": preq.body,
+                },
             }
-        return result
+            connection_error = None
+            try:
+                resp = session.send(preq, proxies=proxies, timeout=timeout)
+            except (
+                requests.ConnectionError,
+                requests.exceptions.ProxyError,
+            ) as e:
+                connection_error = str(e)
+            except requests.exceptions.ReadTimeout:
+                connection_error = "Request timeout"
+            if connection_error is not None:
+                result["connection_error"] = connection_error
+                return result
+            # If there was a request error, try to interpret any Squid
+            # error.
+            squid_error = resp.headers.get("X-Squid-Error")
+            if (
+                resp.status_code < 200 or resp.status_code > 299
+            ) and squid_error:
+                human_readable = SQUID_ERROR_MESSAGES.get(
+                    squid_error.split(" ", 1)[0]
+                )
+                if human_readable:
+                    result["connection_error"] = human_readable
+                else:
+                    result["connection_error"] = (
+                        "Proxy error: %s" % squid_error
+                    )
+            else:
+                result["response"] = {
+                    "status_code": resp.status_code,
+                    "headers": dict(resp.headers),
+                    "body": resp.content,
+                }
+            return result
diff --git a/lib/lp/testing/layers.py b/lib/lp/testing/layers.py
index a6fae41..eca5535 100644
--- a/lib/lp/testing/layers.py
+++ b/lib/lp/testing/layers.py
@@ -821,11 +821,11 @@ class LibrarianLayer(DatabaseLayer):
     def _check_and_reset(cls):
         """Raise an exception if the Librarian has been killed, else reset."""
         try:
-            session = Session()
-            session.mount(
-                config.librarian.download_url, HTTPAdapter(max_retries=3)
-            )
-            session.get(config.librarian.download_url).content
+            with Session() as session:
+                session.mount(
+                    config.librarian.download_url, HTTPAdapter(max_retries=3)
+                )
+                session.get(config.librarian.download_url).content
         except Exception as e:
             raise LayerIsolationError(
                 "Librarian has been killed or has hung."