← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:black-remaining into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:black-remaining into launchpad:master.

Commit message:
Apply black to remaining files

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/427352

As with `pyupgrade`, the handful of files that still need to work on Python 2 or that are copied from external sources are excluded.
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:black-remaining into launchpad:master.
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 7421d57..ddb984f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -39,122 +39,19 @@ repos:
     rev: 22.3.0
     hooks:
       - id: black
-        files: |
-          (?x)^(
-            cronscripts
-            |database
-            |lib/lp/(
-              answers
-              |app
-              |archivepublisher
-              |archiveuploader
-              |blueprints
-              |bugs
-              |buildmaster
-              |charms
-              |code
-              |codehosting
-              |coop
-              |oci
-              |registry
-              |scripts
-              |services
-              |snappy
-              |soyuz
-              |testing
-              |testopenid
-              |tests
-              |translations
-              |xmlrpc
-            )
-            |scripts
-            |utilities
-          )/
         exclude: |
           (?x)^(
-            utilities/community-contributions\.py
+            lib/contrib/.*
+            |lib/devscripts/.*
+            |utilities/community-contributions\.py
             |utilities/update-sourcecode
           )$
 -   repo: https://github.com/PyCQA/isort
     rev: 5.9.2
     hooks:
     -   id: isort
-        name: isort (old-style)
-        args:
-        -   --combine-as
-        -   --force-grid-wrap=2
-        -   --force-sort-within-sections
-        -   --trailing-comma
-        -   --line-length=78
-        -   --lines-after-imports=2
-        -   --multi-line=8
-        -   --dont-order-by-type
-        exclude: |
-          (?x)^(
-            cronscripts
-            |database
-            |lib/lp/(
-              answers
-              |app
-              |archivepublisher
-              |archiveuploader
-              |blueprints
-              |bugs
-              |buildmaster
-              |charms
-              |code
-              |codehosting
-              |coop
-              |oci
-              |registry
-              |scripts
-              |services
-              |snappy
-              |soyuz
-              |testing
-              |testopenid
-              |tests
-              |translations
-              |xmlrpc
-            )
-            |scripts
-            |utilities
-          )/
-    -   id: isort
-        alias: isort-black
-        name: isort (black)
+        name: isort
         args: [--profile, black]
-        files: |
-          (?x)^(
-            cronscripts
-            |database
-            |lib/lp/(
-              answers
-              |app
-              |archivepublisher
-              |archiveuploader
-              |blueprints
-              |bugs
-              |buildmaster
-              |charms
-              |code
-              |codehosting
-              |coop
-              |oci
-              |registry
-              |scripts
-              |services
-              |snappy
-              |soyuz
-              |testing
-              |testopenid
-              |tests
-              |translations
-              |xmlrpc
-            )
-            |scripts
-            |utilities
-          )/
 -   repo: https://github.com/PyCQA/flake8
     rev: 3.9.2
     hooks:
diff --git a/_pythonpath.py b/_pythonpath.py
index 1773f89..b8d2eaa 100644
--- a/_pythonpath.py
+++ b/_pythonpath.py
@@ -4,13 +4,12 @@
 # This file works if the Python has been started with -S, or if bin/py
 # has been used.
 
-from importlib.util import find_spec
 import os.path
 import sys
-
+from importlib.util import find_spec
 
 # Get path to this file.
-if __name__ == '__main__':
+if __name__ == "__main__":
     filename = __file__
 else:
     # If this is an imported module, we want the location of the .py
@@ -20,46 +19,49 @@ else:
 # project root.
 top = os.path.dirname(os.path.abspath(os.path.realpath(filename)))
 
-env = os.path.join(top, 'env')
-python_version = '%s.%s' % sys.version_info[:2]
-stdlib_dir = os.path.join(env, 'lib', 'python%s' % python_version)
+env = os.path.join(top, "env")
+python_version = "%s.%s" % sys.version_info[:2]
+stdlib_dir = os.path.join(env, "lib", "python%s" % python_version)
 
-if ('site' in sys.modules and
-        'lp_sitecustomize' not in sys.modules):
+if "site" in sys.modules and "lp_sitecustomize" not in sys.modules:
     # Site initialization has been run but lp_sitecustomize was not loaded,
     # so something is set up incorrectly.  We blow up, with a hopefully
     # helpful error message.
     raise RuntimeError(
-        'Python was invoked incorrectly.  Scripts should usually be '
+        "Python was invoked incorrectly.  Scripts should usually be "
         "started with Launchpad's bin/py, or with a Python invoked with "
-        'the -S flag.')
+        "the -S flag."
+    )
 
 # Ensure that the virtualenv's standard library directory is in sys.path;
 # activate_this will not put it there.
 if stdlib_dir not in sys.path and (stdlib_dir + os.sep) not in sys.path:
     sys.path.insert(0, stdlib_dir)
 
-if not sys.executable.startswith(top + os.sep) or 'site' not in sys.modules:
+if not sys.executable.startswith(top + os.sep) or "site" not in sys.modules:
     # Activate the virtualenv.  Avoid importing lp_sitecustomize here, as
     # activate_this imports site before it's finished setting up sys.path.
-    orig_disable_sitecustomize = os.environ.get('LP_DISABLE_SITECUSTOMIZE')
-    os.environ['LP_DISABLE_SITECUSTOMIZE'] = '1'
+    orig_disable_sitecustomize = os.environ.get("LP_DISABLE_SITECUSTOMIZE")
+    os.environ["LP_DISABLE_SITECUSTOMIZE"] = "1"
     # This is a bit like env/bin/activate_this.py, but to help namespace
     # packages work properly we change sys.prefix before importing site
     # rather than after.
     sys.real_prefix = sys.prefix
     sys.prefix = env
-    os.environ['PATH'] = (
-        os.path.join(env, 'bin') + os.pathsep + os.environ.get('PATH', ''))
-    os.environ['VIRTUAL_ENV'] = env
+    os.environ["PATH"] = (
+        os.path.join(env, "bin") + os.pathsep + os.environ.get("PATH", "")
+    )
+    os.environ["VIRTUAL_ENV"] = env
     site_packages = os.path.join(
-        env, 'lib', 'python%s' % python_version, 'site-packages')
+        env, "lib", "python%s" % python_version, "site-packages"
+    )
     import site
+
     site.addsitedir(site_packages)
     if orig_disable_sitecustomize is not None:
-        os.environ['LP_DISABLE_SITECUSTOMIZE'] = orig_disable_sitecustomize
+        os.environ["LP_DISABLE_SITECUSTOMIZE"] = orig_disable_sitecustomize
     else:
-        del os.environ['LP_DISABLE_SITECUSTOMIZE']
+        del os.environ["LP_DISABLE_SITECUSTOMIZE"]
 
 # Move all our own directories to the front of the path.
 new_sys_path = []
@@ -70,7 +72,8 @@ for item in list(sys.path):
 sys.path[:0] = new_sys_path
 
 # Initialise the Launchpad environment.
-if 'LP_DISABLE_SITECUSTOMIZE' not in os.environ:
-    if 'lp_sitecustomize' not in sys.modules:
+if "LP_DISABLE_SITECUSTOMIZE" not in os.environ:
+    if "lp_sitecustomize" not in sys.modules:
         import lp_sitecustomize
+
         lp_sitecustomize.main()
diff --git a/brzplugins/lpserve/__init__.py b/brzplugins/lpserve/__init__.py
index 6b47fde..c97a8ef 100644
--- a/brzplugins/lpserve/__init__.py
+++ b/brzplugins/lpserve/__init__.py
@@ -7,28 +7,16 @@ Cribbed from bzrlib.builtins.cmd_serve from Bazaar 0.16.
 """
 
 __all__ = [
-    'cmd_launchpad_server',
-    ]
+    "cmd_launchpad_server",
+]
 
 
 import resource
 
-from breezy import (
-    lockdir,
-    ui,
-    )
-from breezy.commands import (
-    Command,
-    register_command,
-    )
-from breezy.option import (
-    Option,
-    RegistryOption,
-    )
-from breezy.transport import (
-    get_transport,
-    transport_server_registry,
-    )
+from breezy import lockdir, ui
+from breezy.commands import Command, register_command
+from breezy.option import Option, RegistryOption
+from breezy.transport import get_transport, transport_server_registry
 
 
 class cmd_launchpad_server(Command):
@@ -36,44 +24,52 @@ class cmd_launchpad_server(Command):
     file-system format.
     """
 
-    aliases = ['lp-serve']
+    aliases = ["lp-serve"]
 
     takes_options = [
+        Option("inet", help="serve on stdin/out for use from inetd or sshd"),
         Option(
-            'inet',
-            help="serve on stdin/out for use from inetd or sshd"),
-        Option(
-            'port',
+            "port",
             help=(
                 "listen for connections on nominated port of the form "
                 "[hostname:]portnumber. Passing 0 as the port number will "
                 "result in a dynamically allocated port. Default port is "
-                " 4155."),
-            type=str),
+                " 4155."
+            ),
+            type=str,
+        ),
         Option(
-            'upload-directory',
+            "upload-directory",
             help=(
                 "upload branches to this directory. Defaults to "
-                "config.codehosting.hosted_branches_root."),
-            type=str),
+                "config.codehosting.hosted_branches_root."
+            ),
+            type=str,
+        ),
         Option(
-            'mirror-directory',
+            "mirror-directory",
             help=(
                 "serve branches from this directory. Defaults to "
-                "config.codehosting.mirrored_branches_root.")),
+                "config.codehosting.mirrored_branches_root."
+            ),
+        ),
         Option(
-            'codehosting-endpoint',
+            "codehosting-endpoint",
             help=(
                 "the url of the internal XML-RPC server. Defaults to "
-                "config.codehosting.codehosting_endpoint."),
-            type=str),
+                "config.codehosting.codehosting_endpoint."
+            ),
+            type=str,
+        ),
         RegistryOption(
-            'protocol', help="Protocol to serve.",
-            lazy_registry=('breezy.transport', 'transport_server_registry'),
-            value_switches=True),
-        ]
+            "protocol",
+            help="Protocol to serve.",
+            lazy_registry=("breezy.transport", "transport_server_registry"),
+            value_switches=True,
+        ),
+    ]
 
-    takes_args = ['user_id']
+    takes_args = ["user_id"]
 
     def run_server(self, smart_server):
         """Run the given smart server."""
@@ -102,18 +98,23 @@ class cmd_launchpad_server(Command):
         """
         host = None
         if port is not None:
-            if ':' in port:
-                host, port = port.split(':')
+            if ":" in port:
+                host, port = port.split(":")
             port = int(port)
         return host, port
 
-    def run(self, user_id, port=None, branch_directory=None,
-            codehosting_endpoint_url=None, inet=False, protocol=None):
+    def run(
+        self,
+        user_id,
+        port=None,
+        branch_directory=None,
+        codehosting_endpoint_url=None,
+        inet=False,
+        protocol=None,
+    ):
         from lp.codehosting.bzrutils import install_oops_handler
-        from lp.codehosting.vfs import (
-            get_lp_server,
-            hooks,
-            )
+        from lp.codehosting.vfs import get_lp_server, hooks
+
         install_oops_handler(user_id)
         four_gig = int(4e9)
         resource.setrlimit(resource.RLIMIT_AS, (four_gig, four_gig))
@@ -121,8 +122,11 @@ class cmd_launchpad_server(Command):
         if protocol is None:
             protocol = transport_server_registry.get()
         lp_server = get_lp_server(
-            int(user_id), codehosting_endpoint_url, branch_directory,
-            seen_new_branch.seen)
+            int(user_id),
+            codehosting_endpoint_url,
+            branch_directory,
+            seen_new_branch.seen,
+        )
         lp_server.start_server()
         try:
             old_lockdir_timeout = lockdir._DEFAULT_TIMEOUT_SECONDS
@@ -141,8 +145,14 @@ register_command(cmd_launchpad_server)
 
 
 def load_tests(standard_tests, module, loader):
-    standard_tests.addTests(loader.loadTestsFromModuleNames(
-        [__name__ + '.' + x for x in [
-            'test_lpserve',
-        ]]))
+    standard_tests.addTests(
+        loader.loadTestsFromModuleNames(
+            [
+                __name__ + "." + x
+                for x in [
+                    "test_lpserve",
+                ]
+            ]
+        )
+    )
     return standard_tests
diff --git a/brzplugins/lpserve/test_lpserve.py b/brzplugins/lpserve/test_lpserve.py
index 1ce8641..2a12593 100644
--- a/brzplugins/lpserve/test_lpserve.py
+++ b/brzplugins/lpserve/test_lpserve.py
@@ -4,15 +4,9 @@
 import os
 import subprocess
 
-from breezy import (
-    osutils,
-    tests,
-    )
+from breezy import osutils, tests
 
-from lp.codehosting import (
-    get_brz_path,
-    get_BRZ_PLUGIN_PATH_for_subprocess,
-    )
+from lp.codehosting import get_brz_path, get_BRZ_PLUGIN_PATH_for_subprocess
 
 
 class TestCaseWithSubprocess(tests.TestCaseWithTransport):
@@ -24,8 +18,9 @@ class TestCaseWithSubprocess(tests.TestCaseWithTransport):
     same as the breezy.tests.TestCase version.
     """
 
-    def start_bzr_subprocess(self, process_args, env_changes=None,
-                             working_dir=None):
+    def start_bzr_subprocess(
+        self, process_args, env_changes=None, working_dir=None
+    ):
         """Start bzr in a subprocess for testing.
 
         Copied and modified from `breezy.tests.TestCase.start_bzr_subprocess`.
@@ -38,7 +33,7 @@ class TestCaseWithSubprocess(tests.TestCaseWithTransport):
         """
         if env_changes is None:
             env_changes = {}
-        env_changes['BRZ_PLUGIN_PATH'] = get_BRZ_PLUGIN_PATH_for_subprocess()
+        env_changes["BRZ_PLUGIN_PATH"] = get_BRZ_PLUGIN_PATH_for_subprocess()
         old_env = {}
 
         def cleanup_environment():
@@ -62,8 +57,11 @@ class TestCaseWithSubprocess(tests.TestCaseWithTransport):
             command = [brz_path]
             command.extend(process_args)
             process = self._popen(
-                command, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE)
+                command,
+                stdin=subprocess.PIPE,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE,
+            )
         finally:
             restore_environment()
             if cwd is not None:
diff --git a/charm/launchpad/reactive/launchpad.py b/charm/launchpad/reactive/launchpad.py
index 7cdf698..076a8c9 100644
--- a/charm/launchpad/reactive/launchpad.py
+++ b/charm/launchpad/reactive/launchpad.py
@@ -4,44 +4,41 @@
 import subprocess
 
 from charmhelpers.core import hookenv
-from charms.reactive import (
-    remove_state,
-    set_state,
-    when,
-    when_not,
-    )
+from charms.reactive import remove_state, set_state, when, when_not
 from ols import base
 
 
 # Monkey-patch layer:ols.
 def create_virtualenv(wheels_dir, codedir, python_exe):
     subprocess.run(
-        ['make', 'compile', 'PYTHON={}'.format(python_exe)],
-        cwd=codedir, check=True)
+        ["make", "compile", "PYTHON={}".format(python_exe)],
+        cwd=codedir,
+        check=True,
+    )
 
 
 base.create_virtualenv = create_virtualenv
 
 
-@when('ols.configured')
-@when_not('service.configured')
+@when("ols.configured")
+@when_not("service.configured")
 def configure():
-    hookenv.log('Hello world!')
-    set_state('service.configured')
+    hookenv.log("Hello world!")
+    set_state("service.configured")
 
 
-@when('service.configured')
+@when("service.configured")
 def check_is_running():
-    hookenv.status_set('active', 'Ready')
+    hookenv.status_set("active", "Ready")
 
 
-@when('config.changed.build_label')
+@when("config.changed.build_label")
 def build_label_changed():
-    remove_state('ols.service.installed')
-    remove_state('ols.configured')
-    remove_state('service.configured')
+    remove_state("ols.service.installed")
+    remove_state("ols.configured")
+    remove_state("service.configured")
 
 
-@when('config.changed')
+@when("config.changed")
 def config_changed():
-    remove_state('service.configured')
+    remove_state("service.configured")
diff --git a/configs/development/gunicorn.conf.py b/configs/development/gunicorn.conf.py
index 2c914aa..e2c2ed5 100644
--- a/configs/development/gunicorn.conf.py
+++ b/configs/development/gunicorn.conf.py
@@ -1,15 +1,14 @@
-from fnmatch import fnmatch
 import os
-
+from fnmatch import fnmatch
 
 BASE_DIR = os.path.realpath(
-    os.path.join(os.path.dirname(__file__), '..', '..'))
+    os.path.join(os.path.dirname(__file__), "..", "..")
+)
 CONFIG_DIR = os.path.dirname(__file__)
 
 
 def find_files(directory, pattern):
-    """Find files in `directory` matching `pattern`.
-    """
+    """Find files in `directory` matching `pattern`."""
     result = []
     for root, dirs, files in os.walk(directory):
         for basename in files:
@@ -33,4 +32,4 @@ reload = True
 # Watch config files changes from the source tree.
 reload_extra_files = find_files(CONFIG_DIR, "*")
 for pattern in ["*.zcml", "*.conf"]:
-    reload_extra_files += find_files(os.path.join(BASE_DIR, 'lib'), pattern)
+    reload_extra_files += find_files(os.path.join(BASE_DIR, "lib"), pattern)
diff --git a/configs/test-playground/gunicorn.conf.py b/configs/test-playground/gunicorn.conf.py
index 85fc029..12b5af6 100644
--- a/configs/test-playground/gunicorn.conf.py
+++ b/configs/test-playground/gunicorn.conf.py
@@ -1,14 +1,13 @@
 import os
 
-
 config_dir = os.path.dirname(__file__)
-log_dir = os.path.join(config_dir, '..', '..', 'logs')
+log_dir = os.path.join(config_dir, "..", "..", "logs")
 
 bind = [":8085", ":8087"]
 workers = 1
 threads = 10
 log_level = "DEBUG"
 
-log_file = os.path.join(log_dir, 'gunicorn.log')
-error_logfile = os.path.join(log_dir, 'gunicorn-error.log')
-access_logfile = os.path.join(log_dir, 'gunicorn-access.log')
+log_file = os.path.join(log_dir, "gunicorn.log")
+error_logfile = os.path.join(log_dir, "gunicorn-error.log")
+access_logfile = os.path.join(log_dir, "gunicorn-access.log")
diff --git a/configs/testrunner-appserver/gunicorn.conf.py b/configs/testrunner-appserver/gunicorn.conf.py
index 85fc029..12b5af6 100644
--- a/configs/testrunner-appserver/gunicorn.conf.py
+++ b/configs/testrunner-appserver/gunicorn.conf.py
@@ -1,14 +1,13 @@
 import os
 
-
 config_dir = os.path.dirname(__file__)
-log_dir = os.path.join(config_dir, '..', '..', 'logs')
+log_dir = os.path.join(config_dir, "..", "..", "logs")
 
 bind = [":8085", ":8087"]
 workers = 1
 threads = 10
 log_level = "DEBUG"
 
-log_file = os.path.join(log_dir, 'gunicorn.log')
-error_logfile = os.path.join(log_dir, 'gunicorn-error.log')
-access_logfile = os.path.join(log_dir, 'gunicorn-access.log')
+log_file = os.path.join(log_dir, "gunicorn.log")
+error_logfile = os.path.join(log_dir, "gunicorn-error.log")
+access_logfile = os.path.join(log_dir, "gunicorn-access.log")
diff --git a/configs/testrunner/gunicorn.conf.py b/configs/testrunner/gunicorn.conf.py
index 85fc029..12b5af6 100644
--- a/configs/testrunner/gunicorn.conf.py
+++ b/configs/testrunner/gunicorn.conf.py
@@ -1,14 +1,13 @@
 import os
 
-
 config_dir = os.path.dirname(__file__)
-log_dir = os.path.join(config_dir, '..', '..', 'logs')
+log_dir = os.path.join(config_dir, "..", "..", "logs")
 
 bind = [":8085", ":8087"]
 workers = 1
 threads = 10
 log_level = "DEBUG"
 
-log_file = os.path.join(log_dir, 'gunicorn.log')
-error_logfile = os.path.join(log_dir, 'gunicorn-error.log')
-access_logfile = os.path.join(log_dir, 'gunicorn-access.log')
+log_file = os.path.join(log_dir, "gunicorn.log")
+error_logfile = os.path.join(log_dir, "gunicorn-error.log")
+access_logfile = os.path.join(log_dir, "gunicorn-access.log")
diff --git a/daemons/buildd-manager.tac b/daemons/buildd-manager.tac
index bd87b03..234c976 100644
--- a/daemons/buildd-manager.tac
+++ b/daemons/buildd-manager.tac
@@ -10,19 +10,15 @@ from twisted.application import service
 from twisted.scripts.twistd import ServerOptions
 
 from lp.buildmaster.manager import BuilddManager
-from lp.services.config import (
-    config,
-    dbconfig,
-    )
+from lp.services.config import config, dbconfig
 from lp.services.daemons import readyservice
 from lp.services.mail.sendmail import set_immediate_mail_delivery
 from lp.services.scripts import execute_zcml_for_scripts
 from lp.services.twistedsupport.features import setup_feature_controller
 from lp.services.twistedsupport.loggingsupport import RotatableFileLogObserver
 
-
 execute_zcml_for_scripts()
-dbconfig.override(dbuser='buildd_manager', isolation_level='read_committed')
+dbconfig.override(dbuser="buildd_manager", isolation_level="read_committed")
 # XXX wgrant 2011-09-24 bug=29744: initZopeless used to do this.
 # Should be removed from callsites verified to not need it.
 set_immediate_mail_delivery(True)
@@ -39,9 +35,10 @@ resource.setrlimit(resource.RLIMIT_NOFILE, (soft_nofile, hard_nofile))
 options = ServerOptions()
 options.parseOptions()
 
-application = service.Application('BuilddManager')
+application = service.Application("BuilddManager")
 application.addComponent(
-    RotatableFileLogObserver(options.get('logfile')), ignoreClass=1)
+    RotatableFileLogObserver(options.get("logfile")), ignoreClass=1
+)
 
 # Service that announces when the daemon is ready.
 readyservice.ReadyService().setServiceParent(application)
@@ -51,4 +48,4 @@ service = BuilddManager()
 service.setServiceParent(application)
 
 # Allow use of feature flags.
-setup_feature_controller('buildd-manager')
+setup_feature_controller("buildd-manager")
diff --git a/daemons/distributionmirror_http_server.tac b/daemons/distributionmirror_http_server.tac
index 191bed0..f42590b 100644
--- a/daemons/distributionmirror_http_server.tac
+++ b/daemons/distributionmirror_http_server.tac
@@ -4,19 +4,15 @@
 # Twisted Application Configuration file.
 # Use with "twistd2.4 -y <file.tac>", e.g. "twistd -noy server.tac"
 
-from twisted.application import (
-    service,
-    strports,
-    )
+from twisted.application import service, strports
 from twisted.web import server
 
 from lp.registry.tests.distributionmirror_http_server import (
     DistributionMirrorTestHTTPServer,
-    )
+)
 from lp.services.daemons import readyservice
 
-
-application = service.Application('DistributionMirrorTestHTTPServer')
+application = service.Application("DistributionMirrorTestHTTPServer")
 httpserverService = service.IServiceCollection(application)
 
 # Service that announces when the daemon is ready
diff --git a/daemons/librarian.tac b/daemons/librarian.tac
index bb071f1..062ef54 100644
--- a/daemons/librarian.tac
+++ b/daemons/librarian.tac
@@ -7,75 +7,66 @@
 import os
 import signal
 
-
 # Turn off the http_proxy environment variable if it is set. We
 # don't need it, but we do need to contact Keystone & Swift directly.
 # We could use no_proxy, but this requires keeping it in sync with
 # reality on dev, staging & production servers.
-if 'http_proxy' in os.environ:
-    del os.environ['http_proxy']
-if 'HTTP_PROXY' in os.environ:
-    del os.environ['HTTP_PROXY']
+if "http_proxy" in os.environ:
+    del os.environ["http_proxy"]
+if "HTTP_PROXY" in os.environ:
+    del os.environ["HTTP_PROXY"]
 
 from meliae import scanner
-from twisted.application import (
-    service,
-    strports,
-    )
+from twisted.application import service, strports
 from twisted.internet import reactor
 from twisted.python import log
 from twisted.scripts.twistd import ServerOptions
 from twisted.web import server
 
-from lp.services.config import (
-    config,
-    dbconfig,
-    )
+from lp.services.config import config, dbconfig
 from lp.services.daemons import readyservice
-from lp.services.librarian.interfaces.client import (
-    DUMP_FILE,
-    SIGDUMPMEM,
-    )
-from lp.services.librarianserver import (
-    db,
-    storage,
-    web as fatweb,
-    )
+from lp.services.librarian.interfaces.client import DUMP_FILE, SIGDUMPMEM
+from lp.services.librarianserver import db, storage
+from lp.services.librarianserver import web as fatweb
 from lp.services.librarianserver.libraryprotocol import FileUploadFactory
 from lp.services.scripts import execute_zcml_for_scripts
 from lp.services.twistedsupport.features import setup_feature_controller
 from lp.services.twistedsupport.loggingsupport import set_up_oops_reporting
 
-
 # Connect to database
 dbconfig.override(
     dbuser=config.librarian.dbuser,
-    isolation_level=config.librarian.isolation_level)
+    isolation_level=config.librarian.isolation_level,
+)
 # Note that this doesn't include *-configure-testing.zcml.  That doesn't
 # matter today, but if it does at some point then we'll need to use a
 # different ZCML file if config.isTestRunner() is true.
 execute_zcml_for_scripts(
-    scriptzcmlfilename='librarian.zcml', setup_interaction=False)
+    scriptzcmlfilename="librarian.zcml", setup_interaction=False
+)
 
-if os.environ.get('LP_TEST_INSTANCE'):
+if os.environ.get("LP_TEST_INSTANCE"):
     # Running in ephemeral mode: get the root dir from the environment and
     # dynamically allocate ports.
-    path = os.environ['LP_LIBRARIAN_ROOT']
+    path = os.environ["LP_LIBRARIAN_ROOT"]
 else:
     path = config.librarian_server.root
 if config.librarian_server.upstream_host:
     upstreamHost = config.librarian_server.upstream_host
     upstreamPort = config.librarian_server.upstream_port
     reactor.addSystemEventTrigger(
-        'before', 'startup', log.msg,
-        'Using upstream librarian http://%s:%d' %
-        (upstreamHost, upstreamPort))
+        "before",
+        "startup",
+        log.msg,
+        "Using upstream librarian http://%s:%d"; % (upstreamHost, upstreamPort),
+    )
 else:
     upstreamHost = upstreamPort = None
     reactor.addSystemEventTrigger(
-        'before', 'startup', log.msg, 'Not using upstream librarian')
+        "before", "startup", log.msg, "Not using upstream librarian"
+    )
 
-application = service.Application('Librarian')
+application = service.Application("Librarian")
 librarianService = service.IServiceCollection(application)
 
 # Service that announces when the daemon is ready
@@ -91,20 +82,25 @@ def setUpListener(uploadPort, webPort, restricted):
         set.
     """
     librarian_storage = storage.LibrarianStorage(
-        path, db.Library(restricted=restricted))
+        path, db.Library(restricted=restricted)
+    )
     upload_factory = FileUploadFactory(librarian_storage)
     strports.service("tcp:%d" % uploadPort, upload_factory).setServiceParent(
-        librarianService)
+        librarianService
+    )
     root = fatweb.LibraryFileResource(
-        librarian_storage, upstreamHost, upstreamPort)
-    root.putChild(b'search', fatweb.DigestSearchResource(librarian_storage))
-    root.putChild(b'robots.txt', fatweb.robotsTxt)
+        librarian_storage, upstreamHost, upstreamPort
+    )
+    root.putChild(b"search", fatweb.DigestSearchResource(librarian_storage))
+    root.putChild(b"robots.txt", fatweb.robotsTxt)
     site = server.Site(root)
     site.displayTracebacks = False
     strports.service("tcp:%d" % webPort, site).setServiceParent(
-        librarianService)
+        librarianService
+    )
+
 
-if os.environ.get('LP_TEST_INSTANCE'):
+if os.environ.get("LP_TEST_INSTANCE"):
     # Running in ephemeral mode: allocate ports on demand.
     setUpListener(0, 0, restricted=False)
     setUpListener(0, 0, restricted=True)
@@ -122,15 +118,16 @@ else:
 options = ServerOptions()
 options.parseOptions()
 logfile = options.get("logfile")
-observer = set_up_oops_reporting('librarian', 'librarian', logfile)
+observer = set_up_oops_reporting("librarian", "librarian", logfile)
 application.addComponent(observer, ignoreClass=1)
 
 # Allow use of feature flags.
-setup_feature_controller('librarian')
+setup_feature_controller("librarian")
 
 
 # Setup a signal handler to dump the process' memory upon 'kill -44'.
 def sigdumpmem_handler(signum, frame):
     scanner.dump_all_objects(DUMP_FILE)
 
+
 signal.signal(SIGDUMPMEM, sigdumpmem_handler)
diff --git a/daemons/numbercruncher.tac b/daemons/numbercruncher.tac
index 212c7f3..8efe410 100644
--- a/daemons/numbercruncher.tac
+++ b/daemons/numbercruncher.tac
@@ -14,15 +14,15 @@ from lp.services.statsd.numbercruncher import NumberCruncher
 from lp.services.twistedsupport.features import setup_feature_controller
 from lp.services.twistedsupport.loggingsupport import RotatableFileLogObserver
 
-
 execute_zcml_for_scripts()
 
 options = ServerOptions()
 options.parseOptions()
 
-application = service.Application('BuilddManager')
+application = service.Application("BuilddManager")
 application.addComponent(
-    RotatableFileLogObserver(options.get('logfile')), ignoreClass=1)
+    RotatableFileLogObserver(options.get("logfile")), ignoreClass=1
+)
 
 # Service that announces when the daemon is ready.
 readyservice.ReadyService().setServiceParent(application)
@@ -33,4 +33,4 @@ service = NumberCruncher()
 service.setServiceParent(application)
 
 # Allow use of feature flags.
-setup_feature_controller('number-cruncher')
+setup_feature_controller("number-cruncher")
diff --git a/daemons/sftp.tac b/daemons/sftp.tac
index 354a55c..f9e6a5f 100644
--- a/daemons/sftp.tac
+++ b/daemons/sftp.tac
@@ -11,24 +11,23 @@ from twisted.protocols.policies import TimeoutFactory
 
 from lp.codehosting.sshserver.daemon import (
     ACCESS_LOG_NAME,
-    get_key_path,
     LOG_NAME,
-    make_portal,
     PRIVATE_KEY_FILE,
     PUBLIC_KEY_FILE,
-    )
+    get_key_path,
+    make_portal,
+)
 from lp.services.config import config
 from lp.services.daemons import readyservice
 from lp.services.twistedsupport.gracefulshutdown import (
     ConnTrackingFactoryWrapper,
-    make_web_status_service,
     OrderedMultiService,
     ShutdownCleanlyService,
-    )
-
+    make_web_status_service,
+)
 
 # Construct an Application that has the codehosting SSH server.
-application = service.Application('sftponly')
+application = service.Application("sftponly")
 
 ordered_services = OrderedMultiService()
 ordered_services.setServiceParent(application)
@@ -36,12 +35,14 @@ ordered_services.setServiceParent(application)
 tracked_factories = set()
 
 web_svc = make_web_status_service(
-    config.codehosting.web_status_port, tracked_factories)
+    config.codehosting.web_status_port, tracked_factories
+)
 web_svc.setServiceParent(ordered_services)
 
 shutdown_cleanly_svc = ShutdownCleanlyService(tracked_factories)
 shutdown_cleanly_svc.setServiceParent(ordered_services)
 
+
 def ssh_factory_decorator(factory):
     """Add idle timeouts and connection tracking to a factory."""
     f = TimeoutFactory(factory, timeoutPeriod=config.codehosting.idle_timeout)
@@ -49,6 +50,7 @@ def ssh_factory_decorator(factory):
     tracked_factories.add(f)
     return f
 
+
 svc = SSHService(
     portal=make_portal(),
     private_key_path=get_key_path(PRIVATE_KEY_FILE),
@@ -59,7 +61,8 @@ svc = SSHService(
     strport=config.codehosting.port,
     factory_decorator=ssh_factory_decorator,
     banner=config.codehosting.banner,
-    moduli_path=config.codehosting.moduli_path)
+    moduli_path=config.codehosting.moduli_path,
+)
 svc.setServiceParent(shutdown_cleanly_svc)
 
 # Service that announces when the daemon is ready
diff --git a/doc/conf.py b/doc/conf.py
index 5460048..f43f4fd 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -13,107 +13,107 @@
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
 # documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.insert(0, os.path.abspath('.'))
+# sys.path.insert(0, os.path.abspath('.'))
 
 # -- General configuration ----------------------------------------------------
 
 # If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
+# needs_sphinx = '1.0'
 
 # Add any Sphinx extension module names here, as strings. They can be
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo']
+extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.todo"]
 
 # Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
 
 # The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
 
 # The encoding of source files.
-#source_encoding = 'utf-8-sig'
+# source_encoding = 'utf-8-sig'
 
 # The master toctree document.
-master_doc = 'index'
+master_doc = "index"
 
 # General information about the project.
-project = 'Launchpad'
-copyright = '2004-2022, Canonical Ltd.'
+project = "Launchpad"
+copyright = "2004-2022, Canonical Ltd."
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
 # built documents.
 #
 # The short X.Y version.
-version = 'dev'
+version = "dev"
 # The full version, including alpha/beta/rc tags.
-release = 'dev'
+release = "dev"
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
-#language = None
+# language = None
 
 # There are two options for replacing |today|: either, you set today to some
 # non-false value, then it is used:
-#today = ''
+# today = ''
 # Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
 
 # The reST default role (used for this markup: `text`) to use for all
 # documents.
-#default_role = None
+# default_role = None
 
 # If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
 
 # If true, the current module name will be prepended to all description
 # unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
 
 # If true, sectionauthor and moduleauthor directives will be shown in the
 # output. They are ignored by default.
-#show_authors = False
+# show_authors = False
 
 # The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
 
 # A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+# modindex_common_prefix = []
 
 
 # -- Options for HTML output --------------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
-html_theme = 'vanilla'
+html_theme = "vanilla"
 
 # Theme options are theme-specific and customize the look and feel of a theme
 # further.  For a list of options available for each theme, see the
 # documentation.
-#html_theme_options = {}
+# html_theme_options = {}
 
 # Add any paths that contain custom themes here, relative to this directory.
-html_theme_path = ['.']
+html_theme_path = ["."]
 
 # The name for this set of Sphinx documents.  If None, it defaults to
 # "<project> v<release> documentation".
-#html_title = None
+# html_title = None
 
 # A shorter title for the navigation bar.  Default is the same as html_title.
-#html_short_title = None
+# html_short_title = None
 
 # The name of an image file (relative to this directory) to place at the top
 # of the sidebar.
-html_logo = 'images/logo.png'
+html_logo = "images/logo.png"
 
 # The name of an image file (within the static path) to use as favicon of the
 # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
 # pixels large.
-html_favicon = 'images/favicon.png'
+html_favicon = "images/favicon.png"
 
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
@@ -122,60 +122,60 @@ html_static_path = []
 
 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
 # using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+# html_last_updated_fmt = '%b %d, %Y'
 
 # If true, SmartyPants will be used to convert quotes and dashes to
 # typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
 
 # Custom sidebar templates, maps document names to template names.
 html_sidebars = {
-    '**': [
-        'globaltoc.html',
+    "**": [
+        "globaltoc.html",
     ]
 }
 
 # Additional templates that should be rendered to pages, maps page names to
 # template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
 
 # If false, no module index is generated.
-#html_domain_indices = True
+# html_domain_indices = True
 
 # If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
 
 # If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
 
 # If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
+# html_show_sourcelink = True
 
 # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
+# html_show_sphinx = True
 
 # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
+# html_show_copyright = True
 
 # If true, an OpenSearch description file will be output, and all pages will
 # contain a <link> tag referring to it.  The value of this option must be the
 # base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
 
 # This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
+# html_file_suffix = None
 
 # Output file base name for HTML help builder.
-htmlhelp_basename = 'Launchpaddoc'
+htmlhelp_basename = "Launchpaddoc"
 
 
 # -- Options for LaTeX output -------------------------------------------------
 
 # The paper size ('letter' or 'a4').
-#latex_paper_size = 'letter'
+# latex_paper_size = 'letter'
 
 # The font size ('10pt', '11pt' or '12pt').
-#latex_font_size = '10pt'
+# latex_font_size = '10pt'
 
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author,
@@ -183,26 +183,26 @@ htmlhelp_basename = 'Launchpaddoc'
 
 # The name of an image file (relative to this directory) to place at the top of
 # the title page.
-#latex_logo = None
+# latex_logo = None
 
 # For "manual" documents, if this is true, then toplevel headings are parts,
 # not chapters.
-#latex_use_parts = False
+# latex_use_parts = False
 
 # If true, show page references after internal links.
-#latex_show_pagerefs = False
+# latex_show_pagerefs = False
 
 # If true, show URL addresses after external links.
-#latex_show_urls = False
+# latex_show_urls = False
 
 # Additional stuff for the LaTeX preamble.
-#latex_preamble = ''
+# latex_preamble = ''
 
 # Documents to append as an appendix to all manuals.
-#latex_appendices = []
+# latex_appendices = []
 
 # If false, no module index is generated.
-#latex_domain_indices = True
+# latex_domain_indices = True
 
 
 # -- Options for manual page output -------------------------------------------
@@ -211,4 +211,4 @@ htmlhelp_basename = 'Launchpaddoc'
 # (source start file, name, description, authors, manual section).
 
 # Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {'http://docs.python.org/': None}
+intersphinx_mapping = {"http://docs.python.org/": None}
diff --git a/lib/contrib/glock.py b/lib/contrib/glock.py
index 542363e..91fe155 100644
--- a/lib/contrib/glock.py
+++ b/lib/contrib/glock.py
@@ -35,7 +35,6 @@ import re
 import string
 import sys
 
-
 # System-dependent imports for locking implementation:
 _windows = (sys.platform == 'win32')
 
diff --git a/lib/devscripts/sourcecode.py b/lib/devscripts/sourcecode.py
index 2dd528a..f088b1a 100644
--- a/lib/devscripts/sourcecode.py
+++ b/lib/devscripts/sourcecode.py
@@ -3,11 +3,7 @@
 
 """Tools for maintaining the Launchpad source code."""
 
-from __future__ import (
-    absolute_import,
-    print_function,
-    )
-
+from __future__ import absolute_import, print_function
 
 __all__ = [
     'interpret_config',
@@ -22,7 +18,6 @@ import os
 import shutil
 import sys
 
-
 try:
     from breezy import ui
     from breezy.branch import Branch
@@ -30,13 +25,10 @@ try:
         BzrError,
         IncompatibleRepositories,
         NotBranchError,
-        )
+    )
     from breezy.plugin import load_plugins
     from breezy.revisionspec import RevisionSpec
-    from breezy.trace import (
-        enable_default_logging,
-        report_exception,
-        )
+    from breezy.trace import enable_default_logging, report_exception
     from breezy.upgrade import upgrade
     from breezy.workingtree import WorkingTree
 except ImportError:
diff --git a/lib/devscripts/tests/test_sourcecode.py b/lib/devscripts/tests/test_sourcecode.py
index 40f5916..e4c122c 100644
--- a/lib/devscripts/tests/test_sourcecode.py
+++ b/lib/devscripts/tests/test_sourcecode.py
@@ -3,10 +3,7 @@
 
 """Module docstring goes here."""
 
-from __future__ import (
-    absolute_import,
-    print_function,
-    )
+from __future__ import absolute_import, print_function
 
 import io
 import os
@@ -14,7 +11,6 @@ import shutil
 import tempfile
 import unittest
 
-
 try:
     from breezy.bzr.bzrdir import BzrDir
     from breezy.tests import TestCase
@@ -30,7 +26,7 @@ from devscripts.sourcecode import (
     interpret_config,
     parse_config_file,
     plan_update,
-    )
+)
 
 
 class TestParseConfigFile(unittest.TestCase):
diff --git a/lib/launchpad_loggerhead/app.py b/lib/launchpad_loggerhead/app.py
index 7697cc9..8326a12 100644
--- a/lib/launchpad_loggerhead/app.py
+++ b/lib/launchpad_loggerhead/app.py
@@ -4,65 +4,42 @@
 import logging
 import os
 import threading
-from urllib.parse import (
-    urlencode,
-    urljoin,
-    )
 import xmlrpc.client
+from urllib.parse import urlencode, urljoin
 
-from breezy import (
-    errors,
-    lru_cache,
-    urlutils,
-    )
+import oops_wsgi
+from breezy import errors, lru_cache, urlutils
 from breezy.transport import get_transport
 from breezy.url_policy_open import open_only_scheme
-from loggerhead.apps import (
-    favicon_app,
-    static_app,
-    )
+from loggerhead.apps import favicon_app, static_app
 from loggerhead.apps.branch import BranchWSGIApp
-import oops_wsgi
-from openid.consumer.consumer import (
-    CANCEL,
-    Consumer,
-    FAILURE,
-    SUCCESS,
-    )
-from openid.extensions.sreg import (
-    SRegRequest,
-    SRegResponse,
-    )
+from openid.consumer.consumer import CANCEL, FAILURE, SUCCESS, Consumer
+from openid.extensions.sreg import SRegRequest, SRegResponse
 from paste.fileapp import DataApp
 from paste.httpexceptions import (
     HTTPMovedPermanently,
     HTTPNotFound,
     HTTPUnauthorized,
-    )
-from paste.request import (
-    construct_url,
-    parse_querystring,
-    path_info_pop,
-    )
+)
+from paste.request import construct_url, parse_querystring, path_info_pop
 
 from lp.code.interfaces.codehosting import (
     BRANCH_TRANSPORT,
     LAUNCHPAD_ANONYMOUS,
     LAUNCHPAD_SERVICES,
-    )
+)
 from lp.codehosting.vfs import get_lp_server
 from lp.services.config import config
 from lp.services.webapp.errorlog import ErrorReportingUtility
 from lp.services.webapp.vhosts import allvhosts
 from lp.xmlrpc import faults
 
-
-robots_txt = '''\
+robots_txt = """\
 User-agent: *
 Disallow: /
-'''
+"""
 
-robots_app = DataApp(robots_txt, content_type='text/plain')
+robots_app = DataApp(robots_txt, content_type="text/plain")
 
 
 thread_locals = threading.local()
@@ -81,24 +58,25 @@ def check_fault(fault, *fault_classes):
 
 
 class RootApp:
-
     def __init__(self, session_var):
         self.graph_cache = lru_cache.LRUCache(10)
         self.session_var = session_var
-        self.log = logging.getLogger('lp-loggerhead')
+        self.log = logging.getLogger("lp-loggerhead")
 
     def get_transport(self):
-        t = getattr(thread_locals, 'transport', None)
+        t = getattr(thread_locals, "transport", None)
         if t is None:
             thread_locals.transport = get_transport(
-                config.codehosting.internal_branch_by_id_root)
+                config.codehosting.internal_branch_by_id_root
+            )
         return thread_locals.transport
 
     def get_branchfs(self):
-        t = getattr(thread_locals, 'branchfs', None)
+        t = getattr(thread_locals, "branchfs", None)
         if t is None:
             thread_locals.branchfs = xmlrpc.client.ServerProxy(
-                config.codehosting.codehosting_endpoint)
+                config.codehosting.codehosting_endpoint
+            )
         return thread_locals.branchfs
 
     def _make_consumer(self, environ):
@@ -118,13 +96,18 @@ class RootApp:
         username.
         """
         openid_request = self._make_consumer(environ).begin(
-            config.launchpad.openid_provider_root)
-        openid_request.addExtension(SRegRequest(required=['nickname']))
+            config.launchpad.openid_provider_root
+        )
+        openid_request.addExtension(SRegRequest(required=["nickname"]))
         back_to = construct_url(environ)
-        raise HTTPMovedPermanently(openid_request.redirectURL(
-            config.codehosting.secure_codebrowse_root,
-            config.codehosting.secure_codebrowse_root + '+login/?'
-            + urlencode({'back_to': back_to})))
+        raise HTTPMovedPermanently(
+            openid_request.redirectURL(
+                config.codehosting.secure_codebrowse_root,
+                config.codehosting.secure_codebrowse_root
+                + "+login/?"
+                + urlencode({"back_to": back_to}),
+            )
+        )
 
     def _complete_login(self, environ, start_response):
         """Complete the OpenID authentication process.
@@ -139,33 +122,35 @@ class RootApp:
         # Passing query['openid.return_to'] here is massive cheating, but
         # given we control the endpoint who cares.
         response = self._make_consumer(environ).complete(
-            query, query['openid.return_to'])
+            query, query["openid.return_to"]
+        )
         if response.status == SUCCESS:
-            self.log.error('open id response: SUCCESS')
+            self.log.error("open id response: SUCCESS")
             sreg_info = SRegResponse.fromSuccessResponse(response)
             if not sreg_info:
-                self.log.error('sreg_info is None.')
+                self.log.error("sreg_info is None.")
                 exc = HTTPUnauthorized()
                 exc.explanation = (
-                  "You don't have a Launchpad account. Check that you're "
-                  "logged in as the right user, or log into Launchpad and try "
-                  "again.")
+                    "You don't have a Launchpad account. Check that you're "
+                    "logged in as the right user, or log into Launchpad and "
+                    "try again."
+                )
                 raise exc
-            environ[self.session_var]['identity_url'] = response.identity_url
-            environ[self.session_var]['user'] = sreg_info['nickname']
-            raise HTTPMovedPermanently(query['back_to'])
+            environ[self.session_var]["identity_url"] = response.identity_url
+            environ[self.session_var]["user"] = sreg_info["nickname"]
+            raise HTTPMovedPermanently(query["back_to"])
         elif response.status == FAILURE:
-            self.log.error('open id response: FAILURE: %s', response.message)
+            self.log.error("open id response: FAILURE: %s", response.message)
             exc = HTTPUnauthorized()
             exc.explanation = response.message
             raise exc
         elif response.status == CANCEL:
-            self.log.error('open id response: CANCEL')
+            self.log.error("open id response: CANCEL")
             exc = HTTPUnauthorized()
             exc.explanation = "Authentication cancelled."
             raise exc
         else:
-            self.log.error('open id response: UNKNOWN')
+            self.log.error("open id response: UNKNOWN")
             exc = HTTPUnauthorized()
             exc.explanation = "Unknown OpenID response."
             raise exc
@@ -177,29 +162,30 @@ class RootApp:
         """
         environ[self.session_var].clear()
         query = dict(parse_querystring(environ))
-        next_url = query.get('next_to')
+        next_url = query.get("next_to")
         if next_url is None:
-            next_url = allvhosts.configs['mainsite'].rooturl
+            next_url = allvhosts.configs["mainsite"].rooturl
         raise HTTPMovedPermanently(next_url)
 
     def __call__(self, environ, start_response):
-        request_is_private = (
-            environ['SERVER_PORT'] == str(config.codebrowse.private_port))
-        environ['loggerhead.static.url'] = environ['SCRIPT_NAME']
-        if environ['PATH_INFO'].startswith('/static/'):
+        request_is_private = environ["SERVER_PORT"] == str(
+            config.codebrowse.private_port
+        )
+        environ["loggerhead.static.url"] = environ["SCRIPT_NAME"]
+        if environ["PATH_INFO"].startswith("/static/"):
             path_info_pop(environ)
             return static_app(environ, start_response)
-        elif environ['PATH_INFO'] == '/favicon.ico':
+        elif environ["PATH_INFO"] == "/favicon.ico":
             return favicon_app(environ, start_response)
-        elif environ['PATH_INFO'] == '/robots.txt':
+        elif environ["PATH_INFO"] == "/robots.txt":
             return robots_app(environ, start_response)
         elif not request_is_private:
-            if environ['PATH_INFO'].startswith('/+login'):
+            if environ["PATH_INFO"].startswith("/+login"):
                 return self._complete_login(environ, start_response)
-            elif environ['PATH_INFO'].startswith('/+logout'):
+            elif environ["PATH_INFO"].startswith("/+logout"):
                 return self._logout(environ, start_response)
-        path = environ['PATH_INFO']
-        trailingSlashCount = len(path) - len(path.rstrip('/'))
+        path = environ["PATH_INFO"]
+        trailingSlashCount = len(path) - len(path.rstrip("/"))
         if request_is_private:
             # Requests on the private port are internal API requests from
             # something that has already performed security checks.  As
@@ -208,30 +194,33 @@ class RootApp:
             user = LAUNCHPAD_SERVICES
         else:
             identity_url = environ[self.session_var].get(
-                'identity_url', LAUNCHPAD_ANONYMOUS)
-            user = environ[self.session_var].get('user', LAUNCHPAD_ANONYMOUS)
+                "identity_url", LAUNCHPAD_ANONYMOUS
+            )
+            user = environ[self.session_var].get("user", LAUNCHPAD_ANONYMOUS)
         lp_server = get_lp_server(
-            identity_url, branch_transport=self.get_transport())
+            identity_url, branch_transport=self.get_transport()
+        )
         lp_server.start_server()
         try:
 
             try:
                 branchfs = self.get_branchfs()
                 transport_type, info, trail = branchfs.translatePath(
-                    identity_url, urlutils.escape(path))
+                    identity_url, urlutils.escape(path)
+                )
             except xmlrpc.client.Fault as f:
                 if check_fault(f, faults.PathTranslationError):
                     raise HTTPNotFound()
                 elif check_fault(f, faults.PermissionDenied):
                     # If we're not allowed to see the branch...
-                    if environ['wsgi.url_scheme'] != 'https':
+                    if environ["wsgi.url_scheme"] != "https":
                         # ... the request shouldn't have come in over http, as
                         # requests for private branches over http should be
                         # redirected to https by the dynamic rewrite script we
                         # use (which runs before this code is reached), but
                         # just in case...
                         env_copy = environ.copy()
-                        env_copy['wsgi.url_scheme'] = 'https'
+                        env_copy["wsgi.url_scheme"] = "https"
                         raise HTTPMovedPermanently(construct_url(env_copy))
                     elif user != LAUNCHPAD_ANONYMOUS:
                         # ... if the user is already logged in and still can't
@@ -248,24 +237,26 @@ class RootApp:
             if transport_type != BRANCH_TRANSPORT:
                 raise HTTPNotFound()
             trail = urlutils.unescape(trail)
-            trail += trailingSlashCount * '/'
+            trail += trailingSlashCount * "/"
             amount_consumed = len(path) - len(trail)
             consumed = path[:amount_consumed]
-            branch_name = consumed.strip('/')
-            self.log.info('Using branch: %s', branch_name)
-            if trail and not trail.startswith('/'):
-                trail = '/' + trail
-            environ['PATH_INFO'] = trail
-            environ['SCRIPT_NAME'] += consumed.rstrip('/')
+            branch_name = consumed.strip("/")
+            self.log.info("Using branch: %s", branch_name)
+            if trail and not trail.startswith("/"):
+                trail = "/" + trail
+            environ["PATH_INFO"] = trail
+            environ["SCRIPT_NAME"] += consumed.rstrip("/")
             branch_url = lp_server.get_url() + branch_name
             branch_link = urljoin(
-                config.codebrowse.launchpad_root, branch_name)
+                config.codebrowse.launchpad_root, branch_name
+            )
             cachepath = os.path.join(
-                config.codebrowse.cachepath, branch_name[1:])
+                config.codebrowse.cachepath, branch_name[1:]
+            )
             if not os.path.isdir(cachepath):
                 os.makedirs(cachepath)
-            self.log.info('branch_url: %s', branch_url)
-            private = info['private']
+            self.log.info("branch_url: %s", branch_url)
+            private = info["private"]
             if private:
                 self.log.info("Branch is private")
             else:
@@ -273,16 +264,22 @@ class RootApp:
 
             try:
                 bzr_branch = open_only_scheme(
-                    lp_server.get_url().strip(':/'), branch_url)
+                    lp_server.get_url().strip(":/"), branch_url
+                )
             except errors.NotBranchError as err:
-                self.log.warning('Not a branch: %s', err)
+                self.log.warning("Not a branch: %s", err)
                 raise HTTPNotFound()
             bzr_branch.lock_read()
             try:
                 view = BranchWSGIApp(
-                    bzr_branch, branch_name, {'cachepath': cachepath},
-                    self.graph_cache, branch_link=branch_link,
-                    served_url=None, private=private)
+                    bzr_branch,
+                    branch_name,
+                    {"cachepath": cachepath},
+                    self.graph_cache,
+                    branch_link=branch_link,
+                    served_url=None,
+                    private=private,
+                )
                 return view.app(environ, start_response)
             finally:
                 bzr_branch.repository.revisions.clear_cache()
@@ -299,7 +296,7 @@ class RootApp:
 def make_error_utility():
     """Make an error utility for logging errors from codebrowse."""
     error_utility = ErrorReportingUtility()
-    error_utility.configure('codebrowse')
+    error_utility.configure("codebrowse")
     return error_utility
 
 
@@ -307,7 +304,7 @@ def make_error_utility():
 # with the same one that lpnet uses for reporting OOPSes to users, or at
 # least something that looks similar.  But even this is better than the
 # "Internal Server Error" you'd get otherwise.
-_oops_html_template = '''\
+_oops_html_template = """\
 <html>
 <head><title>Oops! %(id)s</title></head>
 <body>
@@ -317,7 +314,7 @@ Please try again in a few minutes, and if the problem persists file a bug at
 <a href="https://bugs.launchpad.net/launchpad";
 >https://bugs.launchpad.net/launchpad</a>
 and quote OOPS-ID <strong>%(id)s</strong>
-</p></body></html>'''
+</p></body></html>"""
 
 
 def oops_middleware(app):
@@ -328,5 +325,9 @@ def oops_middleware(app):
     code 500).
     """
     error_utility = make_error_utility()
-    return oops_wsgi.make_app(app, error_utility._oops_config,
-            template=_oops_html_template, soft_start_timeout=7000)
+    return oops_wsgi.make_app(
+        app,
+        error_utility._oops_config,
+        template=_oops_html_template,
+        soft_start_timeout=7000,
+    )
diff --git a/lib/launchpad_loggerhead/revision.py b/lib/launchpad_loggerhead/revision.py
index 54e38c0..5504780 100644
--- a/lib/launchpad_loggerhead/revision.py
+++ b/lib/launchpad_loggerhead/revision.py
@@ -3,7 +3,7 @@
 
 """WSGI Middleware to add Launchpad revision headers to loggerhead."""
 
-__all__ = ['RevisionHeaderHandler']
+__all__ = ["RevisionHeaderHandler"]
 
 from lp.app import versioninfo
 
@@ -24,8 +24,11 @@ class RevisionHeaderHandler:
 
         Add the appropriate revision numbers in the response headers.
         """
+
         def response_hook(status, response_headers, exc_info=None):
             response_headers.append(
-                ('X-Launchpad-Revision', versioninfo.revision))
+                ("X-Launchpad-Revision", versioninfo.revision)
+            )
             return start_response(status, response_headers, exc_info)
+
         return self.application(environ, response_hook)
diff --git a/lib/launchpad_loggerhead/session.py b/lib/launchpad_loggerhead/session.py
index ce214f7..48d213d 100644
--- a/lib/launchpad_loggerhead/session.py
+++ b/lib/launchpad_loggerhead/session.py
@@ -7,10 +7,7 @@ import hashlib
 import pickle
 
 from secure_cookie.cookie import SecureCookie
-from werkzeug.http import (
-    dump_cookie,
-    parse_cookie,
-    )
+from werkzeug.http import dump_cookie, parse_cookie
 
 from lp.services.config import config
 
@@ -22,7 +19,6 @@ class LaunchpadSecureCookie(SecureCookie):
 
     # The OpenID consumer stores non-JSON-encodable objects in the session.
     class serialization_method:
-
         @classmethod
         def dumps(cls, value):
             # Use protocol 2 for Python 2 compatibility.
@@ -70,15 +66,17 @@ class SessionHandler:
                 "path": "/",
                 "httponly": True,
                 "secure": environ["wsgi.url_scheme"] == "https",
-                }
+            }
             if session:
                 cookie = dump_cookie(
-                    self.cookie_name, session.serialize(), **cookie_kwargs)
+                    self.cookie_name, session.serialize(), **cookie_kwargs
+                )
                 response_headers.append(("Set-Cookie", cookie))
             elif existed:
                 # Delete the cookie.
                 cookie = dump_cookie(
-                    self.cookie_name, "", expires=0, **cookie_kwargs)
+                    self.cookie_name, "", expires=0, **cookie_kwargs
+                )
                 response_headers.append(("Set-Cookie", cookie))
             return start_response(status, response_headers, exc_info)
 
diff --git a/lib/launchpad_loggerhead/testing.py b/lib/launchpad_loggerhead/testing.py
index 9f63a62..0847bf1 100644
--- a/lib/launchpad_loggerhead/testing.py
+++ b/lib/launchpad_loggerhead/testing.py
@@ -2,8 +2,8 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'LoggerheadFixture',
-    ]
+    "LoggerheadFixture",
+]
 
 import os.path
 import time
@@ -16,13 +16,10 @@ from lp.services.osutils import (
     get_pid_from_file,
     kill_by_pidfile,
     remove_if_exists,
-    )
+)
 from lp.services.pidfile import pidfile_path
 from lp.services.scripts.tests import run_script
-from lp.testing.layers import (
-    BaseLayer,
-    LayerProcessController,
-    )
+from lp.testing.layers import BaseLayer, LayerProcessController
 
 
 class LoggerheadFixtureException(Exception):
@@ -34,22 +31,26 @@ class LoggerheadFixture(Fixture):
 
     def _setUp(self):
         pidfile = pidfile_path(
-            "codebrowse", use_config=LayerProcessController.appserver_config)
+            "codebrowse", use_config=LayerProcessController.appserver_config
+        )
         pid = get_pid_from_file(pidfile)
         if pid is not None:
             warnings.warn(
                 "Attempt to start LoggerheadFixture with an existing "
-                "instance (%d) running in %s." % (pid, pidfile))
+                "instance (%d) running in %s." % (pid, pidfile)
+            )
             kill_by_pidfile(pidfile)
         self.logfile = os.path.join(config.codebrowse.log_folder, "debug.log")
         remove_if_exists(self.logfile)
         self.addCleanup(kill_by_pidfile, pidfile)
         run_script(
-            os.path.join("scripts", "start-loggerhead.py"), ["--daemon"],
+            os.path.join("scripts", "start-loggerhead.py"),
+            ["--daemon"],
             # The testrunner-appserver config provides the correct
             # openid_provider_root URL.
             extra_env={"LPCONFIG": BaseLayer.appserver_config_name},
-            universal_newlines=False)
+            universal_newlines=False,
+        )
         self._waitForStartup()
 
     def _hasStarted(self):
diff --git a/lib/launchpad_loggerhead/tests.py b/lib/launchpad_loggerhead/tests.py
index 52cd884..560042c 100644
--- a/lib/launchpad_loggerhead/tests.py
+++ b/lib/launchpad_loggerhead/tests.py
@@ -1,14 +1,11 @@
 # Copyright 2010-2018 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from urllib.parse import (
-    urlencode,
-    urlsplit,
-    )
+from urllib.parse import urlencode, urlsplit
 
-from paste.httpexceptions import HTTPExceptionHandler
 import requests
 import soupmatchers
+from paste.httpexceptions import HTTPExceptionHandler
 from testtools.content import Content
 from testtools.content_type import UTF8_TEXT
 from zope.security.proxy import removeSecurityProxy
@@ -21,47 +18,44 @@ from lp.app import versioninfo
 from lp.app.enums import InformationType
 from lp.services.config import config
 from lp.services.webapp.vhosts import allvhosts
-from lp.testing import (
-    TestCase,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    AppServerLayer,
-    DatabaseFunctionalLayer,
-    )
+from lp.testing import TestCase, TestCaseWithFactory
+from lp.testing.layers import AppServerLayer, DatabaseFunctionalLayer
 
-
-SESSION_VAR = 'lh.session'
+SESSION_VAR = "lh.session"
 
 # See lib/launchpad_loggerhead/wsgi.py for the production mechanism for
 # getting the secret.
-SECRET = b'secret'
+SECRET = b"secret"
 
 
 def session_scribbler(app, test):
     """Squirrel away the session variable."""
+
     def scribble(environ, start_response):
         test.session = environ[SESSION_VAR]  # Yay for mutables.
         return app(environ, start_response)
+
     return scribble
 
 
 class SimpleLogInRootApp(RootApp):
     """A mock root app that doesn't require open id."""
+
     def _complete_login(self, environ, start_response):
-        environ[SESSION_VAR]['user'] = 'bob'
-        start_response('200 OK', [('Content-type', 'text/plain')])
-        return [b'\n']
+        environ[SESSION_VAR]["user"] = "bob"
+        start_response("200 OK", [("Content-type", "text/plain")])
+        return [b"\n"]
 
     def __call__(self, environ, start_response):
         codebrowse_netloc = urlsplit(
-            config.codehosting.secure_codebrowse_root).netloc
-        if environ['HTTP_HOST'] == codebrowse_netloc:
+            config.codehosting.secure_codebrowse_root
+        ).netloc
+        if environ["HTTP_HOST"] == codebrowse_netloc:
             return RootApp.__call__(self, environ, start_response)
         else:
             # Return a fake response.
-            start_response('200 OK', [('Content-type', 'text/plain')])
-            return [b'This is a dummy destination.\n']
+            start_response("200 OK", [("Content-type", "text/plain")])
+            return [b"This is a dummy destination.\n"]
 
 
 class TestLogout(TestCase):
@@ -76,29 +70,32 @@ class TestLogout(TestCase):
         app = SessionHandler(app, SESSION_VAR, SECRET)
         self.cookie_name = app.cookie_name
         self.browser = Browser(wsgi_app=app)
-        self.browser.open(
-            config.codehosting.secure_codebrowse_root + '+login')
+        self.browser.open(config.codehosting.secure_codebrowse_root + "+login")
 
     def testLoggerheadLogout(self):
         # We start logged in as 'bob'.
-        self.assertEqual(self.session['user'], 'bob')
+        self.assertEqual(self.session["user"], "bob")
         self.browser.open(
-            config.codehosting.secure_codebrowse_root + 'favicon.ico')
-        self.assertEqual(self.session['user'], 'bob')
+            config.codehosting.secure_codebrowse_root + "favicon.ico"
+        )
+        self.assertEqual(self.session["user"], "bob")
         self.assertTrue(self.browser.cookies.get(self.cookie_name))
 
         # When we visit +logout, our session is gone.
         self.browser.open(
-            config.codehosting.secure_codebrowse_root + '+logout')
+            config.codehosting.secure_codebrowse_root + "+logout"
+        )
         self.assertEqual(self.session, {})
 
         # By default, we have been redirected to the Launchpad root.
         self.assertEqual(
-            self.browser.url, allvhosts.configs['mainsite'].rooturl)
+            self.browser.url, allvhosts.configs["mainsite"].rooturl
+        )
 
         # The user has an empty session now.
         self.browser.open(
-            config.codehosting.secure_codebrowse_root + 'favicon.ico')
+            config.codehosting.secure_codebrowse_root + "favicon.ico"
+        )
         self.assertEqual(self.session, {})
 
     def testLoggerheadLogoutRedirect(self):
@@ -109,18 +106,21 @@ class TestLogout(TestCase):
         # TestLoginAndLogout.test_CookieLogoutPage).
 
         # Here, we will have a more useless example of the basic machinery.
-        dummy_root = 'http://dummy.test/'
+        dummy_root = "http://dummy.test/";
         self.browser.open(
-            config.codehosting.secure_codebrowse_root +
-            '+logout?' + urlencode(dict(next_to=dummy_root + '+logout')))
+            config.codehosting.secure_codebrowse_root
+            + "+logout?"
+            + urlencode(dict(next_to=dummy_root + "+logout"))
+        )
 
         # We are logged out, as before.
         self.assertEqual(self.session, {})
 
         # Now, though, we are redirected to the ``next_to`` destination.
-        self.assertEqual(self.browser.url, dummy_root + '+logout')
-        self.assertEqual(self.browser.contents,
-                         b'This is a dummy destination.\n')
+        self.assertEqual(self.browser.url, dummy_root + "+logout")
+        self.assertEqual(
+            self.browser.contents, b"This is a dummy destination.\n"
+        )
 
 
 class TestWSGI(TestCaseWithFactory):
@@ -141,87 +141,112 @@ class TestWSGI(TestCaseWithFactory):
                 return [b""]
 
         self.addDetail(
-            "loggerhead-debug", Content(UTF8_TEXT, get_debug_log_bytes))
+            "loggerhead-debug", Content(UTF8_TEXT, get_debug_log_bytes)
+        )
 
     def test_public_port_public_branch(self):
         # Requests for public branches on the public port are allowed.
         db_branch, _ = self.create_branch_and_tree()
         branch_url = "http://127.0.0.1:%d/%s"; % (
-            config.codebrowse.port, db_branch.unique_name)
+            config.codebrowse.port,
+            db_branch.unique_name,
+        )
         response = requests.get(branch_url)
         self.assertEqual(200, response.status_code)
         title_tag = soupmatchers.Tag(
-            "page title", "title", text="%s : changes" % db_branch.unique_name)
+            "page title", "title", text="%s : changes" % db_branch.unique_name
+        )
         self.assertThat(response.text, soupmatchers.HTMLContains(title_tag))
 
     def test_public_port_private_branch(self):
         # Requests for private branches on the public port send the user
         # through the login workflow.
         db_branch, _ = self.create_branch_and_tree(
-            information_type=InformationType.USERDATA)
+            information_type=InformationType.USERDATA
+        )
         naked_branch = removeSecurityProxy(db_branch)
         branch_url = "http://127.0.0.1:%d/%s"; % (
-            config.codebrowse.port, naked_branch.unique_name)
+            config.codebrowse.port,
+            naked_branch.unique_name,
+        )
         response = requests.get(
-            branch_url, headers={"X-Forwarded-Scheme": "https"},
-            allow_redirects=False)
+            branch_url,
+            headers={"X-Forwarded-Scheme": "https"},
+            allow_redirects=False,
+        )
         self.assertEqual(301, response.status_code)
         self.assertEqual(
             "testopenid.test:8085",
-            urlsplit(response.headers["Location"]).netloc)
+            urlsplit(response.headers["Location"]).netloc,
+        )
 
     def test_private_port_public_branch(self):
         # Requests for public branches on the private port are allowed.
         db_branch, _ = self.create_branch_and_tree()
         branch_url = "http://127.0.0.1:%d/%s"; % (
-            config.codebrowse.private_port, db_branch.unique_name)
+            config.codebrowse.private_port,
+            db_branch.unique_name,
+        )
         response = requests.get(branch_url)
         self.assertEqual(200, response.status_code)
         title_tag = soupmatchers.Tag(
-            "page title", "title", text="%s : changes" % db_branch.unique_name)
+            "page title", "title", text="%s : changes" % db_branch.unique_name
+        )
         self.assertThat(response.text, soupmatchers.HTMLContains(title_tag))
 
     def test_private_port_private_branch(self):
         # Requests for private branches on the private port are allowed.
         db_branch, _ = self.create_branch_and_tree(
-            information_type=InformationType.USERDATA)
+            information_type=InformationType.USERDATA
+        )
         naked_branch = removeSecurityProxy(db_branch)
         branch_url = "http://127.0.0.1:%d/%s"; % (
-            config.codebrowse.private_port, naked_branch.unique_name)
+            config.codebrowse.private_port,
+            naked_branch.unique_name,
+        )
         response = requests.get(branch_url)
         self.assertEqual(200, response.status_code)
         title_tag = soupmatchers.Tag(
-            "page title", "title",
-            text="%s : changes" % naked_branch.unique_name)
+            "page title",
+            "title",
+            text="%s : changes" % naked_branch.unique_name,
+        )
         self.assertThat(response.text, soupmatchers.HTMLContains(title_tag))
 
     def test_revision_header_present(self):
         db_branch, _ = self.create_branch_and_tree()
         branch_url = "http://127.0.0.1:%d/%s"; % (
-            config.codebrowse.port, db_branch.unique_name)
+            config.codebrowse.port,
+            db_branch.unique_name,
+        )
         response = requests.get(branch_url)
         self.assertEqual(200, response.status_code)
         self.assertEqual(
-            versioninfo.revision,
-            response.headers['X-Launchpad-Revision'])
+            versioninfo.revision, response.headers["X-Launchpad-Revision"]
+        )
 
     def test_vary_header_present(self):
         db_branch, _ = self.create_branch_and_tree()
         branch_url = "http://127.0.0.1:%d/%s"; % (
-            config.codebrowse.port, db_branch.unique_name)
+            config.codebrowse.port,
+            db_branch.unique_name,
+        )
         response = requests.get(branch_url)
         self.assertEqual(200, response.status_code)
-        self.assertEqual('Cookie, Authorization', response.headers['Vary'])
+        self.assertEqual("Cookie, Authorization", response.headers["Vary"])
 
     def test_security_headers_present(self):
         db_branch, _ = self.create_branch_and_tree()
         branch_url = "http://127.0.0.1:%d/%s"; % (
-            config.codebrowse.port, db_branch.unique_name)
+            config.codebrowse.port,
+            db_branch.unique_name,
+        )
         response = requests.get(branch_url)
         self.assertEqual(200, response.status_code)
         self.assertEqual(
             "frame-ancestors 'self';",
-            response.headers['Content-Security-Policy'])
-        self.assertEqual('SAMEORIGIN', response.headers['X-Frame-Options'])
-        self.assertEqual('nosniff', response.headers['X-Content-Type-Options'])
-        self.assertEqual('1; mode=block', response.headers['X-XSS-Protection'])
+            response.headers["Content-Security-Policy"],
+        )
+        self.assertEqual("SAMEORIGIN", response.headers["X-Frame-Options"])
+        self.assertEqual("nosniff", response.headers["X-Content-Type-Options"])
+        self.assertEqual("1; mode=block", response.headers["X-XSS-Protection"])
diff --git a/lib/launchpad_loggerhead/wsgi.py b/lib/launchpad_loggerhead/wsgi.py
index eefe119..00defe8 100644
--- a/lib/launchpad_loggerhead/wsgi.py
+++ b/lib/launchpad_loggerhead/wsgi.py
@@ -2,17 +2,17 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'LoggerheadApplication',
-    ]
+    "LoggerheadApplication",
+]
 
 import atexit
 import logging
-from optparse import OptionParser
 import os.path
 import signal
 import sys
 import time
 import traceback
+from optparse import OptionParser
 
 from gunicorn.app.base import Application
 from gunicorn.glogging import Logger
@@ -21,25 +21,15 @@ from paste.httpexceptions import HTTPExceptionHandler
 from paste.request import construct_url
 from paste.wsgilib import catch_errors
 
-from launchpad_loggerhead.app import (
-    oops_middleware,
-    RootApp,
-    )
+import lp.codehosting  # noqa: F401
+from launchpad_loggerhead.app import RootApp, oops_middleware
 from launchpad_loggerhead.revision import RevisionHeaderHandler
 from launchpad_loggerhead.session import SessionHandler
-import lp.codehosting  # noqa: F401
 from lp.services.config import config
-from lp.services.pidfile import (
-    pidfile_path,
-    remove_pidfile,
-    )
-from lp.services.scripts import (
-    logger,
-    logger_options,
-    )
+from lp.services.pidfile import pidfile_path, remove_pidfile
+from lp.services.scripts import logger, logger_options
 from lp.services.scripts.logger import LaunchpadFormatter
 
-
 log = logging.getLogger("loggerhead")
 
 
@@ -49,16 +39,17 @@ SESSION_VAR = "lh.session"
 def set_standard_headers(app):
     def wrapped(environ, start_response):
         def response_hook(status, response_headers, exc_info=None):
-            response_headers.extend([
-                # Our response always varies based on authentication.
-                ('Vary', 'Cookie, Authorization'),
-
-                # Prevent clickjacking and content sniffing attacks.
-                ('Content-Security-Policy', "frame-ancestors 'self';"),
-                ('X-Frame-Options', 'SAMEORIGIN'),
-                ('X-Content-Type-Options', 'nosniff'),
-                ('X-XSS-Protection', '1; mode=block'),
-                ])
+            response_headers.extend(
+                [
+                    # Our response always varies based on authentication.
+                    ("Vary", "Cookie, Authorization"),
+                    # Prevent clickjacking and content sniffing attacks.
+                    ("Content-Security-Policy", "frame-ancestors 'self';"),
+                    ("X-Frame-Options", "SAMEORIGIN"),
+                    ("X-Content-Type-Options", "nosniff"),
+                    ("X-XSS-Protection", "1; mode=block"),
+                ]
+            )
             return start_response(status, response_headers, exc_info)
 
         return app(environ, response_hook)
@@ -75,22 +66,26 @@ def log_request_start_and_stop(app):
         def request_done_ok():
             log.info(
                 "Processed ok %s [%0.3f seconds]",
-                url, time.time() - start_time)
+                url,
+                time.time() - start_time,
+            )
 
         def request_done_err(exc_info):
             log.info(
                 "Processed err %s [%0.3f seconds]: %s",
-                url, time.time() - start_time,
-                traceback.format_exception_only(*exc_info[:2]))
+                url,
+                time.time() - start_time,
+                traceback.format_exception_only(*exc_info[:2]),
+            )
 
         return catch_errors(
-            app, environ, start_response, request_done_err, request_done_ok)
+            app, environ, start_response, request_done_err, request_done_ok
+        )
 
     return wrapped
 
 
 class LoggerheadLogger(Logger):
-
     def setup(self, cfg):
         super().setup(cfg)
         formatter = LaunchpadFormatter(datefmt=None)
@@ -102,7 +97,8 @@ class LoggerheadLogger(Logger):
         parser = OptionParser()
         logger_options(parser)
         log_options, _ = parser.parse_args(
-            ['-q', '--ms', '--log-file=DEBUG:%s' % cfg.errorlog])
+            ["-q", "--ms", "--log-file=DEBUG:%s" % cfg.errorlog]
+        )
         logger(log_options)
 
 
@@ -113,18 +109,19 @@ def _on_starting_hook(arbiter):
     # Register a trivial SIGTERM handler so that the atexit hook is called
     # on SIGTERM.
     signal.signal(
-        signal.SIGTERM, lambda signum, frame: sys.exit(-signal.SIGTERM))
+        signal.SIGTERM, lambda signum, frame: sys.exit(-signal.SIGTERM)
+    )
 
 
 class LoggerheadApplication(Application):
-
     def __init__(self, **kwargs):
         self.options = kwargs
         super().__init__()
 
     def init(self, parser, opts, args):
-        top = os.path.abspath(os.path.join(
-            os.path.dirname(__file__), os.pardir, os.pardir))
+        top = os.path.abspath(
+            os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)
+        )
         listen_host = config.codebrowse.listen_host
         log_folder = config.codebrowse.log_folder or os.path.join(top, "logs")
         if not os.path.exists(log_folder):
@@ -135,7 +132,7 @@ class LoggerheadApplication(Application):
             "bind": [
                 "%s:%s" % (listen_host, config.codebrowse.port),
                 "%s:%s" % (listen_host, config.codebrowse.private_port),
-                ],
+            ],
             "capture_output": True,
             "errorlog": os.path.join(log_folder, "debug.log"),
             # Trust that firewalls only permit sending requests to
@@ -156,30 +153,32 @@ class LoggerheadApplication(Application):
                 "X-FORWARDED-PROTO": "https",
                 "X-FORWARDED-SCHEME": "https",
                 "X-FORWARDED-SSL": "on",
-                },
+            },
             # Kill threads after 300 seconds of inactivity.  This is
             # insanely high, but loggerhead is often pretty slow.
             "timeout": 300,
             "threads": 10,
             "worker_class": "gthread",
-            }
+        }
         cfg.update(self.options)
         return cfg
 
     def _load_brz_plugins(self):
         from breezy.plugin import load_plugins
+
         load_plugins()
 
         import breezy.plugins
+
         if getattr(breezy.plugins, "loom", None) is None:
             log.error("Loom plugin loading failed.")
 
     def load(self):
         self._load_brz_plugins()
 
-        with open(os.path.join(
-                config.root, config.codebrowse.secret_path),
-                "rb") as secret_file:
+        with open(
+            os.path.join(config.root, config.codebrowse.secret_path), "rb"
+        ) as secret_file:
             secret = secret_file.read()
 
         app = RootApp(SESSION_VAR)
diff --git a/lib/lp/__init__.py b/lib/lp/__init__.py
index 824c98e..1a13c25 100644
--- a/lib/lp/__init__.py
+++ b/lib/lp/__init__.py
@@ -46,5 +46,4 @@ Here are these layers:
 # own MessageFactory
 from zope.i18nmessageid import MessageFactory
 
-
 _ = MessageFactory("launchpad")
diff --git a/lib/lp/_schema_circular_imports.py b/lib/lp/_schema_circular_imports.py
index ba34be6..4165c4b 100644
--- a/lib/lp/_schema_circular_imports.py
+++ b/lib/lp/_schema_circular_imports.py
@@ -24,40 +24,40 @@ from lp.code.interfaces.gitrepository import IGitRepository
 from lp.code.interfaces.sourcepackagerecipe import ISourcePackageRecipe
 from lp.registry.interfaces.commercialsubscription import (
     ICommercialSubscription,
-    )
+)
 from lp.registry.interfaces.distribution import IDistribution
 from lp.registry.interfaces.distributionmirror import IDistributionMirror
 from lp.registry.interfaces.distributionsourcepackage import (
     IDistributionSourcePackage,
-    )
+)
 from lp.registry.interfaces.distroseries import IDistroSeries
 from lp.registry.interfaces.distroseriesdifference import (
     IDistroSeriesDifference,
-    )
+)
 from lp.registry.interfaces.distroseriesdifferencecomment import (
     IDistroSeriesDifferenceComment,
-    )
+)
 from lp.registry.interfaces.ociproject import IOCIProject
 from lp.registry.interfaces.person import (
     IPerson,
     IPersonEditRestricted,
     IPersonLimitedView,
     IPersonViewRestricted,
-    )
+)
 from lp.registry.interfaces.product import IProduct
 from lp.registry.interfaces.productseries import IProductSeries
 from lp.registry.interfaces.sourcepackage import (
     ISourcePackage,
     ISourcePackageEdit,
     ISourcePackagePublic,
-    )
+)
 from lp.services.auth.interfaces import IAccessToken
 from lp.services.comments.interfaces.conversation import IComment
 from lp.services.messages.interfaces.message import (
     IIndexedMessage,
     IMessage,
     IUserToUserEmail,
-    )
+)
 from lp.services.messages.interfaces.messagerevision import IMessageRevision
 from lp.services.webservice.apihelpers import (
     patch_collection_property,
@@ -66,129 +66,148 @@ from lp.services.webservice.apihelpers import (
     patch_list_parameter_type,
     patch_plain_parameter_type,
     patch_reference_property,
-    )
+)
 from lp.soyuz.interfaces.archive import IArchive
 from lp.soyuz.interfaces.archivesubscriber import IArchiveSubscriber
 from lp.soyuz.interfaces.distroarchseries import IDistroArchSeries
 from lp.soyuz.interfaces.queue import IPackageUpload
 
-
 # IBuilder
-patch_reference_property(IBuilder, 'current_build', IBuildFarmJob)
+patch_reference_property(IBuilder, "current_build", IBuildFarmJob)
 
-patch_reference_property(IPersonViewRestricted, 'archive', IArchive)
-patch_collection_property(IPersonViewRestricted, 'ppas', IArchive)
+patch_reference_property(IPersonViewRestricted, "archive", IArchive)
+patch_collection_property(IPersonViewRestricted, "ppas", IArchive)
 patch_plain_parameter_type(
-    IPersonLimitedView, 'getPPAByName', 'distribution', IDistribution)
-patch_entry_return_type(IPersonLimitedView, 'getPPAByName', IArchive)
+    IPersonLimitedView, "getPPAByName", "distribution", IDistribution
+)
+patch_entry_return_type(IPersonLimitedView, "getPPAByName", IArchive)
 patch_plain_parameter_type(
-    IPersonEditRestricted, 'createPPA', 'distribution', IDistribution)
-patch_entry_return_type(IPersonEditRestricted, 'createPPA', IArchive)
+    IPersonEditRestricted, "createPPA", "distribution", IDistribution
+)
+patch_entry_return_type(IPersonEditRestricted, "createPPA", IArchive)
 
-patch_reference_property(ISourcePackagePublic, 'distroseries', IDistroSeries)
-patch_reference_property(ISourcePackagePublic, 'productseries', IProductSeries)
-patch_entry_return_type(ISourcePackagePublic, 'getBranch', IBranch)
-patch_plain_parameter_type(ISourcePackageEdit, 'setBranch', 'branch', IBranch)
-patch_reference_property(ISourcePackage, 'distribution', IDistribution)
+patch_reference_property(ISourcePackagePublic, "distroseries", IDistroSeries)
+patch_reference_property(ISourcePackagePublic, "productseries", IProductSeries)
+patch_entry_return_type(ISourcePackagePublic, "getBranch", IBranch)
+patch_plain_parameter_type(ISourcePackageEdit, "setBranch", "branch", IBranch)
+patch_reference_property(ISourcePackage, "distribution", IDistribution)
 
 # IPerson
-patch_entry_return_type(IPerson, 'createRecipe', ISourcePackageRecipe)
-patch_list_parameter_type(IPerson, 'createRecipe', 'distroseries',
-                          Reference(schema=IDistroSeries))
-patch_plain_parameter_type(IPerson, 'createRecipe', 'daily_build_archive',
-                           IArchive)
-patch_plain_parameter_type(IPerson, 'getArchiveSubscriptionURL', 'archive',
-                           IArchive)
+patch_entry_return_type(IPerson, "createRecipe", ISourcePackageRecipe)
+patch_list_parameter_type(
+    IPerson, "createRecipe", "distroseries", Reference(schema=IDistroSeries)
+)
+patch_plain_parameter_type(
+    IPerson, "createRecipe", "daily_build_archive", IArchive
+)
+patch_plain_parameter_type(
+    IPerson, "getArchiveSubscriptionURL", "archive", IArchive
+)
 patch_collection_return_type(
-    IPerson, 'getArchiveSubscriptions', IArchiveSubscriber)
-patch_entry_return_type(IPerson, 'getRecipe', ISourcePackageRecipe)
-patch_collection_return_type(IPerson, 'getOwnedProjects', IProduct)
+    IPerson, "getArchiveSubscriptions", IArchiveSubscriber
+)
+patch_entry_return_type(IPerson, "getRecipe", ISourcePackageRecipe)
+patch_collection_return_type(IPerson, "getOwnedProjects", IProduct)
 
 # IBuildFarmJob
-patch_reference_property(IBuildFarmJob, 'buildqueue_record', IBuildQueue)
+patch_reference_property(IBuildFarmJob, "buildqueue_record", IBuildQueue)
 
 # IComment
-patch_reference_property(IComment, 'comment_author', IPerson)
+patch_reference_property(IComment, "comment_author", IPerson)
 
 # ICommercialSubscription
-patch_reference_property(ICommercialSubscription, 'product', IProduct)
+patch_reference_property(ICommercialSubscription, "product", IProduct)
 patch_reference_property(
-    ICommercialSubscription, 'distribution', IDistribution)
+    ICommercialSubscription, "distribution", IDistribution
+)
 
 # IDistribution
-patch_collection_property(IDistribution, 'series', IDistroSeries)
-patch_collection_property(IDistribution, 'derivatives', IDistroSeries)
-patch_reference_property(IDistribution, 'currentseries', IDistroSeries)
-patch_entry_return_type(IDistribution, 'getArchive', IArchive)
-patch_entry_return_type(IDistribution, 'getSeries', IDistroSeries)
+patch_collection_property(IDistribution, "series", IDistroSeries)
+patch_collection_property(IDistribution, "derivatives", IDistroSeries)
+patch_reference_property(IDistribution, "currentseries", IDistroSeries)
+patch_entry_return_type(IDistribution, "getArchive", IArchive)
+patch_entry_return_type(IDistribution, "getSeries", IDistroSeries)
 patch_collection_return_type(
-    IDistribution, 'getDevelopmentSeries', IDistroSeries)
+    IDistribution, "getDevelopmentSeries", IDistroSeries
+)
 patch_entry_return_type(
-    IDistribution, 'getSourcePackage', IDistributionSourcePackage)
-patch_entry_return_type(IDistribution, 'getOCIProject', IOCIProject)
-patch_collection_return_type(
-    IDistribution, 'searchSourcePackages', IDistributionSourcePackage)
-patch_reference_property(IDistribution, 'main_archive', IArchive)
-patch_collection_property(IDistribution, 'all_distro_archives', IArchive)
-patch_entry_return_type(IDistribution, 'newOCIProject', IOCIProject)
+    IDistribution, "getSourcePackage", IDistributionSourcePackage
+)
+patch_entry_return_type(IDistribution, "getOCIProject", IOCIProject)
 patch_collection_return_type(
-    IDistribution, 'searchOCIProjects', IOCIProject)
-patch_collection_property(IDistribution, 'vulnerabilities', IVulnerability)
+    IDistribution, "searchSourcePackages", IDistributionSourcePackage
+)
+patch_reference_property(IDistribution, "main_archive", IArchive)
+patch_collection_property(IDistribution, "all_distro_archives", IArchive)
+patch_entry_return_type(IDistribution, "newOCIProject", IOCIProject)
+patch_collection_return_type(IDistribution, "searchOCIProjects", IOCIProject)
+patch_collection_property(IDistribution, "vulnerabilities", IVulnerability)
 
 
 # IDistributionMirror
-patch_reference_property(IDistributionMirror, 'distribution', IDistribution)
+patch_reference_property(IDistributionMirror, "distribution", IDistribution)
 
 
 # IDistroSeries
 patch_entry_return_type(
-    IDistroSeries, 'getDistroArchSeries', IDistroArchSeries)
-patch_reference_property(IDistroSeries, 'main_archive', IArchive)
+    IDistroSeries, "getDistroArchSeries", IDistroArchSeries
+)
+patch_reference_property(IDistroSeries, "main_archive", IArchive)
 patch_collection_property(
-    IDistroSeries, 'enabled_architectures', IDistroArchSeries)
-patch_reference_property(IDistroSeries, 'distribution', IDistribution)
+    IDistroSeries, "enabled_architectures", IDistroArchSeries
+)
+patch_reference_property(IDistroSeries, "distribution", IDistribution)
 patch_plain_parameter_type(
-    IDistroSeries, 'getPackageUploads', 'archive', IArchive)
+    IDistroSeries, "getPackageUploads", "archive", IArchive
+)
 patch_collection_return_type(
-    IDistroSeries, 'getPackageUploads', IPackageUpload)
-patch_reference_property(IDistroSeries, 'previous_series', IDistroSeries)
+    IDistroSeries, "getPackageUploads", IPackageUpload
+)
+patch_reference_property(IDistroSeries, "previous_series", IDistroSeries)
 patch_reference_property(
-    IDistroSeries, 'nominatedarchindep', IDistroArchSeries)
-patch_collection_return_type(IDistroSeries, 'getDerivedSeries', IDistroSeries)
-patch_collection_return_type(IDistroSeries, 'getParentSeries', IDistroSeries)
+    IDistroSeries, "nominatedarchindep", IDistroArchSeries
+)
+patch_collection_return_type(IDistroSeries, "getDerivedSeries", IDistroSeries)
+patch_collection_return_type(IDistroSeries, "getParentSeries", IDistroSeries)
 patch_plain_parameter_type(
-    IDistroSeries, 'getDifferencesTo', 'parent_series', IDistroSeries)
+    IDistroSeries, "getDifferencesTo", "parent_series", IDistroSeries
+)
 patch_collection_return_type(
-    IDistroSeries, 'getDifferencesTo', IDistroSeriesDifference)
+    IDistroSeries, "getDifferencesTo", IDistroSeriesDifference
+)
 patch_collection_return_type(
-    IDistroSeries, 'getDifferenceComments', IDistroSeriesDifferenceComment)
+    IDistroSeries, "getDifferenceComments", IDistroSeriesDifferenceComment
+)
 
 
 # IDistroSeriesDifference
 patch_reference_property(
-    IDistroSeriesDifference, 'latest_comment', IDistroSeriesDifferenceComment)
+    IDistroSeriesDifference, "latest_comment", IDistroSeriesDifferenceComment
+)
 
 # IDistroSeriesDifferenceComment
 patch_reference_property(
-    IDistroSeriesDifferenceComment, 'comment_author', IPerson)
+    IDistroSeriesDifferenceComment, "comment_author", IPerson
+)
 
 # IIndexedMessage
-patch_reference_property(IIndexedMessage, 'inside', IBugTask)
+patch_reference_property(IIndexedMessage, "inside", IBugTask)
 
 # IMessage
-patch_reference_property(IMessage, 'owner', IPerson)
-patch_collection_property(IMessage, 'revisions', IMessageRevision)
+patch_reference_property(IMessage, "owner", IPerson)
+patch_collection_property(IMessage, "revisions", IMessageRevision)
 
 # IUserToUserEmail
-patch_reference_property(IUserToUserEmail, 'sender', IPerson)
-patch_reference_property(IUserToUserEmail, 'recipient', IPerson)
+patch_reference_property(IUserToUserEmail, "sender", IPerson)
+patch_reference_property(IUserToUserEmail, "recipient", IPerson)
 
 # IPerson
 patch_collection_return_type(
-    IPerson, 'getBugSubscriberPackages', IDistributionSourcePackage)
+    IPerson, "getBugSubscriberPackages", IDistributionSourcePackage
+)
 
 # IProductSeries
-patch_reference_property(IProductSeries, 'product', IProduct)
+patch_reference_property(IProductSeries, "product", IProduct)
 
 # IAccessToken
-patch_reference_property(IAccessToken, 'git_repository', IGitRepository)
+patch_reference_property(IAccessToken, "git_repository", IGitRepository)
diff --git a/lib/lp/layers.py b/lib/lp/layers.py
index 8066614..e25de52 100644
--- a/lib/lp/layers.py
+++ b/lib/lp/layers.py
@@ -8,16 +8,16 @@ Also define utilities that manipulate layers.
 
 from lazr.restful.interfaces import IWebServiceLayer
 from zope.interface import (
+    Interface,
     alsoProvides,
     directlyProvidedBy,
     directlyProvides,
-    Interface,
-    )
+)
 from zope.publisher.interfaces.browser import (
     IBrowserRequest,
     IBrowserSkinType,
     IDefaultBrowserLayer,
-    )
+)
 
 
 def setAdditionalLayer(request, layer):
@@ -58,6 +58,8 @@ class PageTestLayer(LaunchpadLayer):
     The SystemErrorView base class looks at the request to see if it provides
     this interface.  If so, it renders tracebacks as plain text.
     """
+
+
 # A few tests register this interface directly as a layer, bypassing the zcml
 # machinery.  This means that they don't get the proper SkinType interface
 # applied to them.  We add it here for convenience.
diff --git a/lib/lp/patchwebservice.py b/lib/lp/patchwebservice.py
index 2008f99..20c84fb 100644
--- a/lib/lp/patchwebservice.py
+++ b/lib/lp/patchwebservice.py
@@ -13,5 +13,4 @@ which tells `lazr.restful` that it should look for webservice exports here.
 # import bugs.  Break this up into a per-package thing.
 from lp import _schema_circular_imports
 
-
 _schema_circular_imports
diff --git a/lib/lp/security.py b/lib/lp/security.py
index ae9d99f..7b0a560 100644
--- a/lib/lp/security.py
+++ b/lib/lp/security.py
@@ -8,19 +8,16 @@ module of the corresponding application package, e.g. `lp.registry.security`.
 """
 
 __all__ = [
-    'AdminByAdminsTeam',
-    'AdminByBuilddAdmin',
-    'AdminByCommercialTeamOrAdmins',
-    'ModerateByRegistryExpertsOrAdmins',
-    'OnlyBazaarExpertsAndAdmins',
-    'OnlyRosettaExpertsAndAdmins',
-    'OnlyVcsImportsAndAdmins',
-    ]
-
-from datetime import (
-    datetime,
-    timedelta,
-    )
+    "AdminByAdminsTeam",
+    "AdminByBuilddAdmin",
+    "AdminByCommercialTeamOrAdmins",
+    "ModerateByRegistryExpertsOrAdmins",
+    "OnlyBazaarExpertsAndAdmins",
+    "OnlyRosettaExpertsAndAdmins",
+    "OnlyVcsImportsAndAdmins",
+]
+
+from datetime import datetime, timedelta
 
 import pytz
 from zope.interface import Interface
@@ -35,7 +32,8 @@ class ViewByLoggedInUser(AuthorizationBase):
     By default, any logged-in user can see anything. More restrictive
     rulesets are defined in other IAuthorization implementations.
     """
-    permission = 'launchpad.View'
+
+    permission = "launchpad.View"
     usedfor = Interface
 
     def checkAuthenticated(self, user):
@@ -49,14 +47,15 @@ class AnyAllowedPersonDeferredToView(AuthorizationBase):
     An authenticated user is delegated to the View security adapter. Since
     anonymous users are not logged in, they are denied.
     """
-    permission = 'launchpad.AnyAllowedPerson'
+
+    permission = "launchpad.AnyAllowedPerson"
     usedfor = Interface
 
     def checkUnauthenticated(self):
         return False
 
     def checkAuthenticated(self, user):
-        yield self.obj, 'launchpad.View'
+        yield self.obj, "launchpad.View"
 
 
 class AnyLegitimatePerson(AuthorizationBase):
@@ -65,7 +64,8 @@ class AnyLegitimatePerson(AuthorizationBase):
     Some operations are open to Launchpad users in general, but we still don't
     want drive-by vandalism.
     """
-    permission = 'launchpad.AnyLegitimatePerson'
+
+    permission = "launchpad.AnyLegitimatePerson"
     usedfor = Interface
 
     def checkUnauthenticated(self):
@@ -75,15 +75,18 @@ class AnyLegitimatePerson(AuthorizationBase):
         return user.person.karma >= config.launchpad.min_legitimate_karma
 
     def _isOldEnough(self, user):
-        return (
-            datetime.now(pytz.UTC) - user.person.account.date_created >=
-            timedelta(days=config.launchpad.min_legitimate_account_age))
+        return datetime.now(
+            pytz.UTC
+        ) - user.person.account.date_created >= timedelta(
+            days=config.launchpad.min_legitimate_account_age
+        )
 
     def checkAuthenticated(self, user):
         if not self._hasEnoughKarma(user) and not self._isOldEnough(user):
             return False
         return self.forwardCheckAuthenticated(
-            user, self.obj, 'launchpad.AnyAllowedPerson')
+            user, self.obj, "launchpad.AnyAllowedPerson"
+        )
 
 
 class LimitedViewDeferredToView(AuthorizationBase):
@@ -95,18 +98,19 @@ class LimitedViewDeferredToView(AuthorizationBase):
     private objects must define their own launchpad.LimitedView checker to
     truly check the permission.
     """
-    permission = 'launchpad.LimitedView'
+
+    permission = "launchpad.LimitedView"
     usedfor = Interface
 
     def checkUnauthenticated(self):
-        yield self.obj, 'launchpad.View'
+        yield self.obj, "launchpad.View"
 
     def checkAuthenticated(self, user):
-        yield self.obj, 'launchpad.View'
+        yield self.obj, "launchpad.View"
 
 
 class AdminByAdminsTeam(AuthorizationBase):
-    permission = 'launchpad.Admin'
+    permission = "launchpad.Admin"
     usedfor = Interface
 
     def checkAuthenticated(self, user):
@@ -114,7 +118,7 @@ class AdminByAdminsTeam(AuthorizationBase):
 
 
 class AdminByCommercialTeamOrAdmins(AuthorizationBase):
-    permission = 'launchpad.Commercial'
+    permission = "launchpad.Commercial"
     usedfor = Interface
 
     def checkAuthenticated(self, user):
@@ -122,7 +126,7 @@ class AdminByCommercialTeamOrAdmins(AuthorizationBase):
 
 
 class ModerateByRegistryExpertsOrAdmins(AuthorizationBase):
-    permission = 'launchpad.Moderate'
+    permission = "launchpad.Moderate"
     usedfor = None
 
     def checkAuthenticated(self, user):
@@ -130,8 +134,7 @@ class ModerateByRegistryExpertsOrAdmins(AuthorizationBase):
 
 
 class OnlyRosettaExpertsAndAdmins(AuthorizationBase):
-    """Base class that allow access to Rosetta experts and Launchpad admins.
-    """
+    """Base class that allow access to Rosetta experts and Launchpad admins."""
 
     def checkAuthenticated(self, user):
         """Allow Launchpad's admins and Rosetta experts edit all fields."""
@@ -155,7 +158,7 @@ class OnlyVcsImportsAndAdmins(AuthorizationBase):
 
 
 class AdminByBuilddAdmin(AuthorizationBase):
-    permission = 'launchpad.Admin'
+    permission = "launchpad.Admin"
 
     def checkAuthenticated(self, user):
         """Allow admins and buildd_admins."""
diff --git a/lib/lp/startwsgi.py b/lib/lp/startwsgi.py
index 918a90f..d60c12d 100644
--- a/lib/lp/startwsgi.py
+++ b/lib/lp/startwsgi.py
@@ -5,12 +5,11 @@
 
 __all__ = []
 
-from zope.event import notify
 import zope.processlifetime
+from zope.event import notify
 
 from lp.services.webapp.wsgi import get_wsgi_application
 
-
 application = get_wsgi_application()
 
 notify(zope.processlifetime.ProcessStarting())
diff --git a/lib/lp/systemhomes.py b/lib/lp/systemhomes.py
index 9df2372..12e0358 100644
--- a/lib/lp/systemhomes.py
+++ b/lib/lp/systemhomes.py
@@ -4,15 +4,15 @@
 """Content classes for the 'home pages' of the subsystems of Launchpad."""
 
 __all__ = [
-    'BazaarApplication',
-    'CodeImportSchedulerApplication',
-    'FeedsApplication',
-    'MailingListApplication',
-    'MaloneApplication',
-    'PrivateMaloneApplication',
-    'RosettaApplication',
-    'TestOpenIDApplication',
-    ]
+    "BazaarApplication",
+    "CodeImportSchedulerApplication",
+    "FeedsApplication",
+    "MailingListApplication",
+    "MaloneApplication",
+    "PrivateMaloneApplication",
+    "RosettaApplication",
+    "TestOpenIDApplication",
+]
 
 import codecs
 import os
@@ -24,10 +24,7 @@ from zope.interface import implementer
 
 from lp.app.enums import PRIVATE_INFORMATION_TYPES
 from lp.bugs.adapters.bug import convert_to_information_type
-from lp.bugs.interfaces.bug import (
-    CreateBugParams,
-    IBugSet,
-    )
+from lp.bugs.interfaces.bug import CreateBugParams, IBugSet
 from lp.bugs.interfaces.bugtask import IBugTaskSet
 from lp.bugs.interfaces.bugtasksearch import BugTaskSearchParams
 from lp.bugs.interfaces.bugtracker import IBugTrackerSet
@@ -35,16 +32,16 @@ from lp.bugs.interfaces.bugwatch import IBugWatchSet
 from lp.bugs.interfaces.malone import (
     IMaloneApplication,
     IPrivateMaloneApplication,
-    )
+)
 from lp.bugs.model.bug import Bug
 from lp.bugs.model.bugtarget import HasBugsBase
 from lp.code.interfaces.codehosting import (
     IBazaarApplication,
     ICodehostingApplication,
-    )
+)
 from lp.code.interfaces.codeimportscheduler import (
     ICodeImportSchedulerApplication,
-    )
+)
 from lp.code.interfaces.gitapi import IGitApplication
 from lp.registry.interfaces.distroseries import IDistroSeriesSet
 from lp.registry.interfaces.mailinglist import IMailingListApplication
@@ -53,10 +50,7 @@ from lp.services.config import config
 from lp.services.database.interfaces import IStore
 from lp.services.feeds.interfaces.application import IFeedsApplication
 from lp.services.statistics.interfaces.statistic import ILaunchpadStatisticSet
-from lp.services.webapp.interfaces import (
-    ICanonicalUrlData,
-    ILaunchBag,
-    )
+from lp.services.webapp.interfaces import ICanonicalUrlData, ILaunchBag
 from lp.services.webapp.publisher import canonical_url
 from lp.services.webservice.interfaces import IWebServiceApplication
 from lp.services.worlddata.interfaces.language import ILanguageSet
@@ -66,7 +60,7 @@ from lp.translations.interfaces.translationgroup import ITranslationGroupSet
 from lp.translations.interfaces.translations import IRosettaApplication
 from lp.translations.interfaces.translationsoverview import (
     ITranslationsOverview,
-    )
+)
 
 
 @implementer(IArchiveApplication)
@@ -114,9 +108,8 @@ class FeedsApplication:
 
 @implementer(IMaloneApplication)
 class MaloneApplication(HasBugsBase):
-
     def __init__(self):
-        self.title = 'Malone: the Launchpad bug tracker'
+        self.title = "Malone: the Launchpad bug tracker"
 
     def _customizeSearchParams(self, search_params):
         """See `HasBugsBase`."""
@@ -136,36 +129,60 @@ class MaloneApplication(HasBugsBase):
         data = []
         for bug in bugs:
             bugtask = bug.default_bugtask
-            different_pillars = related_bug and (
-                set(bug.affected_pillars).isdisjoint(
-                    related_bug.affected_pillars)) or False
-            data.append({
-                'id': bug_id,
-                'information_type': bug.information_type.title,
-                'is_private':
-                    bug.information_type in PRIVATE_INFORMATION_TYPES,
-                'importance': bugtask.importance.title,
-                'importance_class': 'importance' + bugtask.importance.name,
-                'status': bugtask.status.title,
-                'status_class': 'status' + bugtask.status.name,
-                'bug_summary': bug.title,
-                'description': bug.description,
-                'bug_url': canonical_url(bugtask),
-                'different_pillars': different_pillars})
+            different_pillars = (
+                related_bug
+                and (
+                    set(bug.affected_pillars).isdisjoint(
+                        related_bug.affected_pillars
+                    )
+                )
+                or False
+            )
+            data.append(
+                {
+                    "id": bug_id,
+                    "information_type": bug.information_type.title,
+                    "is_private": bug.information_type
+                    in PRIVATE_INFORMATION_TYPES,
+                    "importance": bugtask.importance.title,
+                    "importance_class": "importance" + bugtask.importance.name,
+                    "status": bugtask.status.title,
+                    "status_class": "status" + bugtask.status.name,
+                    "bug_summary": bug.title,
+                    "description": bug.description,
+                    "bug_url": canonical_url(bugtask),
+                    "different_pillars": different_pillars,
+                }
+            )
         return data
 
-    def createBug(self, owner, title, description, target,
-                  information_type=None, tags=None,
-                  security_related=None, private=None):
+    def createBug(
+        self,
+        owner,
+        title,
+        description,
+        target,
+        information_type=None,
+        tags=None,
+        security_related=None,
+        private=None,
+    ):
         """See `IMaloneApplication`."""
-        if (information_type is None
-            and (security_related is not None or private is not None)):
+        if information_type is None and (
+            security_related is not None or private is not None
+        ):
             # Adapt the deprecated args to information_type.
             information_type = convert_to_information_type(
-                private, security_related)
+                private, security_related
+            )
         params = CreateBugParams(
-            title=title, comment=description, owner=owner,
-            information_type=information_type, tags=tags, target=target)
+            title=title,
+            comment=description,
+            owner=owner,
+            information_type=information_type,
+            tags=tags,
+            target=target,
+        )
         return getUtility(IBugSet).createBug(params)
 
     @property
@@ -188,11 +205,11 @@ class MaloneApplication(HasBugsBase):
 
     @property
     def projects_with_bugs_count(self):
-        return getUtility(ILaunchpadStatisticSet).value('projects_with_bugs')
+        return getUtility(ILaunchpadStatisticSet).value("projects_with_bugs")
 
     @property
     def shared_bug_count(self):
-        return getUtility(ILaunchpadStatisticSet).value('shared_bug_count')
+        return getUtility(ILaunchpadStatisticSet).value("shared_bug_count")
 
     @property
     def top_bugtrackers(self):
@@ -205,17 +222,15 @@ class MaloneApplication(HasBugsBase):
 
 @implementer(IBazaarApplication)
 class BazaarApplication:
-
     def __init__(self):
-        self.title = 'The Open Source Bazaar'
+        self.title = "The Open Source Bazaar"
 
 
 @implementer(IRosettaApplication)
 class RosettaApplication:
-
     def __init__(self):
-        self.title = 'Rosetta: Translations in the Launchpad'
-        self.name = 'Rosetta'
+        self.title = "Rosetta: Translations in the Launchpad"
+        self.name = "Rosetta"
 
     @property
     def languages(self):
@@ -226,12 +241,12 @@ class RosettaApplication:
     def language_count(self):
         """See `IRosettaApplication`."""
         stats = getUtility(ILaunchpadStatisticSet)
-        return stats.value('language_count')
+        return stats.value("language_count")
 
     @property
     def statsdate(self):
         stats = getUtility(ILaunchpadStatisticSet)
-        return stats.dateupdated('potemplate_count')
+        return stats.dateupdated("potemplate_count")
 
     @property
     def translation_groups(self):
@@ -248,9 +263,9 @@ class RosettaApplication:
         projects = getUtility(ITranslationsOverview)
         for project in projects.getMostTranslatedPillars():
             yield {
-                'pillar': project['pillar'],
-                'font_size': project['weight'] * 10,
-                }
+                "pillar": project["pillar"],
+                "font_size": project["weight"] * 10,
+            }
 
     def translatable_distroseriess(self):
         """See `IRosettaApplication`."""
@@ -260,22 +275,22 @@ class RosettaApplication:
     def potemplate_count(self):
         """See `IRosettaApplication`."""
         stats = getUtility(ILaunchpadStatisticSet)
-        return stats.value('potemplate_count')
+        return stats.value("potemplate_count")
 
     def pofile_count(self):
         """See `IRosettaApplication`."""
         stats = getUtility(ILaunchpadStatisticSet)
-        return stats.value('pofile_count')
+        return stats.value("pofile_count")
 
     def pomsgid_count(self):
         """See `IRosettaApplication`."""
         stats = getUtility(ILaunchpadStatisticSet)
-        return stats.value('pomsgid_count')
+        return stats.value("pomsgid_count")
 
     def translator_count(self):
         """See `IRosettaApplication`."""
         stats = getUtility(ILaunchpadStatisticSet)
-        return stats.value('translator_count')
+        return stats.value("translator_count")
 
 
 @implementer(IWebServiceApplication, ICanonicalUrlData)
@@ -287,7 +302,7 @@ class WebServiceApplication(ServiceRootResource):
     """
 
     inside = None
-    path = ''
+    path = ""
     rootsite = None
 
     cached_wadl = {}
@@ -299,8 +314,14 @@ class WebServiceApplication(ServiceRootResource):
     def cachedWADLPath(cls, instance_name, version):
         """Helper method to calculate the path to a cached WADL file."""
         return os.path.join(
-            config.root, 'lib', 'canonical', 'launchpad',
-            'apidoc', version, '%s.wadl' % (instance_name,))
+            config.root,
+            "lib",
+            "canonical",
+            "launchpad",
+            "apidoc",
+            version,
+            "%s.wadl" % (instance_name,),
+        )
 
     def toWADL(self):
         """See `IWebServiceApplication`.
@@ -316,13 +337,12 @@ class WebServiceApplication(ServiceRootResource):
             # The cache has been disabled for testing
             # purposes. Generate the WADL.
             return super().toWADL()
-        if  version not in self.__class__.cached_wadl:
+        if version not in self.__class__.cached_wadl:
             # It's not cached. Look for it on disk.
-            _wadl_filename = self.cachedWADLPath(
-                config.instance_name, version)
+            _wadl_filename = self.cachedWADLPath(config.instance_name, version)
             _wadl_fd = None
             try:
-                _wadl_fd = codecs.open(_wadl_filename, encoding='UTF-8')
+                _wadl_fd = codecs.open(_wadl_filename, encoding="UTF-8")
                 try:
                     wadl = _wadl_fd.read()
                 finally:
diff --git a/lib/lp_sitecustomize.py b/lib/lp_sitecustomize.py
index 1f75c6f..16d71fa 100644
--- a/lib/lp_sitecustomize.py
+++ b/lib/lp_sitecustomize.py
@@ -4,7 +4,6 @@
 # This file is imported by _pythonpath.py and by the standard Launchpad
 # script preamble (see LPScriptWriter in setup.py).
 
-from collections import defaultdict
 import itertools
 import linecache
 import logging
@@ -12,11 +11,9 @@ import os
 import sys
 import traceback
 import warnings
+from collections import defaultdict
 
-from twisted.internet.defer import (
-    Deferred,
-    DeferredList,
-    )
+from twisted.internet.defer import Deferred, DeferredList
 from zope.security import checker
 
 from lp.services.log import loglevels
@@ -33,23 +30,20 @@ def add_custom_loglevels():
     # override. BLATHER is between INFO and DEBUG, so we can leave it.
     # TRACE conflicts with DEBUG6, and since we are not using ZEO, we
     # just overwrite the level string by calling addLevelName.
-    from ZODB.loglevels import (
-        BLATHER,
-        TRACE,
-        )
+    from ZODB.loglevels import BLATHER, TRACE
 
     # Confirm our above assumptions, and silence lint at the same time.
     assert BLATHER == 15
     assert TRACE == loglevels.DEBUG6
 
-    logging.addLevelName(loglevels.DEBUG2, 'DEBUG2')
-    logging.addLevelName(loglevels.DEBUG3, 'DEBUG3')
-    logging.addLevelName(loglevels.DEBUG4, 'DEBUG4')
-    logging.addLevelName(loglevels.DEBUG5, 'DEBUG5')
-    logging.addLevelName(loglevels.DEBUG6, 'DEBUG6')
-    logging.addLevelName(loglevels.DEBUG7, 'DEBUG7')
-    logging.addLevelName(loglevels.DEBUG8, 'DEBUG8')
-    logging.addLevelName(loglevels.DEBUG9, 'DEBUG9')
+    logging.addLevelName(loglevels.DEBUG2, "DEBUG2")
+    logging.addLevelName(loglevels.DEBUG3, "DEBUG3")
+    logging.addLevelName(loglevels.DEBUG4, "DEBUG4")
+    logging.addLevelName(loglevels.DEBUG5, "DEBUG5")
+    logging.addLevelName(loglevels.DEBUG6, "DEBUG6")
+    logging.addLevelName(loglevels.DEBUG7, "DEBUG7")
+    logging.addLevelName(loglevels.DEBUG8, "DEBUG8")
+    logging.addLevelName(loglevels.DEBUG9, "DEBUG9")
 
     # Install our customized Logger that provides easy access to our
     # custom loglevels.
@@ -60,7 +54,7 @@ def add_custom_loglevels():
     # the logging module, so our override does not take effect without
     # this manual effort.
     old_root = logging.root
-    new_root = LaunchpadLogger('root', loglevels.WARNING)
+    new_root = LaunchpadLogger("root", loglevels.WARNING)
 
     # Fix globals.
     logging.root = new_root
@@ -72,20 +66,20 @@ def add_custom_loglevels():
 
     # Fix existing Logger instances.
     for logger in manager.loggerDict.values():
-        if getattr(logger, 'parent', None) is old_root:
+        if getattr(logger, "parent", None) is old_root:
             logger.parent = new_root
 
 
 def silence_amqp_logger():
     """Install the NullHandler on the amqp logger to silence logs."""
-    amqp_logger = logging.getLogger('amqp')
+    amqp_logger = logging.getLogger("amqp")
     amqp_logger.addHandler(logging.NullHandler())
     amqp_logger.propagate = False
 
 
 def silence_bzr_loggers():
     """Install the NullHandler on the bzr/brz loggers to silence logs."""
-    for logger_name in ('bzr', 'brz'):
+    for logger_name in ("bzr", "brz"):
         logger = logging.getLogger(logger_name)
         logger.addHandler(logging.NullHandler())
         logger.propagate = False
@@ -101,18 +95,17 @@ def silence_swiftclient_logger():
 
     keystoneclient logs credentials at DEBUG.
     """
-    if not os.environ.get('LP_SWIFTCLIENT_DEBUG'):
-        swiftclient_logger = logging.getLogger('swiftclient')
+    if not os.environ.get("LP_SWIFTCLIENT_DEBUG"):
+        swiftclient_logger = logging.getLogger("swiftclient")
         swiftclient_logger.setLevel(logging.INFO)
-    keystoneclient_logger = logging.getLogger('keystoneclient')
+    keystoneclient_logger = logging.getLogger("keystoneclient")
     keystoneclient_logger.setLevel(logging.INFO)
 
 
 def silence_zcml_logger():
     """Lower level of ZCML parsing DEBUG messages."""
-    config_filter = MappingFilter(
-        {logging.DEBUG: (7, 'DEBUG4')}, 'config')
-    logging.getLogger('config').addFilter(config_filter)
+    config_filter = MappingFilter({logging.DEBUG: (7, "DEBUG4")}, "config")
+    logging.getLogger("config").addFilter(config_filter)
 
 
 class FilterOnlyHandler(logging.Handler):
@@ -133,10 +126,9 @@ def silence_transaction_logger():
     # the logging records get mutated before being propagated up
     # to higher level loggers.
     txn_handler = FilterOnlyHandler()
-    txn_filter = MappingFilter(
-        {logging.DEBUG: (8, 'DEBUG3')}, 'txn')
+    txn_filter = MappingFilter({logging.DEBUG: (8, "DEBUG3")}, "txn")
     txn_handler.addFilter(txn_filter)
-    logging.getLogger('txn').addHandler(txn_handler)
+    logging.getLogger("txn").addHandler(txn_handler)
 
 
 def silence_warnings():
@@ -145,14 +137,14 @@ def silence_warnings():
     #   DeprecationWarning: the sha module is deprecated; use the hashlib
     #   module instead
     warnings.filterwarnings(
-        "ignore",
-        category=DeprecationWarning,
-        module="Crypto")
+        "ignore", category=DeprecationWarning, module="Crypto"
+    )
     # Filter all deprecation warnings for Zope 3.6, which emanate from
     # the zope package.
-    filter_pattern = '.*(Zope 3.6|provide.*global site manager).*'
+    filter_pattern = ".*(Zope 3.6|provide.*global site manager).*"
     warnings.filterwarnings(
-        'ignore', filter_pattern, category=DeprecationWarning)
+        "ignore", filter_pattern, category=DeprecationWarning
+    )
 
 
 def customize_logger():
@@ -191,13 +183,13 @@ def main(instance_name=None):
     # to setup.py from the Makefile, and then written to env/instance_name.
     # We do all actual initialization here, in a more visible place.
     if instance_name is None:
-        instance_name_path = os.path.join(sys.prefix, 'instance_name')
+        instance_name_path = os.path.join(sys.prefix, "instance_name")
         with open(instance_name_path) as instance_name_file:
-            instance_name = instance_name_file.read().rstrip('\n')
-    if instance_name and instance_name != 'development':
+            instance_name = instance_name_file.read().rstrip("\n")
+    if instance_name and instance_name != "development":
         # See bug 656213 for why we do this carefully.
-        os.environ.setdefault('LPCONFIG', instance_name)
-    os.environ['STORM_CEXTENSIONS'] = '1'
+        os.environ.setdefault("LPCONFIG", instance_name)
+    os.environ["STORM_CEXTENSIONS"] = "1"
     add_custom_loglevels()
     customizeMimetypes()
     silence_warnings()
@@ -223,4 +215,5 @@ def main(instance_name=None):
     # formats anyway, just in a less crude way, as we don't want to use them
     # in practice.)
     import types
-    sys.modules['breezy.git'] = types.ModuleType('breezy.git')
+
+    sys.modules["breezy.git"] = types.ModuleType("breezy.git")
diff --git a/setup.py b/setup.py
index e26320d..435edce 100644
--- a/setup.py
+++ b/setup.py
@@ -3,11 +3,11 @@
 # Copyright 2009, 2010 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from distutils.sysconfig import get_python_lib
 import imp
 import os.path
-from string import Template
 import sys
+from distutils.sysconfig import get_python_lib
+from string import Template
 from textwrap import dedent
 
 from setuptools import setup
@@ -30,14 +30,18 @@ class LPScriptWriter(ScriptWriter):
     feature of setuptools then we may want to revisit this.
     """
 
-    template = Template(dedent("""
+    template = Template(
+        dedent(
+            """
         import sys
 
         import ${module_name}
 
         if __name__ == '__main__':
             sys.exit(${module_name}.${attrs}())
-        """))
+        """
+        )
+    )
 
     @classmethod
     def get_args(cls, dist, header=None):
@@ -46,10 +50,12 @@ class LPScriptWriter(ScriptWriter):
             header = cls.get_header()
         for name, ep in dist.get_entry_map("console_scripts").items():
             cls._ensure_safe_name(name)
-            script_text = cls.template.substitute({
-                "attrs": ".".join(ep.attrs),
-                "module_name": ep.module_name,
-                })
+            script_text = cls.template.substitute(
+                {
+                    "attrs": ".".join(ep.attrs),
+                    "module_name": ep.module_name,
+                }
+            )
             args = cls._get_script_args("console", name, header, script_text)
             yield from args
 
@@ -60,11 +66,14 @@ class lp_develop(develop):
     def _get_orig_sitecustomize(self):
         env_top = os.path.join(os.path.dirname(__file__), "env")
         system_paths = [
-            path for path in sys.path
-            if not path.startswith(env_top) and "pip-build-env-" not in path]
+            path
+            for path in sys.path
+            if not path.startswith(env_top) and "pip-build-env-" not in path
+        ]
         try:
-            fp, orig_sitecustomize_path, _ = (
-                imp.find_module("sitecustomize", system_paths))
+            fp, orig_sitecustomize_path, _ = imp.find_module(
+                "sitecustomize", system_paths
+            )
             if fp:
                 fp.close()
         except ImportError:
@@ -72,10 +81,16 @@ class lp_develop(develop):
         if orig_sitecustomize_path.endswith(".py"):
             with open(orig_sitecustomize_path) as orig_sitecustomize_file:
                 orig_sitecustomize = orig_sitecustomize_file.read()
-                return dedent("""
+                return (
+                    dedent(
+                        """
                     # The following is from
                     # %s
-                    """ % orig_sitecustomize_path) + orig_sitecustomize
+                    """
+                        % orig_sitecustomize_path
+                    )
+                    + orig_sitecustomize
+                )
         else:
             return ""
 
@@ -90,12 +105,14 @@ class lp_develop(develop):
             # activated.  We use -S to avoid importing sitecustomize both
             # before and after the execv.
             py_header = LPScriptWriter.get_header("#!python -S")
-            py_script_text = dedent("""\
+            py_script_text = dedent(
+                """\
                 import os
                 import sys
 
                 os.execv(sys.executable, [sys.executable] + sys.argv[1:])
-                """)
+                """
+            )
             self.write_script("py", py_header + py_script_text)
 
             # Install site customizations for this virtualenv.  In principle
@@ -109,9 +126,12 @@ class lp_develop(develop):
             site_packages_dir = get_python_lib(prefix=env_top)
             orig_sitecustomize = self._get_orig_sitecustomize()
             sitecustomize_path = os.path.join(
-                site_packages_dir, "_sitecustomize.py")
+                site_packages_dir, "_sitecustomize.py"
+            )
             with open(sitecustomize_path, "w") as sitecustomize_file:
-                sitecustomize_file.write(dedent("""\
+                sitecustomize_file.write(
+                    dedent(
+                        """\
                     import os
                     import sys
 
@@ -119,13 +139,16 @@ class lp_develop(develop):
                         if "lp_sitecustomize" not in sys.modules:
                             import lp_sitecustomize
                             lp_sitecustomize.main()
-                    """))
+                    """
+                    )
+                )
                 if orig_sitecustomize:
                     sitecustomize_file.write(orig_sitecustomize)
             # Awkward naming; this needs to come lexicographically after any
             # other .pth files.
             sitecustomize_pth_path = os.path.join(
-                site_packages_dir, "zzz_run_venv_sitecustomize.pth")
+                site_packages_dir, "zzz_run_venv_sitecustomize.pth"
+            )
             with open(sitecustomize_pth_path, "w") as sitecustomize_pth_file:
                 sitecustomize_pth_file.write("import _sitecustomize\n")
 
@@ -138,6 +161,6 @@ class lp_develop(develop):
 
 setup(
     cmdclass={
-        'develop': lp_develop,
+        "develop": lp_develop,
     },
 )
diff --git a/test_on_merge.py b/test_on_merge.py
index a86ad0e..63087f0 100755
--- a/test_on_merge.py
+++ b/test_on_merge.py
@@ -10,25 +10,15 @@ import _pythonpath  # noqa: F401
 import errno
 import os
 import select
-from signal import (
-    SIGHUP,
-    SIGINT,
-    SIGKILL,
-    SIGTERM,
-    )
-from subprocess import (
-    PIPE,
-    Popen,
-    STDOUT,
-    )
 import sys
 import time
+from signal import SIGHUP, SIGINT, SIGKILL, SIGTERM
+from subprocess import PIPE, STDOUT, Popen
 
 import psycopg2
 
 from lp.services.database import activity_cols
 
-
 # The TIMEOUT setting (expressed in seconds) affects how long a test will run
 # before it is deemed to be hung, and then appropriately terminated.
 # It's principal use is preventing a PQM job from hanging indefinitely and
@@ -60,73 +50,80 @@ def setup_test_database():
     """
     # Sanity check PostgreSQL version. No point in trying to create a test
     # database when PostgreSQL is too old.
-    con = psycopg2.connect('dbname=template1')
+    con = psycopg2.connect("dbname=template1")
     if con.server_version < 100000:
-        print('Your PostgreSQL version is too old.  You need at least 10.x')
-        print('You have %s' % con.get_parameter_status('server_version'))
+        print("Your PostgreSQL version is too old.  You need at least 10.x")
+        print("You have %s" % con.get_parameter_status("server_version"))
         return 1
 
     # Drop the template database if it exists - the Makefile does this
     # too, but we can explicity check for errors here
-    con = psycopg2.connect('dbname=template1')
+    con = psycopg2.connect("dbname=template1")
     con.set_isolation_level(0)
     cur = con.cursor()
     try:
-        cur.execute('drop database launchpad_ftest_template')
+        cur.execute("drop database launchpad_ftest_template")
     except psycopg2.ProgrammingError as x:
-        if 'does not exist' not in str(x):
+        if "does not exist" not in str(x):
             raise
 
     # If there are existing database connections, terminate. We have
     # rogue processes still connected to the database.
     for loop in range(2):
-        cur.execute("""
+        cur.execute(
+            """
             SELECT usename, %(query)s
             FROM pg_stat_activity
             WHERE datname IN (
                 'launchpad_dev', 'launchpad_ftest_template', 'launchpad_ftest')
-            """ % activity_cols(cur))
+            """
+            % activity_cols(cur)
+        )
         results = list(cur.fetchall())
         if not results:
             break
         # Rogue processes. Report, sleep for a bit, and try again.
         for usename, query in results:
-            print('!! Open connection %s - %s' % (usename, query))
-        print('Sleeping')
+            print("!! Open connection %s - %s" % (usename, query))
+        print("Sleeping")
         time.sleep(20)
     else:
-        print('Cannot rebuild database. There are open connections.')
+        print("Cannot rebuild database. There are open connections.")
         return 1
 
     cur.close()
     con.close()
 
     # Build the template database. Tests duplicate this.
-    schema_dir = os.path.join(HERE, 'database', 'schema')
-    if os.system('cd %s; make test > /dev/null' % (schema_dir)) != 0:
-        print('Failed to create database or load sampledata.')
+    schema_dir = os.path.join(HERE, "database", "schema")
+    if os.system("cd %s; make test > /dev/null" % (schema_dir)) != 0:
+        print("Failed to create database or load sampledata.")
         return 1
 
     # Sanity check the database. No point running tests if the
     # bedrock is crumbling.
-    con = psycopg2.connect('dbname=launchpad_ftest_template')
+    con = psycopg2.connect("dbname=launchpad_ftest_template")
     cur = con.cursor()
-    cur.execute("""
+    cur.execute(
+        """
         select pg_encoding_to_char(encoding) as encoding from pg_database
         where datname='launchpad_ftest_template'
-        """)
+        """
+    )
     enc = cur.fetchone()[0]
-    if enc not in ('UNICODE', 'UTF8'):
-        print('Database encoding incorrectly set')
+    if enc not in ("UNICODE", "UTF8"):
+        print("Database encoding incorrectly set")
         return 1
-    cur.execute(r"""
+    cur.execute(
+        r"""
         SELECT setting FROM pg_settings
         WHERE context='internal' AND name='lc_ctype'
-        """)
+        """
+    )
     loc = cur.fetchone()[0]
-    #if not (loc.startswith('en_') or loc in ('C', 'en')):
-    if loc != 'C':
-        print('Database locale incorrectly set. Need to rerun initdb.')
+    # if not (loc.startswith('en_') or loc in ('C', 'en')):
+    if loc != "C":
+        print("Database locale incorrectly set. Need to rerun initdb.")
         return 1
 
     # Explicity close our connections - things will fail if we leave open
@@ -141,17 +138,18 @@ def setup_test_database():
 
 def run_test_process():
     """Start the testrunner process and return its exit code."""
-    print('Running tests.')
+    print("Running tests.")
     os.chdir(HERE)
 
     # We run the test suite under a virtual frame buffer server so that the
     # JavaScript integration test suite can run.
     cmd = [
-        '/usr/bin/xvfb-run',
+        "/usr/bin/xvfb-run",
         "--error-file=/var/tmp/xvfb-errors.log",
         "--server-args='-screen 0 1024x768x24'",
-        os.path.join(HERE, 'bin', 'test')] + sys.argv[1:]
-    command_line = ' '.join(cmd)
+        os.path.join(HERE, "bin", "test"),
+    ] + sys.argv[1:]
+    command_line = " ".join(cmd)
     print("Running command:", command_line)
 
     # Run the test suite.  Make the suite the leader of a new process group
@@ -161,7 +159,8 @@ def run_test_process():
         stdout=PIPE,
         stderr=STDOUT,
         preexec_fn=os.setpgrp,
-        shell=True)
+        shell=True,
+    )
 
     # This code is very similar to what takes place in Popen._communicate(),
     # but this code times out if there is no activity on STDOUT for too long.
@@ -194,10 +193,10 @@ def run_test_process():
 
     if rv == 0:
         print()
-        print('Successfully ran all tests.')
+        print("Successfully ran all tests.")
     else:
         print()
-        print('Tests failed (exit code %d)' % rv)
+        print("Tests failed (exit code %d)" % rv)
 
     return rv
 
@@ -206,8 +205,10 @@ def cleanup_hung_testrunner(process):
     """Kill and clean up the testrunner process and its children."""
     print()
     print()
-    print("WARNING: A test appears to be hung. There has been no "
-        "output for %d seconds." % TIMEOUT)
+    print(
+        "WARNING: A test appears to be hung. There has been no "
+        "output for %d seconds." % TIMEOUT
+    )
     print("Forcibly shutting down the test suite")
 
     # This guarantees the process will die.  In rare cases
@@ -249,5 +250,5 @@ def nice_killpg(pgid):
     print("Process group %d is now empty." % pgid)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     sys.exit(main())