← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:black-archivepublisher into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:black-archivepublisher into launchpad:master.

Commit message:
lp.archivepublisher: Apply black

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/425019
-- 
The attached diff has been truncated due to its size.
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:black-archivepublisher into launchpad:master.
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index b57c076..7a8022a 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -58,3 +58,5 @@ c606443bdb2f342593c9a7c9437cb70c01f85f29
 7ae201d4e317cc9db665a0edb28c2439797daff6
 # apply black to lp.app
 8fd124775592a33c3d2ce9ef8111a9a5f1a5e089
+# apply black to lp.archivepublisher
+8885e7977012e4f376e23f52125784567aefebe4
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 7d5c58e..97d7f4a 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -43,6 +43,7 @@ repos:
           (?x)^lib/lp/(
             answers
             |app
+            |archivepublisher
           )/
 -   repo: https://github.com/PyCQA/isort
     rev: 5.9.2
@@ -62,6 +63,7 @@ repos:
           (?x)^lib/lp/(
             answers
             |app
+            |archivepublisher
           )/
     -   id: isort
         alias: isort-black
@@ -71,6 +73,7 @@ repos:
           (?x)^lib/lp/(
             answers
             |app
+            |archivepublisher
           )/
 -   repo: https://github.com/PyCQA/flake8
     rev: 3.9.2
diff --git a/lib/lp/archivepublisher/__init__.py b/lib/lp/archivepublisher/__init__.py
index 795d6a9..7b6202c 100644
--- a/lib/lp/archivepublisher/__init__.py
+++ b/lib/lp/archivepublisher/__init__.py
@@ -1,10 +1,15 @@
 # Copyright 2009-2011 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-__all__ = ['HARDCODED_COMPONENT_ORDER']
+__all__ = ["HARDCODED_COMPONENT_ORDER"]
 
 # XXX: kiko 2006-08-23: if people actually start seriously using
 # ComponentSelections this will need to be revisited. For instance, adding
 # new components will break places which use this list.
 HARDCODED_COMPONENT_ORDER = [
-    'main', 'restricted', 'universe', 'multiverse', 'partner']
+    "main",
+    "restricted",
+    "universe",
+    "multiverse",
+    "partner",
+]
diff --git a/lib/lp/archivepublisher/archivegpgsigningkey.py b/lib/lp/archivepublisher/archivegpgsigningkey.py
index c1ada20..4ee076a 100644
--- a/lib/lp/archivepublisher/archivegpgsigningkey.py
+++ b/lib/lp/archivepublisher/archivegpgsigningkey.py
@@ -4,9 +4,9 @@
 """ArchiveGPGSigningKey implementation."""
 
 __all__ = [
-    'ArchiveGPGSigningKey',
-    'SignableArchive',
-    ]
+    "ArchiveGPGSigningKey",
+    "SignableArchive",
+]
 
 
 import os
@@ -16,44 +16,32 @@ from twisted.internet import defer
 from twisted.internet.threads import deferToThread
 from zope.component import getUtility
 from zope.interface import implementer
-from zope.security.proxy import (
-    ProxyFactory,
-    removeSecurityProxy,
-    )
+from zope.security.proxy import ProxyFactory, removeSecurityProxy
 
 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
 from lp.archivepublisher.config import getPubConfig
 from lp.archivepublisher.interfaces.archivegpgsigningkey import (
+    PUBLISHER_GPG_USES_SIGNING_SERVICE,
     CannotSignArchive,
     IArchiveGPGSigningKey,
     ISignableArchive,
-    PUBLISHER_GPG_USES_SIGNING_SERVICE,
-    )
-from lp.archivepublisher.run_parts import (
-    find_run_parts_dir,
-    run_parts,
-    )
+)
+from lp.archivepublisher.run_parts import find_run_parts_dir, run_parts
 from lp.registry.interfaces.gpg import IGPGKeySet
 from lp.services.config import config
 from lp.services.features import getFeatureFlag
-from lp.services.gpg.interfaces import (
-    IGPGHandler,
-    IPymeKey,
-    )
+from lp.services.gpg.interfaces import IGPGHandler, IPymeKey
 from lp.services.osutils import remove_if_exists
-from lp.services.propertycache import (
-    cachedproperty,
-    get_property_cache,
-    )
+from lp.services.propertycache import cachedproperty, get_property_cache
 from lp.services.signing.enums import (
     OpenPGPKeyAlgorithm,
     SigningKeyType,
     SigningMode,
-    )
+)
 from lp.services.signing.interfaces.signingkey import (
     ISigningKey,
     ISigningKeySet,
-    )
+)
 
 
 @implementer(ISignableArchive)
@@ -63,7 +51,7 @@ class SignableArchive:
     gpgme_modes = {
         SigningMode.DETACHED: gpgme.SIG_MODE_DETACH,
         SigningMode.CLEAR: gpgme.SIG_MODE_CLEAR,
-        }
+    }
 
     def __init__(self, archive):
         self.archive = archive
@@ -78,8 +66,9 @@ class SignableArchive:
     def can_sign(self):
         """See `ISignableArchive`."""
         return (
-            self.archive.signing_key_fingerprint is not None or
-            self._run_parts_dir is not None)
+            self.archive.signing_key_fingerprint is not None
+            or self._run_parts_dir is not None
+        )
 
     @cachedproperty
     def _signing_key(self):
@@ -88,7 +77,8 @@ class SignableArchive:
             return None
         elif self.archive.signing_key_fingerprint is not None:
             return getUtility(ISigningKeySet).get(
-                SigningKeyType.OPENPGP, self.archive.signing_key_fingerprint)
+                SigningKeyType.OPENPGP, self.archive.signing_key_fingerprint
+            )
         else:
             return None
 
@@ -97,7 +87,8 @@ class SignableArchive:
         """This archive's signing key as a local GPG key."""
         if self.archive.signing_key is not None:
             secret_key_path = self.getPathForSecretKey(
-                self.archive.signing_key)
+                self.archive.signing_key
+            )
             with open(secret_key_path, "rb") as secret_key_file:
                 secret_key_export = secret_key_file.read()
             gpghandler = getUtility(IGPGHandler)
@@ -120,12 +111,13 @@ class SignableArchive:
         """
         if not self.can_sign:
             raise CannotSignArchive(
-                "No signing key available for %s" % self.archive.displayname)
+                "No signing key available for %s" % self.archive.displayname
+            )
 
         output_paths = []
         for input_path, output_path, mode, suite in signatures:
             if mode not in {SigningMode.DETACHED, SigningMode.CLEAR}:
-                raise ValueError('Invalid signature mode for GPG: %s' % mode)
+                raise ValueError("Invalid signature mode for GPG: %s" % mode)
             signed = False
 
             if self._signing_key is not None or self._secret_key is not None:
@@ -134,19 +126,24 @@ class SignableArchive:
                 if self._signing_key is not None:
                     try:
                         signature = self._signing_key.sign(
-                            input_content, os.path.basename(input_path),
-                            mode=mode)
+                            input_content,
+                            os.path.basename(input_path),
+                            mode=mode,
+                        )
                         signed = True
                     except Exception:
                         if log is not None:
                             log.exception(
                                 "Failed to sign archive using signing "
-                                "service; falling back to local key")
+                                "service; falling back to local key"
+                            )
                         get_property_cache(self)._signing_key = None
                 if not signed and self._secret_key is not None:
                     signature = getUtility(IGPGHandler).signContent(
-                        input_content, self._secret_key,
-                        mode=self.gpgme_modes[mode])
+                        input_content,
+                        self._secret_key,
+                        mode=self.gpgme_modes[mode],
+                    )
                     signed = True
                 if signed:
                     with open(output_path, "wb") as output_file:
@@ -162,45 +159,57 @@ class SignableArchive:
                     "MODE": mode.name.lower(),
                     "DISTRIBUTION": self.archive.distribution.name,
                     "SUITE": suite,
-                    }
+                }
                 run_parts(
-                    self.archive.distribution.name, "sign.d",
-                    log=log, env=env)
+                    self.archive.distribution.name, "sign.d", log=log, env=env
+                )
                 signed = True
                 if os.path.exists(output_path):
                     output_paths.append(output_path)
 
             if not signed:
                 raise AssertionError(
-                    "No signing key available for %s" %
-                    self.archive.displayname)
+                    "No signing key available for %s"
+                    % self.archive.displayname
+                )
         return output_paths
 
-    def signRepository(self, suite, pubconf=None, suffix='', log=None):
+    def signRepository(self, suite, pubconf=None, suffix="", log=None):
         """See `ISignableArchive`."""
         if pubconf is None:
             pubconf = self.pubconf
         suite_path = os.path.join(pubconf.distsroot, suite)
-        release_file_path = os.path.join(suite_path, 'Release' + suffix)
+        release_file_path = os.path.join(suite_path, "Release" + suffix)
         if not os.path.exists(release_file_path):
             raise AssertionError(
-                "Release file doesn't exist in the repository: %s" %
-                release_file_path)
+                "Release file doesn't exist in the repository: %s"
+                % release_file_path
+            )
 
         output_names = []
-        for output_path in self._makeSignatures([
-                (release_file_path,
-                 os.path.join(suite_path, 'Release.gpg' + suffix),
-                 SigningMode.DETACHED, suite),
-                (release_file_path,
-                 os.path.join(suite_path, 'InRelease' + suffix),
-                 SigningMode.CLEAR, suite),
-                ], log=log):
+        for output_path in self._makeSignatures(
+            [
+                (
+                    release_file_path,
+                    os.path.join(suite_path, "Release.gpg" + suffix),
+                    SigningMode.DETACHED,
+                    suite,
+                ),
+                (
+                    release_file_path,
+                    os.path.join(suite_path, "InRelease" + suffix),
+                    SigningMode.CLEAR,
+                    suite,
+                ),
+            ],
+            log=log,
+        ):
             output_name = os.path.basename(output_path)
             if suffix:
-                output_name = output_name[:-len(suffix)]
+                output_name = output_name[: -len(suffix)]
             assert (
-                os.path.join(suite_path, output_name + suffix) == output_path)
+                os.path.join(suite_path, output_name + suffix) == output_path
+            )
             output_names.append(output_name)
         return output_names
 
@@ -215,11 +224,13 @@ class SignableArchive:
         archive_root = self.pubconf.archiveroot + os.sep
         if not path.startswith(archive_root):
             raise AssertionError(
-                "Attempting to sign file (%s) outside archive_root for %s" % (
-                    path, self.archive.displayname))
+                "Attempting to sign file (%s) outside archive_root for %s"
+                % (path, self.archive.displayname)
+            )
 
         self._makeSignatures(
-            [(path, "%s.gpg" % path, SigningMode.DETACHED, suite)], log=log)
+            [(path, "%s.gpg" % path, SigningMode.DETACHED, suite)], log=log
+        )
 
 
 @implementer(IArchiveGPGSigningKey)
@@ -230,7 +241,8 @@ class ArchiveGPGSigningKey(SignableArchive):
         """See `IArchiveGPGSigningKey`."""
         return os.path.join(
             config.personalpackagearchive.signing_keys_root,
-            "%s.gpg" % key.fingerprint)
+            "%s.gpg" % key.fingerprint,
+        )
 
     def exportSecretKey(self, key):
         """See `IArchiveGPGSigningKey`."""
@@ -240,30 +252,34 @@ class ArchiveGPGSigningKey(SignableArchive):
         if not os.path.exists(os.path.dirname(export_path)):
             os.makedirs(os.path.dirname(export_path))
 
-        with open(export_path, 'wb') as export_file:
+        with open(export_path, "wb") as export_file:
             export_file.write(key.export())
 
     def generateSigningKey(self, log=None, async_keyserver=False):
         """See `IArchiveGPGSigningKey`."""
-        assert self.archive.signing_key_fingerprint is None, (
-            "Cannot override signing_keys.")
+        assert (
+            self.archive.signing_key_fingerprint is None
+        ), "Cannot override signing_keys."
 
         # Always generate signing keys for the default PPA, even if it
         # was not specifically requested. The default PPA signing key
         # is then propagated to the context named-ppa.
         default_ppa = (
-            self.archive.owner.archive if self.archive.is_ppa
-            else self.archive)
+            self.archive.owner.archive if self.archive.is_ppa else self.archive
+        )
         if self.archive != default_ppa:
+
             def propagate_key(_):
                 self.archive.signing_key_owner = default_ppa.signing_key_owner
                 self.archive.signing_key_fingerprint = (
-                    default_ppa.signing_key_fingerprint)
+                    default_ppa.signing_key_fingerprint
+                )
                 del get_property_cache(self.archive).signing_key
 
             if default_ppa.signing_key_fingerprint is None:
                 d = IArchiveGPGSigningKey(default_ppa).generateSigningKey(
-                    log=log, async_keyserver=async_keyserver)
+                    log=log, async_keyserver=async_keyserver
+                )
             else:
                 d = defer.succeed(None)
             # generateSigningKey is only asynchronous if async_keyserver is
@@ -281,39 +297,48 @@ class ArchiveGPGSigningKey(SignableArchive):
         # perhaps push it down to a property of the archive.
         if self.archive.is_copy:
             key_displayname = (
-                "Launchpad copy archive %s" % self.archive.reference)
+                "Launchpad copy archive %s" % self.archive.reference
+            )
         else:
             key_displayname = (
-                "Launchpad PPA for %s" % self.archive.owner.displayname)
+                "Launchpad PPA for %s" % self.archive.owner.displayname
+            )
         if getFeatureFlag(PUBLISHER_GPG_USES_SIGNING_SERVICE):
             try:
                 signing_key = getUtility(ISigningKeySet).generate(
-                    SigningKeyType.OPENPGP, key_displayname,
-                    openpgp_key_algorithm=OpenPGPKeyAlgorithm.RSA, length=4096)
+                    SigningKeyType.OPENPGP,
+                    key_displayname,
+                    openpgp_key_algorithm=OpenPGPKeyAlgorithm.RSA,
+                    length=4096,
+                )
             except Exception as e:
                 if log is not None:
                     log.exception(
-                        "Error generating signing key for %s: %s %s" %
-                        (self.archive.reference, e.__class__.__name__, e))
+                        "Error generating signing key for %s: %s %s"
+                        % (self.archive.reference, e.__class__.__name__, e)
+                    )
                 raise
         else:
             signing_key = getUtility(IGPGHandler).generateKey(
-                key_displayname, logger=log)
+                key_displayname, logger=log
+            )
         return self._setupSigningKey(
-            signing_key, async_keyserver=async_keyserver)
+            signing_key, async_keyserver=async_keyserver
+        )
 
     def setSigningKey(self, key_path, async_keyserver=False):
         """See `IArchiveGPGSigningKey`."""
-        assert self.archive.signing_key_fingerprint is None, (
-            "Cannot override signing_keys.")
-        assert os.path.exists(key_path), (
-            "%s does not exist" % key_path)
+        assert (
+            self.archive.signing_key_fingerprint is None
+        ), "Cannot override signing_keys."
+        assert os.path.exists(key_path), "%s does not exist" % key_path
 
         with open(key_path, "rb") as key_file:
             secret_key_export = key_file.read()
         secret_key = getUtility(IGPGHandler).importSecretKey(secret_key_export)
         return self._setupSigningKey(
-            secret_key, async_keyserver=async_keyserver)
+            secret_key, async_keyserver=async_keyserver
+        )
 
     def _uploadPublicSigningKey(self, signing_key):
         """Upload the public half of a signing key to the keyserver."""
@@ -335,7 +360,8 @@ class ArchiveGPGSigningKey(SignableArchive):
         key_owner = getUtility(ILaunchpadCelebrities).ppa_key_guard
         if IPymeKey.providedBy(pub_key):
             key, _ = getUtility(IGPGKeySet).activate(
-                key_owner, pub_key, pub_key.can_encrypt)
+                key_owner, pub_key, pub_key.can_encrypt
+            )
         else:
             assert ISigningKey.providedBy(pub_key)
             key = pub_key
@@ -363,7 +389,8 @@ class ArchiveGPGSigningKey(SignableArchive):
             # Since that thread won't have a Zope interaction, we need to
             # unwrap the security proxy for it.
             d = deferToThread(
-                self._uploadPublicSigningKey, removeSecurityProxy(signing_key))
+                self._uploadPublicSigningKey, removeSecurityProxy(signing_key)
+            )
             d.addCallback(ProxyFactory)
             d.addCallback(self._storeSigningKey)
             return d
diff --git a/lib/lp/archivepublisher/artifactory.py b/lib/lp/archivepublisher/artifactory.py
index 8031ef3..3e912ed 100644
--- a/lib/lp/archivepublisher/artifactory.py
+++ b/lib/lp/archivepublisher/artifactory.py
@@ -5,26 +5,20 @@
 
 __all__ = [
     "ArtifactoryPool",
-    ]
+]
 
-from collections import defaultdict
 import logging
 import os
-from pathlib import (
-    Path,
-    PurePath,
-    )
 import tempfile
+from collections import defaultdict
+from pathlib import Path, PurePath
 from typing import Optional
 
+import requests
 from artifactory import ArtifactoryPath
 from dohq_artifactory.auth import XJFrogArtApiAuth
-import requests
 
-from lp.archivepublisher.diskpool import (
-    FileAddActionEnum,
-    poolify,
-    )
+from lp.archivepublisher.diskpool import FileAddActionEnum, poolify
 from lp.services.config import config
 from lp.services.librarian.utils import copy_and_close
 from lp.soyuz.enums import ArchiveRepositoryFormat
@@ -33,44 +27,55 @@ from lp.soyuz.interfaces.files import (
     IBinaryPackageFile,
     IPackageReleaseFile,
     ISourcePackageReleaseFile,
-    )
+)
 from lp.soyuz.interfaces.publishing import (
     IBinaryPackagePublishingHistory,
     NotInPool,
     PoolFileOverwriteError,
-    )
+)
 
 
-def _path_for(archive: IArchive, rootpath: ArtifactoryPath, source_name: str,
-              source_version: str, pub_file: IPackageReleaseFile) -> Path:
+def _path_for(
+    archive: IArchive,
+    rootpath: ArtifactoryPath,
+    source_name: str,
+    source_version: str,
+    pub_file: IPackageReleaseFile,
+) -> Path:
     repository_format = archive.repository_format
     if repository_format == ArchiveRepositoryFormat.DEBIAN:
         path = rootpath / poolify(source_name)
     elif repository_format == ArchiveRepositoryFormat.PYTHON:
         path = rootpath / source_name / source_version
     elif repository_format == ArchiveRepositoryFormat.CONDA:
-        user_defined_fields = (
-            pub_file.binarypackagerelease.user_defined_fields)
+        user_defined_fields = pub_file.binarypackagerelease.user_defined_fields
         subdir = next(
             (value for key, value in user_defined_fields if key == "subdir"),
-            None)
+            None,
+        )
         if subdir is None:
             raise AssertionError(
-                "Cannot publish a Conda package with no subdir")
+                "Cannot publish a Conda package with no subdir"
+            )
         path = rootpath / subdir
     else:
         raise AssertionError(
-            "Unsupported repository format: %r" % repository_format)
+            "Unsupported repository format: %r" % repository_format
+        )
     path = path / pub_file.libraryfile.filename
     return path
 
 
 class ArtifactoryPoolEntry:
-
-    def __init__(self, archive: IArchive, rootpath: ArtifactoryPath,
-                 source_name: str, source_version: str,
-                 pub_file: IPackageReleaseFile,
-                 logger: logging.Logger) -> None:
+    def __init__(
+        self,
+        archive: IArchive,
+        rootpath: ArtifactoryPath,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+        logger: logging.Logger,
+    ) -> None:
         self.archive = archive
         self.rootpath = rootpath
         self.source_name = source_name
@@ -89,8 +94,12 @@ class ArtifactoryPoolEntry:
         # complications in terms of having to keep track of components just
         # in order to update an artifact's properties.
         return _path_for(
-            self.archive, self.rootpath, self.source_name, self.source_version,
-            self.pub_file)
+            self.archive,
+            self.rootpath,
+            self.source_name,
+            self.source_version,
+            self.pub_file,
+        )
 
     def makeReleaseID(self, pub_file: IPackageReleaseFile) -> str:
         """
@@ -113,11 +122,13 @@ class ArtifactoryPoolEntry:
     # Property names outside the "launchpad." namespace that we expect to
     # overwrite.  Any existing property names other than these will be left
     # alone.
-    owned_properties = frozenset({
-        "deb.architecture",
-        "deb.component",
-        "deb.distribution",
-        })
+    owned_properties = frozenset(
+        {
+            "deb.architecture",
+            "deb.component",
+            "deb.distribution",
+        }
+    )
 
     def calculateProperties(self, release_id, publications):
         """Return a dict of Artifactory properties to set for this file.
@@ -165,37 +176,51 @@ class ArtifactoryPoolEntry:
             archives = {publication.archive for publication in publications}
             if len(archives) > 1:
                 raise AssertionError(
-                    "Can't calculate properties across multiple archives: %s" %
-                    archives)
+                    "Can't calculate properties across multiple archives: %s"
+                    % archives
+                )
             repository_format = tuple(archives)[0].repository_format
             if repository_format == ArchiveRepositoryFormat.DEBIAN:
-                properties["deb.distribution"] = sorted({
-                    pub.distroseries.getSuite(pub.pocket)
-                    for pub in publications})
-                properties["deb.component"] = sorted({
-                    pub.component.name for pub in publications})
-                architectures = sorted({
-                    pub.distroarchseries.architecturetag
-                    for pub in publications
-                    if IBinaryPackagePublishingHistory.providedBy(pub)})
+                properties["deb.distribution"] = sorted(
+                    {
+                        pub.distroseries.getSuite(pub.pocket)
+                        for pub in publications
+                    }
+                )
+                properties["deb.component"] = sorted(
+                    {pub.component.name for pub in publications}
+                )
+                architectures = sorted(
+                    {
+                        pub.distroarchseries.architecturetag
+                        for pub in publications
+                        if IBinaryPackagePublishingHistory.providedBy(pub)
+                    }
+                )
                 if architectures:
                     properties["deb.architecture"] = architectures
             else:
-                properties["launchpad.channel"] = sorted({
-                    "%s:%s" % (
-                        pub.distroseries.getSuite(pub.pocket), pub.channel)
-                    for pub in publications})
+                properties["launchpad.channel"] = sorted(
+                    {
+                        "%s:%s"
+                        % (pub.distroseries.getSuite(pub.pocket), pub.channel)
+                        for pub in publications
+                    }
+                )
         # Additional metadata per
         # https://docs.google.com/spreadsheets/d/15Xkdi-CRu2NiQfLoclP5PKW63Zw6syiuao8VJG7zxvw
         # (private).
         if IBinaryPackageFile.providedBy(self.pub_file):
             ci_build = self.pub_file.binarypackagerelease.ci_build
             if ci_build is not None:
-                properties.update({
-                    "soss.source_url": (
-                        ci_build.git_repository.getCodebrowseUrl()),
-                    "soss.commit_id": ci_build.commit_sha1,
-                    })
+                properties.update(
+                    {
+                        "soss.source_url": (
+                            ci_build.git_repository.getCodebrowseUrl()
+                        ),
+                        "soss.commit_id": ci_build.commit_sha1,
+                    }
+                )
         return properties
 
     def addFile(self):
@@ -209,12 +234,14 @@ class ArtifactoryPoolEntry:
             sha1 = lfa.content.sha1
             if sha1 != file_hash:
                 raise PoolFileOverwriteError(
-                    "%s != %s for %s" % (sha1, file_hash, targetpath))
+                    "%s != %s for %s" % (sha1, file_hash, targetpath)
+                )
             return FileAddActionEnum.NONE
 
         self.debug("Deploying %s", targetpath)
         properties = self.calculateProperties(
-            self.makeReleaseID(self.pub_file), [])
+            self.makeReleaseID(self.pub_file), []
+        )
         fd, name = tempfile.mkstemp(prefix="temp-download.")
         f = os.fdopen(fd, "wb")
         try:
@@ -233,13 +260,16 @@ class ArtifactoryPoolEntry:
         release_id = old_properties.get("launchpad.release-id")
         if not release_id:
             raise AssertionError(
-                "Cannot update properties: launchpad.release-id is not in %s" %
-                old_properties)
+                "Cannot update properties: launchpad.release-id is not in %s"
+                % old_properties
+            )
         properties = self.calculateProperties(release_id[0], publications)
         new_properties = {
-            key: value for key, value in old_properties.items()
-            if not key.startswith("launchpad.") and
-               key not in self.owned_properties}
+            key: value
+            for key, value in old_properties.items()
+            if not key.startswith("launchpad.")
+            and key not in self.owned_properties
+        }
         new_properties.update(properties)
         if old_properties != new_properties:
             # We could use the ArtifactoryPath.properties setter, but that
@@ -248,7 +278,8 @@ class ArtifactoryPoolEntry:
             properties_to_remove = set(old_properties) - set(new_properties)
             if properties_to_remove:
                 targetpath.del_properties(
-                    properties_to_remove, recursive=False)
+                    properties_to_remove, recursive=False
+                )
             targetpath.set_properties(new_properties, recursive=False)
 
     def removeFile(self) -> int:
@@ -266,8 +297,9 @@ class ArtifactoryPool:
 
     results = FileAddActionEnum
 
-    def __init__(self, archive: IArchive, rootpath,
-                 logger: logging.Logger) -> None:
+    def __init__(
+        self, archive: IArchive, rootpath, logger: logging.Logger
+    ) -> None:
         self.archive = archive
         if not isinstance(rootpath, ArtifactoryPath):
             rootpath = ArtifactoryPath(rootpath)
@@ -290,7 +322,7 @@ class ArtifactoryPool:
             session.proxies = {
                 "http": config.launchpad.http_proxy,
                 "https": config.launchpad.http_proxy,
-                }
+            }
         if config.launchpad.ca_certificates_path is not None:
             session.verify = config.launchpad.ca_certificates_path
         write_creds = config.artifactory.write_credentials
@@ -300,15 +332,29 @@ class ArtifactoryPool:
             session.auth = XJFrogArtApiAuth(write_creds.split(":", 1)[1])
         return session
 
-    def _getEntry(self, source_name: str, source_version: str,
-                  pub_file: IPackageReleaseFile) -> ArtifactoryPoolEntry:
+    def _getEntry(
+        self,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+    ) -> ArtifactoryPoolEntry:
         """See `DiskPool._getEntry`."""
         return ArtifactoryPoolEntry(
-            self.archive, self.rootpath, source_name, source_version, pub_file,
-            self.logger)
-
-    def pathFor(self, comp: str, source_name: str, source_version: str,
-                pub_file: Optional[IPackageReleaseFile] = None) -> Path:
+            self.archive,
+            self.rootpath,
+            source_name,
+            source_version,
+            pub_file,
+            self.logger,
+        )
+
+    def pathFor(
+        self,
+        comp: str,
+        source_name: str,
+        source_version: str,
+        pub_file: Optional[IPackageReleaseFile] = None,
+    ) -> Path:
         """Return the path for the given pool file."""
         # For Artifactory publication, we ignore the component.  There's
         # only marginal benefit in having it be explicitly represented in
@@ -316,10 +362,16 @@ class ArtifactoryPool:
         # complications in terms of having to keep track of components just
         # in order to update an artifact's properties.
         return _path_for(
-            self.archive, self.rootpath, source_name, source_version, pub_file)
-
-    def addFile(self, component: str, source_name: str, source_version: str,
-                pub_file: IPackageReleaseFile):
+            self.archive, self.rootpath, source_name, source_version, pub_file
+        )
+
+    def addFile(
+        self,
+        component: str,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+    ):
         """Add a file with the given contents to the pool.
 
         `source_name`, `source_version`, and `filename` are used to
@@ -346,8 +398,13 @@ class ArtifactoryPool:
         entry = self._getEntry(source_name, source_version, pub_file)
         return entry.addFile()
 
-    def removeFile(self, component: str, source_name: str, source_version: str,
-                   pub_file: IPackageReleaseFile) -> int:
+    def removeFile(
+        self,
+        component: str,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+    ) -> int:
         """Remove the specified file from the pool.
 
         There are two possible outcomes:
@@ -362,9 +419,14 @@ class ArtifactoryPool:
         entry = self._getEntry(source_name, source_version, pub_file)
         return entry.removeFile()
 
-    def updateProperties(self, source_name: str, source_version: str,
-                         pub_file: IPackageReleaseFile, publications,
-                         old_properties=None):
+    def updateProperties(
+        self,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+        publications,
+        old_properties=None,
+    ):
         """Update a file's properties in Artifactory."""
         entry = self._getEntry(source_name, source_version, pub_file)
         entry.updateProperties(publications, old_properties=old_properties)
@@ -384,7 +446,7 @@ class ArtifactoryPool:
                 "*.dsc",
                 "*.tar.*",
                 "*.udeb",
-                ]
+            ]
         elif repository_format == ArchiveRepositoryFormat.PYTHON:
             return ["*.whl"]
         elif repository_format == ArchiveRepositoryFormat.CONDA:
@@ -394,7 +456,8 @@ class ArtifactoryPool:
             ]
         else:
             raise AssertionError(
-                "Unknown repository format %r" % repository_format)
+                "Unknown repository format %r" % repository_format
+            )
 
     def getAllArtifacts(self, repository_name, repository_format):
         """Get a mapping of all artifacts to their current properties.
@@ -413,13 +476,14 @@ class ArtifactoryPool:
                 "$or": [
                     {"name": {"$match": pattern}}
                     for pattern in self.getArtifactPatterns(repository_format)
-                    ],
-                },
+                ],
+            },
             ".include",
             # We don't use "repo", but the AQL documentation says that
             # non-admin users must include all of "name", "repo", and "path"
             # in the include directive.
-            ["repo", "path", "name", "property"])
+            ["repo", "path", "name", "property"],
+        )
         artifacts_by_path = {}
         for artifact in artifacts:
             path = PurePath(artifact["path"], artifact["name"])
@@ -430,5 +494,6 @@ class ArtifactoryPool:
             # and in an undefined order.  Always sort them to ensure that we
             # can compare properties reliably.
             artifacts_by_path[path] = {
-                key: sorted(values) for key, values in properties.items()}
+                key: sorted(values) for key, values in properties.items()
+            }
         return artifacts_by_path
diff --git a/lib/lp/archivepublisher/config.py b/lib/lp/archivepublisher/config.py
index 06ae2a7..acf5480 100644
--- a/lib/lp/archivepublisher/config.py
+++ b/lib/lp/archivepublisher/config.py
@@ -17,12 +17,11 @@ from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.services.config import config
 from lp.soyuz.enums import (
-    archive_suffixes,
     ArchivePublishingMethod,
     ArchivePurpose,
     ArchiveRepositoryFormat,
-    )
-
+    archive_suffixes,
+)
 
 APT_FTPARCHIVE_PURPOSES = (ArchivePurpose.PRIMARY, ArchivePurpose.COPY)
 
@@ -36,85 +35,100 @@ def getPubConfig(archive):
     """
     pubconf = Config(archive)
     ppa_config = config.personalpackagearchive
-    db_pubconf = getUtility(
-        IPublisherConfigSet).getByDistribution(archive.distribution)
+    db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
+        archive.distribution
+    )
     if db_pubconf is None:
         return None
 
     pubconf.temproot = os.path.join(
-        db_pubconf.root_dir, '%s-temp' % archive.distribution.name)
+        db_pubconf.root_dir, "%s-temp" % archive.distribution.name
+    )
 
     if archive.publishing_method == ArchivePublishingMethod.ARTIFACTORY:
         if config.artifactory.base_url is None:
             raise AssertionError(
                 "Cannot publish to Artifactory because "
-                "config.artifactory.base_url is unset.")
+                "config.artifactory.base_url is unset."
+            )
         pubconf.distroroot = None
         # XXX cjwatson 2022-04-01: This assumes that only admins can
         # configure archives to publish to Artifactory, since Archive.name
         # isn't unique.  We may eventually need to use a new column with a
         # unique constraint, but this is enough to get us going for now.
         pubconf.archiveroot = "%s/%s" % (
-            config.artifactory.base_url.rstrip("/"), archive.name)
+            config.artifactory.base_url.rstrip("/"),
+            archive.name,
+        )
     elif archive.is_ppa:
         if archive.private:
             pubconf.distroroot = ppa_config.private_root
         else:
             pubconf.distroroot = ppa_config.root
         pubconf.archiveroot = os.path.join(
-            pubconf.distroroot, archive.owner.name, archive.name,
-            archive.distribution.name)
+            pubconf.distroroot,
+            archive.owner.name,
+            archive.name,
+            archive.distribution.name,
+        )
     elif archive.is_main:
         pubconf.distroroot = db_pubconf.root_dir
         pubconf.archiveroot = os.path.join(
-            pubconf.distroroot, archive.distribution.name)
+            pubconf.distroroot, archive.distribution.name
+        )
         pubconf.archiveroot += archive_suffixes[archive.purpose]
     elif archive.is_copy:
         pubconf.distroroot = db_pubconf.root_dir
         pubconf.archiveroot = os.path.join(
             pubconf.distroroot,
-            archive.distribution.name + '-' + archive.name,
-            archive.distribution.name)
+            archive.distribution.name + "-" + archive.name,
+            archive.distribution.name,
+        )
     else:
         raise AssertionError(
             "Unknown archive purpose %s when getting publisher config.",
-            archive.purpose)
+            archive.purpose,
+        )
 
     # There can be multiple copy archives, so the temp dir needs to be
     # within the archive.
     if archive.is_copy:
-        pubconf.temproot = pubconf.archiveroot + '-temp'
-
-    if (archive.publishing_method == ArchivePublishingMethod.LOCAL and
-            archive.purpose in APT_FTPARCHIVE_PURPOSES):
-        pubconf.overrideroot = pubconf.archiveroot + '-overrides'
-        pubconf.cacheroot = pubconf.archiveroot + '-cache'
-        pubconf.miscroot = pubconf.archiveroot + '-misc'
+        pubconf.temproot = pubconf.archiveroot + "-temp"
+
+    if (
+        archive.publishing_method == ArchivePublishingMethod.LOCAL
+        and archive.purpose in APT_FTPARCHIVE_PURPOSES
+    ):
+        pubconf.overrideroot = pubconf.archiveroot + "-overrides"
+        pubconf.cacheroot = pubconf.archiveroot + "-cache"
+        pubconf.miscroot = pubconf.archiveroot + "-misc"
     else:
         pubconf.overrideroot = None
         pubconf.cacheroot = None
         pubconf.miscroot = None
 
     if archive.is_main:
-        pubconf.signingroot = pubconf.archiveroot + '-uefi'
+        pubconf.signingroot = pubconf.archiveroot + "-uefi"
         if not os.path.exists(pubconf.signingroot):
-            pubconf.signingroot = pubconf.archiveroot + '-signing'
+            pubconf.signingroot = pubconf.archiveroot + "-signing"
         pubconf.signingautokey = False
     elif archive.is_ppa:
         signing_keys_root = os.path.join(ppa_config.signing_keys_root, "uefi")
         if not os.path.exists(signing_keys_root):
             signing_keys_root = os.path.join(
-                ppa_config.signing_keys_root, "signing")
-        pubconf.signingroot = os.path.join(signing_keys_root,
-            archive.owner.name, archive.name)
+                ppa_config.signing_keys_root, "signing"
+            )
+        pubconf.signingroot = os.path.join(
+            signing_keys_root, archive.owner.name, archive.name
+        )
         pubconf.signingautokey = True
     else:
         pubconf.signingroot = None
         pubconf.signingautokey = False
 
     if archive.repository_format == ArchiveRepositoryFormat.DEBIAN:
-        pubconf.poolroot = os.path.join(pubconf.archiveroot, 'pool')
-        pubconf.distsroot = os.path.join(pubconf.archiveroot, 'dists')
+        pubconf.poolroot = os.path.join(pubconf.archiveroot, "pool")
+        pubconf.distsroot = os.path.join(pubconf.archiveroot, "dists")
     else:
         pubconf.poolroot = pubconf.archiveroot
         pubconf.distsroot = None
@@ -126,13 +140,14 @@ def getPubConfig(archive):
     # for PPAs with the same owner and name. META_DATA uploads are only used
     # by a few PPAs, and only by USC, so we leave metaroot unset and
     # ignore the uploads for anything except Ubuntu PPAs.
-    ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
-    if (archive.publishing_method == ArchivePublishingMethod.LOCAL and
-            archive.is_ppa and archive.distribution == ubuntu):
-        meta_root = os.path.join(
-            pubconf.distroroot, archive.owner.name)
-        pubconf.metaroot = os.path.join(
-            meta_root, "meta", archive.name)
+    ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
+    if (
+        archive.publishing_method == ArchivePublishingMethod.LOCAL
+        and archive.is_ppa
+        and archive.distribution == ubuntu
+    ):
+        meta_root = os.path.join(pubconf.distroroot, archive.owner.name)
+        pubconf.metaroot = os.path.join(meta_root, "meta", archive.name)
     else:
         pubconf.metaroot = None
 
@@ -141,7 +156,7 @@ def getPubConfig(archive):
     # to the publisher (e.g. Contents generation) to publish files in a
     # race-free way.
     if archive.is_main:
-        pubconf.stagingroot = pubconf.archiveroot + '-staging'
+        pubconf.stagingroot = pubconf.archiveroot + "-staging"
     else:
         pubconf.stagingroot = None
 
@@ -161,8 +176,10 @@ class Config:
             # the same filesystem as the pool, since the pool is remote
             # anyway.
             lambda archive, rootpath, temppath, logger: ArtifactoryPool(
-                archive, rootpath, logger)),
-        }
+                archive, rootpath, logger
+            )
+        ),
+    }
 
     def __init__(self, archive):
         self.archive = archive
@@ -182,7 +199,7 @@ class Config:
             self.miscroot,
             self.temproot,
             self.stagingroot,
-            ]
+        ]
 
         for directory in required_directories:
             if directory is None:
diff --git a/lib/lp/archivepublisher/customupload.py b/lib/lp/archivepublisher/customupload.py
index 814ea29..6193ca4 100644
--- a/lib/lp/archivepublisher/customupload.py
+++ b/lib/lp/archivepublisher/customupload.py
@@ -11,7 +11,7 @@ Custom uploads include Debian installer packages, dist upgraders and
 DDTP (Debian Description Translation Project) tarballs.
 """
 
-__all__ = ['CustomUpload']
+__all__ = ["CustomUpload"]
 
 import os
 import shutil
@@ -20,56 +20,65 @@ import tempfile
 
 from zope.interface import implementer
 
-from lp.archivepublisher.debversion import (
-    Version as make_version,
-    VersionError,
-    )
+from lp.archivepublisher.debversion import Version as make_version
+from lp.archivepublisher.debversion import VersionError
 from lp.archivepublisher.interfaces.archivegpgsigningkey import (
     ISignableArchive,
-    )
+)
 from lp.services.librarian.utils import copy_and_close
-from lp.soyuz.interfaces.queue import (
-    CustomUploadError,
-    ICustomUploadHandler,
-    )
+from lp.soyuz.interfaces.queue import CustomUploadError, ICustomUploadHandler
 
 
 class CustomUploadTarballTarError(CustomUploadError):
     """The tarfile module raised an exception."""
+
     def __init__(self, tarfile_path, tar_error):
-        message = 'Problem reading tarfile %s: %s' % (tarfile_path, tar_error)
+        message = "Problem reading tarfile %s: %s" % (tarfile_path, tar_error)
         CustomUploadError.__init__(self, message)
 
 
 class CustomUploadTarballInvalidTarfile(CustomUploadError):
     """The supplied tarfile did not contain the expected elements."""
+
     def __init__(self, tarfile_path, expected_dir):
-        message = ('Tarfile %s did not contain expected file %s' %
-                   (tarfile_path, expected_dir))
+        message = "Tarfile %s did not contain expected file %s" % (
+            tarfile_path,
+            expected_dir,
+        )
         CustomUploadError.__init__(self, message)
 
 
 class CustomUploadBadUmask(CustomUploadError):
     """The environment's umask was incorrect."""
+
     def __init__(self, expected_umask, got_umask):
-        message = 'Bad umask; expected %03o, got %03o' % (
-            expected_umask, got_umask)
+        message = "Bad umask; expected %03o, got %03o" % (
+            expected_umask,
+            got_umask,
+        )
         CustomUploadError.__init__(self, message)
 
 
 class CustomUploadTarballInvalidFileType(CustomUploadError):
     """A file of type other than regular or symlink was found."""
+
     def __init__(self, tarfile_path, file_name):
-        message = ("Tarfile %s has file %s which is not a regular file, "
-                   "directory or a symlink" % (tarfile_path, file_name))
+        message = (
+            "Tarfile %s has file %s which is not a regular file, "
+            "directory or a symlink" % (tarfile_path, file_name)
+        )
         CustomUploadError.__init__(self, message)
 
 
 class CustomUploadTarballBadSymLink(CustomUploadError):
     """A symlink was found whose target points outside the immediate tree."""
+
     def __init__(self, tarfile_path, symlink_name, target):
         message = "Tarfile %s has a symlink %s whose target %s is illegal" % (
-            tarfile_path, symlink_name, target)
+            tarfile_path,
+            symlink_name,
+            target,
+        )
         CustomUploadError.__init__(self, message)
 
 
@@ -78,17 +87,24 @@ class CustomUploadTarballBadFile(CustomUploadError):
 
     This can happen if someone embeds ../file in the tar, for example.
     """
+
     def __init__(self, tarfile_path, file_name):
         message = "Tarfile %s has a file %s which is illegal" % (
-            tarfile_path, file_name)
+            tarfile_path,
+            file_name,
+        )
         CustomUploadError.__init__(self, message)
 
 
 class CustomUploadAlreadyExists(CustomUploadError):
     """A build for this type, architecture, and version already exists."""
+
     def __init__(self, custom_type, arch, version):
-        message = ('%s build %s for architecture %s already exists' %
-                   (custom_type, version, arch))
+        message = "%s build %s for architecture %s already exists" % (
+            custom_type,
+            version,
+            arch,
+        )
         CustomUploadError.__init__(self, message)
 
 
@@ -169,7 +185,8 @@ class CustomUpload:
         """Check for conflicts with existing publications in the archive."""
         if os.path.exists(os.path.join(self.targetdir, self.version)):
             raise CustomUploadAlreadyExists(
-                self.custom_type, self.arch, self.version)
+                self.custom_type, self.arch, self.version
+            )
 
     def verifyBeforeExtracting(self, tar):
         """Verify the tarball before extracting it.
@@ -187,7 +204,8 @@ class CustomUpload:
 
             if not (member.isreg() or member.issym() or member.isdir()):
                 raise CustomUploadTarballInvalidFileType(
-                    self.tarfile_path, member.name)
+                    self.tarfile_path, member.name
+                )
 
             # Append os.sep to stop attacks like /var/tmp/../tmpBOGUS
             # This is unlikely since someone would need to guess what
@@ -200,10 +218,13 @@ class CustomUpload:
             # The path can either be the tmpdir (without a trailing
             # separator) or have the tmpdir plus a trailing separator
             # as a prefix.
-            if (member_realpath != self.tmpdir and
-                not member_realpath.startswith(tmpdir_with_sep)):
+            if (
+                member_realpath != self.tmpdir
+                and not member_realpath.startswith(tmpdir_with_sep)
+            ):
                 raise CustomUploadTarballBadFile(
-                    self.tarfile_path, member.name)
+                    self.tarfile_path, member.name
+                )
 
             if member.issym():
                 # This is a bit tricky.  We need to take the dirname of
@@ -212,24 +233,29 @@ class CustomUpload:
                 # get an absolute path for the link target.
                 rel_link_file_location = os.path.dirname(member.name)
                 abs_link_file_location = os.path.join(
-                    self.tmpdir, rel_link_file_location)
+                    self.tmpdir, rel_link_file_location
+                )
                 target_path = os.path.join(
-                    abs_link_file_location, member.linkname)
+                    abs_link_file_location, member.linkname
+                )
                 target_realpath = os.path.realpath(target_path)
 
                 # The same rules apply here as for member_realpath
                 # above.
-                if (target_realpath != self.tmpdir and
-                    not target_realpath.startswith(tmpdir_with_sep)):
+                if (
+                    target_realpath != self.tmpdir
+                    and not target_realpath.startswith(tmpdir_with_sep)
+                ):
                     raise CustomUploadTarballBadSymLink(
-                        self.tarfile_path, member.name, member.linkname)
+                        self.tarfile_path, member.name, member.linkname
+                    )
 
         return True
 
     def extract(self):
         """Extract the custom upload to a temporary directory."""
         assert self.tmpdir is None, "Have already extracted tarfile"
-        self.tmpdir = tempfile.mkdtemp(prefix='customupload_')
+        self.tmpdir = tempfile.mkdtemp(prefix="customupload_")
         try:
             tar = tarfile.open(self.tarfile_path)
             self.verifyBeforeExtracting(tar)
@@ -256,9 +282,10 @@ class CustomUpload:
          * destpath is the absolute path to the target location.
         """
         sourcepath = os.path.join(dirname, basename)
-        assert sourcepath.startswith(self.tmpdir), (
-            "Source path must refer to the extracted location.")
-        basepath = sourcepath[len(self.tmpdir):].lstrip(os.path.sep)
+        assert sourcepath.startswith(
+            self.tmpdir
+        ), "Source path must refer to the extracted location."
+        basepath = sourcepath[len(self.tmpdir) :].lstrip(os.path.sep)
         destpath = os.path.join(self.targetdir, basepath)
 
         return sourcepath, basepath, destpath
@@ -292,7 +319,8 @@ class CustomUpload:
             # Create symbolic links to directories.
             for dirname in dirnames:
                 sourcepath, basepath, destpath = self._buildInstallPaths(
-                    dirname, dirpath)
+                    dirname, dirpath
+                )
 
                 if not self.shouldInstall(basepath):
                     continue
@@ -316,7 +344,8 @@ class CustomUpload:
             # Create/Copy files.
             for filename in filenames:
                 sourcepath, basepath, destpath = self._buildInstallPaths(
-                    filename, dirpath)
+                    filename, dirpath
+                )
 
                 if not self.shouldInstall(basepath):
                     continue
@@ -338,7 +367,8 @@ class CustomUpload:
 
         if not extracted:
             raise CustomUploadTarballInvalidTarfile(
-                self.tarfile_path, self.targetdir)
+                self.tarfile_path, self.targetdir
+            )
 
     def fixCurrentSymlink(self):
         """Update the 'current' symlink and prune old entries.
@@ -358,7 +388,7 @@ class CustomUpload:
         versions = []
         for entry in os.scandir(self.targetdir):
             # Skip the symlink.
-            if entry.name == 'current':
+            if entry.name == "current":
                 continue
             # Skip broken versions.
             try:
@@ -371,9 +401,9 @@ class CustomUpload:
 
         # Make sure the 'current' symlink points to the most recent version
         # The most recent version is in versions[0]
-        current = os.path.join(self.targetdir, 'current')
-        os.symlink(versions[0], '%s.new' % current)
-        os.rename('%s.new' % current, current)
+        current = os.path.join(self.targetdir, "current")
+        os.symlink(versions[0], "%s.new" % current)
+        os.rename("%s.new" % current, current)
 
         # There may be some other unpacked installer directories in
         # the target already. We only keep the three with the highest
diff --git a/lib/lp/archivepublisher/ddtp_tarball.py b/lib/lp/archivepublisher/ddtp_tarball.py
index 78e4e3e..3412ed7 100644
--- a/lib/lp/archivepublisher/ddtp_tarball.py
+++ b/lib/lp/archivepublisher/ddtp_tarball.py
@@ -11,8 +11,8 @@ to enable developers to publish indexes of DDTP contents.
 """
 
 __all__ = [
-    'DdtpTarballUpload',
-    ]
+    "DdtpTarballUpload",
+]
 
 import os
 
@@ -48,6 +48,7 @@ class DdtpTarballUpload(CustomUpload):
 
     Old contents will be preserved.
     """
+
     custom_type = "ddtp-tarball"
 
     @staticmethod
@@ -66,10 +67,12 @@ class DdtpTarballUpload(CustomUpload):
         self.setComponents(tarfile_path)
         self.archive = archive
         self.distro_series, _ = getUtility(IDistroSeriesSet).fromSuite(
-            archive.distribution, suite)
+            archive.distribution, suite
+        )
         pubconf = getPubConfig(archive)
         self.targetdir = os.path.join(
-            pubconf.archiveroot, 'dists', suite, self.component)
+            pubconf.archiveroot, "dists", suite, self.component
+        )
 
     @classmethod
     def getSeriesKey(cls, tarfile_path):
@@ -90,20 +93,24 @@ class DdtpTarballUpload(CustomUpload):
         # conditions depending on the archive purpose) may be configured to
         # create its own Translation-en files.  If so, we must take care not
         # to allow ddtp-tarball custom uploads to collide with those.
-        if (filename == "i18n/Translation-en" or
-                filename.startswith("i18n/Translation-en.")):
+        if filename == "i18n/Translation-en" or filename.startswith(
+            "i18n/Translation-en."
+        ):
             # Compare with the step C condition in
             # PublishDistro.publishArchive.
             if self.archive.purpose in (
-                    ArchivePurpose.PRIMARY, ArchivePurpose.COPY):
+                ArchivePurpose.PRIMARY,
+                ArchivePurpose.COPY,
+            ):
                 # See FTPArchiveHandler.writeAptConfig.
                 if not self.distro_series.include_long_descriptions:
                     return False
             else:
                 # See Publisher._writeComponentIndexes.
-                if (not self.distro_series.include_long_descriptions and
-                        getFeatureFlag(
-                            "soyuz.ppa.separate_long_descriptions")):
+                if (
+                    not self.distro_series.include_long_descriptions
+                    and getFeatureFlag("soyuz.ppa.separate_long_descriptions")
+                ):
                     return False
         return True
 
diff --git a/lib/lp/archivepublisher/deathrow.py b/lib/lp/archivepublisher/deathrow.py
index db73277..2d885e9 100644
--- a/lib/lp/archivepublisher/deathrow.py
+++ b/lib/lp/archivepublisher/deathrow.py
@@ -9,36 +9,25 @@ import datetime
 
 import pytz
 from storm.expr import Exists
-from storm.locals import (
-    And,
-    ClassAlias,
-    Not,
-    Select,
-    )
+from storm.locals import And, ClassAlias, Not, Select
 
 from lp.archivepublisher.config import getPubConfig
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.interfaces import IStore
-from lp.services.librarian.model import (
-    LibraryFileAlias,
-    LibraryFileContent,
-    )
+from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent
 from lp.soyuz.enums import ArchivePurpose
 from lp.soyuz.interfaces.publishing import (
     IBinaryPackagePublishingHistory,
-    inactive_publishing_status,
     ISourcePackagePublishingHistory,
     MissingSymlinkInPool,
     NotInPool,
-    )
-from lp.soyuz.model.files import (
-    BinaryPackageFile,
-    SourcePackageReleaseFile,
-    )
+    inactive_publishing_status,
+)
+from lp.soyuz.model.files import BinaryPackageFile, SourcePackageReleaseFile
 from lp.soyuz.model.publishing import (
     BinaryPackagePublishingHistory,
     SourcePackagePublishingHistory,
-    )
+)
 
 
 def getDeathRow(archive, log, pool_root_override):
@@ -87,17 +76,25 @@ class DeathRow:
         removed."""
         if dry_run:
             # Don't actually remove the files if we are dry running
-            def _mockRemoveFile(component_name, pool_name, pool_version,
-                                pub_file):
+            def _mockRemoveFile(
+                component_name, pool_name, pool_version, pub_file
+            ):
                 self.logger.debug(
-                    "(Not really!) removing %s %s/%s/%s" %
-                    (component_name, pool_name, pool_version,
-                     pub_file.libraryfile.filename))
+                    "(Not really!) removing %s %s/%s/%s"
+                    % (
+                        component_name,
+                        pool_name,
+                        pool_version,
+                        pub_file.libraryfile.filename,
+                    )
+                )
                 fullpath = self.diskpool.pathFor(
-                    component_name, pool_name, pool_version, pub_file)
+                    component_name, pool_name, pool_version, pub_file
+                )
                 if not fullpath.exists():
                     raise NotInPool
                 return fullpath.lstat().st_size
+
             self._removeFile = _mockRemoveFile
 
         source_files, binary_files = self._collectCondemned()
@@ -115,35 +112,51 @@ class DeathRow:
         Both sources and binaries are lists.
         """
         OtherSPPH = ClassAlias(SourcePackagePublishingHistory)
-        sources = list(IStore(SourcePackagePublishingHistory).find(
-            SourcePackagePublishingHistory,
-            SourcePackagePublishingHistory.archive == self.archive,
-            SourcePackagePublishingHistory.scheduleddeletiondate < UTC_NOW,
-            SourcePackagePublishingHistory.dateremoved == None,
-            Not(Exists(Select(
-                1, tables=[OtherSPPH],
-                where=And(
-                    SourcePackagePublishingHistory.sourcepackagereleaseID ==
-                        OtherSPPH.sourcepackagereleaseID,
-                    OtherSPPH.archiveID == self.archive.id,
-                    Not(OtherSPPH.status.is_in(inactive_publishing_status))),
-                )))).order_by(SourcePackagePublishingHistory.id))
+        other_active_spph = Select(
+            1,
+            tables=[OtherSPPH],
+            where=And(
+                SourcePackagePublishingHistory.sourcepackagereleaseID
+                == OtherSPPH.sourcepackagereleaseID,
+                OtherSPPH.archiveID == self.archive.id,
+                Not(OtherSPPH.status.is_in(inactive_publishing_status)),
+            ),
+        )
+        sources = list(
+            IStore(SourcePackagePublishingHistory)
+            .find(
+                SourcePackagePublishingHistory,
+                SourcePackagePublishingHistory.archive == self.archive,
+                SourcePackagePublishingHistory.scheduleddeletiondate < UTC_NOW,
+                SourcePackagePublishingHistory.dateremoved == None,
+                Not(Exists(other_active_spph)),
+            )
+            .order_by(SourcePackagePublishingHistory.id)
+        )
         self.logger.debug("%d Sources" % len(sources))
 
         OtherBPPH = ClassAlias(BinaryPackagePublishingHistory)
-        binaries = list(IStore(BinaryPackagePublishingHistory).find(
-            BinaryPackagePublishingHistory,
-            BinaryPackagePublishingHistory.archive == self.archive,
-            BinaryPackagePublishingHistory.scheduleddeletiondate < UTC_NOW,
-            BinaryPackagePublishingHistory.dateremoved == None,
-            Not(Exists(Select(
-                1, tables=[OtherBPPH],
-                where=And(
-                    BinaryPackagePublishingHistory.binarypackagereleaseID ==
-                        OtherBPPH.binarypackagereleaseID,
-                    OtherBPPH.archiveID == self.archive.id,
-                    Not(OtherBPPH.status.is_in(inactive_publishing_status))),
-                )))).order_by(BinaryPackagePublishingHistory.id))
+        other_active_bpph = Select(
+            1,
+            tables=[OtherBPPH],
+            where=And(
+                BinaryPackagePublishingHistory.binarypackagereleaseID
+                == OtherBPPH.binarypackagereleaseID,
+                OtherBPPH.archiveID == self.archive.id,
+                Not(OtherBPPH.status.is_in(inactive_publishing_status)),
+            ),
+        )
+        binaries = list(
+            IStore(BinaryPackagePublishingHistory)
+            .find(
+                BinaryPackagePublishingHistory,
+                BinaryPackagePublishingHistory.archive == self.archive,
+                BinaryPackagePublishingHistory.scheduleddeletiondate < UTC_NOW,
+                BinaryPackagePublishingHistory.dateremoved == None,
+                Not(Exists(other_active_bpph)),
+            )
+            .order_by(BinaryPackagePublishingHistory.id)
+        )
         self.logger.debug("%d Binaries" % len(binaries))
 
         return (sources, binaries)
@@ -160,34 +173,42 @@ class DeathRow:
         clauses = []
 
         if ISourcePackagePublishingHistory.implementedBy(publication_class):
-            clauses.extend([
-                SourcePackagePublishingHistory.archive == self.archive,
-                SourcePackagePublishingHistory.dateremoved == None,
-                SourcePackagePublishingHistory.sourcepackagerelease ==
-                    SourcePackageReleaseFile.sourcepackagereleaseID,
-                SourcePackageReleaseFile.libraryfile == LibraryFileAlias.id,
-                ])
+            clauses.extend(
+                [
+                    SourcePackagePublishingHistory.archive == self.archive,
+                    SourcePackagePublishingHistory.dateremoved == None,
+                    SourcePackagePublishingHistory.sourcepackagerelease
+                    == SourcePackageReleaseFile.sourcepackagereleaseID,
+                    SourcePackageReleaseFile.libraryfile
+                    == LibraryFileAlias.id,
+                ]
+            )
         elif IBinaryPackagePublishingHistory.implementedBy(publication_class):
-            clauses.extend([
-                BinaryPackagePublishingHistory.archive == self.archive,
-                BinaryPackagePublishingHistory.dateremoved == None,
-                BinaryPackagePublishingHistory.binarypackagerelease ==
-                    BinaryPackageFile.binarypackagereleaseID,
-                BinaryPackageFile.libraryfile == LibraryFileAlias.id,
-                ])
+            clauses.extend(
+                [
+                    BinaryPackagePublishingHistory.archive == self.archive,
+                    BinaryPackagePublishingHistory.dateremoved == None,
+                    BinaryPackagePublishingHistory.binarypackagerelease
+                    == BinaryPackageFile.binarypackagereleaseID,
+                    BinaryPackageFile.libraryfile == LibraryFileAlias.id,
+                ]
+            )
         else:
             raise AssertionError("%r is not supported." % publication_class)
 
-        clauses.extend([
-            LibraryFileAlias.content == LibraryFileContent.id,
-            LibraryFileAlias.filename == filename,
-            LibraryFileContent.md5 == file_md5,
-            ])
+        clauses.extend(
+            [
+                LibraryFileAlias.content == LibraryFileContent.id,
+                LibraryFileAlias.filename == filename,
+                LibraryFileContent.md5 == file_md5,
+            ]
+        )
 
         all_publications = IStore(publication_class).find(
-            publication_class, *clauses)
+            publication_class, *clauses
+        )
 
-        right_now = datetime.datetime.now(pytz.timezone('UTC'))
+        right_now = datetime.datetime.now(pytz.timezone("UTC"))
         for pub in all_publications:
             # Deny removal if any reference is still active.
             if pub.status not in inactive_publishing_status:
@@ -201,8 +222,9 @@ class DeathRow:
 
         return True
 
-    def _tryRemovingFromDisk(self, condemned_source_files,
-                             condemned_binary_files):
+    def _tryRemovingFromDisk(
+        self, condemned_source_files, condemned_binary_files
+    ):
         """Take the list of publishing records provided and unpublish them.
 
         You should only pass in entries you want to be unpublished because
@@ -235,7 +257,7 @@ class DeathRow:
                     pub_record.pool_name,
                     pub_record.pool_version,
                     pub_file,
-                    )
+                )
                 file_path = str(self.diskpool.pathFor(*pub_file_details))
 
                 # Check if the LibraryFileAlias in question was already
@@ -266,14 +288,17 @@ class DeathRow:
             checkPubRecord(pub_record, BinaryPackagePublishingHistory)
 
         self.logger.info(
-            "Removing %s files marked for reaping" % len(condemned_files))
+            "Removing %s files marked for reaping" % len(condemned_files)
+        )
 
         for condemned_file in sorted(condemned_files, reverse=True):
-            component_name, pool_name, pool_version, pub_file = (
-                details[condemned_file])
+            component_name, pool_name, pool_version, pub_file = details[
+                condemned_file
+            ]
             try:
                 bytes += self._removeFile(
-                    component_name, pool_name, pool_version, pub_file)
+                    component_name, pool_name, pool_version, pub_file
+                )
             except NotInPool as info:
                 # It's safe for us to let this slide because it means that
                 # the file is already gone.
@@ -292,7 +317,9 @@ class DeathRow:
     def _markPublicationRemoved(self, condemned_records):
         # Now that the os.remove() calls have been made, simply let every
         # now out-of-date record be marked as removed.
-        self.logger.debug("Marking %s condemned packages as removed." %
-                          len(condemned_records))
+        self.logger.debug(
+            "Marking %s condemned packages as removed."
+            % len(condemned_records)
+        )
         for record in condemned_records:
             record.dateremoved = UTC_NOW
diff --git a/lib/lp/archivepublisher/debian_installer.py b/lib/lp/archivepublisher/debian_installer.py
index 73d84e1..49ea92f 100644
--- a/lib/lp/archivepublisher/debian_installer.py
+++ b/lib/lp/archivepublisher/debian_installer.py
@@ -7,8 +7,8 @@
 # Daniel Silverstone who should be the first point of contact for it.
 
 __all__ = [
-    'DebianInstallerUpload',
-    ]
+    "DebianInstallerUpload",
+]
 
 import os
 import shutil
@@ -18,7 +18,7 @@ from lp.archivepublisher.customupload import CustomUpload
 
 
 class DebianInstallerUpload(CustomUpload):
-    """ Debian Installer custom upload.
+    """Debian Installer custom upload.
 
     The debian-installer filename must be of the form:
 
@@ -36,6 +36,7 @@ class DebianInstallerUpload(CustomUpload):
 
     A 'current' symbolic link points to the most recent version.
     """
+
     custom_type = "installer"
 
     @staticmethod
@@ -53,8 +54,12 @@ class DebianInstallerUpload(CustomUpload):
         self.setComponents(tarfile_path)
         pubconf = getPubConfig(archive)
         self.targetdir = os.path.join(
-            pubconf.archiveroot, 'dists', suite, 'main',
-            'installer-%s' % self.arch)
+            pubconf.archiveroot,
+            "dists",
+            suite,
+            "main",
+            "installer-%s" % self.arch,
+        )
 
     @classmethod
     def getSeriesKey(cls, tarfile_path):
@@ -67,14 +72,16 @@ class DebianInstallerUpload(CustomUpload):
         CustomUpload.extract(self)
         # We now have a valid unpacked installer directory, but it's one level
         # deeper than it should be. Move it up and remove the debris.
-        unpack_dir = 'installer-%s' % self.arch
-        os.rename(os.path.join(self.tmpdir, unpack_dir, self.version),
-                  os.path.join(self.tmpdir, self.version))
+        unpack_dir = "installer-%s" % self.arch
+        os.rename(
+            os.path.join(self.tmpdir, unpack_dir, self.version),
+            os.path.join(self.tmpdir, self.version),
+        )
         shutil.rmtree(os.path.join(self.tmpdir, unpack_dir))
 
     def shouldInstall(self, filename):
-        return filename.startswith('%s/' % self.version)
+        return filename.startswith("%s/" % self.version)
 
     def shouldSign(self, filename):
         """Sign checksums files."""
-        return filename.endswith('SUMS')
+        return filename.endswith("SUMS")
diff --git a/lib/lp/archivepublisher/debversion.py b/lib/lp/archivepublisher/debversion.py
index 94fe8c4..7fd1ac5 100644
--- a/lib/lp/archivepublisher/debversion.py
+++ b/lib/lp/archivepublisher/debversion.py
@@ -14,11 +14,10 @@ import re
 
 from debian import changelog
 
-
 # Regular expressions make validating things easy
-valid_epoch = re.compile(r'^[0-9]+$')
-valid_upstream = re.compile(r'^[0-9][A-Za-z0-9+:.~-]*$')
-valid_revision = re.compile(r'^[A-Za-z0-9+.~]+$')
+valid_epoch = re.compile(r"^[0-9]+$")
+valid_upstream = re.compile(r"^[0-9][A-Za-z0-9+:.~-]*$")
+valid_revision = re.compile(r"^[A-Za-z0-9+.~]+$")
 
 VersionError = changelog.VersionError
 
@@ -83,4 +82,5 @@ class Version(changelog.Version):
             raise BadUpstreamError("Upstream version cannot be empty")
         if not valid_upstream.search(self.upstream_version):
             raise BadUpstreamError(
-                "Bad upstream version format %s" % self.upstream_version)
+                "Bad upstream version format %s" % self.upstream_version
+            )
diff --git a/lib/lp/archivepublisher/diskpool.py b/lib/lp/archivepublisher/diskpool.py
index 84b7874..dfa3e3e 100644
--- a/lib/lp/archivepublisher/diskpool.py
+++ b/lib/lp/archivepublisher/diskpool.py
@@ -2,28 +2,21 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DiskPool',
-    'DiskPoolEntry',
-    'FileAddActionEnum',
-    'poolify',
-    'unpoolify',
-    ]
+    "DiskPool",
+    "DiskPoolEntry",
+    "FileAddActionEnum",
+    "poolify",
+    "unpoolify",
+]
 
 import logging
 import os
-from pathlib import Path
 import tempfile
-from typing import (
-    Optional,
-    Tuple,
-    Union,
-    )
+from pathlib import Path
+from typing import Optional, Tuple, Union
 
 from lp.archivepublisher import HARDCODED_COMPONENT_ORDER
-from lp.services.librarian.utils import (
-    copy_and_close,
-    sha1_from_path,
-    )
+from lp.services.librarian.utils import copy_and_close, sha1_from_path
 from lp.services.propertycache import cachedproperty
 from lp.soyuz.interfaces.archive import IArchive
 from lp.soyuz.interfaces.files import IPackageReleaseFile
@@ -31,7 +24,7 @@ from lp.soyuz.interfaces.publishing import (
     MissingSymlinkInPool,
     NotInPool,
     PoolFileOverwriteError,
-    )
+)
 
 
 def poolify(source: str, component: Optional[str] = None) -> Path:
@@ -64,8 +57,9 @@ def relative_symlink(src_path: Path, dst_path: Path) -> None:
     dst_path = Path(os.path.normpath(str(dst_path)))
     common_prefix = Path(os.path.commonpath([str(src_path), str(dst_path)]))
     backward_elems = [os.path.pardir] * (
-        len(dst_path.parts) - len(common_prefix.parts) - 1)
-    forward_elems = src_path.parts[len(common_prefix.parts):]
+        len(dst_path.parts) - len(common_prefix.parts) - 1
+    )
+    forward_elems = src_path.parts[len(common_prefix.parts) :]
     src_path = Path(*backward_elems, *forward_elems)
     dst_path.symlink_to(src_path)
 
@@ -77,6 +71,7 @@ class FileAddActionEnum:
     SYMLINK_ADDED: we created a symlink to another copy of the same file
     NONE: no action was necessary or taken.
     """
+
     FILE_ADDED = "file_added"
     SYMLINK_ADDED = "symlink_added"
     NONE = "none"
@@ -98,8 +93,12 @@ class _diskpool_atomicfile:
     the filename is present in the pool, it is definitely complete.
     """
 
-    def __init__(self, targetfilename: Path, mode: str,
-                 rootpath: Union[str, Path] = "/tmp") -> None:
+    def __init__(
+        self,
+        targetfilename: Path,
+        mode: str,
+        rootpath: Union[str, Path] = "/tmp",
+    ) -> None:
         # atomicfile implements the file object interface, but it is only
         # really used (or useful) for writing binary files, which is why we
         # keep the mode constructor argument but assert it's sane below.
@@ -138,10 +137,17 @@ class DiskPoolEntry:
     Remaining files in the 'temppath' indicated installation failures and
     require manual removal after further investigation.
     """
-    def __init__(self, archive: IArchive, rootpath: Path, temppath: Path,
-                 source_name: str, source_version: str,
-                 pub_file: IPackageReleaseFile,
-                 logger: logging.Logger) -> None:
+
+    def __init__(
+        self,
+        archive: IArchive,
+        rootpath: Path,
+        temppath: Path,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+        logger: logging.Logger,
+    ) -> None:
         self.archive = archive
         self.rootpath = rootpath
         self.temppath = temppath
@@ -169,11 +175,14 @@ class DiskPoolEntry:
     def pathFor(self, component: str) -> Path:
         """Return the path for this file in the given component."""
         return (
-            self.rootpath / poolify(self.source_name, component) /
-            self.pub_file.libraryfile.filename)
-
-    def preferredComponent(self, add: Optional[str] = None,
-                           remove: Optional[str] = None) -> Optional[str]:
+            self.rootpath
+            / poolify(self.source_name, component)
+            / self.pub_file.libraryfile.filename
+        )
+
+    def preferredComponent(
+        self, add: Optional[str] = None, remove: Optional[str] = None
+    ) -> Optional[str]:
         """Return the appropriate component for the real file.
 
         If add is passed, add it to the list before calculating.
@@ -212,19 +221,21 @@ class DiskPoolEntry:
             # There's something on disk. Check hash.
             sha1 = lfa.content.sha1
             if sha1 != self.file_hash:
-                raise PoolFileOverwriteError('%s != %s for %s' %
-                    (sha1, self.file_hash,
-                     self.pathFor(self.file_component)))
-
-            if (component == self.file_component
-                or component in self.symlink_components):
+                raise PoolFileOverwriteError(
+                    "%s != %s for %s"
+                    % (sha1, self.file_hash, self.pathFor(self.file_component))
+                )
+
+            if (
+                component == self.file_component
+                or component in self.symlink_components
+            ):
                 # The file is already here
                 return FileAddActionEnum.NONE
             else:
                 # The file is present in a different component,
                 # make a symlink.
-                relative_symlink(
-                    self.pathFor(self.file_component), targetpath)
+                relative_symlink(self.pathFor(self.file_component), targetpath)
                 self.symlink_components.add(component)
                 # Then fix to ensure the right component is linked.
                 self._sanitiseLinks()
@@ -234,11 +245,14 @@ class DiskPoolEntry:
         # If we get to here, we want to write the file.
         assert not targetpath.exists()
 
-        self.debug("Making new file in %s for %s/%s" %
-                   (component, self.source_name, lfa.filename))
+        self.debug(
+            "Making new file in %s for %s/%s"
+            % (component, self.source_name, lfa.filename)
+        )
 
         file_to_write = _diskpool_atomicfile(
-            targetpath, "wb", rootpath=self.temppath)
+            targetpath, "wb", rootpath=self.temppath
+        )
         lfa.open()
         copy_and_close(lfa, file_to_write)
         self.file_component = component
@@ -258,14 +272,17 @@ class DiskPoolEntry:
         filename = self.pub_file.libraryfile.filename
         if not self.file_component:
             raise NotInPool(
-                "File for removing %s %s/%s is not in pool, skipping." %
-                (component, self.source_name, filename))
+                "File for removing %s %s/%s is not in pool, skipping."
+                % (component, self.source_name, filename)
+            )
 
         # Okay, it's there, if it's a symlink then we need to remove
         # it simply.
         if component in self.symlink_components:
-            self.debug("Removing %s %s/%s as it is a symlink"
-                       % (component, self.source_name, filename))
+            self.debug(
+                "Removing %s %s/%s as it is a symlink"
+                % (component, self.source_name, filename)
+            )
             # ensure we are removing a symbolic link and
             # it is published in one or more components
             link_path = self.pathFor(component)
@@ -274,14 +291,17 @@ class DiskPoolEntry:
 
         if component != self.file_component:
             raise MissingSymlinkInPool(
-                "Symlink for %s/%s in %s is missing, skipping." %
-                (self.source_name, filename, component))
+                "Symlink for %s/%s in %s is missing, skipping."
+                % (self.source_name, filename, component)
+            )
 
         # It's not a symlink, this means we need to check whether we
         # have symlinks or not.
         if len(self.symlink_components) == 0:
-            self.debug("Removing %s/%s from %s" %
-                       (self.source_name, filename, component))
+            self.debug(
+                "Removing %s/%s from %s"
+                % (self.source_name, filename, component)
+            )
         else:
             # The target for removal is the real file, and there are symlinks
             # pointing to it. In order to avoid breakage, we need to first
@@ -323,11 +343,14 @@ class DiskPoolEntry:
         filename = self.pub_file.libraryfile.filename
         if targetcomponent not in self.symlink_components:
             raise ValueError(
-                "Target component '%s' is not a symlink for %s" %
-                             (targetcomponent, filename))
+                "Target component '%s' is not a symlink for %s"
+                % (targetcomponent, filename)
+            )
 
-        self.debug("Shuffling symlinks so primary for %s is in %s" %
-                   (filename, targetcomponent))
+        self.debug(
+            "Shuffling symlinks so primary for %s is in %s"
+            % (filename, targetcomponent)
+        )
 
         # Okay, so first up, we unlink the targetcomponent symlink.
         targetpath = self.pathFor(targetcomponent)
@@ -395,26 +418,43 @@ class DiskPool:
     'rootpath' and 'temppath' must be in the same filesystem, see
     DiskPoolEntry for further information.
     """
+
     results = FileAddActionEnum
 
-    def __init__(self, archive: IArchive, rootpath, temppath,
-                 logger: logging.Logger) -> None:
+    def __init__(
+        self, archive: IArchive, rootpath, temppath, logger: logging.Logger
+    ) -> None:
         self.archive = archive
         self.rootpath = Path(rootpath)
         self.temppath = Path(temppath) if temppath is not None else None
         self.entries = {}
         self.logger = logger
 
-    def _getEntry(self, source_name: str, source_version: str,
-                  pub_file: IPackageReleaseFile) -> DiskPoolEntry:
+    def _getEntry(
+        self,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+    ) -> DiskPoolEntry:
         """Return a new DiskPoolEntry for the given source and file."""
         return DiskPoolEntry(
-            self.archive, self.rootpath, self.temppath, source_name,
-            source_version, pub_file, self.logger)
-
-    def pathFor(self, comp: str, source_name: str, source_version: str,
-                pub_file: Optional[IPackageReleaseFile] = None,
-                file: Optional[str] = None) -> Path:
+            self.archive,
+            self.rootpath,
+            self.temppath,
+            source_name,
+            source_version,
+            pub_file,
+            self.logger,
+        )
+
+    def pathFor(
+        self,
+        comp: str,
+        source_name: str,
+        source_version: str,
+        pub_file: Optional[IPackageReleaseFile] = None,
+        file: Optional[str] = None,
+    ) -> Path:
         """Return the path for the given pool file."""
         if file is None:
             file = pub_file.libraryfile.filename
@@ -422,8 +462,13 @@ class DiskPool:
             raise AssertionError("Must pass either pub_file or file")
         return self.rootpath / poolify(source_name, comp) / file
 
-    def addFile(self, component: str, source_name: str, source_version: str,
-                pub_file: IPackageReleaseFile):
+    def addFile(
+        self,
+        component: str,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+    ):
         """Add a file with the given contents to the pool.
 
         `component`, `source_name`, `source_version`, and `pub_file` are
@@ -456,8 +501,13 @@ class DiskPool:
         entry = self._getEntry(source_name, source_version, pub_file)
         return entry.addFile(component)
 
-    def removeFile(self, component: str, source_name: str, source_version: str,
-                   pub_file: IPackageReleaseFile) -> int:
+    def removeFile(
+        self,
+        component: str,
+        source_name: str,
+        source_version: str,
+        pub_file: IPackageReleaseFile,
+    ) -> int:
         """Remove the specified file from the pool.
 
         There are three possible outcomes:
diff --git a/lib/lp/archivepublisher/dist_upgrader.py b/lib/lp/archivepublisher/dist_upgrader.py
index 7564565..959fc9c 100644
--- a/lib/lp/archivepublisher/dist_upgrader.py
+++ b/lib/lp/archivepublisher/dist_upgrader.py
@@ -4,17 +4,15 @@
 """The processing of dist-upgrader tarballs."""
 
 __all__ = [
-    'DistUpgraderUpload',
-    ]
+    "DistUpgraderUpload",
+]
 
 import os
 
 from lp.archivepublisher.config import getPubConfig
 from lp.archivepublisher.customupload import CustomUpload
-from lp.archivepublisher.debversion import (
-    BadUpstreamError,
-    Version as make_version,
-    )
+from lp.archivepublisher.debversion import BadUpstreamError
+from lp.archivepublisher.debversion import Version as make_version
 from lp.soyuz.interfaces.queue import CustomUploadError
 
 
@@ -51,6 +49,7 @@ class DistUpgraderUpload(CustomUpload):
 
     A 'current' symbolic link points to the most recent version.
     """
+
     custom_type = "dist-upgrader"
 
     @staticmethod
@@ -68,8 +67,12 @@ class DistUpgraderUpload(CustomUpload):
         self.setComponents(tarfile_path)
         pubconf = getPubConfig(archive)
         self.targetdir = os.path.join(
-            pubconf.archiveroot, 'dists', suite, 'main',
-            'dist-upgrader-%s' % self.arch)
+            pubconf.archiveroot,
+            "dists",
+            suite,
+            "main",
+            "dist-upgrader-%s" % self.arch,
+        )
 
     @classmethod
     def getSeriesKey(cls, tarfile_path):
@@ -97,8 +100,8 @@ class DistUpgraderUpload(CustomUpload):
             version = make_version(directory_name)
         except BadUpstreamError as exc:
             raise DistUpgraderBadVersion(self.tarfile_path, exc)
-        return version and not filename.startswith('current')
+        return version and not filename.startswith("current")
 
     def shouldSign(self, filename):
         """Sign *.tar.gz files."""
-        return filename.endswith('.tar.gz')
+        return filename.endswith(".tar.gz")
diff --git a/lib/lp/archivepublisher/domination.py b/lib/lp/archivepublisher/domination.py
index 65eb290..7ee9b5e 100644
--- a/lib/lp/archivepublisher/domination.py
+++ b/lib/lp/archivepublisher/domination.py
@@ -48,25 +48,16 @@ it is performed for each suite using:
 
 """
 
-__all__ = ['Dominator']
+__all__ = ["Dominator"]
 
 from collections import defaultdict
 from datetime import timedelta
 from functools import cmp_to_key
 from itertools import filterfalse
-from operator import (
-    attrgetter,
-    itemgetter,
-    )
+from operator import attrgetter, itemgetter
 
 import apt_pkg
-from storm.expr import (
-    And,
-    Count,
-    Desc,
-    Not,
-    Select,
-    )
+from storm.expr import And, Count, Desc, Not, Select
 from zope.component import getUtility
 
 from lp.registry.model.sourcepackagename import SourcePackageName
@@ -77,18 +68,15 @@ from lp.services.database.interfaces import IStore
 from lp.services.database.sqlbase import (
     block_implicit_flushes,
     flush_database_updates,
-    )
+)
 from lp.services.database.stormexpr import IsDistinctFrom
 from lp.services.orderingcheck import OrderingCheck
 from lp.soyuz.adapters.packagelocation import PackageLocation
-from lp.soyuz.enums import (
-    BinaryPackageFormat,
-    PackagePublishingStatus,
-    )
+from lp.soyuz.enums import BinaryPackageFormat, PackagePublishingStatus
 from lp.soyuz.interfaces.publishing import (
-    inactive_publishing_status,
     IPublishingSet,
-    )
+    inactive_publishing_status,
+)
 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
 from lp.soyuz.model.binarypackagename import BinaryPackageName
 from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
@@ -96,10 +84,9 @@ from lp.soyuz.model.distroarchseries import DistroArchSeries
 from lp.soyuz.model.publishing import (
     BinaryPackagePublishingHistory,
     SourcePackagePublishingHistory,
-    )
+)
 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
 
-
 # Days before a package will be removed from disk.
 STAY_OF_EXECUTION = 1
 
@@ -117,8 +104,7 @@ def join_spph_spn():
 
 
 def join_spph_spr():
-    """Join condition: SourcePackageRelease/SourcePackagePublishingHistory.
-    """
+    """Join condition: SourcePackageRelease/SourcePackagePublishingHistory."""
     SPPH = SourcePackagePublishingHistory
     SPR = SourcePackageRelease
 
@@ -131,8 +117,9 @@ class SourcePublicationTraits:
     Used by `GeneralizedPublication` to hide the differences from
     `BinaryPackagePublishingHistory`.
     """
+
     release_class = SourcePackageRelease
-    release_reference_name = 'sourcepackagereleaseID'
+    release_reference_name = "sourcepackagereleaseID"
 
     @staticmethod
     def getPackageName(spph):
@@ -151,8 +138,9 @@ class BinaryPublicationTraits:
     Used by `GeneralizedPublication` to hide the differences from
     `SourcePackagePublishingHistory`.
     """
+
     release_class = BinaryPackageRelease
-    release_reference_name = 'binarypackagereleaseID'
+    release_reference_name = "binarypackagereleaseID"
 
     @staticmethod
     def getPackageName(bpph):
@@ -173,6 +161,7 @@ class GeneralizedPublication:
     without caring which.  Differences are abstracted away in a traits
     class.
     """
+
     def __init__(self, is_source=True):
         self.is_source = is_source
         if is_source:
@@ -195,13 +184,14 @@ class GeneralizedPublication:
         break the tie.
         """
         version_comparison = apt_pkg.version_compare(
-            self.getPackageVersion(pub1), self.getPackageVersion(pub2))
+            self.getPackageVersion(pub1), self.getPackageVersion(pub2)
+        )
 
         if version_comparison == 0:
             # Use dates as tie breaker (idiom equivalent to Python 2's cmp).
-            return (
-                (pub1.datecreated > pub2.datecreated) -
-                (pub1.datecreated < pub2.datecreated))
+            return (pub1.datecreated > pub2.datecreated) - (
+                pub1.datecreated < pub2.datecreated
+            )
         else:
             return version_comparison
 
@@ -218,7 +208,7 @@ def make_package_location(pub):
         distroseries=pub.distroseries,
         pocket=pub.pocket,
         channel=pub.channel,
-        )
+    )
 
 
 def find_live_source_versions(sorted_pubs):
@@ -260,8 +250,9 @@ def find_live_binary_versions_pass_1(sorted_pubs):
     sorted_pubs = list(sorted_pubs)
     latest = sorted_pubs.pop(0)
     return get_binary_versions(
-        [latest] + [
-            pub for pub in sorted_pubs if not pub.architecture_specific])
+        [latest]
+        + [pub for pub in sorted_pubs if not pub.architecture_specific]
+    )
 
 
 class ArchSpecificPublicationsCache:
@@ -278,6 +269,7 @@ class ArchSpecificPublicationsCache:
     (source package release, archive, distroseries, pocket).  Hence this
     cache.
     """
+
     def __init__(self):
         self.cache = {}
 
@@ -289,15 +281,16 @@ class ArchSpecificPublicationsCache:
             bpph.archive,
             bpph.distroseries,
             bpph.pocket,
-            )
+        )
 
     def hasArchSpecificPublications(self, bpph):
         """Does bpph have active, arch-specific publications?
 
         If so, the dominator will want to reprieve `bpph`.
         """
-        assert not bpph.architecture_specific, (
-            "Wrongly dominating arch-specific binary pub in pass 2.")
+        assert (
+            not bpph.architecture_specific
+        ), "Wrongly dominating arch-specific binary pub in pass 2."
 
         key = self.getKey(bpph)
         if key not in self.cache:
@@ -308,7 +301,8 @@ class ArchSpecificPublicationsCache:
     def _lookUp(spr, archive, distroseries, pocket):
         """Look up an answer in the database."""
         query = getUtility(IPublishingSet).getActiveArchSpecificPublications(
-            spr, archive, distroseries, pocket)
+            spr, archive, distroseries, pocket
+        )
         return not query.is_empty()
 
 
@@ -344,14 +338,16 @@ def find_live_binary_versions_pass_2(sorted_pubs, cache):
     """
     sorted_pubs = list(sorted_pubs)
     latest = sorted_pubs.pop(0)
-    is_arch_specific = attrgetter('architecture_specific')
+    is_arch_specific = attrgetter("architecture_specific")
     arch_specific_pubs = list(filter(is_arch_specific, sorted_pubs))
     arch_indep_pubs = list(filterfalse(is_arch_specific, sorted_pubs))
 
     bpbs = load_related(
         BinaryPackageBuild,
-        [pub.binarypackagerelease for pub in arch_indep_pubs], ['buildID'])
-    load_related(SourcePackageRelease, bpbs, ['source_package_release_id'])
+        [pub.binarypackagerelease for pub in arch_indep_pubs],
+        ["buildID"],
+    )
+    load_related(SourcePackageRelease, bpbs, ["source_package_release_id"])
 
     # XXX cjwatson 2022-05-01: Skip the architecture-specific check for
     # publications from CI builds for now, until we figure out how to
@@ -364,8 +360,9 @@ def find_live_binary_versions_pass_2(sorted_pubs, cache):
     reprieved_pubs = [
         pub
         for pub in arch_indep_pubs
-            if pub.binarypackagerelease.ci_build_id is None and
-               cache.hasArchSpecificPublications(pub)]
+        if pub.binarypackagerelease.ci_build_id is None
+        and cache.hasArchSpecificPublications(pub)
+    ]
 
     return get_binary_versions([latest] + arch_specific_pubs + reprieved_pubs)
 
@@ -391,8 +388,9 @@ class Dominator:
         self.logger = logger
         self.archive = archive
 
-    def planPackageDomination(self, sorted_pubs, live_versions,
-                              generalization):
+    def planPackageDomination(
+        self, sorted_pubs, live_versions, generalization
+    ):
         """Plan domination of publications for a single package.
 
         The latest publication for any version in `live_versions` stays
@@ -436,11 +434,14 @@ class Dominator:
 
         self.logger.debug(
             "Package has %d live publication(s).  Live versions: %s",
-            len(sorted_pubs), live_versions)
+            len(sorted_pubs),
+            live_versions,
+        )
 
         # Verify that the publications are really sorted properly.
         check_order = OrderingCheck(
-            key=cmp_to_key(generalization.compare), reverse=True)
+            key=cmp_to_key(generalization.compare), reverse=True
+        )
 
         current_dominant = None
         dominant_version = None
@@ -460,7 +461,8 @@ class Dominator:
                 # Supersede it.
                 supersede.append((pub, current_dominant))
                 self.logger.debug2(
-                    "Superseding older publication for version %s.", version)
+                    "Superseding older publication for version %s.", version
+                )
             elif version in live_versions:
                 # This publication stays active; if any publications
                 # that follow right after this are to be superseded,
@@ -521,8 +523,9 @@ class Dominator:
         # the items so that we can be sure that we're not altering the
         # iteration order while iteration is underway.
         for (name, location), pubs in list(pubs_by_name_and_location.items()):
-            pubs_by_name_and_location[(name, location)] = (
-                generalization.sortPublications(pubs))
+            pubs_by_name_and_location[
+                (name, location)
+            ] = generalization.sortPublications(pubs)
 
         return pubs_by_name_and_location
 
@@ -535,8 +538,9 @@ class Dominator:
         if pub_record.status == PackagePublishingStatus.DELETED:
             pub_record.scheduleddeletiondate = UTC_NOW
         else:
-            pub_record.scheduleddeletiondate = (
-                UTC_NOW + timedelta(days=STAY_OF_EXECUTION))
+            pub_record.scheduleddeletiondate = UTC_NOW + timedelta(
+                days=STAY_OF_EXECUTION
+            )
 
     def _judgeSuperseded(self, source_records, binary_records):
         """Determine whether the superseded packages supplied should
@@ -560,8 +564,10 @@ class Dominator:
             binpkg_release = pub_record.binarypackagerelease
             self.logger.debug(
                 "%s/%s (%s) has been judged eligible for removal",
-                binpkg_release.binarypackagename.name, binpkg_release.version,
-                pub_record.distroarchseries.architecturetag)
+                binpkg_release.binarypackagename.name,
+                binpkg_release.version,
+                pub_record.distroarchseries.architecturetag,
+            )
             self._setScheduledDeletionDate(pub_record)
             # XXX cprov 20070820: 'datemadepending' is useless, since it's
             # always equals to "scheduleddeletiondate - quarantine".
@@ -574,20 +580,24 @@ class Dominator:
             # SourcePackageRelease which are/have been in this
             # distroseries...
             considered_binaries = IStore(BinaryPackagePublishingHistory).find(
-                BinaryPackagePublishingHistory.distroarchseries ==
-                    DistroArchSeries.id,
+                BinaryPackagePublishingHistory.distroarchseries
+                == DistroArchSeries.id,
                 BinaryPackagePublishingHistory.scheduleddeletiondate == None,
                 BinaryPackagePublishingHistory.dateremoved == None,
                 BinaryPackagePublishingHistory.archive == self.archive,
                 BinaryPackageBuild.source_package_release == srcpkg_release,
                 DistroArchSeries.distroseries == pub_record.distroseries,
-                BinaryPackagePublishingHistory.binarypackagerelease ==
-                    BinaryPackageRelease.id,
+                BinaryPackagePublishingHistory.binarypackagerelease
+                == BinaryPackageRelease.id,
                 BinaryPackageRelease.build == BinaryPackageBuild.id,
                 BinaryPackagePublishingHistory.pocket == pub_record.pocket,
-                Not(IsDistinctFrom(
-                    BinaryPackagePublishingHistory._channel,
-                    pub_record._channel)))
+                Not(
+                    IsDistinctFrom(
+                        BinaryPackagePublishingHistory._channel,
+                        pub_record._channel,
+                    )
+                ),
+            )
 
             # There is at least one non-removed binary to consider
             if not considered_binaries.is_empty():
@@ -601,7 +611,8 @@ class Dominator:
                     channel=pub_record.channel,
                     status=PackagePublishingStatus.PUBLISHED,
                     archive=self.archive,
-                    sourcepackagerelease=srcpkg_release)
+                    sourcepackagerelease=srcpkg_release,
+                )
                 # Zero PUBLISHED for this spr, so nothing to take over
                 # for us, so leave it for consideration next time.
                 if published.is_empty():
@@ -610,8 +621,10 @@ class Dominator:
             # Okay, so there's no unremoved binaries, let's go for it...
             self.logger.debug(
                 "%s/%s (%s) source has been judged eligible for removal",
-                srcpkg_release.sourcepackagename.name, srcpkg_release.version,
-                pub_record.id)
+                srcpkg_release.sourcepackagename.name,
+                srcpkg_release.version,
+                pub_record.id,
+            )
             self._setScheduledDeletionDate(pub_record)
             # XXX cprov 20070820: 'datemadepending' is pointless, since it's
             # always equals to "scheduleddeletiondate - quarantine".
@@ -636,16 +649,18 @@ class Dominator:
             BPPH.distroarchseries == distroarchseries,
             BPPH.archive == self.archive,
             BPPH.pocket == pocket,
-            ]
+        ]
         candidate_binary_names = Select(
-            BPPH.binarypackagenameID, And(*bpph_location_clauses),
+            BPPH.binarypackagenameID,
+            And(*bpph_location_clauses),
             group_by=(BPPH.binarypackagenameID, BPPH._channel),
-            having=(Count() > 1))
+            having=(Count() > 1),
+        )
         main_clauses = bpph_location_clauses + [
             BPR.id == BPPH.binarypackagereleaseID,
             BPR.binarypackagenameID.is_in(candidate_binary_names),
             BPR.binpackageformat != BinaryPackageFormat.DDEB,
-            ]
+        ]
 
         # We're going to access the BPRs as well.  Since we make the
         # database look them up anyway, and since there won't be many
@@ -654,7 +669,7 @@ class Dominator:
         # the join would complicate the query.
         query = IStore(BPPH).find((BPPH, BPR), *main_clauses)
         bpphs = list(DecoratedResultSet(query, itemgetter(0)))
-        load_related(BinaryPackageName, bpphs, ['binarypackagenameID'])
+        load_related(BinaryPackageName, bpphs, ["binarypackagenameID"])
         return bpphs
 
     def dominateBinaries(self, distroseries, pocket):
@@ -682,7 +697,8 @@ class Dominator:
 
         def plan(pubs, live_versions):
             cur_supersede, cur_keep, cur_delete = self.planPackageDomination(
-                pubs, live_versions, generalization)
+                pubs, live_versions, generalization
+            )
             supersede.extend(cur_supersede)
             keep.update(cur_keep)
             delete.extend(cur_delete)
@@ -712,8 +728,10 @@ class Dominator:
         for distroarchseries in distroseries.architectures:
             self.logger.info(
                 "Performing domination across %s/%s (%s)",
-                distroarchseries.distroseries.name, pocket.title,
-                distroarchseries.architecturetag)
+                distroarchseries.distroseries.name,
+                pocket.title,
+                distroarchseries.architecturetag,
+            )
 
             self.logger.info("Finding binaries...")
             bins = self.findBinariesForDomination(distroarchseries, pocket)
@@ -721,7 +739,8 @@ class Dominator:
             self.logger.info("Planning domination of binaries...")
             for (name, location), pubs in sorted_packages.items():
                 self.logger.debug(
-                    "Planning domination of %s in %s" % (name, location))
+                    "Planning domination of %s in %s" % (name, location)
+                )
                 assert len(pubs) > 0, "Dominating zero binaries!"
                 live_versions = find_live_binary_versions_pass_1(pubs)
                 plan(pubs, live_versions)
@@ -749,13 +768,16 @@ class Dominator:
             sorted_packages = self._sortPackages(bins, generalization)
             self.logger.info("Planning domination of binaries...(2nd pass)")
             for name, location in packages_w_arch_indep.intersection(
-                    sorted_packages):
+                sorted_packages
+            ):
                 pubs = sorted_packages[(name, location)]
                 self.logger.debug(
-                    "Planning domination of %s in %s" % (name, location))
+                    "Planning domination of %s in %s" % (name, location)
+                )
                 assert len(pubs) > 0, "Dominating zero binaries in 2nd pass!"
                 live_versions = find_live_binary_versions_pass_2(
-                    pubs, reprieve_cache)
+                    pubs, reprieve_cache
+                )
                 plan(pubs, live_versions)
 
         execute_plan()
@@ -769,7 +791,7 @@ class Dominator:
             SPPH.distroseries == distroseries,
             SPPH.archive == self.archive,
             SPPH.pocket == pocket,
-            )
+        )
 
     def findSourcesForDomination(self, distroseries, pocket):
         """Find binary publications that need dominating.
@@ -786,12 +808,14 @@ class Dominator:
         SPR = SourcePackageRelease
 
         spph_location_clauses = self._composeActiveSourcePubsCondition(
-            distroseries, pocket)
+            distroseries, pocket
+        )
         candidate_source_names = Select(
             SPPH.sourcepackagenameID,
             And(join_spph_spr(), spph_location_clauses),
             group_by=(SPPH.sourcepackagenameID, SPPH._channel),
-            having=(Count() > 1))
+            having=(Count() > 1),
+        )
 
         # We'll also access the SourcePackageReleases associated with
         # the publications we find.  Since they're in the join anyway,
@@ -803,9 +827,10 @@ class Dominator:
             (SPPH, SPR),
             join_spph_spr(),
             SPPH.sourcepackagenameID.is_in(candidate_source_names),
-            spph_location_clauses)
+            spph_location_clauses,
+        )
         spphs = DecoratedResultSet(query, itemgetter(0))
-        load_related(SourcePackageName, spphs, ['sourcepackagenameID'])
+        load_related(SourcePackageName, spphs, ["sourcepackagenameID"])
         return spphs
 
     def dominateSources(self, distroseries, pocket):
@@ -816,7 +841,9 @@ class Dominator:
         """
         self.logger.debug(
             "Performing domination across %s/%s (Source)",
-            distroseries.name, pocket.title)
+            distroseries.name,
+            pocket.title,
+        )
 
         generalization = GeneralizedPublication(is_source=True)
 
@@ -832,7 +859,8 @@ class Dominator:
             assert len(pubs) > 0, "Dominating zero sources!"
             live_versions = find_live_source_versions(pubs)
             cur_supersede, _, cur_delete = self.planPackageDomination(
-                pubs, live_versions, generalization)
+                pubs, live_versions, generalization
+            )
             supersede.extend(cur_supersede)
             delete.extend(cur_delete)
 
@@ -852,12 +880,13 @@ class Dominator:
         looking_for = (
             SourcePackageName.name,
             Count(SourcePackagePublishingHistory.id),
-            )
+        )
         result = IStore(SourcePackageName).find(
             looking_for,
             join_spph_spr(),
             join_spph_spn(),
-            self._composeActiveSourcePubsCondition(distroseries, pocket))
+            self._composeActiveSourcePubsCondition(distroseries, pocket),
+        )
         return result.group_by(SourcePackageName.name)
 
     def findPublishedSPPHs(self, distroseries, pocket, package_name):
@@ -870,14 +899,21 @@ class Dominator:
             join_spph_spr(),
             join_spph_spn(),
             SourcePackageName.name == package_name,
-            self._composeActiveSourcePubsCondition(distroseries, pocket))
+            self._composeActiveSourcePubsCondition(distroseries, pocket),
+        )
         # Sort by descending version (SPR.version has type debversion in
         # the database, so this should be a real proper comparison) so
         # that _sortPackage will have slightly less work to do later.
         return query.order_by(Desc(SPR.version), Desc(SPPH.datecreated))
 
-    def dominateSourceVersions(self, distroseries, pocket, package_name,
-                               live_versions, immutable_check=True):
+    def dominateSourceVersions(
+        self,
+        distroseries,
+        pocket,
+        package_name,
+        live_versions,
+        immutable_check=True,
+    ):
         """Dominate source publications based on a set of "live" versions.
 
         Active publications for the "live" versions will remain active.  All
@@ -897,7 +933,8 @@ class Dominator:
         pubs = self.findPublishedSPPHs(distroseries, pocket, package_name)
         pubs = generalization.sortPublications(pubs)
         supersede, _, delete = self.planPackageDomination(
-            pubs, live_versions, generalization)
+            pubs, live_versions, generalization
+        )
         for pub, dominant in supersede:
             pub.supersede(dominant, logger=self.logger)
             IStore(pub).flush()
@@ -913,21 +950,25 @@ class Dominator:
             SourcePackagePublishingHistory.archive == self.archive,
             SourcePackagePublishingHistory.pocket == pocket,
             SourcePackagePublishingHistory.status.is_in(
-                inactive_publishing_status),
+                inactive_publishing_status
+            ),
             SourcePackagePublishingHistory.scheduleddeletiondate == None,
-            SourcePackagePublishingHistory.dateremoved == None)
+            SourcePackagePublishingHistory.dateremoved == None,
+        )
 
         binaries = IStore(BinaryPackagePublishingHistory).find(
             BinaryPackagePublishingHistory,
-            BinaryPackagePublishingHistory.distroarchseries ==
-                DistroArchSeries.id,
+            BinaryPackagePublishingHistory.distroarchseries
+            == DistroArchSeries.id,
             DistroArchSeries.distroseries == distroseries,
             BinaryPackagePublishingHistory.archive == self.archive,
             BinaryPackagePublishingHistory.pocket == pocket,
             BinaryPackagePublishingHistory.status.is_in(
-                inactive_publishing_status),
+                inactive_publishing_status
+            ),
             BinaryPackagePublishingHistory.scheduleddeletiondate == None,
-            BinaryPackagePublishingHistory.dateremoved == None)
+            BinaryPackagePublishingHistory.dateremoved == None,
+        )
 
         self._judgeSuperseded(sources, binaries)
 
@@ -953,4 +994,5 @@ class Dominator:
         self.judge(distroseries, pocket)
 
         self.logger.debug(
-            "Domination for %s/%s finished", distroseries.name, pocket.title)
+            "Domination for %s/%s finished", distroseries.name, pocket.title
+        )
diff --git a/lib/lp/archivepublisher/indices.py b/lib/lp/archivepublisher/indices.py
index a88f445..42b7364 100644
--- a/lib/lp/archivepublisher/indices.py
+++ b/lib/lp/archivepublisher/indices.py
@@ -2,16 +2,16 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'IndexStanzaFields',
-    'build_binary_stanza_fields',
-    'build_source_stanza_fields',
-    'build_translations_stanza_fields',
-    ]
+    "IndexStanzaFields",
+    "build_binary_stanza_fields",
+    "build_source_stanza_fields",
+    "build_translations_stanza_fields",
+]
 
-from collections import OrderedDict
 import hashlib
 import os.path
 import re
+from collections import OrderedDict
 
 from lp.soyuz.model.publishing import makePoolPath
 
@@ -34,8 +34,7 @@ class IndexStanzaFields:
         self.fields.append((name, value))
 
     def extend(self, entries):
-        """Extend the internal list with the key-value pairs in entries.
-        """
+        """Extend the internal list with the key-value pairs in entries."""
         for name, value in entries:
             self.append(name, value)
 
@@ -51,8 +50,8 @@ class IndexStanzaFields:
                 continue
 
             # do not add separation space for the special file list fields.
-            if name not in ('Files', 'Checksums-Sha1', 'Checksums-Sha256'):
-                value = ' %s' % value
+            if name not in ("Files", "Checksums-Sha1", "Checksums-Sha256"):
+                value = " %s" % value
 
             # XXX Michael Nelson 20090930 bug=436182. We have an issue
             # in the upload parser that has
@@ -72,13 +71,13 @@ class IndexStanzaFields:
             # followed by a white-space character has a space inserted.
             value = re.sub(r"\n(\S)", r"\n \1", value)
 
-            output_lines.append('%s:%s' % (name, value))
+            output_lines.append("%s:%s" % (name, value))
 
-        return '\n'.join(output_lines)
+        return "\n".join(output_lines)
 
 
 def format_file_list(lst):
-    return ''.join('\n %s %s %s' % ((h,) + f) for (h, f) in lst)
+    return "".join("\n %s %s %s" % ((h,) + f) for (h, f) in lst)
 
 
 def format_description(summary, description):
@@ -91,7 +90,7 @@ def format_description(summary, description):
     #  ...
     #  <DESCRIPTION LN>
     descr_lines = [line.lstrip() for line in description.splitlines()]
-    bin_description = '%s\n %s' % (summary, '\n '.join(descr_lines))
+    bin_description = "%s\n %s" % (summary, "\n ".join(descr_lines))
     return bin_description
 
 
@@ -103,49 +102,55 @@ def build_source_stanza_fields(spr, component, section):
     sha1_list = []
     sha256_list = []
     for spf in spr.files:
-        common = (
-            spf.libraryfile.content.filesize, spf.libraryfile.filename)
+        common = (spf.libraryfile.content.filesize, spf.libraryfile.filename)
         files_list.append((spf.libraryfile.content.md5, common))
         sha1_list.append((spf.libraryfile.content.sha1, common))
         sha256_list.append((spf.libraryfile.content.sha256, common))
-    user_defined_fields = OrderedDict([
-        (key.lower(), (key, value))
-        for key, value in spr.user_defined_fields])
+    user_defined_fields = OrderedDict(
+        [(key.lower(), (key, value)) for key, value in spr.user_defined_fields]
+    )
     # Filling stanza options.
     fields = IndexStanzaFields()
-    fields.append('Package', spr.name)
-    fields.append('Binary', spr.dsc_binaries)
-    fields.append('Version', spr.version)
-    fields.append('Section', section.name)
-    fields.append('Maintainer', spr.dsc_maintainer_rfc822)
-    fields.append('Build-Depends', spr.builddepends)
-    fields.append('Build-Depends-Indep', spr.builddependsindep)
-    if 'build-depends-arch' in user_defined_fields:
+    fields.append("Package", spr.name)
+    fields.append("Binary", spr.dsc_binaries)
+    fields.append("Version", spr.version)
+    fields.append("Section", section.name)
+    fields.append("Maintainer", spr.dsc_maintainer_rfc822)
+    fields.append("Build-Depends", spr.builddepends)
+    fields.append("Build-Depends-Indep", spr.builddependsindep)
+    if "build-depends-arch" in user_defined_fields:
         fields.append(
-            'Build-Depends-Arch',
-            user_defined_fields.pop('build-depends-arch')[1])
-    fields.append('Build-Conflicts', spr.build_conflicts)
-    fields.append('Build-Conflicts-Indep', spr.build_conflicts_indep)
-    if 'build-conflicts-arch' in user_defined_fields:
+            "Build-Depends-Arch",
+            user_defined_fields.pop("build-depends-arch")[1],
+        )
+    fields.append("Build-Conflicts", spr.build_conflicts)
+    fields.append("Build-Conflicts-Indep", spr.build_conflicts_indep)
+    if "build-conflicts-arch" in user_defined_fields:
         fields.append(
-            'Build-Conflicts-Arch',
-            user_defined_fields.pop('build-conflicts-arch')[1])
-    fields.append('Architecture', spr.architecturehintlist)
-    fields.append('Standards-Version', spr.dsc_standards_version)
-    fields.append('Format', spr.dsc_format)
-    fields.append('Directory', pool_path)
-    fields.append('Files', format_file_list(files_list))
-    fields.append('Checksums-Sha1', format_file_list(sha1_list))
-    fields.append('Checksums-Sha256', format_file_list(sha256_list))
-    fields.append('Homepage', spr.homepage)
+            "Build-Conflicts-Arch",
+            user_defined_fields.pop("build-conflicts-arch")[1],
+        )
+    fields.append("Architecture", spr.architecturehintlist)
+    fields.append("Standards-Version", spr.dsc_standards_version)
+    fields.append("Format", spr.dsc_format)
+    fields.append("Directory", pool_path)
+    fields.append("Files", format_file_list(files_list))
+    fields.append("Checksums-Sha1", format_file_list(sha1_list))
+    fields.append("Checksums-Sha256", format_file_list(sha256_list))
+    fields.append("Homepage", spr.homepage)
     fields.extend(user_defined_fields.values())
 
     return fields
 
 
-def build_binary_stanza_fields(bpr, component, section, priority,
-                               phased_update_percentage,
-                               separate_long_descriptions=False):
+def build_binary_stanza_fields(
+    bpr,
+    component,
+    section,
+    priority,
+    phased_update_percentage,
+    separate_long_descriptions=False,
+):
     """Build a map of fields to be included in a Packages file.
 
     :param separate_long_descriptions: if True, the long description will
@@ -161,12 +166,14 @@ def build_binary_stanza_fields(bpr, component, section, priority,
     bin_sha1 = bin_file.libraryfile.content.sha1
     bin_sha256 = bin_file.libraryfile.content.sha256
     bin_filepath = os.path.join(
-        makePoolPath(spr.name, component.name), bin_filename)
+        makePoolPath(spr.name, component.name), bin_filename
+    )
     description = format_description(bpr.summary, bpr.description)
     # Our formatted description isn't \n-terminated, but apt
     # considers the trailing \n to be part of the data to hash.
     bin_description_md5 = hashlib.md5(
-        description.encode('utf-8') + b'\n').hexdigest()
+        description.encode("utf-8") + b"\n"
+    ).hexdigest()
     if separate_long_descriptions:
         # If distroseries.include_long_descriptions is False, the
         # description should be the summary
@@ -180,46 +187,46 @@ def build_binary_stanza_fields(bpr, component, section, priority,
     if bpr.architecturespecific:
         architecture = bpr.build.distro_arch_series.architecturetag
     else:
-        architecture = 'all'
+        architecture = "all"
 
     essential = None
     if bpr.essential:
-        essential = 'yes'
+        essential = "yes"
 
     source = None
     if bpr.version != spr.version:
-        source = '%s (%s)' % (spr.name, spr.version)
+        source = "%s (%s)" % (spr.name, spr.version)
     elif bpr.name != spr.name:
         source = spr.name
 
     fields = IndexStanzaFields()
-    fields.append('Package', bpr.name)
-    fields.append('Source', source)
-    fields.append('Priority', priority.title.lower())
-    fields.append('Section', section.name)
-    fields.append('Installed-Size', bpr.installedsize)
-    fields.append('Maintainer', spr.dsc_maintainer_rfc822)
-    fields.append('Architecture', architecture)
-    fields.append('Version', bpr.version)
-    fields.append('Recommends', bpr.recommends)
-    fields.append('Replaces', bpr.replaces)
-    fields.append('Suggests', bpr.suggests)
-    fields.append('Provides', bpr.provides)
-    fields.append('Depends', bpr.depends)
-    fields.append('Conflicts', bpr.conflicts)
-    fields.append('Pre-Depends', bpr.pre_depends)
-    fields.append('Enhances', bpr.enhances)
-    fields.append('Breaks', bpr.breaks)
-    fields.append('Essential', essential)
-    fields.append('Filename', bin_filepath)
-    fields.append('Size', bin_size)
-    fields.append('MD5sum', bin_md5)
-    fields.append('SHA1', bin_sha1)
-    fields.append('SHA256', bin_sha256)
-    fields.append('Phased-Update-Percentage', phased_update_percentage)
-    fields.append('Description', bin_description)
+    fields.append("Package", bpr.name)
+    fields.append("Source", source)
+    fields.append("Priority", priority.title.lower())
+    fields.append("Section", section.name)
+    fields.append("Installed-Size", bpr.installedsize)
+    fields.append("Maintainer", spr.dsc_maintainer_rfc822)
+    fields.append("Architecture", architecture)
+    fields.append("Version", bpr.version)
+    fields.append("Recommends", bpr.recommends)
+    fields.append("Replaces", bpr.replaces)
+    fields.append("Suggests", bpr.suggests)
+    fields.append("Provides", bpr.provides)
+    fields.append("Depends", bpr.depends)
+    fields.append("Conflicts", bpr.conflicts)
+    fields.append("Pre-Depends", bpr.pre_depends)
+    fields.append("Enhances", bpr.enhances)
+    fields.append("Breaks", bpr.breaks)
+    fields.append("Essential", essential)
+    fields.append("Filename", bin_filepath)
+    fields.append("Size", bin_size)
+    fields.append("MD5sum", bin_md5)
+    fields.append("SHA1", bin_sha1)
+    fields.append("SHA256", bin_sha256)
+    fields.append("Phased-Update-Percentage", phased_update_percentage)
+    fields.append("Description", bin_description)
     if separate_long_descriptions:
-        fields.append('Description-md5', bin_description_md5)
+        fields.append("Description-md5", bin_description_md5)
     if bpr.user_defined_fields:
         fields.extend(bpr.user_defined_fields)
 
@@ -242,12 +249,13 @@ def build_translations_stanza_fields(bpr, packages):
     # Our formatted description isn't \n-terminated, but apt
     # considers the trailing \n to be part of the data to hash.
     bin_description_md5 = hashlib.md5(
-        bin_description.encode('utf-8') + b'\n').hexdigest()
+        bin_description.encode("utf-8") + b"\n"
+    ).hexdigest()
     if (bpr.name, bin_description_md5) not in packages:
         fields = IndexStanzaFields()
-        fields.append('Package', bpr.name)
-        fields.append('Description-md5', bin_description_md5)
-        fields.append('Description-en', bin_description)
+        fields.append("Package", bpr.name)
+        fields.append("Description-md5", bin_description_md5)
+        fields.append("Description-en", bin_description)
         packages.add((bpr.name, bin_description_md5))
 
         return fields
diff --git a/lib/lp/archivepublisher/interfaces/archivegpgsigningkey.py b/lib/lp/archivepublisher/interfaces/archivegpgsigningkey.py
index f857e90..d0b03e0 100644
--- a/lib/lp/archivepublisher/interfaces/archivegpgsigningkey.py
+++ b/lib/lp/archivepublisher/interfaces/archivegpgsigningkey.py
@@ -4,24 +4,21 @@
 """ArchiveGPGSigningKey interface."""
 
 __all__ = [
-    'CannotSignArchive',
-    'IArchiveGPGSigningKey',
-    'ISignableArchive',
-    'PUBLISHER_GPG_USES_SIGNING_SERVICE',
-    ]
-
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
+    "CannotSignArchive",
+    "IArchiveGPGSigningKey",
+    "ISignableArchive",
+    "PUBLISHER_GPG_USES_SIGNING_SERVICE",
+]
+
+from zope.interface import Attribute, Interface
 from zope.schema import Object
 
 from lp import _
 from lp.soyuz.interfaces.archive import IArchive
 
-
 PUBLISHER_GPG_USES_SIGNING_SERVICE = (
-    'archivepublisher.gpg.signing_service.enabled')
+    "archivepublisher.gpg.signing_service.enabled"
+)
 
 
 class CannotSignArchive(Exception):
@@ -35,11 +32,12 @@ class ISignableArchive(Interface):
     """
 
     archive = Object(
-        title=_('Corresponding IArchive'), required=True, schema=IArchive)
+        title=_("Corresponding IArchive"), required=True, schema=IArchive
+    )
 
     can_sign = Attribute("True if this archive is set up for signing.")
 
-    def signRepository(suite, pubconf=None, suffix='', log=None):
+    def signRepository(suite, pubconf=None, suffix="", log=None):
         """Sign the corresponding repository.
 
         :param suite: suite name to be signed.
diff --git a/lib/lp/archivepublisher/interfaces/publisherconfig.py b/lib/lp/archivepublisher/interfaces/publisherconfig.py
index 65e80c9..2bf04fa 100644
--- a/lib/lp/archivepublisher/interfaces/publisherconfig.py
+++ b/lib/lp/archivepublisher/interfaces/publisherconfig.py
@@ -4,16 +4,13 @@
 """PublisherConfig interface."""
 
 __all__ = [
-    'IPublisherConfig',
-    'IPublisherConfigSet',
-    ]
+    "IPublisherConfig",
+    "IPublisherConfigSet",
+]
 
 from lazr.restful.fields import Reference
 from zope.interface import Interface
-from zope.schema import (
-    Int,
-    TextLine,
-    )
+from zope.schema import Int, TextLine
 
 from lp import _
 from lp.registry.interfaces.distribution import IDistribution
@@ -22,23 +19,32 @@ from lp.registry.interfaces.distribution import IDistribution
 class IPublisherConfig(Interface):
     """`PublisherConfig` interface."""
 
-    id = Int(title=_('ID'), required=True, readonly=True)
+    id = Int(title=_("ID"), required=True, readonly=True)
 
     distribution = Reference(
-        IDistribution, title=_("Distribution"), required=True,
-        description=_("The Distribution for this configuration."))
+        IDistribution,
+        title=_("Distribution"),
+        required=True,
+        description=_("The Distribution for this configuration."),
+    )
 
     root_dir = TextLine(
-        title=_("Root Directory"), required=True,
-        description=_("The root directory for published archives."))
+        title=_("Root Directory"),
+        required=True,
+        description=_("The root directory for published archives."),
+    )
 
     base_url = TextLine(
-        title=_("Base URL"), required=True,
-        description=_("The base URL for published archives"))
+        title=_("Base URL"),
+        required=True,
+        description=_("The base URL for published archives"),
+    )
 
     copy_base_url = TextLine(
-        title=_("Copy Base URL"), required=True,
-        description=_("The base URL for published copy archives"))
+        title=_("Copy Base URL"),
+        required=True,
+        description=_("The base URL for published copy archives"),
+    )
 
 
 class IPublisherConfigSet(Interface):
diff --git a/lib/lp/archivepublisher/meta_data.py b/lib/lp/archivepublisher/meta_data.py
index b0de662..2c18051 100644
--- a/lib/lp/archivepublisher/meta_data.py
+++ b/lib/lp/archivepublisher/meta_data.py
@@ -5,7 +5,7 @@
 
 __all__ = [
     "MetaDataUpload",
-    ]
+]
 
 import os
 
@@ -22,6 +22,7 @@ class MetaDataUpload(CustomUpload):
     seen even when the archive is private, and allows commercial customers
     to browse contents for potential later purchase.
     """
+
     custom_type = "meta-data"
 
     @classmethod
@@ -35,7 +36,8 @@ class MetaDataUpload(CustomUpload):
         if pubconf.metaroot is None:
             if self.logger is not None:
                 self.logger.debug(
-                    "Skipping meta-data for archive without metaroot.")
+                    "Skipping meta-data for archive without metaroot."
+                )
             return
 
         dest_file = os.path.join(pubconf.metaroot, libraryfilealias.filename)
diff --git a/lib/lp/archivepublisher/model/ftparchive.py b/lib/lp/archivepublisher/model/ftparchive.py
index 5ab2ef5..15bf6e6 100644
--- a/lib/lp/archivepublisher/model/ftparchive.py
+++ b/lib/lp/archivepublisher/model/ftparchive.py
@@ -1,18 +1,15 @@
 # Copyright 2009-2021 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from collections import defaultdict
 import io
 import os
 import re
 import time
+from collections import defaultdict
 
-from storm.expr import (
-    Desc,
-    Join,
-    )
-from storm.store import EmptyResultSet
 import transaction
+from storm.expr import Desc, Join
+from storm.store import EmptyResultSet
 
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.model.sourcepackagename import SourcePackageName
@@ -21,7 +18,7 @@ from lp.services.command_spawner import (
     CommandSpawner,
     OutputLineHandler,
     ReturnCodeReceiver,
-    )
+)
 from lp.services.database.interfaces import IStore
 from lp.services.database.stormexpr import Concatenate
 from lp.services.librarian.model import LibraryFileAlias
@@ -30,20 +27,17 @@ from lp.soyuz.enums import (
     BinaryPackageFormat,
     IndexCompressionType,
     PackagePublishingStatus,
-    )
+)
 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
 from lp.soyuz.model.binarypackagename import BinaryPackageName
 from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
 from lp.soyuz.model.component import Component
 from lp.soyuz.model.distroarchseries import DistroArchSeries
-from lp.soyuz.model.files import (
-    BinaryPackageFile,
-    SourcePackageReleaseFile,
-    )
+from lp.soyuz.model.files import BinaryPackageFile, SourcePackageReleaseFile
 from lp.soyuz.model.publishing import (
     BinaryPackagePublishingHistory,
     SourcePackagePublishingHistory,
-    )
+)
 from lp.soyuz.model.section import Section
 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
 
@@ -61,8 +55,9 @@ def make_clean_dir(path, clean_pattern=".*"):
     """
     if os.path.isdir(path):
         for entry in list(os.scandir(path)):
-            if (entry.name == "by-hash" or
-                    not re.match(clean_pattern, entry.name)):
+            if entry.name == "by-hash" or not re.match(
+                clean_pattern, entry.name
+            ):
                 # Ignore existing by-hash directories; they will be cleaned
                 # up to match the rest of the directory tree later.
                 continue
@@ -123,23 +118,23 @@ tree "%(DISTS)s/%(DISTRORELEASEONDISK)s"
 """
 
 EXT_TO_SUBCOMPONENT = {
-    'udeb': 'debian-installer',
-    'ddeb': 'debug',
-    }
+    "udeb": "debian-installer",
+    "ddeb": "debug",
+}
 
 SUBCOMPONENT_TO_EXT = {
-    'debian-installer': 'udeb',
-    'debug': 'ddeb',
-    }
+    "debian-installer": "udeb",
+    "debug": "ddeb",
+}
 
 CLEANUP_FREQUENCY = 60 * 60 * 24
 
 COMPRESSOR_TO_CONFIG = {
-    IndexCompressionType.UNCOMPRESSED: '.',
-    IndexCompressionType.GZIP: 'gzip',
-    IndexCompressionType.BZIP2: 'bzip2',
-    IndexCompressionType.XZ: 'xz',
-    }
+    IndexCompressionType.UNCOMPRESSED: ".",
+    IndexCompressionType.GZIP: "gzip",
+    IndexCompressionType.BZIP2: "bzip2",
+    IndexCompressionType.XZ: "xz",
+}
 
 
 class AptFTPArchiveFailure(Exception):
@@ -189,11 +184,13 @@ class FTPArchiveHandler:
 
         returncodes = {}
         completion_handler = ReturnCodeReceiver()
-        returncodes['all'] = completion_handler
+        returncodes["all"] = completion_handler
         spawner.start(
-            base_command, stdout_handler=stdout_handler,
+            base_command,
+            stdout_handler=stdout_handler,
             stderr_handler=stderr_handler,
-            completion_handler=completion_handler)
+            completion_handler=completion_handler,
+        )
 
         spawner.complete()
         stdout_handler.finalize()
@@ -201,11 +198,13 @@ class FTPArchiveHandler:
         failures = sorted(
             (tag, receiver.returncode)
             for tag, receiver in returncodes.items()
-                if receiver.returncode != 0)
+            if receiver.returncode != 0
+        )
         if len(failures) > 0:
             by_arch = ["%s (returned %d)" % failure for failure in failures]
             raise AptFTPArchiveFailure(
-                "Failure(s) from apt-ftparchive: %s" % ", ".join(by_arch))
+                "Failure(s) from apt-ftparchive: %s" % ", ".join(by_arch)
+            )
 
     def runApt(self, apt_config_filename):
         self.runAptWithArgs(apt_config_filename, "--no-contents", "generate")
@@ -221,8 +220,11 @@ class FTPArchiveHandler:
         """
         for distroseries in self.distro.series:
             components = [
-                comp.name for comp in
-                self.publisher.archive.getComponentsForSeries(distroseries)]
+                comp.name
+                for comp in self.publisher.archive.getComponentsForSeries(
+                    distroseries
+                )
+            ]
             for pocket in PackagePublishingPocket.items:
                 if not fullpublish:
                     if not self.publisher.isDirty(distroseries, pocket):
@@ -232,7 +234,8 @@ class FTPArchiveHandler:
                         continue
 
                 self.publisher.release_files_needed.add(
-                    distroseries.getSuite(pocket))
+                    distroseries.getSuite(pocket)
+                )
 
                 for comp in components:
                     self.createEmptyPocketRequest(distroseries, pocket, comp)
@@ -246,28 +249,30 @@ class FTPArchiveHandler:
             (comp,),
             ("extra", comp),
             (comp, "src"),
-            ]
+        ]
         for sub_comp in self.publisher.subcomponents:
             needed_paths.append((comp, sub_comp))
 
         for path in needed_paths:
             full_path = os.path.join(
-                self._config.overrideroot,
-                ".".join(("override", suite) + path))
+                self._config.overrideroot, ".".join(("override", suite) + path)
+            )
             if not os.path.exists(full_path):
                 write_file(full_path, b"")
 
         # Create empty file lists if they don't already exist.
         def touch_list(*parts):
             full_path = os.path.join(
-                self._config.overrideroot,
-                "_".join((suite, ) + parts))
+                self._config.overrideroot, "_".join((suite,) + parts)
+            )
             if not os.path.exists(full_path):
                 write_file(full_path, b"")
+
         touch_list(comp, "source")
 
         arch_tags = [
-            a.architecturetag for a in distroseries.enabled_architectures]
+            a.architecturetag for a in distroseries.enabled_architectures
+        ]
         for arch in arch_tags:
             # Touch more file lists for the archs.
             touch_list(comp, "binary-" + arch)
@@ -290,26 +295,39 @@ class FTPArchiveHandler:
         """
         origins = (
             SourcePackagePublishingHistory,
-            Join(Component,
-                 Component.id == SourcePackagePublishingHistory.componentID),
-            Join(Section,
-                 Section.id == SourcePackagePublishingHistory.sectionID),
-            Join(SourcePackageRelease,
-                 SourcePackageRelease.id ==
-                     SourcePackagePublishingHistory.sourcepackagereleaseID),
-            Join(SourcePackageName,
-                 SourcePackageName.id ==
-                     SourcePackageRelease.sourcepackagenameID),
+            Join(
+                Component,
+                Component.id == SourcePackagePublishingHistory.componentID,
+            ),
+            Join(
+                Section, Section.id == SourcePackagePublishingHistory.sectionID
+            ),
+            Join(
+                SourcePackageRelease,
+                SourcePackageRelease.id
+                == SourcePackagePublishingHistory.sourcepackagereleaseID,
+            ),
+            Join(
+                SourcePackageName,
+                SourcePackageName.id
+                == SourcePackageRelease.sourcepackagenameID,
+            ),
+        )
+
+        return (
+            IStore(SourcePackageName)
+            .using(*origins)
+            .find(
+                (SourcePackageName.name, Component.name, Section.name),
+                SourcePackagePublishingHistory.archive
+                == self.publisher.archive,
+                SourcePackagePublishingHistory.distroseries == distroseries,
+                SourcePackagePublishingHistory.pocket == pocket,
+                SourcePackagePublishingHistory.status
+                == PackagePublishingStatus.PUBLISHED,
             )
-
-        return IStore(SourcePackageName).using(*origins).find(
-            (SourcePackageName.name, Component.name, Section.name),
-            SourcePackagePublishingHistory.archive == self.publisher.archive,
-            SourcePackagePublishingHistory.distroseries == distroseries,
-            SourcePackagePublishingHistory.pocket == pocket,
-            SourcePackagePublishingHistory.status ==
-                PackagePublishingStatus.PUBLISHED).order_by(
-                    Desc(SourcePackagePublishingHistory.id))
+            .order_by(Desc(SourcePackagePublishingHistory.id))
+        )
 
     def getBinariesForOverrides(self, distroseries, pocket):
         """Fetch override information about all published binaries.
@@ -326,20 +344,29 @@ class FTPArchiveHandler:
         """
         origins = (
             BinaryPackagePublishingHistory,
-            Join(Component,
-                 Component.id == BinaryPackagePublishingHistory.componentID),
-            Join(Section,
-                 Section.id == BinaryPackagePublishingHistory.sectionID),
-            Join(BinaryPackageRelease,
-                 BinaryPackageRelease.id ==
-                     BinaryPackagePublishingHistory.binarypackagereleaseID),
-            Join(BinaryPackageName,
-                 BinaryPackageName.id ==
-                     BinaryPackageRelease.binarypackagenameID),
-            Join(DistroArchSeries,
-                 DistroArchSeries.id ==
-                     BinaryPackagePublishingHistory.distroarchseriesID),
-            )
+            Join(
+                Component,
+                Component.id == BinaryPackagePublishingHistory.componentID,
+            ),
+            Join(
+                Section, Section.id == BinaryPackagePublishingHistory.sectionID
+            ),
+            Join(
+                BinaryPackageRelease,
+                BinaryPackageRelease.id
+                == BinaryPackagePublishingHistory.binarypackagereleaseID,
+            ),
+            Join(
+                BinaryPackageName,
+                BinaryPackageName.id
+                == BinaryPackageRelease.binarypackagenameID,
+            ),
+            Join(
+                DistroArchSeries,
+                DistroArchSeries.id
+                == BinaryPackagePublishingHistory.distroarchseriesID,
+            ),
+        )
 
         architectures_ids = [arch.id for arch in distroseries.architectures]
         if len(architectures_ids) == 0:
@@ -348,23 +375,34 @@ class FTPArchiveHandler:
         conditions = [
             BinaryPackagePublishingHistory.archive == self.publisher.archive,
             BinaryPackagePublishingHistory.distroarchseriesID.is_in(
-                architectures_ids),
+                architectures_ids
+            ),
             BinaryPackagePublishingHistory.pocket == pocket,
-            BinaryPackagePublishingHistory.status ==
-                PackagePublishingStatus.PUBLISHED,
-            ]
+            BinaryPackagePublishingHistory.status
+            == PackagePublishingStatus.PUBLISHED,
+        ]
         if not self.publisher.archive.publish_debug_symbols:
             conditions.append(
                 BinaryPackageRelease.binpackageformat
-                    != BinaryPackageFormat.DDEB)
+                != BinaryPackageFormat.DDEB
+            )
 
-        result_set = IStore(BinaryPackageName).using(*origins).find(
-            (BinaryPackageName.name, Component.name, Section.name,
-             DistroArchSeries.architecturetag,
-             BinaryPackagePublishingHistory.priority,
-             BinaryPackageRelease.binpackageformat,
-             BinaryPackagePublishingHistory.phased_update_percentage),
-            *conditions)
+        result_set = (
+            IStore(BinaryPackageName)
+            .using(*origins)
+            .find(
+                (
+                    BinaryPackageName.name,
+                    Component.name,
+                    Section.name,
+                    DistroArchSeries.architecturetag,
+                    BinaryPackagePublishingHistory.priority,
+                    BinaryPackageRelease.binpackageformat,
+                    BinaryPackagePublishingHistory.phased_update_percentage,
+                ),
+                *conditions,
+            )
+        )
 
         return result_set.order_by(Desc(BinaryPackagePublishingHistory.id))
 
@@ -383,8 +421,9 @@ class FTPArchiveHandler:
                 bpphs = self.getBinariesForOverrides(distroseries, pocket)
                 self.publishOverrides(distroseries, pocket, spphs, bpphs)
 
-    def publishOverrides(self, distroseries, pocket,
-                         source_publications, binary_publications):
+    def publishOverrides(
+        self, distroseries, pocket, source_publications, binary_publications
+    ):
         """Output a set of override files for use in apt-ftparchive.
 
         Given the provided sourceoverrides and binaryoverrides, do the
@@ -413,12 +452,19 @@ class FTPArchiveHandler:
         # Ensure that we generate overrides for all the expected components,
         # even if they're currently empty.
         for component in self.publisher.archive.getComponentsForSeries(
-                distroseries):
+            distroseries
+        ):
             overrides[component.name]
 
-        def updateOverride(packagename, component, section, archtag=None,
-                           priority=None, binpackageformat=None,
-                           phased_update_percentage=None):
+        def updateOverride(
+            packagename,
+            component,
+            section,
+            archtag=None,
+            priority=None,
+            binpackageformat=None,
+            phased_update_percentage=None,
+        ):
             """Generates and packs tuples of data required for overriding.
 
             If archtag is provided, it's a binary tuple; otherwise, it's a
@@ -442,10 +488,15 @@ class FTPArchiveHandler:
                 subcomp = FORMAT_TO_SUBCOMPONENT.get(binpackageformat)
                 if subcomp is None:
                     package_arch = "%s/%s" % (packagename, archtag)
-                    override['bin'].add((
-                        package_arch, priority, section,
-                        0 if phased_update_percentage is None else 1,
-                        phased_update_percentage))
+                    override["bin"].add(
+                        (
+                            package_arch,
+                            priority,
+                            section,
+                            0 if phased_update_percentage is None else 1,
+                            phased_update_percentage,
+                        )
+                    )
                 elif subcomp in self.publisher.subcomponents:
                     # We pick up subcomponent packages here, although they
                     # do not need phased updates (and adding the
@@ -453,7 +504,7 @@ class FTPArchiveHandler:
                     # generateOverrideForComponent).
                     override[subcomp].add((packagename, priority, section))
             else:
-                override['src'].add((packagename, section))
+                override["src"].add((packagename, section))
 
         # Process huge iterations (more than 200k records) in batches.
         # See `PublishingTunableLoop`.
@@ -469,41 +520,54 @@ class FTPArchiveHandler:
 
         # Now generate the files on disk...
         for component in overrides:
-            self.log.debug("Generating overrides for %s/%s..." % (
-                suite, component))
+            self.log.debug(
+                "Generating overrides for %s/%s..." % (suite, component)
+            )
             self.generateOverrideForComponent(overrides, suite, component)
 
     def generateOverrideForComponent(self, overrides, suite, component):
         """Generates overrides for a specific component."""
-        src_overrides = sorted(overrides[component]['src'])
-        bin_overrides = sorted(overrides[component]['bin'])
+        src_overrides = sorted(overrides[component]["src"])
+        bin_overrides = sorted(overrides[component]["bin"])
 
         # Set up filepaths for the overrides we read
-        extra_extra_overrides = os.path.join(self._config.miscroot,
-            "more-extra.override.%s.main" % suite)
+        extra_extra_overrides = os.path.join(
+            self._config.miscroot, "more-extra.override.%s.main" % suite
+        )
         if not os.path.exists(extra_extra_overrides):
-            unpocketed_series = "-".join(suite.split('-')[:-1])
-            extra_extra_overrides = os.path.join(self._config.miscroot,
-                "more-extra.override.%s.main" % unpocketed_series)
+            unpocketed_series = "-".join(suite.split("-")[:-1])
+            extra_extra_overrides = os.path.join(
+                self._config.miscroot,
+                "more-extra.override.%s.main" % unpocketed_series,
+            )
         # And for the overrides we write out
-        main_override = os.path.join(self._config.overrideroot,
-                                     "override.%s.%s" % (suite, component))
-        ef_override = os.path.join(self._config.overrideroot,
-                                   "override.%s.extra.%s" % (suite, component))
+        main_override = os.path.join(
+            self._config.overrideroot, "override.%s.%s" % (suite, component)
+        )
+        ef_override = os.path.join(
+            self._config.overrideroot,
+            "override.%s.extra.%s" % (suite, component),
+        )
         ef_override_new = "{}.new".format(ef_override)
         # Create the files as .new and then move into place to prevent
         # race conditions with other processes handling these files
         main_override_new = "{}.new".format(main_override)
-        source_override = os.path.join(self._config.overrideroot,
-                                       "override.%s.%s.src" %
-                                       (suite, component))
+        source_override = os.path.join(
+            self._config.overrideroot,
+            "override.%s.%s.src" % (suite, component),
+        )
 
         # Start to write the files out
         ef = open(ef_override_new, "w")
         f = open(main_override_new, "w")
         basic_override_seen = set()
-        for (package_arch, priority, section, _,
-             phased_update_percentage) in bin_overrides:
+        for (
+            package_arch,
+            priority,
+            section,
+            _,
+            phased_update_percentage,
+        ) in bin_overrides:
             package = package_arch.split("/")[0]
             if package not in basic_override_seen:
                 basic_override_seen.add(package)
@@ -516,14 +580,26 @@ class FTPArchiveHandler:
                 # appropriate and look for bugs addresses etc in Launchpad.)
                 ef.write("\t".join([package, "Origin", "Ubuntu"]))
                 ef.write("\n")
-                ef.write("\t".join([
-                    package, "Bugs",
-                    "https://bugs.launchpad.net/ubuntu/+filebug";]))
+                ef.write(
+                    "\t".join(
+                        [
+                            package,
+                            "Bugs",
+                            "https://bugs.launchpad.net/ubuntu/+filebug";,
+                        ]
+                    )
+                )
                 ef.write("\n")
             if phased_update_percentage is not None:
-                ef.write("\t".join([
-                    package_arch, "Phased-Update-Percentage",
-                    str(phased_update_percentage)]))
+                ef.write(
+                    "\t".join(
+                        [
+                            package_arch,
+                            "Phased-Update-Percentage",
+                            str(phased_update_percentage),
+                        ]
+                    )
+                )
                 ef.write("\n")
         f.close()
         # Move into place
@@ -569,7 +645,8 @@ class FTPArchiveHandler:
             if sub_overrides:
                 sub_path = os.path.join(
                     self._config.overrideroot,
-                    "override.%s.%s.%s" % (suite, component, subcomp))
+                    "override.%s.%s.%s" % (suite, component, subcomp),
+                )
                 _outputSimpleOverrides(sub_path, sub_overrides)
 
     #
@@ -590,27 +667,29 @@ class FTPArchiveHandler:
             SourcePackageName.name,
             LibraryFileAlias.filename,
             Component.name,
-            )
+        )
         join_conditions = [
-            SourcePackageReleaseFile.sourcepackagereleaseID ==
-                SourcePackagePublishingHistory.sourcepackagereleaseID,
-            SourcePackageName.id ==
-                SourcePackagePublishingHistory.sourcepackagenameID,
+            SourcePackageReleaseFile.sourcepackagereleaseID
+            == SourcePackagePublishingHistory.sourcepackagereleaseID,
+            SourcePackageName.id
+            == SourcePackagePublishingHistory.sourcepackagenameID,
             LibraryFileAlias.id == SourcePackageReleaseFile.libraryfileID,
             Component.id == SourcePackagePublishingHistory.componentID,
-            ]
+        ]
         select_conditions = [
             SourcePackagePublishingHistory.archive == self.publisher.archive,
             SourcePackagePublishingHistory.distroseriesID == distroseries.id,
             SourcePackagePublishingHistory.pocket == pocket,
-            SourcePackagePublishingHistory.status ==
-                PackagePublishingStatus.PUBLISHED,
-            ]
+            SourcePackagePublishingHistory.status
+            == PackagePublishingStatus.PUBLISHED,
+        ]
 
         result_set = IStore(SourcePackageRelease).find(
-            columns, *(join_conditions + select_conditions))
+            columns, *(join_conditions + select_conditions)
+        )
         return result_set.order_by(
-            LibraryFileAlias.filename, SourcePackageReleaseFile.id)
+            LibraryFileAlias.filename, SourcePackageReleaseFile.id
+        )
 
     def getBinaryFiles(self, distroseries, pocket):
         """Fetch publishing information about all published binary files.
@@ -628,36 +707,39 @@ class FTPArchiveHandler:
             LibraryFileAlias.filename,
             Component.name,
             Concatenate("binary-", DistroArchSeries.architecturetag),
-            )
+        )
         join_conditions = [
-            BinaryPackageRelease.id ==
-                BinaryPackagePublishingHistory.binarypackagereleaseID,
-            BinaryPackageFile.binarypackagereleaseID ==
-                BinaryPackagePublishingHistory.binarypackagereleaseID,
+            BinaryPackageRelease.id
+            == BinaryPackagePublishingHistory.binarypackagereleaseID,
+            BinaryPackageFile.binarypackagereleaseID
+            == BinaryPackagePublishingHistory.binarypackagereleaseID,
             BinaryPackageBuild.id == BinaryPackageRelease.buildID,
             SourcePackageName.id == BinaryPackageBuild.source_package_name_id,
             LibraryFileAlias.id == BinaryPackageFile.libraryfileID,
-            DistroArchSeries.id ==
-                BinaryPackagePublishingHistory.distroarchseriesID,
+            DistroArchSeries.id
+            == BinaryPackagePublishingHistory.distroarchseriesID,
             Component.id == BinaryPackagePublishingHistory.componentID,
-            ]
+        ]
         select_conditions = [
             DistroArchSeries.distroseriesID == distroseries.id,
             BinaryPackagePublishingHistory.archive == self.publisher.archive,
             BinaryPackagePublishingHistory.pocket == pocket,
-            BinaryPackagePublishingHistory.status ==
-                PackagePublishingStatus.PUBLISHED,
-            ]
+            BinaryPackagePublishingHistory.status
+            == PackagePublishingStatus.PUBLISHED,
+        ]
 
         if not self.publisher.archive.publish_debug_symbols:
             select_conditions.append(
                 BinaryPackageRelease.binpackageformat
-                    != BinaryPackageFormat.DDEB)
+                != BinaryPackageFormat.DDEB
+            )
 
         result_set = IStore(BinaryPackageRelease).find(
-            columns, *(join_conditions + select_conditions))
+            columns, *(join_conditions + select_conditions)
+        )
         return result_set.order_by(
-            LibraryFileAlias.filename, BinaryPackageFile.id)
+            LibraryFileAlias.filename, BinaryPackageFile.id
+        )
 
     def generateFileLists(self, fullpublish=False):
         """Collect currently published FilePublishings and write filelists."""
@@ -686,13 +768,15 @@ class FTPArchiveHandler:
         # Ensure that we generate file lists for all the expected components
         # and architectures, even if they're currently empty.
         for component in self.publisher.archive.getComponentsForSeries(
-                distroseries):
+            distroseries
+        ):
             filelist[component.name]["source"]
             for das in distroseries.enabled_architectures:
                 filelist[component.name]["binary-%s" % das.architecturetag]
 
-        def updateFileList(sourcepackagename, filename, component,
-                           architecturetag=None):
+        def updateFileList(
+            sourcepackagename, filename, component, architecturetag=None
+        ):
             # DiskPool.pathFor takes a source package version parameter.  We
             # could fetch that in getSourceFiles/getBinaryFiles and pass it
             # down here.  However, it adds another column to a query with an
@@ -701,7 +785,9 @@ class FTPArchiveHandler:
             # involved here; so we just pass None as the version.
             ondiskname = str(
                 self._diskpool.pathFor(
-                    component, sourcepackagename, None, file=filename))
+                    component, sourcepackagename, None, file=filename
+                )
+            )
             if architecturetag is None:
                 architecturetag = "source"
             filelist[component][architecturetag].append(ondiskname)
@@ -732,7 +818,8 @@ class FTPArchiveHandler:
                     enabled = das.enabled
                 if enabled:
                     self.writeFileList(
-                        architecture, file_names, suite, component)
+                        architecture, file_names, suite, component
+                    )
 
     def writeFileList(self, arch, file_names, dr_pocketed, component):
         """Output file lists for a series and architecture.
@@ -741,23 +828,30 @@ class FTPArchiveHandler:
         """
         files = defaultdict(list)
         for name in file_names:
-            files[EXT_TO_SUBCOMPONENT.get(name.rsplit('.', 1)[1])].append(name)
-
-        lists = (
-            [(None, 'regular', '%s_%s_%s' % (dr_pocketed, component, arch))]
-            + [(subcomp, subcomp,
-                '%s_%s_%s_%s' % (dr_pocketed, component, subcomp, arch))
-               for subcomp in self.publisher.subcomponents])
+            files[EXT_TO_SUBCOMPONENT.get(name.rsplit(".", 1)[1])].append(name)
+
+        lists = [
+            (None, "regular", "%s_%s_%s" % (dr_pocketed, component, arch))
+        ] + [
+            (
+                subcomp,
+                subcomp,
+                "%s_%s_%s_%s" % (dr_pocketed, component, subcomp, arch),
+            )
+            for subcomp in self.publisher.subcomponents
+        ]
         for subcomp, desc, filename in lists:
             self.log.debug(
-                "Writing %s file list for %s/%s/%s" % (
-                    desc, dr_pocketed, component, arch))
+                "Writing %s file list for %s/%s/%s"
+                % (desc, dr_pocketed, component, arch)
+            )
             # Prevent race conditions with other processes handling these
             # files, create as .new and then move into place
             new_path = os.path.join(
-                self._config.overrideroot, "{}.new".format(filename))
+                self._config.overrideroot, "{}.new".format(filename)
+            )
             final_path = os.path.join(self._config.overrideroot, filename)
-            with open(new_path, 'w') as f:
+            with open(new_path, "w") as f:
                 files[subcomp].sort(key=package_name)
                 f.write("\n".join(files[subcomp]))
                 if files[subcomp]:
@@ -779,10 +873,15 @@ class FTPArchiveHandler:
         explicitly marked as dirty.
         """
         apt_config = io.StringIO()
-        apt_config.write(CONFIG_HEADER % (self._config.archiveroot,
-                                          self._config.overrideroot,
-                                          self._config.cacheroot,
-                                          self._config.miscroot))
+        apt_config.write(
+            CONFIG_HEADER
+            % (
+                self._config.archiveroot,
+                self._config.overrideroot,
+                self._config.cacheroot,
+                self._config.miscroot,
+            )
+        )
 
         # confixtext now contains a basic header. Add a dists entry for
         # each of the distroseries we've touched
@@ -791,11 +890,14 @@ class FTPArchiveHandler:
 
                 if not fullpublish:
                     if not self.publisher.isDirty(distroseries, pocket):
-                        self.log.debug("Skipping a-f stanza for %s/%s" %
-                                           (distroseries.name, pocket.name))
+                        self.log.debug(
+                            "Skipping a-f stanza for %s/%s"
+                            % (distroseries.name, pocket.name)
+                        )
                         continue
                     self.publisher.checkDirtySuiteBeforePublishing(
-                        distroseries, pocket)
+                        distroseries, pocket
+                    )
                 else:
                     if not self.publisher.isAllowed(distroseries, pocket):
                         continue
@@ -812,24 +914,29 @@ class FTPArchiveHandler:
         """Generates the config stanza for an individual pocket."""
         suite = distroseries.getSuite(pocket)
 
-        archs = [
-            a.architecturetag for a in distroseries.enabled_architectures]
+        archs = [a.architecturetag for a in distroseries.enabled_architectures]
         comps = [
-            comp.name for comp in
-            self.publisher.archive.getComponentsForSeries(distroseries)]
+            comp.name
+            for comp in self.publisher.archive.getComponentsForSeries(
+                distroseries
+            )
+        ]
 
         self.writeAptConfig(
-            apt_config, suite, comps, archs,
+            apt_config,
+            suite,
+            comps,
+            archs,
             distroseries.include_long_descriptions,
-            distroseries.index_compressors)
+            distroseries.index_compressors,
+        )
 
         # Make sure all the relevant directories exist and are empty.  Each
         # of these only contains files generated by apt-ftparchive, and may
         # contain files left over from previous configurations (e.g.
         # different compressor types).
         for comp in comps:
-            component_path = os.path.join(
-                self._config.distsroot, suite, comp)
+            component_path = os.path.join(self._config.distsroot, suite, comp)
             make_clean_dir(os.path.join(component_path, "source"))
             if not distroseries.include_long_descriptions:
                 # apt-ftparchive only generates the English
@@ -837,52 +944,71 @@ class FTPArchiveHandler:
                 # files here that we want to keep.
                 make_clean_dir(
                     os.path.join(component_path, "i18n"),
-                    r'Translation-en(\..*)?$')
+                    r"Translation-en(\..*)?$",
+                )
             for arch in archs:
                 make_clean_dir(os.path.join(component_path, "binary-" + arch))
                 for subcomp in self.publisher.subcomponents:
-                    make_clean_dir(os.path.join(
-                        component_path, subcomp, "binary-" + arch))
-
-    def writeAptConfig(self, apt_config, suite, comps, archs,
-                       include_long_descriptions, index_compressors):
+                    make_clean_dir(
+                        os.path.join(component_path, subcomp, "binary-" + arch)
+                    )
+
+    def writeAptConfig(
+        self,
+        apt_config,
+        suite,
+        comps,
+        archs,
+        include_long_descriptions,
+        index_compressors,
+    ):
         self.log.debug("Generating apt config for %s" % suite)
         compressors = " ".join(
-            COMPRESSOR_TO_CONFIG[c] for c in index_compressors)
-        apt_config.write(STANZA_TEMPLATE % {
-                         "LISTPATH": self._config.overrideroot,
-                         "DISTRORELEASE": suite,
-                         "DISTRORELEASEBYFILE": suite,
-                         "DISTRORELEASEONDISK": suite,
-                         "ARCHITECTURES": " ".join(archs + ["source"]),
-                         "SECTIONS": " ".join(comps),
-                         "EXTENSIONS": ".deb",
-                         "COMPRESSORS": compressors,
-                         "CACHEINSERT": "",
-                         "DISTS": os.path.basename(self._config.distsroot),
-                         "HIDEEXTRA": "",
-                         # Must match DdtpTarballUpload.shouldInstall.
-                         "LONGDESCRIPTION":
-                             "true" if include_long_descriptions else "false",
-                         })
+            COMPRESSOR_TO_CONFIG[c] for c in index_compressors
+        )
+        apt_config.write(
+            STANZA_TEMPLATE
+            % {
+                "LISTPATH": self._config.overrideroot,
+                "DISTRORELEASE": suite,
+                "DISTRORELEASEBYFILE": suite,
+                "DISTRORELEASEONDISK": suite,
+                "ARCHITECTURES": " ".join(archs + ["source"]),
+                "SECTIONS": " ".join(comps),
+                "EXTENSIONS": ".deb",
+                "COMPRESSORS": compressors,
+                "CACHEINSERT": "",
+                "DISTS": os.path.basename(self._config.distsroot),
+                "HIDEEXTRA": "",
+                # Must match DdtpTarballUpload.shouldInstall.
+                "LONGDESCRIPTION": "true"
+                if include_long_descriptions
+                else "false",
+            }
+        )
 
         if archs:
             for component in comps:
                 for subcomp in self.publisher.subcomponents:
-                    apt_config.write(STANZA_TEMPLATE % {
-                        "LISTPATH": self._config.overrideroot,
-                        "DISTRORELEASEONDISK": "%s/%s" % (suite, component),
-                        "DISTRORELEASEBYFILE": "%s_%s" % (suite, component),
-                        "DISTRORELEASE": "%s.%s" % (suite, component),
-                        "ARCHITECTURES": " ".join(archs),
-                        "SECTIONS": subcomp,
-                        "EXTENSIONS": '.%s' % SUBCOMPONENT_TO_EXT[subcomp],
-                        "COMPRESSORS": compressors,
-                        "CACHEINSERT": "-%s" % subcomp,
-                        "DISTS": os.path.basename(self._config.distsroot),
-                        "HIDEEXTRA": "// ",
-                        "LONGDESCRIPTION": "true",
-                        })
+                    apt_config.write(
+                        STANZA_TEMPLATE
+                        % {
+                            "LISTPATH": self._config.overrideroot,
+                            "DISTRORELEASEONDISK": "%s/%s"
+                            % (suite, component),
+                            "DISTRORELEASEBYFILE": "%s_%s"
+                            % (suite, component),
+                            "DISTRORELEASE": "%s.%s" % (suite, component),
+                            "ARCHITECTURES": " ".join(archs),
+                            "SECTIONS": subcomp,
+                            "EXTENSIONS": ".%s" % SUBCOMPONENT_TO_EXT[subcomp],
+                            "COMPRESSORS": compressors,
+                            "CACHEINSERT": "-%s" % subcomp,
+                            "DISTS": os.path.basename(self._config.distsroot),
+                            "HIDEEXTRA": "// ",
+                            "LONGDESCRIPTION": "true",
+                        }
+                    )
 
     def cleanCaches(self):
         """Clean apt-ftparchive caches.
@@ -892,7 +1018,8 @@ class FTPArchiveHandler:
         get too large and slow down normal runs of apt-ftparchive.
         """
         apt_config_filename = os.path.join(
-            self._config.miscroot, "apt-cleanup.conf")
+            self._config.miscroot, "apt-cleanup.conf"
+        )
         try:
             last_cleanup = os.stat(apt_config_filename).st_mtime
             if last_cleanup > time.time() - CLEANUP_FREQUENCY:
@@ -901,10 +1028,15 @@ class FTPArchiveHandler:
             pass
 
         apt_config = io.StringIO()
-        apt_config.write(CONFIG_HEADER % (self._config.archiveroot,
-                                          self._config.overrideroot,
-                                          self._config.cacheroot,
-                                          self._config.miscroot))
+        apt_config.write(
+            CONFIG_HEADER
+            % (
+                self._config.archiveroot,
+                self._config.overrideroot,
+                self._config.cacheroot,
+                self._config.miscroot,
+            )
+        )
 
         # "apt-ftparchive clean" doesn't care what suite it's given, but it
         # needs to know the union of all architectures and components for
@@ -915,11 +1047,17 @@ class FTPArchiveHandler:
             for a in distroseries.enabled_architectures:
                 archs.add(a.architecturetag)
             for comp in self.publisher.archive.getComponentsForSeries(
-                distroseries):
+                distroseries
+            ):
                 comps.add(comp.name)
         self.writeAptConfig(
-            apt_config, "nonexistent-suite", sorted(comps), sorted(archs),
-            True, [IndexCompressionType.UNCOMPRESSED])
+            apt_config,
+            "nonexistent-suite",
+            sorted(comps),
+            sorted(archs),
+            True,
+            [IndexCompressionType.UNCOMPRESSED],
+        )
 
         with open(apt_config_filename, "w") as fp:
             fp.write(apt_config.getvalue())
diff --git a/lib/lp/archivepublisher/model/publisherconfig.py b/lib/lp/archivepublisher/model/publisherconfig.py
index 7cc8465..3f05833 100644
--- a/lib/lp/archivepublisher/model/publisherconfig.py
+++ b/lib/lp/archivepublisher/model/publisherconfig.py
@@ -4,48 +4,42 @@
 """Database class for table PublisherConfig."""
 
 __all__ = [
-    'PublisherConfig',
-    'PublisherConfigSet',
-    ]
-
-from storm.locals import (
-    Int,
-    Reference,
-    Storm,
-    Unicode,
-    )
+    "PublisherConfig",
+    "PublisherConfigSet",
+]
+
+from storm.locals import Int, Reference, Storm, Unicode
 from zope.interface import implementer
 
 from lp.archivepublisher.interfaces.publisherconfig import (
     IPublisherConfig,
     IPublisherConfigSet,
-    )
-from lp.services.database.interfaces import (
-    IMasterStore,
-    IStore,
-    )
+)
+from lp.services.database.interfaces import IMasterStore, IStore
 
 
 @implementer(IPublisherConfig)
 class PublisherConfig(Storm):
     """See `IArchiveAuthToken`."""
-    __storm_table__ = 'PublisherConfig'
+
+    __storm_table__ = "PublisherConfig"
 
     id = Int(primary=True)
 
-    distribution_id = Int(name='distribution', allow_none=False)
-    distribution = Reference(distribution_id, 'Distribution.id')
+    distribution_id = Int(name="distribution", allow_none=False)
+    distribution = Reference(distribution_id, "Distribution.id")
 
-    root_dir = Unicode(name='root_dir', allow_none=False)
+    root_dir = Unicode(name="root_dir", allow_none=False)
 
-    base_url = Unicode(name='base_url', allow_none=False)
+    base_url = Unicode(name="base_url", allow_none=False)
 
-    copy_base_url = Unicode(name='copy_base_url', allow_none=False)
+    copy_base_url = Unicode(name="copy_base_url", allow_none=False)
 
 
 @implementer(IPublisherConfigSet)
 class PublisherConfigSet:
     """See `IPublisherConfigSet`."""
+
     title = "Soyuz Publisher Configurations"
 
     def new(self, distribution, root_dir, base_url, copy_base_url):
@@ -61,6 +55,11 @@ class PublisherConfigSet:
 
     def getByDistribution(self, distribution):
         """See `IArchiveAuthTokenSet`."""
-        return IStore(PublisherConfig).find(
-            PublisherConfig,
-            PublisherConfig.distribution_id == distribution.id).one()
+        return (
+            IStore(PublisherConfig)
+            .find(
+                PublisherConfig,
+                PublisherConfig.distribution_id == distribution.id,
+            )
+            .one()
+        )
diff --git a/lib/lp/archivepublisher/publishing.py b/lib/lp/archivepublisher/publishing.py
index 18f2f48..d1c2b28 100644
--- a/lib/lp/archivepublisher/publishing.py
+++ b/lib/lp/archivepublisher/publishing.py
@@ -2,45 +2,32 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'cannot_modify_suite',
-    'DirectoryHash',
-    'FORMAT_TO_SUBCOMPONENT',
-    'GLOBAL_PUBLISHER_LOCK',
-    'Publisher',
-    'getPublisher',
-    ]
+    "cannot_modify_suite",
+    "DirectoryHash",
+    "FORMAT_TO_SUBCOMPONENT",
+    "GLOBAL_PUBLISHER_LOCK",
+    "Publisher",
+    "getPublisher",
+]
 
 import bz2
-from collections import defaultdict
-from datetime import (
-    datetime,
-    timedelta,
-    )
 import errno
-from functools import partial
 import gzip
 import hashlib
-from itertools import (
-    chain,
-    groupby,
-    )
 import lzma
-from operator import attrgetter
 import os
 import re
 import shutil
+from collections import defaultdict
+from datetime import datetime, timedelta
+from functools import partial
+from itertools import chain, groupby
+from operator import attrgetter
 
-from debian.deb822 import (
-    _multivalued,
-    Release,
-    )
+from debian.deb822 import Release, _multivalued
 from storm.expr import Desc
 from zope.component import getUtility
-from zope.interface import (
-    Attribute,
-    implementer,
-    Interface,
-    )
+from zope.interface import Attribute, Interface, implementer
 
 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
 from lp.archivepublisher import HARDCODED_COMPONENT_ORDER
@@ -50,19 +37,13 @@ from lp.archivepublisher.indices import (
     build_binary_stanza_fields,
     build_source_stanza_fields,
     build_translations_stanza_fields,
-    )
+)
 from lp.archivepublisher.interfaces.archivegpgsigningkey import (
     ISignableArchive,
-    )
+)
 from lp.archivepublisher.model.ftparchive import FTPArchiveHandler
-from lp.archivepublisher.utils import (
-    get_ppa_reference,
-    RepositoryIndexFile,
-    )
-from lp.registry.interfaces.pocket import (
-    PackagePublishingPocket,
-    pocketsuffix,
-    )
+from lp.archivepublisher.utils import RepositoryIndexFile, get_ppa_reference
+from lp.registry.interfaces.pocket import PackagePublishingPocket, pocketsuffix
 from lp.registry.interfaces.series import SeriesStatus
 from lp.registry.model.distroseries import DistroSeries
 from lp.services.database.bulk import load
@@ -71,10 +52,7 @@ from lp.services.database.interfaces import IStore
 from lp.services.features import getFeatureFlag
 from lp.services.helpers import filenameToContentType
 from lp.services.librarian.client import LibrarianClient
-from lp.services.osutils import (
-    ensure_directory_exists,
-    open_for_writing,
-    )
+from lp.services.osutils import ensure_directory_exists, open_for_writing
 from lp.services.utils import file_exists
 from lp.soyuz.enums import (
     ArchivePublishingMethod,
@@ -82,32 +60,31 @@ from lp.soyuz.enums import (
     ArchiveStatus,
     BinaryPackageFormat,
     PackagePublishingStatus,
-    )
+)
 from lp.soyuz.interfaces.archive import NoSuchPPA
 from lp.soyuz.interfaces.archivefile import IArchiveFileSet
 from lp.soyuz.interfaces.publishing import (
-    active_publishing_status,
     IPublishingSet,
-    )
+    active_publishing_status,
+)
 from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
 from lp.soyuz.model.distroarchseries import DistroArchSeries
 from lp.soyuz.model.publishing import (
     BinaryPackagePublishingHistory,
     SourcePackagePublishingHistory,
-    )
+)
 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
 
-
 # Use this as the lock file name for all scripts that may manipulate
 # archives in the filesystem.  In a Launchpad(Cron)Script, set
 # lockfilename to this value to make it use the shared lock.
-GLOBAL_PUBLISHER_LOCK = 'launchpad-publisher.lock'
+GLOBAL_PUBLISHER_LOCK = "launchpad-publisher.lock"
 
 
 FORMAT_TO_SUBCOMPONENT = {
-    BinaryPackageFormat.UDEB: 'debian-installer',
-    BinaryPackageFormat.DDEB: 'debug',
-    }
+    BinaryPackageFormat.UDEB: "debian-installer",
+    BinaryPackageFormat.DDEB: "debug",
+}
 
 
 # Number of days before unreferenced files are removed from by-hash.
@@ -133,19 +110,19 @@ def reorder_components(components):
 
 def remove_suffix(path):
     """Return `path` but with any compression suffix removed."""
-    if path.endswith('.gz'):
-        return path[:-len('.gz')]
-    elif path.endswith('.bz2'):
-        return path[:-len('.bz2')]
-    elif path.endswith('.xz'):
-        return path[:-len('.xz')]
+    if path.endswith(".gz"):
+        return path[: -len(".gz")]
+    elif path.endswith(".bz2"):
+        return path[: -len(".bz2")]
+    elif path.endswith(".xz"):
+        return path[: -len(".xz")]
     else:
         return path
 
 
 def get_suffixed_indices(path):
     """Return a set of paths to compressed copies of the given index."""
-    return {path + suffix for suffix in ('', '.gz', '.bz2', '.xz')}
+    return {path + suffix for suffix in ("", ".gz", ".bz2", ".xz")}
 
 
 def getPublisher(archive, allowed_suites, log, distsroot=None):
@@ -155,11 +132,12 @@ def getPublisher(archive, allowed_suites, log, distsroot=None):
     be stored via 'distroot' argument.
     """
     if archive.purpose != ArchivePurpose.PPA:
-        log.debug("Finding configuration for %s %s."
-                  % (archive.distribution.name, archive.displayname))
+        log.debug(
+            "Finding configuration for %s %s."
+            % (archive.distribution.name, archive.displayname)
+        )
     else:
-        log.debug("Finding configuration for '%s' PPA."
-                  % archive.owner.name)
+        log.debug("Finding configuration for '%s' PPA." % archive.owner.name)
     pubconf = getPubConfig(archive)
 
     disk_pool = pubconf.getDiskPool(log)
@@ -176,7 +154,8 @@ def getPublisher(archive, allowed_suites, log, distsroot=None):
 def get_sources_path(config, suite_name, component):
     """Return path to Sources file for the given arguments."""
     return os.path.join(
-        config.distsroot, suite_name, component.name, "source", "Sources")
+        config.distsroot, suite_name, component.name, "source", "Sources"
+    )
 
 
 def get_packages_path(config, suite_name, component, arch, subcomp=None):
@@ -191,13 +170,16 @@ def get_packages_path(config, suite_name, component, arch, subcomp=None):
 
 def cannot_modify_suite(archive, distroseries, pocket):
     """Return True for Release pockets of stable series in primary archives."""
-    return (not distroseries.isUnstable() and
-            not archive.allowUpdatesToReleasePocket() and
-            pocket == PackagePublishingPocket.RELEASE)
+    return (
+        not distroseries.isUnstable()
+        and not archive.allowUpdatesToReleasePocket()
+        and pocket == PackagePublishingPocket.RELEASE
+    )
 
 
 class I18nIndex(_multivalued):
     """Represents an i18n/Index file."""
+
     _multivalued_fields = {
         "sha1": ["sha1", "size", "name"],
     }
@@ -211,7 +193,7 @@ class I18nIndex(_multivalued):
         return fixed_field_lengths
 
     def _get_size_field_length(self, key):
-        return max(len(str(item['size'])) for item in self[key])
+        return max(len(str(item["size"])) for item in self[key])
 
 
 class IArchiveHash(Interface):
@@ -219,18 +201,24 @@ class IArchiveHash(Interface):
 
     hash_factory = Attribute("A hashlib class suitable for this algorithm.")
     deb822_name = Attribute(
-        "Algorithm name expected by debian.deb822.Release.")
+        "Algorithm name expected by debian.deb822.Release."
+    )
     apt_name = Attribute(
         "Algorithm name used by apt in Release files and by-hash "
-        "subdirectories.")
+        "subdirectories."
+    )
     lfc_name = Attribute(
-        "LibraryFileContent attribute name corresponding to this algorithm.")
+        "LibraryFileContent attribute name corresponding to this algorithm."
+    )
     dh_name = Attribute(
-        "Filename for use when checksumming directories with this algorithm.")
+        "Filename for use when checksumming directories with this algorithm."
+    )
     write_by_hash = Attribute(
-        "Whether to write by-hash subdirectories for this algorithm.")
+        "Whether to write by-hash subdirectories for this algorithm."
+    )
     write_directory_hash = Attribute(
-        "Whether to write *SUM files for this algorithm for directories.")
+        "Whether to write *SUM files for this algorithm for directories."
+    )
 
 
 @implementer(IArchiveHash)
@@ -270,7 +258,7 @@ archive_hashes = [
     MD5ArchiveHash(),
     SHA1ArchiveHash(),
     SHA256ArchiveHash(),
-    ]
+]
 
 
 class ByHash:
@@ -305,20 +293,25 @@ class ByHash:
         for archive_hash in reversed(self._usable_archive_hashes):
             digest = getattr(lfa.content, archive_hash.lfc_name)
             digest_path = os.path.join(
-                self.path, archive_hash.apt_name, digest)
+                self.path, archive_hash.apt_name, digest
+            )
             self.known_digests[archive_hash.apt_name][digest].add(name)
             if not os.path.lexists(digest_path):
                 self.log.debug(
-                    "by-hash: Creating %s for %s" % (digest_path, name))
+                    "by-hash: Creating %s for %s" % (digest_path, name)
+                )
                 ensure_directory_exists(os.path.dirname(digest_path))
                 if archive_hash != best_hash:
                     os.symlink(
                         os.path.join(
-                            os.pardir, best_hash.apt_name, best_digest),
-                        digest_path)
+                            os.pardir, best_hash.apt_name, best_digest
+                        ),
+                        digest_path,
+                    )
                 elif copy_from_path is not None:
                     os.link(
-                        os.path.join(self.root, copy_from_path), digest_path)
+                        os.path.join(self.root, copy_from_path), digest_path
+                    )
                 else:
                     with open(digest_path, "wb") as outfile:
                         lfa.open()
@@ -343,10 +336,13 @@ class ByHash:
             if os.path.exists(hash_path):
                 prune_hash_directory = True
                 for entry in list(os.scandir(hash_path)):
-                    if entry.name not in self.known_digests[
-                            archive_hash.apt_name]:
+                    if (
+                        entry.name
+                        not in self.known_digests[archive_hash.apt_name]
+                    ):
                         self.log.debug(
-                            "by-hash: Deleting unreferenced %s" % entry.path)
+                            "by-hash: Deleting unreferenced %s" % entry.path
+                        )
                         os.unlink(entry.path)
                     else:
                         prune_hash_directory = False
@@ -379,7 +375,8 @@ class ByHashes:
     def add(self, path, lfa, copy_from_path=None):
         dirpath, name = os.path.split(path)
         self.registerChild(dirpath).add(
-            name, lfa, copy_from_path=copy_from_path)
+            name, lfa, copy_from_path=copy_from_path
+        )
 
     def known(self, path, hashname, digest):
         dirpath, name = os.path.split(path)
@@ -397,8 +394,9 @@ class Publisher:
     the processing of each DistroSeries and DistroArchSeries in question
     """
 
-    def __init__(self, log, config, diskpool, archive, allowed_suites=None,
-                 library=None):
+    def __init__(
+        self, log, config, diskpool, archive, allowed_suites=None, library=None
+    ):
         """Initialize a publisher.
 
         Publishers need the pool root dir and a DiskPool object.
@@ -412,7 +410,8 @@ class Publisher:
         self.distro = archive.distribution
         self.archive = archive
         self.allowed_suites = (
-            None if allowed_suites is None else set(allowed_suites))
+            None if allowed_suites is None else set(allowed_suites)
+        )
 
         self._diskpool = diskpool
 
@@ -452,16 +451,18 @@ class Publisher:
 
         Otherwise, return False.
         """
-        return (not self.allowed_suites or
-                distroseries.getSuite(pocket) in self.allowed_suites)
+        return (
+            not self.allowed_suites
+            or distroseries.getSuite(pocket) in self.allowed_suites
+        )
 
     @property
     def subcomponents(self):
         subcomps = []
         if self.archive.purpose != ArchivePurpose.PARTNER:
-            subcomps.append('debian-installer')
+            subcomps.append("debian-installer")
         if self.archive.publish_debug_symbols:
-            subcomps.append('debug')
+            subcomps.append("debug")
         return subcomps
 
     @property
@@ -469,7 +470,7 @@ class Publisher:
         if self.archive.purpose in (
             ArchivePurpose.PRIMARY,
             ArchivePurpose.PARTNER,
-            ):
+        ):
             # For PRIMARY and PARTNER archives, skip OBSOLETE and FUTURE
             # series.  We will never want to publish anything in them, so it
             # isn't worth thinking about whether they have pending
@@ -477,10 +478,12 @@ class Publisher:
             return [
                 series
                 for series in self.distro.series
-                if series.status not in (
+                if series.status
+                not in (
                     SeriesStatus.OBSOLETE,
                     SeriesStatus.FUTURE,
-                    )]
+                )
+            ]
         else:
             # Other archives may have reasons to continue building at least
             # for OBSOLETE series.  For example, a PPA may be continuing to
@@ -505,24 +508,29 @@ class Publisher:
         clauses = [
             SourcePackagePublishingHistory.archive == self.archive,
             SourcePackagePublishingHistory.status.is_in(
-                active_publishing_status),
-            ]
+                active_publishing_status
+            ),
+        ]
         if not is_careful:
             clauses.append(
-                SourcePackagePublishingHistory.datepublished == None)
+                SourcePackagePublishingHistory.datepublished == None
+            )
 
         publications = IStore(SourcePackagePublishingHistory).find(
-            SourcePackagePublishingHistory, *clauses)
+            SourcePackagePublishingHistory, *clauses
+        )
         return publications.order_by(
             SourcePackagePublishingHistory.distroseriesID,
             SourcePackagePublishingHistory.pocket,
-            Desc(SourcePackagePublishingHistory.id))
+            Desc(SourcePackagePublishingHistory.id),
+        )
 
     def publishSources(self, distroseries, pocket, spphs):
         """Publish sources for a given distroseries and pocket."""
         self.log.debug(
-            "* Publishing pending sources for %s" %
-            distroseries.getSuite(pocket))
+            "* Publishing pending sources for %s"
+            % distroseries.getSuite(pocket)
+        )
         for spph in spphs:
             spph.publish(self._diskpool, self.log)
 
@@ -537,16 +545,21 @@ class Publisher:
         dirty_suites = set()
         all_spphs = self.getPendingSourcePublications(is_careful)
         for (distroseries, pocket), spphs in groupby(
-                all_spphs, attrgetter("distroseries", "pocket")):
+            all_spphs, attrgetter("distroseries", "pocket")
+        ):
             if not self.isAllowed(distroseries, pocket):
                 self.log.debug("* Skipping %s", distroseries.getSuite(pocket))
             elif not self.checkLegalPocket(distroseries, pocket, is_careful):
                 for spph in spphs:
                     self.log.error(
-                        "Tried to publish %s (%s) into %s (%s), skipping" % (
-                            spph.displayname, spph.id,
+                        "Tried to publish %s (%s) into %s (%s), skipping"
+                        % (
+                            spph.displayname,
+                            spph.id,
                             distroseries.getSuite(pocket),
-                            distroseries.status.name))
+                            distroseries.status.name,
+                        )
+                    )
             else:
                 self.publishSources(distroseries, pocket, spphs)
                 dirty_suites.add(distroseries.getSuite(pocket))
@@ -556,29 +569,36 @@ class Publisher:
         """Return the specific group of binary records to be published."""
         clauses = [
             BinaryPackagePublishingHistory.archive == self.archive,
-            BinaryPackagePublishingHistory.distroarchseriesID ==
-                DistroArchSeries.id,
+            BinaryPackagePublishingHistory.distroarchseriesID
+            == DistroArchSeries.id,
             BinaryPackagePublishingHistory.status.is_in(
-                active_publishing_status),
-            ]
+                active_publishing_status
+            ),
+        ]
         if not is_careful:
             clauses.append(
-                BinaryPackagePublishingHistory.datepublished == None)
+                BinaryPackagePublishingHistory.datepublished == None
+            )
 
         publications = IStore(BinaryPackagePublishingHistory).find(
-            BinaryPackagePublishingHistory, *clauses)
+            BinaryPackagePublishingHistory, *clauses
+        )
         return publications.order_by(
             DistroArchSeries.distroseriesID,
             BinaryPackagePublishingHistory.pocket,
             DistroArchSeries.architecturetag,
-            Desc(BinaryPackagePublishingHistory.id))
+            Desc(BinaryPackagePublishingHistory.id),
+        )
 
     def publishBinaries(self, distroarchseries, pocket, bpphs):
         """Publish binaries for a given distroarchseries and pocket."""
         self.log.debug(
-            "* Publishing pending binaries for %s/%s" % (
+            "* Publishing pending binaries for %s/%s"
+            % (
                 distroarchseries.distroseries.getSuite(pocket),
-                distroarchseries.architecturetag))
+                distroarchseries.architecturetag,
+            )
+        )
         for bpph in bpphs:
             bpph.publish(self._diskpool, self.log)
 
@@ -593,17 +613,22 @@ class Publisher:
         dirty_suites = set()
         all_bpphs = self.getPendingBinaryPublications(is_careful)
         for (distroarchseries, pocket), bpphs in groupby(
-                all_bpphs, attrgetter("distroarchseries", "pocket")):
+            all_bpphs, attrgetter("distroarchseries", "pocket")
+        ):
             distroseries = distroarchseries.distroseries
             if not self.isAllowed(distroseries, pocket):
                 pass  # Already logged by publishSources.
             elif not self.checkLegalPocket(distroseries, pocket, is_careful):
                 for bpph in bpphs:
                     self.log.error(
-                        "Tried to publish %s (%s) into %s (%s), skipping" % (
-                            bpph.displayname, bpph.id,
+                        "Tried to publish %s (%s) into %s (%s), skipping"
+                        % (
+                            bpph.displayname,
+                            bpph.id,
                             distroseries.getSuite(pocket),
-                            distroseries.status.name))
+                            distroseries.status.name,
+                        )
+                    )
             else:
                 self.publishBinaries(distroarchseries, pocket, bpphs)
                 dirty_suites.add(distroseries.getSuite(pocket))
@@ -620,9 +645,11 @@ class Publisher:
         self.log.debug("* Step A: Publishing packages")
 
         self.dirty_suites.update(
-            self.findAndPublishSources(is_careful=force_publishing))
+            self.findAndPublishSources(is_careful=force_publishing)
+        )
         self.dirty_suites.update(
-            self.findAndPublishBinaries(is_careful=force_publishing))
+            self.findAndPublishBinaries(is_careful=force_publishing)
+        )
 
     def A2_markPocketsWithDeletionsDirty(self):
         """An intermediate step in publishing to detect deleted packages.
@@ -640,7 +667,7 @@ class Publisher:
                 table.status == PackagePublishingStatus.DELETED,
                 table.scheduleddeletiondate == None,
                 table.dateremoved == None,
-                ]
+            ]
 
         # We need to get a set of suite names that have publications that
         # are waiting to be deleted.  Each suite name is added to the
@@ -649,29 +676,43 @@ class Publisher:
         # Make the source publications query.
         conditions = base_conditions(SourcePackagePublishingHistory)
         conditions.append(
-            SourcePackagePublishingHistory.distroseriesID == DistroSeries.id)
-        source_suites = IStore(SourcePackagePublishingHistory).find(
-            (DistroSeries, SourcePackagePublishingHistory.pocket),
-            *conditions).config(distinct=True).order_by(
-                DistroSeries.id, SourcePackagePublishingHistory.pocket)
+            SourcePackagePublishingHistory.distroseriesID == DistroSeries.id
+        )
+        source_suites = (
+            IStore(SourcePackagePublishingHistory)
+            .find(
+                (DistroSeries, SourcePackagePublishingHistory.pocket),
+                *conditions,
+            )
+            .config(distinct=True)
+            .order_by(DistroSeries.id, SourcePackagePublishingHistory.pocket)
+        )
 
         # Make the binary publications query.
         conditions = base_conditions(BinaryPackagePublishingHistory)
-        conditions.extend([
-            BinaryPackagePublishingHistory.distroarchseriesID ==
-                DistroArchSeries.id,
-            DistroArchSeries.distroseriesID == DistroSeries.id,
-            ])
-        binary_suites = IStore(BinaryPackagePublishingHistory).find(
-            (DistroSeries, BinaryPackagePublishingHistory.pocket),
-            *conditions).config(distinct=True).order_by(
-                DistroSeries.id, BinaryPackagePublishingHistory.pocket)
+        conditions.extend(
+            [
+                BinaryPackagePublishingHistory.distroarchseriesID
+                == DistroArchSeries.id,
+                DistroArchSeries.distroseriesID == DistroSeries.id,
+            ]
+        )
+        binary_suites = (
+            IStore(BinaryPackagePublishingHistory)
+            .find(
+                (DistroSeries, BinaryPackagePublishingHistory.pocket),
+                *conditions,
+            )
+            .config(distinct=True)
+            .order_by(DistroSeries.id, BinaryPackagePublishingHistory.pocket)
+        )
 
         for distroseries, pocket in chain(source_suites, binary_suites):
             if self.isDirty(distroseries, pocket):
                 continue
-            if (cannot_modify_suite(self.archive, distroseries, pocket)
-                or not self.isAllowed(distroseries, pocket)):
+            if cannot_modify_suite(
+                self.archive, distroseries, pocket
+            ) or not self.isAllowed(distroseries, pocket):
                 # We don't want to mark release pockets dirty in a
                 # stable distroseries, no matter what other bugs
                 # that precede here have dirtied it.
@@ -691,8 +732,10 @@ class Publisher:
                     continue
                 if not force_domination:
                     if not self.isDirty(distroseries, pocket):
-                        self.log.debug("Skipping domination for %s/%s" %
-                                   (distroseries.name, pocket.name))
+                        self.log.debug(
+                            "Skipping domination for %s/%s"
+                            % (distroseries.name, pocket.name)
+                        )
                         continue
                     self.checkDirtySuiteBeforePublishing(distroseries, pocket)
                 judgejudy.judgeAndDominate(distroseries, pocket)
@@ -700,9 +743,9 @@ class Publisher:
     def C_doFTPArchive(self, is_careful):
         """Does the ftp-archive step: generates Sources and Packages."""
         self.log.debug("* Step C: Set apt-ftparchive up and run it")
-        apt_handler = FTPArchiveHandler(self.log, self._config,
-                                        self._diskpool, self.distro,
-                                        self)
+        apt_handler = FTPArchiveHandler(
+            self.log, self._config, self._diskpool, self.distro, self
+        )
         apt_handler.run(is_careful)
 
     def C_writeIndexes(self, is_careful):
@@ -715,8 +758,10 @@ class Publisher:
             for pocket in self.archive.getPockets():
                 if not is_careful:
                     if not self.isDirty(distroseries, pocket):
-                        self.log.debug("Skipping index generation for %s/%s" %
-                                       (distroseries.name, pocket.name))
+                        self.log.debug(
+                            "Skipping index generation for %s/%s"
+                            % (distroseries.name, pocket.name)
+                        )
                         continue
                     self.checkDirtySuiteBeforePublishing(distroseries, pocket)
 
@@ -725,7 +770,8 @@ class Publisher:
                 components = self.archive.getComponentsForSeries(distroseries)
                 for component in components:
                     self._writeComponentIndexes(
-                        distroseries, pocket, component)
+                        distroseries, pocket, component
+                    )
 
     def C_updateArtifactoryProperties(self, is_careful):
         """Update Artifactory properties to match our database."""
@@ -744,19 +790,22 @@ class Publisher:
         spphs_by_spr = defaultdict(list)
         bpphs_by_bpr = defaultdict(list)
         for spph in publishing_set.getSourcesForPublishing(
-                archive=self.archive):
+            archive=self.archive
+        ):
             spphs_by_spr[spph.sourcepackagereleaseID].append(spph)
             release_id = "source:%d" % spph.sourcepackagereleaseID
             releases_by_id.setdefault(release_id, spph.sourcepackagerelease)
             pubs_by_id[release_id].append(spph)
         for bpph in publishing_set.getBinariesForPublishing(
-                archive=self.archive):
+            archive=self.archive
+        ):
             bpphs_by_bpr[bpph.binarypackagereleaseID].append(bpph)
             release_id = "binary:%d" % bpph.binarypackagereleaseID
             releases_by_id.setdefault(release_id, bpph.binarypackagerelease)
             pubs_by_id[release_id].append(bpph)
         artifacts = self._diskpool.getAllArtifacts(
-            self.archive.name, self.archive.repository_format)
+            self.archive.name, self.archive.repository_format
+        )
 
         plan = []
         for path, properties in sorted(artifacts.items()):
@@ -767,7 +816,8 @@ class Publisher:
                 # Skip any files that Launchpad didn't put in Artifactory.
                 continue
             plan.append(
-                (source_name[0], source_version[0], release_id[0], properties))
+                (source_name[0], source_version[0], release_id[0], properties)
+            )
 
         # Releases that have been removed may still have corresponding
         # artifacts but no corresponding publishing history rows.  Bulk-load
@@ -792,9 +842,12 @@ class Publisher:
 
         for source_name, source_version, release_id, properties in plan:
             self._diskpool.updateProperties(
-                source_name, source_version,
+                source_name,
+                source_version,
                 releases_by_id[release_id].files[0],
-                pubs_by_id.get(release_id), old_properties=properties)
+                pubs_by_id.get(release_id),
+                old_properties=properties,
+            )
 
     def D_writeReleaseFiles(self, is_careful):
         """Write out the Release files for the provided distribution.
@@ -807,9 +860,11 @@ class Publisher:
 
         archive_file_suites = set()
         for container in getUtility(IArchiveFileSet).getContainersToReap(
-                self.archive, container_prefix="release:"):
+            self.archive, container_prefix="release:"
+        ):
             distroseries, pocket = self.distro.getDistroSeriesAndPocket(
-                container[len("release:"):])
+                container[len("release:") :]
+            )
             archive_file_suites.add(distroseries.getSuite(pocket))
 
         for distroseries in self.distro:
@@ -832,24 +887,30 @@ class Publisher:
                 write_release = suite in self.release_files_needed
                 if not is_careful:
                     if not self.isDirty(distroseries, pocket):
-                        self.log.debug("Skipping release files for %s/%s" %
-                                       (distroseries.name, pocket.name))
+                        self.log.debug(
+                            "Skipping release files for %s/%s"
+                            % (distroseries.name, pocket.name)
+                        )
                         write_release = False
                     else:
                         self.checkDirtySuiteBeforePublishing(
-                            distroseries, pocket)
+                            distroseries, pocket
+                        )
 
                 if write_release:
                     self._writeSuite(distroseries, pocket)
-                elif (suite in archive_file_suites and
-                      distroseries.publish_by_hash):
+                elif (
+                    suite in archive_file_suites
+                    and distroseries.publish_by_hash
+                ):
                     # We aren't publishing a new Release file for this
                     # suite, probably because it's immutable, but we still
                     # need to prune by-hash files from it.
                     extra_by_hash_files = {
                         filename: filename
                         for filename in ("Release", "Release.gpg", "InRelease")
-                        if file_exists(os.path.join(suite_path, filename))}
+                        if file_exists(os.path.join(suite_path, filename))
+                    }
                     self._updateByHash(suite, "Release", extra_by_hash_files)
 
     def _allIndexFiles(self, distroseries):
@@ -863,16 +924,18 @@ class Publisher:
             suite_name = distroseries.getSuite(pocket)
             for component in components:
                 yield gzip.open, get_sources_path(
-                    self._config, suite_name, component) + ".gz"
+                    self._config, suite_name, component
+                ) + ".gz"
                 for arch in distroseries.architectures:
                     if not arch.enabled:
                         continue
                     yield gzip.open, get_packages_path(
-                        self._config, suite_name, component, arch) + ".gz"
+                        self._config, suite_name, component, arch
+                    ) + ".gz"
                     for subcomp in self.subcomponents:
                         yield gzip.open, get_packages_path(
-                            self._config, suite_name, component, arch,
-                            subcomp) + ".gz"
+                            self._config, suite_name, component, arch, subcomp
+                        ) + ".gz"
 
     def _latestNonEmptySeries(self):
         """Find the latest non-empty series in an archive.
@@ -918,18 +981,21 @@ class Publisher:
                 alias_suite = "%s%s" % (alias, pocketsuffix[pocket])
                 current_suite = current.getSuite(pocket)
                 current_suite_path = os.path.join(
-                    self._config.distsroot, current_suite)
+                    self._config.distsroot, current_suite
+                )
                 if not os.path.isdir(current_suite_path):
                     continue
                 alias_suite_path = os.path.join(
-                    self._config.distsroot, alias_suite)
+                    self._config.distsroot, alias_suite
+                )
                 if os.path.islink(alias_suite_path):
                     if os.readlink(alias_suite_path) == current_suite:
                         continue
                 elif os.path.isdir(alias_suite_path):
                     # Perhaps somebody did something misguided ...
                     self.log.warning(
-                        "Alias suite path %s is a directory!" % alias_suite)
+                        "Alias suite path %s is a directory!" % alias_suite
+                    )
                     continue
                 try:
                     os.unlink(alias_suite_path)
@@ -946,15 +1012,17 @@ class Publisher:
         and Sources.lp .
         """
         suite_name = distroseries.getSuite(pocket)
-        self.log.debug("Generate Indexes for %s/%s"
-                       % (suite_name, component.name))
+        self.log.debug(
+            "Generate Indexes for %s/%s" % (suite_name, component.name)
+        )
 
         self.log.debug("Generating Sources")
 
         separate_long_descriptions = False
         # Must match DdtpTarballUpload.shouldInstall.
-        if (not distroseries.include_long_descriptions and
-                getFeatureFlag("soyuz.ppa.separate_long_descriptions")):
+        if not distroseries.include_long_descriptions and getFeatureFlag(
+            "soyuz.ppa.separate_long_descriptions"
+        ):
             # If include_long_descriptions is False and the feature flag is
             # enabled, create a Translation-en file.
             # build_binary_stanza_fields will also omit long descriptions
@@ -962,20 +1030,33 @@ class Publisher:
             separate_long_descriptions = True
             packages = set()
             translation_en = RepositoryIndexFile(
-                os.path.join(self._config.distsroot, suite_name,
-                             component.name, "i18n", "Translation-en"),
-                self._config.temproot, distroseries.index_compressors)
+                os.path.join(
+                    self._config.distsroot,
+                    suite_name,
+                    component.name,
+                    "i18n",
+                    "Translation-en",
+                ),
+                self._config.temproot,
+                distroseries.index_compressors,
+            )
 
         source_index = RepositoryIndexFile(
             get_sources_path(self._config, suite_name, component),
-            self._config.temproot, distroseries.index_compressors)
+            self._config.temproot,
+            distroseries.index_compressors,
+        )
 
         for spp in getUtility(IPublishingSet).getSourcesForPublishing(
-                archive=self.archive, distroseries=distroseries, pocket=pocket,
-                component=component):
+            archive=self.archive,
+            distroseries=distroseries,
+            pocket=pocket,
+            component=component,
+        ):
             stanza = build_source_stanza_fields(
-                spp.sourcepackagerelease, spp.component, spp.section)
-            source_index.write(stanza.makeOutput().encode('utf-8') + b'\n\n')
+                spp.sourcepackagerelease, spp.component, spp.section
+            )
+            source_index.write(stanza.makeOutput().encode("utf-8") + b"\n\n")
 
         source_index.close()
 
@@ -983,37 +1064,51 @@ class Publisher:
             if not arch.enabled:
                 continue
 
-            arch_path = 'binary-%s' % arch.architecturetag
+            arch_path = "binary-%s" % arch.architecturetag
 
             self.log.debug("Generating Packages for %s" % arch_path)
 
             indices = {}
             indices[None] = RepositoryIndexFile(
                 get_packages_path(self._config, suite_name, component, arch),
-                self._config.temproot, distroseries.index_compressors)
+                self._config.temproot,
+                distroseries.index_compressors,
+            )
 
             for subcomp in self.subcomponents:
                 indices[subcomp] = RepositoryIndexFile(
                     get_packages_path(
-                        self._config, suite_name, component, arch, subcomp),
-                    self._config.temproot, distroseries.index_compressors)
+                        self._config, suite_name, component, arch, subcomp
+                    ),
+                    self._config.temproot,
+                    distroseries.index_compressors,
+                )
 
             for bpp in getUtility(IPublishingSet).getBinariesForPublishing(
-                    archive=self.archive, distroarchseries=arch, pocket=pocket,
-                    component=component):
+                archive=self.archive,
+                distroarchseries=arch,
+                pocket=pocket,
+                component=component,
+            ):
                 subcomp = FORMAT_TO_SUBCOMPONENT.get(
-                    bpp.binarypackagerelease.binpackageformat)
+                    bpp.binarypackagerelease.binpackageformat
+                )
                 if subcomp not in indices:
                     # Skip anything that we're not generating indices
                     # for, eg. ddebs where publish_debug_symbols is
                     # disabled.
                     continue
                 stanza = build_binary_stanza_fields(
-                    bpp.binarypackagerelease, bpp.component, bpp.section,
-                    bpp.priority, bpp.phased_update_percentage,
-                    separate_long_descriptions)
+                    bpp.binarypackagerelease,
+                    bpp.component,
+                    bpp.section,
+                    bpp.priority,
+                    bpp.phased_update_percentage,
+                    separate_long_descriptions,
+                )
                 indices[subcomp].write(
-                    stanza.makeOutput().encode('utf-8') + b'\n\n')
+                    stanza.makeOutput().encode("utf-8") + b"\n\n"
+                )
                 if separate_long_descriptions:
                     # If the (Package, Description-md5) pair already exists
                     # in the set, build_translations_stanza_fields will
@@ -1021,11 +1116,13 @@ class Publisher:
                     # the set and return a stanza to be written to
                     # Translation-en.
                     translation_stanza = build_translations_stanza_fields(
-                        bpp.binarypackagerelease, packages)
+                        bpp.binarypackagerelease, packages
+                    )
                     if translation_stanza is not None:
                         translation_en.write(
-                            translation_stanza.makeOutput().encode('utf-8')
-                            + b'\n\n')
+                            translation_stanza.makeOutput().encode("utf-8")
+                            + b"\n\n"
+                        )
 
             for index in indices.values():
                 index.close()
@@ -1042,7 +1139,8 @@ class Publisher:
         """
         if cannot_modify_suite(self.archive, distroseries, pocket):
             raise AssertionError(
-                "Oops, tainting RELEASE pocket of %s." % distroseries)
+                "Oops, tainting RELEASE pocket of %s." % distroseries
+            )
 
     def _getLabel(self):
         """Return the contents of the Release file Label field.
@@ -1087,42 +1185,52 @@ class Publisher:
         extra_data = {}
         for filename, real_filename in extra_files.items():
             hashes = self._readIndexFileHashes(
-                suite, filename, real_file_name=real_filename)
+                suite, filename, real_file_name=real_filename
+            )
             if hashes is None:
                 continue
             for archive_hash in archive_hashes:
                 extra_data.setdefault(archive_hash.apt_name, []).append(
-                    hashes[archive_hash.deb822_name])
+                    hashes[archive_hash.deb822_name]
+                )
 
         release_path = os.path.join(
-            self._config.distsroot, suite, release_file_name)
+            self._config.distsroot, suite, release_file_name
+        )
         with open(release_path) as release_file:
             release_data = Release(release_file)
         archive_file_set = getUtility(IArchiveFileSet)
         by_hashes = ByHashes(self._config.distsroot, self.log)
         suite_dir = os.path.relpath(
-            os.path.join(self._config.distsroot, suite),
-            self._config.distsroot)
+            os.path.join(self._config.distsroot, suite), self._config.distsroot
+        )
         container = "release:%s" % suite
 
         def strip_dists(path):
             assert path.startswith("dists/")
-            return path[len("dists/"):]
+            return path[len("dists/") :]
 
         # Gather information on entries in the current Release file, and
         # make sure nothing there is condemned.
         current_files = {}
-        for current_entry in (
-                release_data["SHA256"] + extra_data.get("SHA256", [])):
+        for current_entry in release_data["SHA256"] + extra_data.get(
+            "SHA256", []
+        ):
             path = os.path.join(suite_dir, current_entry["name"])
             real_name = current_entry.get("real_name", current_entry["name"])
             real_path = os.path.join(suite_dir, real_name)
             current_files[path] = (
-                int(current_entry["size"]), current_entry["sha256"], real_path)
+                int(current_entry["size"]),
+                current_entry["sha256"],
+                real_path,
+            )
         uncondemned_files = set()
         for db_file in archive_file_set.getByArchive(
-                self.archive, container=container, only_condemned=True,
-                eager_load=True):
+            self.archive,
+            container=container,
+            only_condemned=True,
+            eager_load=True,
+        ):
             stripped_path = strip_dists(db_file.path)
             if stripped_path in current_files:
                 current_sha256 = current_files[stripped_path][1]
@@ -1130,26 +1238,32 @@ class Publisher:
                     uncondemned_files.add(db_file)
         if uncondemned_files:
             for container, path, sha256 in archive_file_set.unscheduleDeletion(
-                    uncondemned_files):
+                uncondemned_files
+            ):
                 self.log.debug(
-                    "by-hash: Unscheduled %s for %s in %s for deletion" % (
-                        sha256, path, container))
+                    "by-hash: Unscheduled %s for %s in %s for deletion"
+                    % (sha256, path, container)
+                )
 
         # Remove any condemned files from the database whose stay of
         # execution has elapsed.  We ensure that we know about all the
         # relevant by-hash directory trees before doing any removals so that
         # we can prune them properly later.
         for db_file in archive_file_set.getByArchive(
-                self.archive, container=container):
+            self.archive, container=container
+        ):
             by_hashes.registerChild(os.path.dirname(strip_dists(db_file.path)))
         for container, path, sha256 in archive_file_set.reap(
-                self.archive, container=container):
+            self.archive, container=container
+        ):
             self.log.debug(
-                "by-hash: Deleted %s for %s in %s" % (sha256, path, container))
+                "by-hash: Deleted %s for %s in %s" % (sha256, path, container)
+            )
 
         # Ensure that all files recorded in the database are in by-hash.
         db_files = archive_file_set.getByArchive(
-            self.archive, container=container, eager_load=True)
+            self.archive, container=container, eager_load=True
+        )
         for db_file in db_files:
             by_hashes.add(strip_dists(db_file.path), db_file.library_file)
 
@@ -1167,11 +1281,12 @@ class Publisher:
                     condemned_files.add(db_file)
         if condemned_files:
             for container, path, sha256 in archive_file_set.scheduleDeletion(
-                    condemned_files,
-                    timedelta(days=BY_HASH_STAY_OF_EXECUTION)):
+                condemned_files, timedelta(days=BY_HASH_STAY_OF_EXECUTION)
+            ):
                 self.log.debug(
-                    "by-hash: Scheduled %s for %s in %s for deletion" % (
-                        sha256, path, container))
+                    "by-hash: Scheduled %s for %s in %s for deletion"
+                    % (sha256, path, container)
+                )
 
         # Ensure that all the current index files are in by-hash and have
         # corresponding database entries.
@@ -1180,14 +1295,21 @@ class Publisher:
         # librarian client has no bulk upload methods.
         for path, (size, sha256, real_path) in current_files.items():
             full_path = os.path.join(self._config.distsroot, real_path)
-            if (os.path.exists(full_path) and
-                    not by_hashes.known(path, "SHA256", sha256)):
+            if os.path.exists(full_path) and not by_hashes.known(
+                path, "SHA256", sha256
+            ):
                 with open(full_path, "rb") as fileobj:
                     db_file = archive_file_set.newFromFile(
-                        self.archive, container, os.path.join("dists", path),
-                        fileobj, size, filenameToContentType(path))
+                        self.archive,
+                        container,
+                        os.path.join("dists", path),
+                        fileobj,
+                        size,
+                        filenameToContentType(path),
+                    )
                 by_hashes.add(
-                    path, db_file.library_file, copy_from_path=real_path)
+                    path, db_file.library_file, copy_from_path=real_path
+                )
 
         # Finally, remove any files from disk that aren't recorded in the
         # database and aren't active.
@@ -1202,7 +1324,8 @@ class Publisher:
             to the filesystem.
         """
         release_path = os.path.join(
-            self._config.distsroot, suite, "Release.new")
+            self._config.distsroot, suite, "Release.new"
+        )
         with open_for_writing(release_path, "wb") as release_file:
             release_data.dump(release_file, "utf-8")
 
@@ -1221,10 +1344,12 @@ class Publisher:
         suite = distroseries.getSuite(pocket)
         suite_dir = os.path.join(self._config.distsroot, suite)
         all_components = [
-            comp.name for comp in
-            self.archive.getComponentsForSeries(distroseries)]
+            comp.name
+            for comp in self.archive.getComponentsForSeries(distroseries)
+        ]
         all_architectures = [
-            a.architecturetag for a in distroseries.enabled_architectures]
+            a.architecturetag for a in distroseries.enabled_architectures
+        ]
         # Core files are those that are normally updated when a suite
         # changes, and which therefore receive special treatment with
         # caching headers on mirrors.
@@ -1239,20 +1364,21 @@ class Publisher:
         # still want to include them in by-hash directories.
         extra_by_hash_files = {}
         for component in all_components:
-            self._writeSuiteSource(
-                distroseries, pocket, component, core_files)
+            self._writeSuiteSource(distroseries, pocket, component, core_files)
             for architecture in all_architectures:
                 self._writeSuiteArch(
-                    distroseries, pocket, component, architecture, core_files)
-            self._writeSuiteI18n(
-                distroseries, pocket, component, core_files)
+                    distroseries, pocket, component, architecture, core_files
+                )
+            self._writeSuiteI18n(distroseries, pocket, component, core_files)
             dep11_dir = os.path.join(suite_dir, component, "dep11")
             try:
                 for entry in os.scandir(dep11_dir):
-                    if (entry.name.startswith("Components-") or
-                            entry.name.startswith("icons-")):
+                    if entry.name.startswith(
+                        "Components-"
+                    ) or entry.name.startswith("icons-"):
                         dep11_path = os.path.join(
-                            component, "dep11", entry.name)
+                            component, "dep11", entry.name
+                        )
                         extra_files.add(remove_suffix(dep11_path))
                         extra_files.add(dep11_path)
             except OSError as e:
@@ -1262,8 +1388,7 @@ class Publisher:
             try:
                 for cnf_file in os.listdir(cnf_dir):
                     if cnf_file.startswith("Commands-"):
-                        cnf_path = os.path.join(
-                            component, "cnf", cnf_file)
+                        cnf_path = os.path.join(component, "cnf", cnf_file)
                         extra_files.add(remove_suffix(cnf_path))
                         extra_files.add(cnf_path)
             except OSError as e:
@@ -1271,14 +1396,17 @@ class Publisher:
                     raise
         for architecture in all_architectures:
             for contents_path in get_suffixed_indices(
-                    'Contents-' + architecture):
+                "Contents-" + architecture
+            ):
                 if os.path.exists(os.path.join(suite_dir, contents_path)):
                     extra_files.add(remove_suffix(contents_path))
                     extra_files.add(contents_path)
         all_files = core_files | extra_files
 
-        drsummary = "%s %s " % (self.distro.displayname,
-                                distroseries.displayname)
+        drsummary = "%s %s " % (
+            self.distro.displayname,
+            distroseries.displayname,
+        )
         if pocket == PackagePublishingPocket.RELEASE:
             drsummary += distroseries.version
         else:
@@ -1292,15 +1420,20 @@ class Publisher:
         release_file["Version"] = distroseries.version
         release_file["Codename"] = distroseries.name
         release_file["Date"] = datetime.utcnow().strftime(
-            "%a, %d %b %Y %k:%M:%S UTC")
+            "%a, %d %b %Y %k:%M:%S UTC"
+        )
         release_file["Architectures"] = " ".join(sorted(all_architectures))
         release_file["Components"] = " ".join(
-            reorder_components(all_components))
+            reorder_components(all_components)
+        )
         release_file["Description"] = drsummary
-        if ((pocket == PackagePublishingPocket.BACKPORTS and
-             distroseries.backports_not_automatic) or
-            (pocket == PackagePublishingPocket.PROPOSED and
-             distroseries.proposed_not_automatic)):
+        if (
+            pocket == PackagePublishingPocket.BACKPORTS
+            and distroseries.backports_not_automatic
+        ) or (
+            pocket == PackagePublishingPocket.PROPOSED
+            and distroseries.proposed_not_automatic
+        ):
             release_file["NotAutomatic"] = "yes"
             release_file["ButAutomaticUpgrades"] = "yes"
 
@@ -1310,7 +1443,8 @@ class Publisher:
                 continue
             for archive_hash in archive_hashes:
                 release_file.setdefault(archive_hash.apt_name, []).append(
-                    hashes[archive_hash.deb822_name])
+                    hashes[archive_hash.deb822_name]
+                )
 
         if distroseries.publish_by_hash and distroseries.advertise_by_hash:
             release_file["Acquire-By-Hash"] = "yes"
@@ -1324,7 +1458,8 @@ class Publisher:
             # Sign the repository.
             self.log.debug("Signing Release file for %s" % suite)
             for signed_name in signable_archive.signRepository(
-                    suite, pubconf=self._config, suffix=".new", log=self.log):
+                suite, pubconf=self._config, suffix=".new", log=self.log
+            ):
                 core_files.add(signed_name)
                 extra_by_hash_files[signed_name] = signed_name + ".new"
         else:
@@ -1338,22 +1473,31 @@ class Publisher:
             if name in core_files:
                 os.rename(
                     os.path.join(suite_dir, "%s.new" % name),
-                    os.path.join(suite_dir, name))
+                    os.path.join(suite_dir, name),
+                )
 
         # Make sure all the timestamps match, to make it easier to insert
         # caching headers on mirrors.
         self._syncTimestamps(suite, core_files)
 
-    def _writeSuiteArchOrSource(self, distroseries, pocket, component,
-                                file_stub, arch_name, arch_path,
-                                all_series_files):
+    def _writeSuiteArchOrSource(
+        self,
+        distroseries,
+        pocket,
+        component,
+        file_stub,
+        arch_name,
+        arch_path,
+        all_series_files,
+    ):
         """Write out a Release file for an architecture or source."""
         # XXX kiko 2006-08-24: Untested method.
 
         suite = distroseries.getSuite(pocket)
         suite_dir = os.path.join(self._config.distsroot, suite)
-        self.log.debug("Writing Release file for %s/%s/%s" % (
-            suite, component, arch_path))
+        self.log.debug(
+            "Writing Release file for %s/%s/%s" % (suite, component, arch_path)
+        )
 
         # Now, grab the actual (non-di) files inside each of
         # the suite's architectures
@@ -1377,21 +1521,29 @@ class Publisher:
         with open_for_writing(release_path, "wb") as f:
             release_file.dump(f, "utf-8")
 
-    def _writeSuiteSource(self, distroseries, pocket, component,
-                          all_series_files):
+    def _writeSuiteSource(
+        self, distroseries, pocket, component, all_series_files
+    ):
         """Write out a Release file for a suite's sources."""
         self._writeSuiteArchOrSource(
-            distroseries, pocket, component, 'Sources', 'source', 'source',
-            all_series_files)
-
-    def _writeSuiteArch(self, distroseries, pocket, component,
-                        arch_name, all_series_files):
+            distroseries,
+            pocket,
+            component,
+            "Sources",
+            "source",
+            "source",
+            all_series_files,
+        )
+
+    def _writeSuiteArch(
+        self, distroseries, pocket, component, arch_name, all_series_files
+    ):
         """Write out a Release file for an architecture in a suite."""
         suite = distroseries.getSuite(pocket)
         suite_dir = os.path.join(self._config.distsroot, suite)
 
-        file_stub = 'Packages'
-        arch_path = 'binary-' + arch_name
+        file_stub = "Packages"
+        arch_path = "binary-" + arch_name
 
         for subcomp in self.subcomponents:
             # Set up the subcomponent paths.
@@ -1402,22 +1554,30 @@ class Publisher:
                     all_series_files.add(remove_suffix(path))
                     all_series_files.add(path)
         self._writeSuiteArchOrSource(
-            distroseries, pocket, component, 'Packages', arch_name, arch_path,
-            all_series_files)
-
-    def _writeSuiteI18n(self, distroseries, pocket, component,
-                        all_series_files):
+            distroseries,
+            pocket,
+            component,
+            "Packages",
+            arch_name,
+            arch_path,
+            all_series_files,
+        )
+
+    def _writeSuiteI18n(
+        self, distroseries, pocket, component, all_series_files
+    ):
         """Write out an Index file for translation files in a suite."""
         suite = distroseries.getSuite(pocket)
-        self.log.debug("Writing Index file for %s/%s/i18n" % (
-            suite, component))
+        self.log.debug(
+            "Writing Index file for %s/%s/i18n" % (suite, component)
+        )
 
         i18n_subpath = os.path.join(component, "i18n")
         i18n_dir = os.path.join(self._config.distsroot, suite, i18n_subpath)
         i18n_files = set()
         try:
             for entry in os.scandir(i18n_dir):
-                if not entry.name.startswith('Translation-'):
+                if not entry.name.startswith("Translation-"):
                     continue
                 i18n_files.add(remove_suffix(entry.name))
                 i18n_files.add(entry.name)
@@ -1432,7 +1592,8 @@ class Publisher:
         i18n_index = I18nIndex()
         for i18n_file in sorted(i18n_files):
             hashes = self._readIndexFileHashes(
-                suite, i18n_file, subpath=i18n_subpath)
+                suite, i18n_file, subpath=i18n_subpath
+            )
             if hashes is None:
                 continue
             i18n_index.setdefault("SHA1", []).append(hashes["sha1"])
@@ -1445,8 +1606,9 @@ class Publisher:
         # Schedule this for inclusion in the Release file.
         all_series_files.add(os.path.join(component, "i18n", "Index"))
 
-    def _readIndexFileHashes(self, suite, file_name, subpath=None,
-                             real_file_name=None):
+    def _readIndexFileHashes(
+        self, suite, file_name, subpath=None, real_file_name=None
+    ):
         """Read an index file and return its hashes.
 
         :param suite: Suite name.
@@ -1464,20 +1626,23 @@ class Publisher:
             {"md5sum": {"md5sum": ..., "size": ..., "name": ...}}), or None
             if the file could not be found.
         """
-        open_func = partial(open, mode='rb')
+        open_func = partial(open, mode="rb")
         full_name = os.path.join(
-            self._config.distsroot, suite, subpath or '.',
-            real_file_name or file_name)
+            self._config.distsroot,
+            suite,
+            subpath or ".",
+            real_file_name or file_name,
+        )
         if not os.path.exists(full_name):
-            if os.path.exists(full_name + '.gz'):
+            if os.path.exists(full_name + ".gz"):
                 open_func = gzip.open
-                full_name = full_name + '.gz'
-            elif os.path.exists(full_name + '.bz2'):
+                full_name = full_name + ".gz"
+            elif os.path.exists(full_name + ".bz2"):
                 open_func = bz2.BZ2File
-                full_name = full_name + '.bz2'
-            elif os.path.exists(full_name + '.xz'):
+                full_name = full_name + ".bz2"
+            elif os.path.exists(full_name + ".xz"):
                 open_func = partial(lzma.LZMAFile, format=lzma.FORMAT_XZ)
-                full_name = full_name + '.xz'
+                full_name = full_name + ".xz"
             else:
                 # The file we were asked to write out doesn't exist.
                 # Most likely we have an incomplete archive (e.g. no sources
@@ -1487,7 +1652,8 @@ class Publisher:
 
         hashes = {
             archive_hash.deb822_name: archive_hash.hash_factory()
-            for archive_hash in archive_hashes}
+            for archive_hash in archive_hashes
+        }
         size = 0
         with open_func(full_name) as in_file:
             for chunk in iter(lambda: in_file.read(256 * 1024), b""):
@@ -1514,27 +1680,38 @@ class Publisher:
         assert self.archive.is_ppa
         if self.archive.publishing_method != ArchivePublishingMethod.LOCAL:
             raise NotImplementedError(
-                "Don't know how to delete archives published using %s" %
-                self.archive.publishing_method.title)
+                "Don't know how to delete archives published using %s"
+                % self.archive.publishing_method.title
+            )
         self.log.info(
-            "Attempting to delete archive '%s/%s' at '%s'." % (
-                self.archive.owner.name, self.archive.name,
-                self._config.archiveroot))
+            "Attempting to delete archive '%s/%s' at '%s'."
+            % (
+                self.archive.owner.name,
+                self.archive.name,
+                self._config.archiveroot,
+            )
+        )
 
         # Set all the publications to DELETED.
         sources = self.archive.getPublishedSources(
-            status=active_publishing_status)
+            status=active_publishing_status
+        )
         getUtility(IPublishingSet).requestDeletion(
-            sources, removed_by=getUtility(ILaunchpadCelebrities).janitor,
-            removal_comment="Removed when deleting archive")
+            sources,
+            removed_by=getUtility(ILaunchpadCelebrities).janitor,
+            removal_comment="Removed when deleting archive",
+        )
 
         # Deleting the sources will have killed the corresponding
         # binaries too, but there may be orphaned leftovers (eg. NBS).
         binaries = self.archive.getAllPublishedBinaries(
-            status=active_publishing_status)
+            status=active_publishing_status
+        )
         getUtility(IPublishingSet).requestDeletion(
-            binaries, removed_by=getUtility(ILaunchpadCelebrities).janitor,
-            removal_comment="Removed when deleting archive")
+            binaries,
+            removed_by=getUtility(ILaunchpadCelebrities).janitor,
+            removal_comment="Removed when deleting archive",
+        )
 
         # Now set dateremoved on any publication that doesn't already
         # have it set, so things can expire from the librarian.
@@ -1551,24 +1728,30 @@ class Publisher:
             except (shutil.Error, OSError) as e:
                 self.log.warning(
                     "Failed to delete directory '%s' for archive "
-                    "'%s/%s'\n%s" % (
-                    directory, self.archive.owner.name,
-                    self.archive.name, e))
+                    "'%s/%s'\n%s"
+                    % (
+                        directory,
+                        self.archive.owner.name,
+                        self.archive.name,
+                        e,
+                    )
+                )
 
         self.archive.status = ArchiveStatus.DELETED
         self.archive.publish = False
 
         # Now that it's gone from disk we can rename the archive to free
         # up the namespace.
-        new_name = base_name = '%s-deletedppa' % self.archive.name
+        new_name = base_name = "%s-deletedppa" % self.archive.name
         count = 1
         while True:
             try:
                 self.archive.owner.getPPAByName(
-                    self.archive.distribution, new_name)
+                    self.archive.distribution, new_name
+                )
             except NoSuchPPA:
                 break
-            new_name = '%s%d' % (base_name, count)
+            new_name = "%s%d" % (base_name, count)
             count += 1
         self.archive.name = new_name
         self.log.info("Renamed deleted archive '%s'.", self.archive.reference)
@@ -1584,8 +1767,13 @@ class DirectoryHash:
 
         for usable in self._usable_archive_hashes:
             csum_path = os.path.join(self.root, usable.dh_name)
-            self.checksum_hash.append((csum_path,
-                RepositoryIndexFile(csum_path, self.tmpdir), usable))
+            self.checksum_hash.append(
+                (
+                    csum_path,
+                    RepositoryIndexFile(csum_path, self.tmpdir),
+                    usable,
+                )
+            )
 
     def __enter__(self):
         return self
@@ -1608,15 +1796,18 @@ class DirectoryHash:
         """Add a path to be checksummed."""
         hashes = [
             (checksum_file, archive_hash.hash_factory())
-            for (_, checksum_file, archive_hash) in self.checksum_hash]
-        with open(path, 'rb') as in_file:
+            for (_, checksum_file, archive_hash) in self.checksum_hash
+        ]
+        with open(path, "rb") as in_file:
             for chunk in iter(lambda: in_file.read(256 * 1024), b""):
                 for (checksum_file, hashobj) in hashes:
                     hashobj.update(chunk)
 
         for (checksum_file, hashobj) in hashes:
             checksum_line = "%s *%s\n" % (
-                hashobj.hexdigest(), path[len(self.root) + 1:])
+                hashobj.hexdigest(),
+                path[len(self.root) + 1 :],
+            )
             checksum_file.write(checksum_line.encode("UTF-8"))
 
     def add_dir(self, path):
diff --git a/lib/lp/archivepublisher/rosetta_translations.py b/lib/lp/archivepublisher/rosetta_translations.py
index 3673e05..db3f094 100644
--- a/lib/lp/archivepublisher/rosetta_translations.py
+++ b/lib/lp/archivepublisher/rosetta_translations.py
@@ -8,8 +8,8 @@ infrastructure to enable developers to publish translations.
 """
 
 __all__ = [
-    'RosettaTranslationsUpload',
-    ]
+    "RosettaTranslationsUpload",
+]
 
 from zope.component import getUtility
 
@@ -21,15 +21,15 @@ from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES
 from lp.soyuz.interfaces.packagetranslationsuploadjob import (
     IPackageTranslationsUploadJobSource,
-    )
-
+)
 
 # Translations uploaded to certain specialised PPAs are redirected to
 # specialised distroseries instead.
 REDIRECTED_PPAS = {
-    "~ci-train-ppa-service/ubuntu/stable-phone-overlay":
-        {"vivid": ("ubuntu-rtm", "15.04")},
-    }
+    "~ci-train-ppa-service/ubuntu/stable-phone-overlay": {
+        "vivid": ("ubuntu-rtm", "15.04")
+    },
+}
 
 
 class RosettaTranslationsUpload(CustomUpload):
@@ -41,6 +41,7 @@ class RosettaTranslationsUpload(CustomUpload):
     For this reason, all methods from CustomUpload that deal with files are
     bypassed.
     """
+
     custom_type = "rosetta-translations"
 
     package_name = None
@@ -66,7 +67,8 @@ class RosettaTranslationsUpload(CustomUpload):
             redirect = REDIRECTED_PPAS[packageupload.archive.reference]
             if packageupload.distroseries.name in redirect:
                 distro_name, distroseries_name = redirect[
-                    packageupload.distroseries.name]
+                    packageupload.distroseries.name
+                ]
                 distro = getUtility(IDistributionSet).getByName(distro_name)
                 distroseries = distro[distroseries_name]
 
@@ -75,24 +77,28 @@ class RosettaTranslationsUpload(CustomUpload):
                 self.logger.debug(
                     "Skipping translations since its purpose is not "
                     "in MAIN_ARCHIVE_PURPOSES and the archive is not "
-                    "whitelisted.")
+                    "whitelisted."
+                )
             return
 
         # If the distroseries is 11.10 (oneiric) or later, the valid names
         # check is not required.  (See bug 788685.)
-        do_names_check = Version(distroseries.version) < Version('11.10')
+        do_names_check = Version(distroseries.version) < Version("11.10")
 
         latest_publication = self._findSourcePublication(packageupload)
         component_name = latest_publication.component.name
         spr = latest_publication.sourcepackagerelease
 
         valid_pockets = (
-            PackagePublishingPocket.RELEASE, PackagePublishingPocket.SECURITY,
-            PackagePublishingPocket.UPDATES, PackagePublishingPocket.PROPOSED)
-        valid_components = ('main', 'restricted')
-        if (packageupload.pocket not in valid_pockets or
-            (do_names_check and
-                component_name not in valid_components)):
+            PackagePublishingPocket.RELEASE,
+            PackagePublishingPocket.SECURITY,
+            PackagePublishingPocket.UPDATES,
+            PackagePublishingPocket.PROPOSED,
+        )
+        valid_components = ("main", "restricted")
+        if packageupload.pocket not in valid_pockets or (
+            do_names_check and component_name not in valid_components
+        ):
             # XXX: CarlosPerelloMarin 2006-02-16 bug=31665:
             # This should be implemented using a more general rule to accept
             # different policies depending on the distribution.
@@ -108,20 +114,25 @@ class RosettaTranslationsUpload(CustomUpload):
             sourcepackage = distroseries.getSourcePackage(spr.name)
             if sourcepackage is not None and sourcepackage.packaging is None:
                 original_sourcepackage = (
-                    packageupload.distroseries.getSourcePackage(spr.name))
+                    packageupload.distroseries.getSourcePackage(spr.name)
+                )
                 if original_sourcepackage is not None:
                     original_packaging = original_sourcepackage.packaging
                     if original_packaging is not None:
                         sourcepackage.setPackaging(
                             original_packaging.productseries,
-                            original_packaging.owner)
+                            original_packaging.owner,
+                        )
 
-        blamee = (packageupload.findPersonToNotify() or
-                  latest_publication.creator or
-                  getUtility(ILaunchpadCelebrities).rosetta_experts)
+        blamee = (
+            packageupload.findPersonToNotify()
+            or latest_publication.creator
+            or getUtility(ILaunchpadCelebrities).rosetta_experts
+        )
 
         getUtility(IPackageTranslationsUploadJobSource).create(
-            distroseries, libraryfilealias, spr.sourcepackagename, blamee)
+            distroseries, libraryfilealias, spr.sourcepackagename, blamee
+        )
 
     @staticmethod
     def parsePath(tarfile_name):
@@ -129,8 +140,9 @@ class RosettaTranslationsUpload(CustomUpload):
         bits = tarfile_name.split("_")
         if len(bits) != 4:
             raise ValueError(
-                "%s is not NAME_VERSION_ARCH_translations.tar.gz" %
-                tarfile_name)
+                "%s is not NAME_VERSION_ARCH_translations.tar.gz"
+                % tarfile_name
+            )
         return tuple(bits)
 
     def setComponents(self, tarfile_name):
@@ -154,6 +166,8 @@ class RosettaTranslationsUpload(CustomUpload):
             # publication for any package. We don't want that.
             raise AssertionError("package_name should not be None.")
         return packageupload.archive.getPublishedSources(
-            name=self.package_name, exact_match=True,
+            name=self.package_name,
+            exact_match=True,
             distroseries=packageupload.distroseries,
-            pocket=packageupload.pocket).first()
+            pocket=packageupload.pocket,
+        ).first()
diff --git a/lib/lp/archivepublisher/run_parts.py b/lib/lp/archivepublisher/run_parts.py
index 6b5bdb3..7391216 100644
--- a/lib/lp/archivepublisher/run_parts.py
+++ b/lib/lp/archivepublisher/run_parts.py
@@ -4,14 +4,14 @@
 """Publisher support for running programs from a plug-in directory."""
 
 __all__ = [
-    'execute_subprocess',
-    'find_run_parts_dir',
-    'run_parts',
-    ]
+    "execute_subprocess",
+    "find_run_parts_dir",
+    "run_parts",
+]
 
 import os
-from shlex import quote as shell_quote
 import subprocess
+from shlex import quote as shell_quote
 
 from lp.services.config import config
 from lp.services.scripts.base import LaunchpadScriptFailure
@@ -70,7 +70,8 @@ def run_parts(distribution_name, parts, log=None, env=None):
         return
     cmd = ["run-parts", "--", parts_dir]
     failure = LaunchpadScriptFailure(
-        "Failure while executing run-parts %s." % parts_dir)
+        "Failure while executing run-parts %s." % parts_dir
+    )
     full_env = dict(os.environ)
     if env is not None:
         full_env.update(env)
diff --git a/lib/lp/archivepublisher/scripts/base.py b/lib/lp/archivepublisher/scripts/base.py
index 7725447..f590791 100644
--- a/lib/lp/archivepublisher/scripts/base.py
+++ b/lib/lp/archivepublisher/scripts/base.py
@@ -5,8 +5,8 @@
 """Publisher script class."""
 
 __all__ = [
-    'PublisherScript',
-    ]
+    "PublisherScript",
+]
 
 from optparse import OptionValueError
 
@@ -17,15 +17,24 @@ from lp.services.scripts.base import LaunchpadCronScript
 
 
 class PublisherScript(LaunchpadCronScript):
-
     def addDistroOptions(self):
         self.parser.add_option(
-            "-d", "--distribution", dest="distribution", metavar="DISTRO",
-            default=None, help="The distribution to publish.")
+            "-d",
+            "--distribution",
+            dest="distribution",
+            metavar="DISTRO",
+            default=None,
+            help="The distribution to publish.",
+        )
 
         self.parser.add_option(
-            "-a", "--all-derived", action="store_true", dest="all_derived",
-            default=False, help="Publish all Ubuntu-derived distributions.")
+            "-a",
+            "--all-derived",
+            action="store_true",
+            dest="all_derived",
+            default=False,
+            help="Publish all Ubuntu-derived distributions.",
+        )
 
     def findSelectedDistro(self):
         """Find the `Distribution` named by the --distribution option.
diff --git a/lib/lp/archivepublisher/scripts/copy_signingkeys.py b/lib/lp/archivepublisher/scripts/copy_signingkeys.py
index 5ce7103..018b7aa 100644
--- a/lib/lp/archivepublisher/scripts/copy_signingkeys.py
+++ b/lib/lp/archivepublisher/scripts/copy_signingkeys.py
@@ -4,8 +4,8 @@
 """Script to copy signing keys between archives."""
 
 __all__ = [
-    'CopySigningKeysScript',
-    ]
+    "CopySigningKeysScript",
+]
 
 import sys
 
@@ -13,10 +13,7 @@ import transaction
 from zope.component import getUtility
 
 from lp.app.errors import NotFoundError
-from lp.services.scripts.base import (
-    LaunchpadScript,
-    LaunchpadScriptFailure,
-    )
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
 from lp.services.signing.enums import SigningKeyType
 from lp.services.signing.interfaces.signingkey import IArchiveSigningKeySet
 from lp.soyuz.interfaces.archive import IArchiveSet
@@ -29,24 +26,34 @@ class CopySigningKeysScript(LaunchpadScript):
 
     def add_my_options(self):
         self.parser.add_option(
-            "-t", "--key-type",
-            help="The type of keys to copy (default: all types).")
+            "-t",
+            "--key-type",
+            help="The type of keys to copy (default: all types).",
+        )
 
         self.parser.add_option("-s", "--series", help="Series name.")
 
         self.parser.add_option(
-            "-n", "--dry-run", action="store_true", default=False,
-            help="Report what would be done, but don't actually copy keys.")
+            "-n",
+            "--dry-run",
+            action="store_true",
+            default=False,
+            help="Report what would be done, but don't actually copy keys.",
+        )
 
         self.parser.add_option(
-            "--overwrite", action="store_true", default=False,
-            help="Overwrite existing keys when executing the copy.")
+            "--overwrite",
+            action="store_true",
+            default=False,
+            help="Overwrite existing keys when executing the copy.",
+        )
 
     def getArchive(self, reference):
         archive = getUtility(IArchiveSet).getByReference(reference)
         if archive is None:
             raise LaunchpadScriptFailure(
-                "Could not find archive '%s'." % reference)
+                "Could not find archive '%s'." % reference
+            )
         return archive
 
     def getKeyTypes(self, name):
@@ -55,7 +62,8 @@ class CopySigningKeysScript(LaunchpadScript):
                 return [SigningKeyType.getTermByToken(name).value]
             except LookupError:
                 raise LaunchpadScriptFailure(
-                    "There is no signing key type named '%s'." % name)
+                    "There is no signing key type named '%s'." % name
+                )
         else:
             return list(SigningKeyType.items)
 
@@ -66,8 +74,9 @@ class CopySigningKeysScript(LaunchpadScript):
             return self.from_archive.distribution[series_name]
         except NotFoundError:
             raise LaunchpadScriptFailure(
-                "Could not find series '%s' in %s." %
-                (series_name, self.from_archive.distribution.display_name))
+                "Could not find series '%s' in %s."
+                % (series_name, self.from_archive.distribution.display_name)
+            )
 
     def processOptions(self):
         if len(self.args) != 2:
@@ -78,44 +87,65 @@ class CopySigningKeysScript(LaunchpadScript):
         self.key_types = self.getKeyTypes(self.options.key_type)
         self.series = self.getSeries(self.options.series)
 
-    def copy(self, from_archive, to_archive, key_type, series=None,
-             overwrite=False):
+    def copy(
+        self, from_archive, to_archive, key_type, series=None, overwrite=False
+    ):
         series_name = series.name if series else None
         from_archive_signing_key = getUtility(IArchiveSigningKeySet).get(
-            key_type, from_archive, series, exact_match=True)
+            key_type, from_archive, series, exact_match=True
+        )
         if from_archive_signing_key is None:
             self.logger.info(
                 "No %s signing key for %s / %s",
-                key_type, from_archive.reference, series_name)
+                key_type,
+                from_archive.reference,
+                series_name,
+            )
             return
         to_archive_signing_key = getUtility(IArchiveSigningKeySet).get(
-            key_type, to_archive, series, exact_match=True)
+            key_type, to_archive, series, exact_match=True
+        )
         if to_archive_signing_key is not None:
             if not overwrite:
                 # If it already exists and we do not force overwrite,
                 # abort this signing key copy.
                 self.logger.warning(
                     "%s signing key for %s / %s already exists",
-                    key_type, to_archive.reference, series_name)
+                    key_type,
+                    to_archive.reference,
+                    series_name,
+                )
                 return
             self.logger.warning(
                 "%s signing key for %s / %s being overwritten",
-                key_type, to_archive.reference, series_name)
+                key_type,
+                to_archive.reference,
+                series_name,
+            )
             to_archive_signing_key.destroySelf()
         self.logger.info(
             "Copying %s signing key %s from %s / %s to %s / %s",
-            key_type, from_archive_signing_key.signing_key.fingerprint,
-            from_archive.reference, series_name,
-            to_archive.reference, series_name)
+            key_type,
+            from_archive_signing_key.signing_key.fingerprint,
+            from_archive.reference,
+            series_name,
+            to_archive.reference,
+            series_name,
+        )
         getUtility(IArchiveSigningKeySet).create(
-            to_archive, series, from_archive_signing_key.signing_key)
+            to_archive, series, from_archive_signing_key.signing_key
+        )
 
     def main(self):
         self.processOptions()
         for key_type in self.key_types:
             self.copy(
-                self.from_archive, self.to_archive, key_type,
-                series=self.series, overwrite=self.options.overwrite)
+                self.from_archive,
+                self.to_archive,
+                key_type,
+                series=self.series,
+                overwrite=self.options.overwrite,
+            )
         if self.options.dry_run:
             self.logger.info("Dry run requested.  Not committing changes.")
             transaction.abort()
diff --git a/lib/lp/archivepublisher/scripts/generate_contents_files.py b/lib/lp/archivepublisher/scripts/generate_contents_files.py
index 14e6416..e4c25e5 100644
--- a/lib/lp/archivepublisher/scripts/generate_contents_files.py
+++ b/lib/lp/archivepublisher/scripts/generate_contents_files.py
@@ -4,11 +4,11 @@
 """Archive Contents files generator."""
 
 __all__ = [
-    'GenerateContentsFiles',
-    ]
+    "GenerateContentsFiles",
+]
 
-from optparse import OptionValueError
 import os
+from optparse import OptionValueError
 
 from zope.component import getUtility
 
@@ -20,26 +20,25 @@ from lp.services.command_spawner import (
     CommandSpawner,
     OutputLineHandler,
     ReturnCodeReceiver,
-    )
+)
 from lp.services.config import config
 from lp.services.database.policy import (
     DatabaseBlockedPolicy,
     StandbyOnlyDatabasePolicy,
-    )
+)
 from lp.services.osutils import ensure_directory_exists
 from lp.services.scripts.base import (
     LaunchpadCronScript,
     LaunchpadScriptFailure,
-    )
+)
 from lp.services.utils import file_exists
 
-
 COMPONENTS = [
-    'main',
-    'restricted',
-    'universe',
-    'multiverse',
-    ]
+    "main",
+    "restricted",
+    "universe",
+    "multiverse",
+]
 
 
 def differ_in_content(one_file, other_file):
@@ -49,7 +48,7 @@ def differ_in_content(one_file, other_file):
     if any([one_exists, other_exists]):
         if one_exists != other_exists:
             return True
-        with open(one_file, 'rb') as one_f, open(other_file, 'rb') as other_f:
+        with open(one_file, "rb") as one_f, open(other_file, "rb") as other_f:
             return one_f.read() != other_f.read()
     else:
         return False
@@ -58,8 +57,8 @@ def differ_in_content(one_file, other_file):
 def get_template(template_name):
     """Return path of given template in this script's templates directory."""
     return os.path.join(
-        config.root, "cronscripts", "publishing", "gen-contents",
-        template_name)
+        config.root, "cronscripts", "publishing", "gen-contents", template_name
+    )
 
 
 def execute(logger, command, args=None):
@@ -73,7 +72,7 @@ def execute(logger, command, args=None):
     command_line = [command]
     if args is not None:
         command_line += args
-    description = ' '.join(command_line)
+    description = " ".join(command_line)
 
     logger.debug("Execute: %s", description)
     # Some of these commands can take a long time.  Use CommandSpawner
@@ -85,14 +84,18 @@ def execute(logger, command, args=None):
     receiver = ReturnCodeReceiver()
     spawner = CommandSpawner()
     spawner.start(
-        command_line, completion_handler=receiver,
-        stderr_handler=stderr_logger, stdout_handler=stdout_logger)
+        command_line,
+        completion_handler=receiver,
+        stderr_handler=stderr_logger,
+        stdout_handler=stdout_logger,
+    )
     spawner.complete()
     stdout_logger.finalize()
     stderr_logger.finalize()
     if receiver.returncode != 0:
         raise LaunchpadScriptFailure(
-            "Failure while running command: %s" % description)
+            "Failure while running command: %s" % description
+        )
 
 
 class GenerateContentsFiles(LaunchpadCronScript):
@@ -102,8 +105,12 @@ class GenerateContentsFiles(LaunchpadCronScript):
     def add_my_options(self):
         """See `LaunchpadScript`."""
         self.parser.add_option(
-            "-d", "--distribution", dest="distribution", default=None,
-            help="Distribution to generate Contents files for.")
+            "-d",
+            "--distribution",
+            dest="distribution",
+            default=None,
+            help="Distribution to generate Contents files for.",
+        )
 
     @property
     def name(self):
@@ -119,16 +126,18 @@ class GenerateContentsFiles(LaunchpadCronScript):
             raise OptionValueError("Specify a distribution.")
 
         self.distribution = getUtility(IDistributionSet).getByName(
-            self.options.distribution)
+            self.options.distribution
+        )
         if self.distribution is None:
             raise OptionValueError(
-                "Distribution '%s' not found." % self.options.distribution)
+                "Distribution '%s' not found." % self.options.distribution
+            )
 
     def setUpContentArchive(self):
         """Make sure the `content_archive` directories exist."""
         self.logger.debug("Ensuring that we have a private tree in place.")
-        for suffix in ['cache', 'misc']:
-            dirname = '-'.join([self.distribution.name, suffix])
+        for suffix in ["cache", "misc"]:
+            dirname = "-".join([self.distribution.name, suffix])
             path = os.path.join(self.content_archive, dirname)
             if not file_exists(path):
                 os.makedirs(path)
@@ -153,31 +162,34 @@ class GenerateContentsFiles(LaunchpadCronScript):
 
     def getDirs(self, archs):
         """Subdirectories needed for each component."""
-        return ['source', 'debian-installer'] + [
-            'binary-%s' % arch for arch in archs]
+        return ["source", "debian-installer"] + [
+            "binary-%s" % arch for arch in archs
+        ]
 
     def writeAptContentsConf(self, suites):
         """Write apt-contents.conf file."""
-        output_dirname = '%s-misc' % self.distribution.name
+        output_dirname = "%s-misc" % self.distribution.name
         output_path = os.path.join(
-            self.content_archive, output_dirname, "apt-contents.conf")
+            self.content_archive, output_dirname, "apt-contents.conf"
+        )
 
         parameters = {
-            'content_archive': self.content_archive,
-            'distribution': self.distribution.name,
+            "content_archive": self.content_archive,
+            "distribution": self.distribution.name,
         }
 
-        with open(output_path, 'w') as output_file:
-            header = get_template('apt_conf_header.template')
+        with open(output_path, "w") as output_file:
+            header = get_template("apt_conf_header.template")
             with open(header) as header_file:
                 output_file.write(header_file.read() % parameters)
 
-            with open(get_template(
-                    'apt_conf_dist.template')) as dist_template_file:
+            with open(
+                get_template("apt_conf_dist.template")
+            ) as dist_template_file:
                 dist_template = dist_template_file.read()
             for suite in suites:
-                parameters['suite'] = suite
-                parameters['architectures'] = ' '.join(self.getArchs(suite))
+                parameters["suite"] = suite
+                parameters["architectures"] = " ".join(self.getArchs(suite))
                 output_file.write(dist_template % parameters)
 
     def createComponentDirs(self, suites):
@@ -186,8 +198,13 @@ class GenerateContentsFiles(LaunchpadCronScript):
             for component in COMPONENTS:
                 for directory in self.getDirs(self.getArchs(suite)):
                     path = os.path.join(
-                        self.content_archive, self.distribution.name, 'dists',
-                        suite, component, directory)
+                        self.content_archive,
+                        self.distribution.name,
+                        "dists",
+                        suite,
+                        component,
+                        directory,
+                    )
                     if not file_exists(path):
                         self.logger.debug("Creating %s.", path)
                         os.makedirs(path)
@@ -198,11 +215,15 @@ class GenerateContentsFiles(LaunchpadCronScript):
         This method won't access the database.
         """
         if file_exists(override_root):
-            execute(self.logger, "cp", [
-                "-a",
-                override_root,
-                "%s/" % self.content_archive,
-                ])
+            execute(
+                self.logger,
+                "cp",
+                [
+                    "-a",
+                    override_root,
+                    "%s/" % self.content_archive,
+                ],
+            )
         else:
             self.logger.debug("Did not find overrides; not copying.")
 
@@ -212,12 +233,18 @@ class GenerateContentsFiles(LaunchpadCronScript):
         This method may take a long time to run.
         This method won't access the database.
         """
-        execute(self.logger, "apt-ftparchive", [
-            "generate",
-            os.path.join(
-                self.content_archive, "%s-misc" % distro_name,
-                "apt-contents.conf"),
-            ])
+        execute(
+            self.logger,
+            "apt-ftparchive",
+            [
+                "generate",
+                os.path.join(
+                    self.content_archive,
+                    "%s-misc" % distro_name,
+                    "apt-contents.conf",
+                ),
+            ],
+        )
 
     def generateContentsFiles(self, override_root, distro_name):
         """Generate Contents files.
@@ -231,14 +258,16 @@ class GenerateContentsFiles(LaunchpadCronScript):
             evaluated without accessing the database.
         """
         self.logger.debug(
-            "Running apt in private tree to generate new contents.")
+            "Running apt in private tree to generate new contents."
+        )
         self.copyOverrides(override_root)
         self.runAptFTPArchive(distro_name)
 
     def updateContentsFile(self, suite, arch):
         """Update Contents file, if it has changed."""
         contents_dir = os.path.join(
-            self.content_archive, self.distribution.name, 'dists', suite)
+            self.content_archive, self.distribution.name, "dists", suite
+        )
         staging_dir = os.path.join(self.config.stagingroot, suite)
         contents_filename = "Contents-%s" % arch
         last_contents = os.path.join(contents_dir, ".%s" % contents_filename)
@@ -248,12 +277,15 @@ class GenerateContentsFiles(LaunchpadCronScript):
         # re-fetch them unnecessarily.
         if differ_in_content(current_contents, last_contents):
             self.logger.debug(
-                "Staging new Contents file for %s/%s.", suite, arch)
+                "Staging new Contents file for %s/%s.", suite, arch
+            )
 
             new_contents = os.path.join(
-                contents_dir, "%s.gz" % contents_filename)
+                contents_dir, "%s.gz" % contents_filename
+            )
             contents_dest = os.path.join(
-                staging_dir, "%s.gz" % contents_filename)
+                staging_dir, "%s.gz" % contents_filename
+            )
 
             ensure_directory_exists(os.path.dirname(contents_dest))
             os.rename(current_contents, last_contents)
@@ -261,7 +293,8 @@ class GenerateContentsFiles(LaunchpadCronScript):
             os.chmod(contents_dest, 0o664)
         else:
             self.logger.debug(
-                "Skipping unmodified Contents file for %s/%s.", suite, arch)
+                "Skipping unmodified Contents file for %s/%s.", suite, arch
+            )
 
     def updateContentsFiles(self, suites):
         """Update all Contents files that have changed."""
@@ -280,7 +313,8 @@ class GenerateContentsFiles(LaunchpadCronScript):
         self.processOptions()
         self.config = getPubConfig(self.distribution.main_archive)
         self.content_archive = os.path.join(
-            self.config.distroroot, "contents-generation")
+            self.config.distroroot, "contents-generation"
+        )
         self.setUpContentArchive()
 
     def process(self):
diff --git a/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py b/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py
index 104b92a..7e8dc23 100644
--- a/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py
+++ b/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py
@@ -18,14 +18,22 @@ class HtaccessTokenGenerator(LaunchpadCronScript):
     def add_my_options(self):
         """Add script command line options."""
         self.parser.add_option(
-            "-n", "--dry-run", action="store_true",
-            dest="dryrun", default=False,
+            "-n",
+            "--dry-run",
+            action="store_true",
+            dest="dryrun",
+            default=False,
             help="If set, no files are changed and no tokens are "
-                 "deactivated.")
+            "deactivated.",
+        )
         self.parser.add_option(
-            "-d", "--no-deactivation", action="store_true",
-            dest="no_deactivation", default=False,
-            help="If set, tokens are not deactivated.")
+            "-d",
+            "--no-deactivation",
+            action="store_true",
+            dest="no_deactivation",
+            default=False,
+            help="If set, tokens are not deactivated.",
+        )
 
     def main(self):
         """Script entry point."""
diff --git a/lib/lp/archivepublisher/scripts/processaccepted.py b/lib/lp/archivepublisher/scripts/processaccepted.py
index 242fff0..10f90f7 100644
--- a/lib/lp/archivepublisher/scripts/processaccepted.py
+++ b/lib/lp/archivepublisher/scripts/processaccepted.py
@@ -4,11 +4,11 @@
 """Helper functions for the process-accepted.py script."""
 
 __all__ = [
-    'ProcessAccepted',
-    ]
+    "ProcessAccepted",
+]
 
-from optparse import OptionValueError
 import sys
+from optparse import OptionValueError
 
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
@@ -19,15 +19,9 @@ from lp.services.limitedlist import LimitedList
 from lp.services.webapp.adapter import (
     clear_request_started,
     set_request_started,
-    )
-from lp.services.webapp.errorlog import (
-    ErrorReportingUtility,
-    ScriptRequest,
-    )
-from lp.soyuz.enums import (
-    ArchivePurpose,
-    PackageUploadStatus,
-    )
+)
+from lp.services.webapp.errorlog import ErrorReportingUtility, ScriptRequest
+from lp.soyuz.enums import ArchivePurpose, PackageUploadStatus
 from lp.soyuz.interfaces.archive import IArchiveSet
 from lp.soyuz.model.processacceptedbugsjob import close_bugs_for_queue_item
 from lp.soyuz.model.queue import PackageUpload
@@ -53,21 +47,31 @@ class ProcessAccepted(PublisherScript):
         self.addDistroOptions()
 
         self.parser.add_option(
-            "--ppa", action="store_true", dest="ppa", default=False,
-            help="Run only over PPA archives.")
+            "--ppa",
+            action="store_true",
+            dest="ppa",
+            default=False,
+            help="Run only over PPA archives.",
+        )
 
         self.parser.add_option(
-            "--copy-archives", action="store_true", dest="copy_archives",
-            default=False, help="Run only over COPY archives.")
+            "--copy-archives",
+            action="store_true",
+            dest="copy_archives",
+            default=False,
+            help="Run only over COPY archives.",
+        )
 
     def validateArguments(self):
         """Validate command-line arguments."""
         if self.options.ppa and self.options.copy_archives:
             raise OptionValueError(
-                "Specify only one of copy archives or ppa archives.")
+                "Specify only one of copy archives or ppa archives."
+            )
         if self.options.all_derived and self.options.distribution:
             raise OptionValueError(
-                "Can't combine --derived with a distribution name.")
+                "Can't combine --derived with a distribution name."
+            )
 
     def getTargetArchives(self, distribution):
         """Find archives to target based on given options."""
@@ -75,7 +79,8 @@ class ProcessAccepted(PublisherScript):
             return distribution.getPendingAcceptancePPAs()
         elif self.options.copy_archives:
             return getUtility(IArchiveSet).getArchivesForDistribution(
-                distribution, purposes=[ArchivePurpose.COPY])
+                distribution, purposes=[ArchivePurpose.COPY]
+            )
         else:
             return distribution.all_distro_archives
 
@@ -93,14 +98,15 @@ class ProcessAccepted(PublisherScript):
             queue_item.realiseUpload(self.logger)
         except Exception:
             message = "Failure processing queue_item %d" % queue_item.id
-            properties = [('error-explanation', message)]
+            properties = [("error-explanation", message)]
             request = ScriptRequest(properties)
             ErrorReportingUtility().raising(sys.exc_info(), request)
-            self.logger.error('%s (%s)', message, request.oopsid)
+            self.logger.error("%s (%s)", message, request.oopsid)
             return False
         else:
             self.logger.debug(
-                "Successfully processed queue item %d", queue_item.id)
+                "Successfully processed queue item %d", queue_item.id
+            )
             return True
 
     def processForDistro(self, distribution):
@@ -117,16 +123,20 @@ class ProcessAccepted(PublisherScript):
                 continue
             set_request_started(
                 request_statements=LimitedList(10000),
-                txn=self.txn, enable_timeout=False)
+                txn=self.txn,
+                enable_timeout=False,
+            )
             try:
                 for distroseries in distribution.series:
 
-                    self.logger.debug("Processing queue for %s %s" % (
-                        archive.reference, distroseries.name))
+                    self.logger.debug(
+                        "Processing queue for %s %s"
+                        % (archive.reference, distroseries.name)
+                    )
 
                     queue_items = distroseries.getPackageUploads(
-                        status=PackageUploadStatus.ACCEPTED,
-                        archive=archive).order_by(PackageUpload.id)
+                        status=PackageUploadStatus.ACCEPTED, archive=archive
+                    ).order_by(PackageUpload.id)
                     start = 0
 
                     # DistroSeries.getPackageUploads returns a
@@ -139,8 +149,11 @@ class ProcessAccepted(PublisherScript):
                     # explicitly order by ID and keep track of how far we've
                     # got.
                     while True:
-                        batch = list(removeSecurityProxy(queue_items).find(
-                            PackageUpload.id > start)[:self.batch_size])
+                        batch = list(
+                            removeSecurityProxy(queue_items).find(
+                                PackageUpload.id > start
+                            )[: self.batch_size]
+                        )
                         for queue_item in batch:
                             start = queue_item.id
                             if self.processQueueItem(queue_item):
diff --git a/lib/lp/archivepublisher/scripts/processdeathrow.py b/lib/lp/archivepublisher/scripts/processdeathrow.py
index 8173f74..50e7276 100644
--- a/lib/lp/archivepublisher/scripts/processdeathrow.py
+++ b/lib/lp/archivepublisher/scripts/processdeathrow.py
@@ -7,8 +7,8 @@ This script removes obsolete files from the selected archive(s) pool.
 """
 
 __all__ = [
-    'DeathRowProcessor',
-    ]
+    "DeathRowProcessor",
+]
 
 from zope.component import getUtility
 
@@ -18,34 +18,46 @@ from lp.services.limitedlist import LimitedList
 from lp.services.webapp.adapter import (
     clear_request_started,
     set_request_started,
-    )
+)
 from lp.soyuz.enums import ArchivePurpose
 from lp.soyuz.interfaces.archive import IArchiveSet
 
 
 class DeathRowProcessor(PublisherScript):
-
     def add_my_options(self):
         self.parser.add_option(
-            "-n", "--dry-run", action="store_true", default=False,
-            help="Dry run: goes through the motions but commits to nothing.")
+            "-n",
+            "--dry-run",
+            action="store_true",
+            default=False,
+            help="Dry run: goes through the motions but commits to nothing.",
+        )
 
         self.addDistroOptions()
 
         self.parser.add_option(
-            "-p", "--pool-root", metavar="PATH",
-            help="Override the path to the pool folder")
+            "-p",
+            "--pool-root",
+            metavar="PATH",
+            help="Override the path to the pool folder",
+        )
 
         self.parser.add_option(
-            "--ppa", action="store_true", default=False,
-            help="Run only over PPA archives.")
+            "--ppa",
+            action="store_true",
+            default=False,
+            help="Run only over PPA archives.",
+        )
 
     def getTargetArchives(self, distribution):
         """Find archives to target based on given options."""
         if self.options.ppa:
             return getUtility(IArchiveSet).getArchivesForDistribution(
-                distribution, purposes=[ArchivePurpose.PPA],
-                check_permissions=False, exclude_pristine=True)
+                distribution,
+                purposes=[ArchivePurpose.PPA],
+                check_permissions=False,
+                exclude_pristine=True,
+            )
         else:
             return distribution.all_distro_archives
 
@@ -62,18 +74,21 @@ class DeathRowProcessor(PublisherScript):
         the operation just executed, i.e, commits successful runs and aborts
         runs with errors. It also respects 'dry-run' command-line option.
         """
-        death_row = getDeathRow(
-            archive, self.logger, self.options.pool_root)
+        death_row = getDeathRow(archive, self.logger, self.options.pool_root)
         self.logger.debug(
-            "Unpublishing death row for %s." % archive.displayname)
+            "Unpublishing death row for %s." % archive.displayname
+        )
         set_request_started(
             request_statements=LimitedList(10000),
-            txn=self.txn, enable_timeout=False)
+            txn=self.txn,
+            enable_timeout=False,
+        )
         try:
             death_row.reap(self.options.dry_run)
         except Exception:
             self.logger.exception(
-                "Unexpected exception while doing death-row unpublish")
+                "Unexpected exception while doing death-row unpublish"
+            )
             self.txn.abort()
         else:
             if self.options.dry_run:
diff --git a/lib/lp/archivepublisher/scripts/publish_ftpmaster.py b/lib/lp/archivepublisher/scripts/publish_ftpmaster.py
index b2a1060..ab301a6 100644
--- a/lib/lp/archivepublisher/scripts/publish_ftpmaster.py
+++ b/lib/lp/archivepublisher/scripts/publish_ftpmaster.py
@@ -4,13 +4,13 @@
 """Master distro publishing script."""
 
 __all__ = [
-    'PublishFTPMaster',
-    ]
+    "PublishFTPMaster",
+]
 
-from datetime import datetime
 import math
 import os
 import shutil
+from datetime import datetime
 
 from pytz import utc
 from zope.component import getUtility
@@ -18,27 +18,21 @@ from zope.component import getUtility
 from lp.archivepublisher.config import getPubConfig
 from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
 from lp.archivepublisher.publishing import (
-    cannot_modify_suite,
     GLOBAL_PUBLISHER_LOCK,
-    )
-from lp.archivepublisher.run_parts import (
-    execute_subprocess,
-    run_parts,
-    )
+    cannot_modify_suite,
+)
+from lp.archivepublisher.run_parts import execute_subprocess, run_parts
 from lp.archivepublisher.scripts.processaccepted import ProcessAccepted
 from lp.archivepublisher.scripts.publishdistro import PublishDistro
 from lp.registry.interfaces.distribution import IDistributionSet
-from lp.registry.interfaces.pocket import (
-    PackagePublishingPocket,
-    pocketsuffix,
-    )
+from lp.registry.interfaces.pocket import PackagePublishingPocket, pocketsuffix
 from lp.registry.interfaces.series import SeriesStatus
 from lp.services.database.bulk import load_related
 from lp.services.osutils import ensure_directory_exists
 from lp.services.scripts.base import (
     LaunchpadCronScript,
     LaunchpadScriptFailure,
-    )
+)
 from lp.services.utils import file_exists
 from lp.soyuz.enums import ArchivePurpose
 from lp.soyuz.model.distroarchseries import DistroArchSeries
@@ -53,11 +47,12 @@ def get_publishable_archives(distribution):
     ARCHIVES_TO_PUBLISH = [
         ArchivePurpose.PRIMARY,
         ArchivePurpose.PARTNER,
-        ]
+    ]
     return [
         archive
         for archive in distribution.all_distro_archives
-            if archive.purpose in ARCHIVES_TO_PUBLISH]
+        if archive.purpose in ARCHIVES_TO_PUBLISH
+    ]
 
 
 def get_backup_dists(archive_config):
@@ -93,10 +88,11 @@ def map_distro_pubconfigs(distro):
     """
     candidates = [
         (archive.purpose, getPubConfig(archive))
-        for archive in get_publishable_archives(distro)]
+        for archive in get_publishable_archives(distro)
+    ]
     return {
-        purpose: config
-        for purpose, config in candidates if config is not None}
+        purpose: config for purpose, config in candidates if config is not None
+    }
 
 
 def newer_mtime(one_file, other_file):
@@ -139,18 +135,36 @@ class PublishFTPMaster(LaunchpadCronScript):
     def add_my_options(self):
         """See `LaunchpadScript`."""
         self.parser.add_option(
-            '-a', '--all-derived', dest='all_derived', action='store_true',
-            default=False, help="Process all derived distributions.")
+            "-a",
+            "--all-derived",
+            dest="all_derived",
+            action="store_true",
+            default=False,
+            help="Process all derived distributions.",
+        )
         self.parser.add_option(
-            '-d', '--distribution', dest='distribution', default=None,
-            help="Distribution to publish.")
+            "-d",
+            "--distribution",
+            dest="distribution",
+            default=None,
+            help="Distribution to publish.",
+        )
         self.parser.add_option(
-            '-p', '--post-rsync', dest='post_rsync', action='store_true',
+            "-p",
+            "--post-rsync",
+            dest="post_rsync",
+            action="store_true",
             default=False,
-            help="When done, rsync backup dists to speed up the next run.")
+            help="When done, rsync backup dists to speed up the next run.",
+        )
         self.parser.add_option(
-            '-s', '--security-only', dest='security_only',
-            action='store_true', default=False, help="Security upload only.")
+            "-s",
+            "--security-only",
+            dest="security_only",
+            action="store_true",
+            default=False,
+            help="Security upload only.",
+        )
 
     def processOptions(self):
         """Handle command-line options.
@@ -159,20 +173,24 @@ class PublishFTPMaster(LaunchpadCronScript):
         """
         if self.options.distribution is None and not self.options.all_derived:
             raise LaunchpadScriptFailure(
-                "Specify a distribution, or --all-derived.")
+                "Specify a distribution, or --all-derived."
+            )
         if self.options.distribution is not None and self.options.all_derived:
             raise LaunchpadScriptFailure(
-                "Can't combine the --distribution and --all-derived options.")
+                "Can't combine the --distribution and --all-derived options."
+            )
 
         if self.options.all_derived:
             distro_set = getUtility(IDistributionSet)
             self.distributions = distro_set.getDerivedDistributions()
         else:
             distro = getUtility(IDistributionSet).getByName(
-                self.options.distribution)
+                self.options.distribution
+            )
             if distro is None:
                 raise LaunchpadScriptFailure(
-                    "Distribution %s not found." % self.options.distribution)
+                    "Distribution %s not found." % self.options.distribution
+                )
             self.distributions = [distro]
 
     def getConfigs(self):
@@ -186,7 +204,8 @@ class PublishFTPMaster(LaunchpadCronScript):
         """
         return {
             distro: map_distro_pubconfigs(distro)
-            for distro in self.distributions}
+            for distro in self.distributions
+        }
 
     def locateIndexesMarker(self, distribution, suite):
         """Give path for marker file whose presence marks index creation.
@@ -197,7 +216,8 @@ class PublishFTPMaster(LaunchpadCronScript):
         """
         config = self.configs[distribution][ArchivePurpose.PRIMARY]
         return os.path.join(
-            config.archiveroot, ".created-indexes-for-%s" % suite)
+            config.archiveroot, ".created-indexes-for-%s" % suite
+        )
 
     def listSuitesNeedingIndexes(self, distroseries):
         """Find suites in `distroseries` that need indexes created.
@@ -221,8 +241,10 @@ class PublishFTPMaster(LaunchpadCronScript):
         # May need indexes for this series.
         suites = [distroseries.getSuite(pocket) for pocket in pocketsuffix]
         return [
-            suite for suite in suites
-                if not file_exists(self.locateIndexesMarker(distro, suite))]
+            suite
+            for suite in suites
+            if not file_exists(self.locateIndexesMarker(distro, suite))
+        ]
 
     def markIndexCreationComplete(self, distribution, suite):
         """Note that archive indexes for `suite` have been created.
@@ -234,23 +256,26 @@ class PublishFTPMaster(LaunchpadCronScript):
         with open(marker_name, "w") as marker:
             marker.write(
                 "Indexes for %s were created on %s.\n"
-                % (suite, datetime.now(utc)))
+                % (suite, datetime.now(utc))
+            )
 
     def createIndexes(self, distribution, suites):
         """Create archive indexes for `suites` of `distroseries`."""
-        self.logger.info(
-            "Creating archive indexes for %s.", ', '.join(suites))
-        self.runPublishDistro(distribution, args=['-A'], suites=suites)
+        self.logger.info("Creating archive indexes for %s.", ", ".join(suites))
+        self.runPublishDistro(distribution, args=["-A"], suites=suites)
         for suite in suites:
             self.markIndexCreationComplete(distribution, suite)
 
     def processAccepted(self, distribution):
         """Run the process-accepted script."""
         self.logger.debug(
-            "Processing the accepted queue into the publishing records...")
+            "Processing the accepted queue into the publishing records..."
+        )
         script = ProcessAccepted(
-            test_args=["-d", distribution.name], logger=self.logger,
-            ignore_cron_control=True)
+            test_args=["-d", distribution.name],
+            logger=self.logger,
+            ignore_cron_control=True,
+        )
         script.txn = self.txn
         script.main()
 
@@ -259,20 +284,24 @@ class PublishFTPMaster(LaunchpadCronScript):
         self.logger.debug("Querying which suites are pending publication...")
 
         archive = distribution.main_archive
-        pending_sources = list(archive.getPublishedSources(
-            only_unpublished=True))
-        pending_binaries = list(archive.getAllPublishedBinaries(
-            only_unpublished=True))
+        pending_sources = list(
+            archive.getPublishedSources(only_unpublished=True)
+        )
+        pending_binaries = list(
+            archive.getAllPublishedBinaries(only_unpublished=True)
+        )
         load_related(
-            DistroArchSeries, pending_binaries, ['distroarchseriesID'])
+            DistroArchSeries, pending_binaries, ["distroarchseriesID"]
+        )
         return {
             pub.distroseries.name + pocketsuffix[pub.pocket]
-            for pub in pending_sources + pending_binaries}
+            for pub in pending_sources + pending_binaries
+        }
 
     def getDirtySecuritySuites(self, distribution):
         """List security suites with pending publications."""
         suites = self.getDirtySuites(distribution)
-        return [suite for suite in suites if suite.endswith('-security')]
+        return [suite for suite in suites if suite.endswith("-security")]
 
     def rsyncBackupDists(self, distribution):
         """Populate the backup dists with a copy of distsroot.
@@ -282,15 +311,16 @@ class PublishFTPMaster(LaunchpadCronScript):
 
         :param archive_purpose: The (purpose of the) archive to copy.
         """
-        for purpose, archive_config in (
-                self.configs[distribution].items()):
+        for purpose, archive_config in self.configs[distribution].items():
             dists = get_dists(archive_config)
             backup_dists = get_backup_dists(archive_config)
             execute_subprocess(
                 ["rsync", "-aH", "--delete", "%s/" % dists, backup_dists],
                 log=self.logger,
                 failure=LaunchpadScriptFailure(
-                    "Failed to rsync new dists for %s." % purpose.title))
+                    "Failed to rsync new dists for %s." % purpose.title
+                ),
+            )
 
     def recoverArchiveWorkingDir(self, archive_config):
         """Recover working dists dir for `archive_config`.
@@ -302,7 +332,8 @@ class PublishFTPMaster(LaunchpadCronScript):
         if file_exists(working_location):
             self.logger.info(
                 "Recovering working directory %s from failed run.",
-                working_location)
+                working_location,
+            )
             os.rename(working_location, get_backup_dists(archive_config))
 
     def recoverWorkingDists(self):
@@ -323,8 +354,7 @@ class PublishFTPMaster(LaunchpadCronScript):
             for archive_purpose, archive_config in distro_configs.items():
                 archiveroot = archive_config.archiveroot
                 if not file_exists(archiveroot):
-                    self.logger.debug(
-                        "Creating archive root %s.", archiveroot)
+                    self.logger.debug("Creating archive root %s.", archiveroot)
                     os.makedirs(archiveroot)
                 dists = get_dists(archive_config)
                 if not file_exists(dists):
@@ -333,7 +363,8 @@ class PublishFTPMaster(LaunchpadCronScript):
                 distscopy = get_backup_dists(archive_config)
                 if not file_exists(distscopy):
                     self.logger.debug(
-                        "Creating backup dists directory %s", distscopy)
+                        "Creating backup dists directory %s", distscopy
+                    )
                     os.makedirs(distscopy)
 
     def runPublishDistro(self, distribution, args=[], suites=None):
@@ -341,18 +372,21 @@ class PublishFTPMaster(LaunchpadCronScript):
         if suites is None:
             suites = []
         arguments = (
-            ['-d', distribution.name] +
-            args +
-            sum((['-s', suite] for suite in suites), []))
+            ["-d", distribution.name]
+            + args
+            + sum((["-s", suite] for suite in suites), [])
+        )
 
         publish_distro = PublishDistro(
-            test_args=arguments, logger=self.logger, ignore_cron_control=True)
+            test_args=arguments, logger=self.logger, ignore_cron_control=True
+        )
         publish_distro.logger = self.logger
         publish_distro.txn = self.txn
         publish_distro.main(reset_store_between_archives=False)
 
-    def publishDistroArchive(self, distribution, archive,
-                             security_suites=None):
+    def publishDistroArchive(
+        self, distribution, archive, security_suites=None
+    ):
         """Publish the results for an archive.
 
         :param archive: Archive to publish.
@@ -362,21 +396,23 @@ class PublishFTPMaster(LaunchpadCronScript):
         purpose = archive.purpose
         archive_config = self.configs[distribution][purpose]
         self.logger.debug(
-            "Publishing the %s %s...", distribution.name, purpose.title)
+            "Publishing the %s %s...", distribution.name, purpose.title
+        )
 
         # For reasons unknown, publishdistro only seems to work with a
         # directory that's inside the archive root.  So we move it there
         # for the duration.
         temporary_dists = get_working_dists(archive_config)
 
-        arguments = ['-R', temporary_dists]
+        arguments = ["-R", temporary_dists]
         if archive.purpose == ArchivePurpose.PARTNER:
-            arguments.append('--partner')
+            arguments.append("--partner")
 
         os.rename(get_backup_dists(archive_config), temporary_dists)
         try:
             self.runPublishDistro(
-                distribution, args=arguments, suites=security_suites)
+                distribution, args=arguments, suites=security_suites
+            )
         finally:
             os.rename(temporary_dists, get_backup_dists(archive_config))
 
@@ -386,13 +422,14 @@ class PublishFTPMaster(LaunchpadCronScript):
         """Execute the publish-distro hooks."""
         archive_config = self.configs[distribution][archive.purpose]
         env = {
-            'ARCHIVEROOT': archive_config.archiveroot,
-            'DISTSROOT': get_backup_dists(archive_config),
-            }
+            "ARCHIVEROOT": archive_config.archiveroot,
+            "DISTSROOT": get_backup_dists(archive_config),
+        }
         if archive_config.overrideroot is not None:
             env["OVERRIDEROOT"] = archive_config.overrideroot
         run_parts(
-            distribution.name, 'publish-distro.d', log=self.logger, env=env)
+            distribution.name, "publish-distro.d", log=self.logger, env=env
+        )
 
     def installDists(self, distribution):
         """Put the new dists into place, as near-atomically as possible.
@@ -418,21 +455,31 @@ class PublishFTPMaster(LaunchpadCronScript):
         """Clear out any redundant empty directories."""
         for archive_config in self.configs[distribution].values():
             execute_subprocess(
-                ["find", archive_config.archiveroot, "-type", "d", "-empty",
-                 "-delete"],
-                log=self.logger)
+                [
+                    "find",
+                    archive_config.archiveroot,
+                    "-type",
+                    "d",
+                    "-empty",
+                    "-delete",
+                ],
+                log=self.logger,
+            )
 
     def runFinalizeParts(self, distribution, security_only=False):
         """Run the finalize.d parts to finalize publication."""
-        archive_roots = ' '.join([
-            archive_config.archiveroot
-            for archive_config in self.configs[distribution].values()])
+        archive_roots = " ".join(
+            [
+                archive_config.archiveroot
+                for archive_config in self.configs[distribution].values()
+            ]
+        )
 
         env = {
-            'SECURITY_UPLOAD_ONLY': 'yes' if security_only else 'no',
-            'ARCHIVEROOTS': archive_roots,
+            "SECURITY_UPLOAD_ONLY": "yes" if security_only else "no",
+            "ARCHIVEROOTS": archive_roots,
         }
-        run_parts(distribution.name, 'finalize.d', log=self.logger, env=env)
+        run_parts(distribution.name, "finalize.d", log=self.logger, env=env)
 
     def publishSecurityUploads(self, distribution):
         """Quickly process just the pending security uploads.
@@ -446,8 +493,10 @@ class PublishFTPMaster(LaunchpadCronScript):
             return False
 
         self.publishDistroArchive(
-            distribution, distribution.main_archive,
-            security_suites=security_suites)
+            distribution,
+            distribution.main_archive,
+            security_suites=security_suites,
+        )
         return True
 
     def publishDistroUploads(self, distribution):
@@ -475,7 +524,8 @@ class PublishFTPMaster(LaunchpadCronScript):
                 current_path = os.path.join(backup_dir, filename)
                 if newer_mtime(new_path, current_path):
                     self.logger.debug(
-                        "Updating %s from %s." % (current_path, new_path))
+                        "Updating %s from %s." % (current_path, new_path)
+                    )
                     ensure_directory_exists(os.path.dirname(current_path))
                     # Due to http://bugs.python.org/issue12904, shutil.copy2
                     # doesn't copy timestamps precisely, and unfortunately
@@ -490,14 +540,16 @@ class PublishFTPMaster(LaunchpadCronScript):
                         st = os.stat(new_path)
                         os.utime(
                             current_path,
-                            (math.ceil(st.st_atime), math.ceil(st.st_mtime)))
+                            (math.ceil(st.st_atime), math.ceil(st.st_mtime)),
+                        )
                         os.unlink(new_path)
                     # Make sure that the file is world-readable, since
                     # occasionally files synced from other services have
                     # been known to end up mode 0o600 or similar and that
                     # breaks mirroring.
                     os.chmod(
-                        current_path, os.stat(current_path).st_mode | 0o444)
+                        current_path, os.stat(current_path).st_mode | 0o444
+                    )
                     updated = True
         return updated
 
@@ -564,10 +616,11 @@ class PublishFTPMaster(LaunchpadCronScript):
                 have_fresh_series = True
                 if series.previous_series is not None:
                     copier = CustomUploadsCopier(
-                        series, PackagePublishingPocket.RELEASE)
+                        series, PackagePublishingPocket.RELEASE
+                    )
                     copier.copy(
-                        series.previous_series,
-                        PackagePublishingPocket.RELEASE)
+                        series.previous_series, PackagePublishingPocket.RELEASE
+                    )
                 self.createIndexes(distribution, suites_needing_indexes)
 
         return have_fresh_series
diff --git a/lib/lp/archivepublisher/scripts/publishdistro.py b/lib/lp/archivepublisher/scripts/publishdistro.py
index 0f4501b..85511be 100644
--- a/lib/lp/archivepublisher/scripts/publishdistro.py
+++ b/lib/lp/archivepublisher/scripts/publishdistro.py
@@ -4,8 +4,8 @@
 """Publisher script class."""
 
 __all__ = [
-    'PublishDistro',
-    ]
+    "PublishDistro",
+]
 
 from optparse import OptionValueError
 
@@ -14,26 +14,23 @@ from zope.component import getUtility
 
 from lp.app.errors import NotFoundError
 from lp.archivepublisher.publishing import (
+    GLOBAL_PUBLISHER_LOCK,
     cannot_modify_suite,
     getPublisher,
-    GLOBAL_PUBLISHER_LOCK,
-    )
+)
 from lp.archivepublisher.scripts.base import PublisherScript
 from lp.services.limitedlist import LimitedList
 from lp.services.scripts.base import LaunchpadScriptFailure
 from lp.services.webapp.adapter import (
     clear_request_started,
     set_request_started,
-    )
+)
 from lp.soyuz.enums import (
     ArchivePublishingMethod,
     ArchivePurpose,
     ArchiveStatus,
-    )
-from lp.soyuz.interfaces.archive import (
-    IArchiveSet,
-    MAIN_ARCHIVE_PURPOSES,
-    )
+)
+from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES, IArchiveSet
 
 
 def is_ppa_private(ppa):
@@ -55,87 +52,155 @@ class PublishDistro(PublisherScript):
         self.addDistroOptions()
 
         self.parser.add_option(
-            "-C", "--careful", action="store_true", dest="careful",
-            default=False, help="Turns on all the below careful options.")
+            "-C",
+            "--careful",
+            action="store_true",
+            dest="careful",
+            default=False,
+            help="Turns on all the below careful options.",
+        )
 
         self.parser.add_option(
-            "-P", "--careful-publishing", action="store_true",
-            dest="careful_publishing", default=False,
-            help="Make the package publishing process careful.")
+            "-P",
+            "--careful-publishing",
+            action="store_true",
+            dest="careful_publishing",
+            default=False,
+            help="Make the package publishing process careful.",
+        )
 
         self.parser.add_option(
-            "-D", "--careful-domination", action="store_true",
-            dest="careful_domination", default=False,
-            help="Make the domination process careful.")
+            "-D",
+            "--careful-domination",
+            action="store_true",
+            dest="careful_domination",
+            default=False,
+            help="Make the domination process careful.",
+        )
 
         self.parser.add_option(
-            "-A", "--careful-apt", action="store_true", dest="careful_apt",
+            "-A",
+            "--careful-apt",
+            action="store_true",
+            dest="careful_apt",
             default=False,
-            help="Make index generation (e.g. apt-ftparchive) careful.")
+            help="Make index generation (e.g. apt-ftparchive) careful.",
+        )
 
         self.parser.add_option(
-            "--careful-release", action="store_true", dest="careful_release",
+            "--careful-release",
+            action="store_true",
+            dest="careful_release",
             default=False,
-            help="Make the Release file generation process careful.")
+            help="Make the Release file generation process careful.",
+        )
 
         self.parser.add_option(
-            "--disable-publishing", action="store_false",
-            dest="enable_publishing", default=True,
-            help="Disable the package publishing process.")
+            "--disable-publishing",
+            action="store_false",
+            dest="enable_publishing",
+            default=True,
+            help="Disable the package publishing process.",
+        )
 
         self.parser.add_option(
-            "--disable-domination", action="store_false",
-            dest="enable_domination", default=True,
-            help="Disable the domination process.")
+            "--disable-domination",
+            action="store_false",
+            dest="enable_domination",
+            default=True,
+            help="Disable the domination process.",
+        )
 
         self.parser.add_option(
-            "--disable-apt", action="store_false",
-            dest="enable_apt", default=True,
-            help="Disable index generation (e.g. apt-ftparchive).")
+            "--disable-apt",
+            action="store_false",
+            dest="enable_apt",
+            default=True,
+            help="Disable index generation (e.g. apt-ftparchive).",
+        )
 
         self.parser.add_option(
-            "--disable-release", action="store_false",
-            dest="enable_release", default=True,
-            help="Disable the Release file generation process.")
+            "--disable-release",
+            action="store_false",
+            dest="enable_release",
+            default=True,
+            help="Disable the Release file generation process.",
+        )
 
         self.parser.add_option(
-            "--include-non-pending", action="store_true",
-            dest="include_non_pending", default=False,
+            "--include-non-pending",
+            action="store_true",
+            dest="include_non_pending",
+            default=False,
             help=(
                 "When publishing PPAs, also include those that do not have "
-                "pending publications."))
+                "pending publications."
+            ),
+        )
 
         self.parser.add_option(
-            '-s', '--suite', metavar='SUITE', dest='suite', action='append',
-            type='string', default=[], help='The suite to publish')
+            "-s",
+            "--suite",
+            metavar="SUITE",
+            dest="suite",
+            action="append",
+            type="string",
+            default=[],
+            help="The suite to publish",
+        )
 
         self.parser.add_option(
-            "--dirty-suite", metavar="SUITE", dest="dirty_suites",
-            action="append", default=[],
-            help="Consider this suite dirty regardless of publications.")
+            "--dirty-suite",
+            metavar="SUITE",
+            dest="dirty_suites",
+            action="append",
+            default=[],
+            help="Consider this suite dirty regardless of publications.",
+        )
 
         self.parser.add_option(
-            "-R", "--distsroot", dest="distsroot", metavar="SUFFIX",
+            "-R",
+            "--distsroot",
+            dest="distsroot",
+            metavar="SUFFIX",
             default=None,
             help=(
                 "Override the dists path for generation of the PRIMARY and "
-                "PARTNER archives only."))
+                "PARTNER archives only."
+            ),
+        )
 
         self.parser.add_option(
-            "--ppa", action="store_true", dest="ppa", default=False,
-            help="Only run over PPA archives.")
+            "--ppa",
+            action="store_true",
+            dest="ppa",
+            default=False,
+            help="Only run over PPA archives.",
+        )
 
         self.parser.add_option(
-            "--private-ppa", action="store_true", dest="private_ppa",
-            default=False, help="Only run over private PPA archives.")
+            "--private-ppa",
+            action="store_true",
+            dest="private_ppa",
+            default=False,
+            help="Only run over private PPA archives.",
+        )
 
         self.parser.add_option(
-            "--partner", action="store_true", dest="partner", default=False,
-            help="Only run over the partner archive.")
+            "--partner",
+            action="store_true",
+            dest="partner",
+            default=False,
+            help="Only run over the partner archive.",
+        )
 
         self.parser.add_option(
-            "--copy-archive", action="store_true", dest="copy_archive",
-            default=False, help="Only run over the copy archives.")
+            "--copy-archive",
+            action="store_true",
+            dest="copy_archive",
+            default=False,
+            help="Only run over the copy archives.",
+        )
 
     def isCareful(self, option):
         """Is the given "carefulness" option enabled?
@@ -178,7 +243,7 @@ class PublishDistro(PublisherScript):
             self.options.ppa,
             self.options.private_ppa,
             self.options.copy_archive,
-            ]
+        ]
         return len(list(filter(None, exclusive_options)))
 
     def logOptions(self):
@@ -187,13 +252,13 @@ class PublishDistro(PublisherScript):
             indexing_engine = "Apt-FTPArchive"
         else:
             indexing_engine = "Indexing"
-        self.logOption('Distribution', self.options.distribution)
+        self.logOption("Distribution", self.options.distribution)
         log_items = [
-            ('Publishing', self.options.careful_publishing),
-            ('Domination', self.options.careful_domination),
+            ("Publishing", self.options.careful_publishing),
+            ("Domination", self.options.careful_domination),
             (indexing_engine, self.options.careful_apt),
-            ('Release', self.options.careful_release),
-            ]
+            ("Release", self.options.careful_release),
+        ]
         for name, option in log_items:
             self.logOption(name, self.describeCare(option))
 
@@ -201,20 +266,24 @@ class PublishDistro(PublisherScript):
         """Check given options for user interface violations."""
         if len(self.args) > 0:
             raise OptionValueError(
-                "publish-distro takes no arguments, only options.")
+                "publish-distro takes no arguments, only options."
+            )
         if self.countExclusiveOptions() > 1:
             raise OptionValueError(
                 "Can only specify one of partner, ppa, private-ppa, "
-                "copy-archive.")
+                "copy-archive."
+            )
 
         if self.options.all_derived and self.options.distribution is not None:
-                raise OptionValueError(
-                    "Specify --distribution or --all-derived, but not both.")
+            raise OptionValueError(
+                "Specify --distribution or --all-derived, but not both."
+            )
 
-        for_ppa = (self.options.ppa or self.options.private_ppa)
+        for_ppa = self.options.ppa or self.options.private_ppa
         if for_ppa and self.options.distsroot:
             raise OptionValueError(
-                "We should not define 'distsroot' in PPA mode!", )
+                "We should not define 'distsroot' in PPA mode!",
+            )
 
     def findSuite(self, distribution, suite):
         """Find the named `suite` in the selected `Distribution`.
@@ -246,22 +315,27 @@ class PublishDistro(PublisherScript):
                     yield archive.distribution.getDistroSeriesAndPocket(suite)
                 except NotFoundError:
                     self.logger.exception(
-                        "Failed to parse dirty suite '%s' for archive '%s'" %
-                        (suite, archive.reference))
+                        "Failed to parse dirty suite '%s' for archive '%s'"
+                        % (suite, archive.reference)
+                    )
 
     def getCopyArchives(self, distribution):
         """Find copy archives for the selected distribution."""
         copy_archives = list(
             getUtility(IArchiveSet).getArchivesForDistribution(
-                distribution, purposes=[ArchivePurpose.COPY]))
+                distribution, purposes=[ArchivePurpose.COPY]
+            )
+        )
         if copy_archives == []:
             raise LaunchpadScriptFailure("Could not find any COPY archives")
         return copy_archives
 
     def getPPAs(self, distribution):
         """Find private package archives for the selected distribution."""
-        if (self.isCareful(self.options.careful_publishing) or
-                self.options.include_non_pending):
+        if (
+            self.isCareful(self.options.careful_publishing)
+            or self.options.include_non_pending
+        ):
             return distribution.getAllPPAs()
         else:
             return distribution.getPendingPublicationPPAs()
@@ -269,7 +343,7 @@ class PublishDistro(PublisherScript):
     def getTargetArchives(self, distribution):
         """Find the archive(s) selected by the script's options."""
         if self.options.partner:
-            return [distribution.getArchiveByComponent('partner')]
+            return [distribution.getArchiveByComponent("partner")]
         elif self.options.ppa:
             return filter(is_ppa_public, self.getPPAs(distribution))
         elif self.options.private_ppa:
@@ -301,7 +375,9 @@ class PublishDistro(PublisherScript):
             # Other types of archives do not currently support deletion.
             self.logger.warning(
                 "Deletion of %s skipped: operation not supported on %s",
-                archive.displayname, archive.purpose.title)
+                archive.displayname,
+                archive.purpose.title,
+            )
             return False
 
     def publishArchive(self, archive, publisher):
@@ -324,14 +400,16 @@ class PublishDistro(PublisherScript):
         publisher.setupArchiveDirs()
         if self.options.enable_publishing:
             publisher.A_publish(
-                self.isCareful(self.options.careful_publishing))
+                self.isCareful(self.options.careful_publishing)
+            )
             self.txn.commit()
 
         if self.options.enable_domination:
             # Flag dirty pockets for any outstanding deletions.
             publisher.A2_markPocketsWithDeletionsDirty()
             publisher.B_dominate(
-                self.isCareful(self.options.careful_domination))
+                self.isCareful(self.options.careful_domination)
+            )
             self.txn.commit()
 
         if self.options.enable_apt:
@@ -341,7 +419,9 @@ class PublishDistro(PublisherScript):
                 # generate the indexes, everything else uses the newer
                 # internal LP code.
                 if archive.purpose in (
-                        ArchivePurpose.PRIMARY, ArchivePurpose.COPY):
+                    ArchivePurpose.PRIMARY,
+                    ArchivePurpose.COPY,
+                ):
                     publisher.C_doFTPArchive(careful_indexing)
                 else:
                     publisher.C_writeIndexes(careful_indexing)
@@ -349,34 +429,46 @@ class PublishDistro(PublisherScript):
                 publisher.C_updateArtifactoryProperties(careful_indexing)
             else:
                 raise AssertionError(
-                    "Unhandled publishing method: %r" % publishing_method)
+                    "Unhandled publishing method: %r" % publishing_method
+                )
             self.txn.commit()
 
-        if (self.options.enable_release and
-                publishing_method == ArchivePublishingMethod.LOCAL):
-            publisher.D_writeReleaseFiles(self.isCareful(
-                self.options.careful_apt or self.options.careful_release))
+        if (
+            self.options.enable_release
+            and publishing_method == ArchivePublishingMethod.LOCAL
+        ):
+            publisher.D_writeReleaseFiles(
+                self.isCareful(
+                    self.options.careful_apt or self.options.careful_release
+                )
+            )
             # The caller will commit this last step.
 
-        if (self.options.enable_apt and
-                publishing_method == ArchivePublishingMethod.LOCAL):
+        if (
+            self.options.enable_apt
+            and publishing_method == ArchivePublishingMethod.LOCAL
+        ):
             publisher.createSeriesAliases()
 
     def processArchive(self, archive_id, reset_store=True):
         set_request_started(
             request_statements=LimitedList(10000),
-            txn=self.txn, enable_timeout=False)
+            txn=self.txn,
+            enable_timeout=False,
+        )
         try:
             archive = getUtility(IArchiveSet).get(archive_id)
             distribution = archive.distribution
             allowed_suites = self.findAllowedSuites(distribution)
             if archive.status == ArchiveStatus.DELETING:
                 publisher = self.getPublisher(
-                    distribution, archive, allowed_suites)
+                    distribution, archive, allowed_suites
+                )
                 work_done = self.deleteArchive(archive, publisher)
             elif archive.can_be_published:
                 publisher = self.getPublisher(
-                    distribution, archive, allowed_suites)
+                    distribution, archive, allowed_suites
+                )
                 self.publishArchive(archive, publisher)
                 work_done = True
             else:
@@ -403,12 +495,14 @@ class PublishDistro(PublisherScript):
             for archive in self.getTargetArchives(distribution):
                 if archive.distribution != distribution:
                     raise AssertionError(
-                        "Archive %s does not match distribution %r" %
-                        (archive.reference, distribution))
+                        "Archive %s does not match distribution %r"
+                        % (archive.reference, distribution)
+                    )
                 archive_ids.append(archive.id)
 
         for archive_id in archive_ids:
             self.processArchive(
-                archive_id, reset_store=reset_store_between_archives)
+                archive_id, reset_store=reset_store_between_archives
+            )
 
         self.logger.debug("Ciao")
diff --git a/lib/lp/archivepublisher/scripts/sync_signingkeys.py b/lib/lp/archivepublisher/scripts/sync_signingkeys.py
index fb5996e..7c59734 100644
--- a/lib/lp/archivepublisher/scripts/sync_signingkeys.py
+++ b/lib/lp/archivepublisher/scripts/sync_signingkeys.py
@@ -4,33 +4,30 @@
 """Script to inject archive keys into signing service."""
 
 __all__ = [
-    'SyncSigningKeysScript',
-    ]
+    "SyncSigningKeysScript",
+]
 
-from datetime import datetime
 import os
+from datetime import datetime
 
+import transaction
 from pytz import utc
 from storm.locals import Store
-import transaction
 from zope.component import getUtility
 
 from lp.archivepublisher.config import getPubConfig
 from lp.archivepublisher.interfaces.archivegpgsigningkey import (
     ISignableArchive,
-    )
+)
 from lp.archivepublisher.model.publisherconfig import PublisherConfig
 from lp.services.database.interfaces import IStore
 from lp.services.gpg.interfaces import IGPGHandler
-from lp.services.scripts.base import (
-    LaunchpadScript,
-    LaunchpadScriptFailure,
-    )
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
 from lp.services.signing.enums import SigningKeyType
 from lp.services.signing.interfaces.signingkey import (
     IArchiveSigningKeySet,
     ISigningKeySet,
-    )
+)
 from lp.soyuz.interfaces.archive import IArchiveSet
 from lp.soyuz.model.archive import Archive
 
@@ -38,49 +35,74 @@ from lp.soyuz.model.archive import Archive
 class SyncSigningKeysScript(LaunchpadScript):
     description = (
         "Injects into signing services all key files currently in this "
-        "machine.")
+        "machine."
+    )
 
     def add_my_options(self):
         self.parser.add_option(
-            "-A", "--archive",
+            "-A",
+            "--archive",
             help=(
                 "The reference of the archive to process "
-                "(default: all archives)."))
+                "(default: all archives)."
+            ),
+        )
         self.parser.add_option(
-            "-t", "--type",
-            help="The type of keys to process (default: all types).")
+            "-t",
+            "--type",
+            help="The type of keys to process (default: all types).",
+        )
         self.parser.add_option(
             "--local-keys",
-            help="Override directory where local keys are found.")
+            help="Override directory where local keys are found.",
+        )
 
         self.parser.add_option(
-            "-l", "--limit", dest="limit", type=int,
-            help="How many archives to fetch.")
+            "-l",
+            "--limit",
+            dest="limit",
+            type=int,
+            help="How many archives to fetch.",
+        )
         self.parser.add_option(
-            "-o", "--offset", dest="offset", type=int,
-            help="Offset on archives list.")
+            "-o",
+            "--offset",
+            dest="offset",
+            type=int,
+            help="Offset on archives list.",
+        )
 
         self.parser.add_option(
-            "--overwrite", action="store_true", default=False,
-            help="Overwrite keys that already exist on the signing service.")
+            "--overwrite",
+            action="store_true",
+            default=False,
+            help="Overwrite keys that already exist on the signing service.",
+        )
         self.parser.add_option(
-            "-n", "--dry-run", action="store_true", default=False,
-            help="Report what would be done, but don't actually inject keys.")
+            "-n",
+            "--dry-run",
+            action="store_true",
+            default=False,
+            help="Report what would be done, but don't actually inject keys.",
+        )
 
     def getArchives(self):
         """Gets the list of archives that should be processed."""
         if self.options.archive is not None:
             archive = getUtility(IArchiveSet).getByReference(
-                self.options.archive)
+                self.options.archive
+            )
             if archive is None:
                 raise LaunchpadScriptFailure(
-                    "No archive named '%s' could be found." %
-                    self.options.archive)
+                    "No archive named '%s' could be found."
+                    % self.options.archive
+                )
             archives = [archive]
         else:
             archives = IStore(Archive).find(
                 Archive,
-                PublisherConfig.distribution_id == Archive.distributionID)
+                PublisherConfig.distribution_id == Archive.distributionID,
+            )
             archives = archives.order_by(Archive.id)
         start = self.options.offset if self.options.offset else 0
         end = start + self.options.limit if self.options.limit else None
@@ -91,11 +113,13 @@ class SyncSigningKeysScript(LaunchpadScript):
         if self.options.type is not None:
             try:
                 key_type = SigningKeyType.getTermByToken(
-                    self.options.type).value
+                    self.options.type
+                ).value
             except LookupError:
                 raise LaunchpadScriptFailure(
-                    "There is no signing key type named '%s'." %
-                    self.options.type)
+                    "There is no signing key type named '%s'."
+                    % self.options.type
+                )
             key_types = [key_type]
         else:
             key_types = [
@@ -105,7 +129,7 @@ class SyncSigningKeysScript(LaunchpadScript):
                 SigningKeyType.SIPL,
                 SigningKeyType.FIT,
                 SigningKeyType.OPENPGP,
-                ]
+            ]
         return key_types
 
     def getKeysPerType(self, dir):
@@ -121,7 +145,8 @@ class SyncSigningKeysScript(LaunchpadScript):
             SigningKeyType.SIPL: ("sipl.pem", "sipl.x509"),
             SigningKeyType.FIT: (
                 os.path.join("fit", "fit.key"),
-                os.path.join("fit", "fit.crt")),
+                os.path.join("fit", "fit.crt"),
+            ),
         }
         found_keys_per_type = {}
         for key_type in self.getKeyTypes():
@@ -129,7 +154,7 @@ class SyncSigningKeysScript(LaunchpadScript):
                 # OpenPGP keys are handled separately.
                 continue
             files = [os.path.join(dir, f) for f in keys_per_type[key_type]]
-            self.logger.debug("Checking files %s...", ', '.join(files))
+            self.logger.debug("Checking files %s...", ", ".join(files))
             if all(os.path.exists(f) for f in files):
                 found_keys_per_type[key_type] = tuple(files)
         return found_keys_per_type
@@ -148,8 +173,9 @@ class SyncSigningKeysScript(LaunchpadScript):
             pubconf = getPubConfig(archive)
             if pubconf is None or pubconf.signingroot is None:
                 self.logger.debug(
-                    "Skipping %s: no pubconfig or no signing root." %
-                    archive.reference)
+                    "Skipping %s: no pubconfig or no signing root."
+                    % archive.reference
+                )
                 return {}
             local_keys = pubconf.signingroot
         for series in archive.distribution.series:
@@ -165,38 +191,52 @@ class SyncSigningKeysScript(LaunchpadScript):
     def inject(self, archive, key_type, series, priv_key_path, pub_key_path):
         arch_signing_key_set = getUtility(IArchiveSigningKeySet)
         existing_archive_signing_key = arch_signing_key_set.get(
-            key_type, archive, series, exact_match=True)
+            key_type, archive, series, exact_match=True
+        )
         if existing_archive_signing_key is not None:
             if self.options.overwrite:
                 self.logger.info(
                     "Overwriting existing signing key for %s / %s / %s",
-                    key_type, archive.reference,
-                    series.name if series else None)
+                    key_type,
+                    archive.reference,
+                    series.name if series else None,
+                )
                 Store.of(existing_archive_signing_key).remove(
-                    existing_archive_signing_key)
+                    existing_archive_signing_key
+                )
             else:
                 self.logger.info(
                     "Signing key for %s / %s / %s already exists",
-                    key_type, archive.reference,
-                    series.name if series else None)
+                    key_type,
+                    archive.reference,
+                    series.name if series else None,
+                )
                 return existing_archive_signing_key
 
         if self.options.dry_run:
             self.logger.info(
                 "Would inject signing key for %s / %s / %s",
-                key_type, archive.reference, series.name if series else None)
+                key_type,
+                archive.reference,
+                series.name if series else None,
+            )
         else:
-            with open(priv_key_path, 'rb') as fd:
+            with open(priv_key_path, "rb") as fd:
                 private_key = fd.read()
-            with open(pub_key_path, 'rb') as fd:
+            with open(pub_key_path, "rb") as fd:
                 public_key = fd.read()
 
             now = datetime.now().replace(tzinfo=utc)
             description = "%s key for %s" % (key_type.name, archive.reference)
             return arch_signing_key_set.inject(
-                key_type, private_key, public_key,
-                description, now, archive,
-                earliest_distro_series=series)
+                key_type,
+                private_key,
+                public_key,
+                description,
+                now,
+                archive,
+                earliest_distro_series=series,
+            )
 
     def injectGPG(self, archive, secret_key_path):
         with open(secret_key_path, "rb") as key_file:
@@ -205,7 +245,8 @@ class SyncSigningKeysScript(LaunchpadScript):
         secret_key = gpg_handler.importSecretKey(secret_key_export)
         signing_key_set = getUtility(ISigningKeySet)
         existing_signing_key = signing_key_set.get(
-            SigningKeyType.OPENPGP, secret_key.fingerprint)
+            SigningKeyType.OPENPGP, secret_key.fingerprint
+        )
         if existing_signing_key is not None:
             # There's no point in honouring self.options.overwrite here,
             # because we know we'll just end up with the same fingerprint
@@ -213,19 +254,27 @@ class SyncSigningKeysScript(LaunchpadScript):
             # existing key with new key material.
             self.logger.info(
                 "Signing key for %s / %s already exists",
-                SigningKeyType.OPENPGP, archive.reference)
+                SigningKeyType.OPENPGP,
+                archive.reference,
+            )
             return existing_signing_key
 
         if self.options.dry_run:
             self.logger.info(
                 "Would inject signing key for %s / %s",
-                SigningKeyType.OPENPGP, archive.reference)
+                SigningKeyType.OPENPGP,
+                archive.reference,
+            )
         else:
             public_key = gpg_handler.retrieveKey(secret_key.fingerprint)
             now = datetime.now().replace(tzinfo=utc)
             return signing_key_set.inject(
-                SigningKeyType.OPENPGP, secret_key.export(),
-                public_key.export(), secret_key.uids[0].name, now)
+                SigningKeyType.OPENPGP,
+                secret_key.export(),
+                public_key.export(),
+                secret_key.uids[0].name,
+                now,
+            )
 
     def processArchive(self, archive):
         for series, path in self.getSeriesPaths(archive).items():
@@ -233,17 +282,25 @@ class SyncSigningKeysScript(LaunchpadScript):
             for key_type, (priv_key, pub_key) in keys_per_type.items():
                 self.logger.info(
                     "Found key files %s / %s (type=%s, series=%s).",
-                    priv_key, pub_key, key_type,
-                    series.name if series else None)
+                    priv_key,
+                    pub_key,
+                    key_type,
+                    series.name if series else None,
+                )
                 self.inject(archive, key_type, series, priv_key, pub_key)
-        if (SigningKeyType.OPENPGP in self.getKeyTypes() and
-                archive.signing_key is not None):
+        if (
+            SigningKeyType.OPENPGP in self.getKeyTypes()
+            and archive.signing_key is not None
+        ):
             secret_key_path = ISignableArchive(archive).getPathForSecretKey(
-                archive.signing_key)
+                archive.signing_key
+            )
             if os.path.exists(secret_key_path):
                 self.logger.info(
                     "Found key file %s (type=%s).",
-                    secret_key_path, SigningKeyType.OPENPGP)
+                    secret_key_path,
+                    SigningKeyType.OPENPGP,
+                )
                 self.injectGPG(archive, secret_key_path)
 
     def _maybeCommit(self, count):
@@ -252,7 +309,9 @@ class SyncSigningKeysScript(LaunchpadScript):
         else:
             self.logger.info(
                 "%d %s processed; committing.",
-                count, "archive" if count == 1 else "archives")
+                count,
+                "archive" if count == 1 else "archives",
+            )
             transaction.commit()
 
     def main(self):
@@ -261,7 +320,8 @@ class SyncSigningKeysScript(LaunchpadScript):
             if i != 0 and i % 100 == 0:
                 self._maybeCommit(i)
             self.logger.debug(
-                "#%s - Processing keys for archive %s.", i, archive.reference)
+                "#%s - Processing keys for archive %s.", i, archive.reference
+            )
             self.processArchive(archive)
             total = i + 1
         self._maybeCommit(total)
diff --git a/lib/lp/archivepublisher/signing.py b/lib/lp/archivepublisher/signing.py
index 5350d07..d874938 100644
--- a/lib/lp/archivepublisher/signing.py
+++ b/lib/lp/archivepublisher/signing.py
@@ -12,10 +12,8 @@ secure to hold signing keys, so we sign them as a custom upload instead.
 __all__ = [
     "SigningUpload",
     "UefiUpload",
-    ]
+]
 
-from datetime import datetime
-from functools import partial
 import os
 import shutil
 import stat
@@ -23,6 +21,8 @@ import subprocess
 import tarfile
 import tempfile
 import textwrap
+from datetime import datetime
+from functools import partial
 
 from pytz import utc
 from zope.component import getUtility
@@ -36,17 +36,15 @@ from lp.services.signing.enums import SigningKeyType
 from lp.services.signing.interfaces.signingkey import IArchiveSigningKeySet
 from lp.soyuz.interfaces.queue import CustomUploadError
 
-
-PUBLISHER_USES_SIGNING_SERVICE = (
-    'archivepublisher.signing_service.enabled')
+PUBLISHER_USES_SIGNING_SERVICE = "archivepublisher.signing_service.enabled"
 PUBLISHER_SIGNING_SERVICE_INJECTS_KEYS = (
-    'archivepublisher.signing_service.injection.enabled')
+    "archivepublisher.signing_service.injection.enabled"
+)
 
 
 class SigningUploadPackError(CustomUploadError):
     def __init__(self, tarfile_path, exc):
-        message = "Problem building tarball '%s': %s" % (
-            tarfile_path, exc)
+        message = "Problem building tarball '%s': %s" % (tarfile_path, exc)
         CustomUploadError.__init__(self, message)
 
 
@@ -97,6 +95,7 @@ class SigningUpload(CustomUpload):
         keys: by copying from local file system (old way) or saving the
         public key stored at signing service (new way).
     """
+
     custom_type = "signing"
 
     dists_directory = "signed"
@@ -110,8 +109,7 @@ class SigningUpload(CustomUpload):
         return bits[0], bits[1], bits[2].split(".")[0]
 
     def setComponents(self, tarfile_path):
-        self.package, self.version, self.arch = self.parsePath(
-            tarfile_path)
+        self.package, self.version, self.arch = self.parsePath(tarfile_path)
 
     def getSeriesPath(self, pubconf, key_name, archive, signing_for):
         """Find the key path for a given series.
@@ -124,11 +122,7 @@ class SigningUpload(CustomUpload):
             if series.name == signing_for:
                 found = True
             if found:
-                path = os.path.join(
-                    pubconf.signingroot,
-                    series.name,
-                    key_name
-                    )
+                path = os.path.join(pubconf.signingroot, series.name, key_name)
                 if os.path.exists(path):
                     return path
         # If we have exhausted all available series, return the root
@@ -139,7 +133,8 @@ class SigningUpload(CustomUpload):
 
         if suite:
             self.distro_series, _ = getUtility(IDistroSeriesSet).fromSuite(
-                self.archive.distribution, suite)
+                self.archive.distribution, suite
+            )
         else:
             self.distro_series = None
 
@@ -147,7 +142,8 @@ class SigningUpload(CustomUpload):
         if pubconf.signingroot is None:
             if self.logger is not None:
                 self.logger.warning(
-                    "No signing root configured for this archive")
+                    "No signing root configured for this archive"
+                )
             self.uefi_key = None
             self.uefi_cert = None
             self.kmod_pem = None
@@ -160,46 +156,50 @@ class SigningUpload(CustomUpload):
             self.fit_cert = None
             self.autokey = False
         else:
-            signing_for = self.distro_series.name if self.distro_series else ''
+            signing_for = self.distro_series.name if self.distro_series else ""
             self.uefi_key = self.getSeriesPath(
-                pubconf, "uefi.key", archive, signing_for)
+                pubconf, "uefi.key", archive, signing_for
+            )
             self.uefi_cert = self.getSeriesPath(
-                pubconf, "uefi.crt", archive, signing_for)
+                pubconf, "uefi.crt", archive, signing_for
+            )
             self.kmod_pem = self.getSeriesPath(
-                pubconf, "kmod.pem", archive, signing_for)
+                pubconf, "kmod.pem", archive, signing_for
+            )
             self.kmod_x509 = self.getSeriesPath(
-                pubconf, "kmod.x509", archive, signing_for)
+                pubconf, "kmod.x509", archive, signing_for
+            )
             self.opal_pem = self.getSeriesPath(
-                pubconf, "opal.pem", archive, signing_for)
+                pubconf, "opal.pem", archive, signing_for
+            )
             self.opal_x509 = self.getSeriesPath(
-                pubconf, "opal.x509", archive, signing_for)
+                pubconf, "opal.x509", archive, signing_for
+            )
             self.sipl_pem = self.getSeriesPath(
-                pubconf, "sipl.pem", archive, signing_for)
+                pubconf, "sipl.pem", archive, signing_for
+            )
             self.sipl_x509 = self.getSeriesPath(
-                pubconf, "sipl.x509", archive, signing_for)
+                pubconf, "sipl.x509", archive, signing_for
+            )
             # Note: the signature tool allows a collection of keys and takes
             #       a directory name with all valid keys.  Avoid mixing the
             #       other signing types' keys with the fit keys.
             self.fit_key = self.getSeriesPath(
-                pubconf,
-                os.path.join("fit", "fit.key"),
-                archive,
-                signing_for
-                )
+                pubconf, os.path.join("fit", "fit.key"), archive, signing_for
+            )
             self.fit_cert = self.getSeriesPath(
-                pubconf,
-                os.path.join("fit", "fit.crt"),
-                archive,
-                signing_for
-                )
+                pubconf, os.path.join("fit", "fit.crt"), archive, signing_for
+            )
             self.autokey = pubconf.signingautokey
 
         self.setComponents(tarfile_path)
 
-        dists_signed = os.path.join(pubconf.archiveroot, "dists",
-            suite, "main", self.dists_directory)
+        dists_signed = os.path.join(
+            pubconf.archiveroot, "dists", suite, "main", self.dists_directory
+        )
         self.targetdir = os.path.join(
-            dists_signed, "%s-%s" % (self.package, self.arch))
+            dists_signed, "%s-%s" % (self.package, self.arch)
+        )
         self.archiveroot = pubconf.archiveroot
         self.temproot = pubconf.temproot
 
@@ -222,7 +222,8 @@ class SigningUpload(CustomUpload):
             else:
                 if self.logger is not None:
                     self.logger.warning(
-                        "%s: public key not world readable" % key)
+                        "%s: public key not world readable" % key
+                    )
 
     def copyPublishedPublicKeys(self):
         """Copy out published keys into the custom upload."""
@@ -231,7 +232,7 @@ class SigningUpload(CustomUpload):
             os.makedirs(keydir)
         for filename, content in self.public_keys.items():
             file_path = os.path.join(keydir, os.path.basename(filename))
-            with open(file_path, 'wb') as fd:
+            with open(file_path, "wb") as fd:
                 fd.write(content)
 
     def setSigningOptions(self):
@@ -239,8 +240,9 @@ class SigningUpload(CustomUpload):
         self.signing_options = {}
 
         # Look for an options file in the top level control directory.
-        options_file = os.path.join(self.tmpdir, self.version,
-            "control", "options")
+        options_file = os.path.join(
+            self.tmpdir, self.version, "control", "options"
+        )
         if not os.path.exists(options_file):
             return
 
@@ -262,14 +264,16 @@ class SigningUpload(CustomUpload):
             # Just log this rather than failing, since custom upload errors
             # tend to make the publisher rather upset.
             if self.logger is not None:
-                self.logger.warning("%s Failed (cmd='%s')" %
-                                    (description, " ".join(cmdl)))
+                self.logger.warning(
+                    "%s Failed (cmd='%s')" % (description, " ".join(cmdl))
+                )
         return status
 
     def findSigningHandlers(self):
         """Find all the signable files in an extracted tarball."""
         use_signing_service = bool(
-            getFeatureFlag(PUBLISHER_USES_SIGNING_SERVICE))
+            getFeatureFlag(PUBLISHER_USES_SIGNING_SERVICE)
+        )
 
         fallback_handlers = {
             SigningKeyType.UEFI: self.signUefi,
@@ -277,7 +281,7 @@ class SigningUpload(CustomUpload):
             SigningKeyType.OPAL: self.signOpal,
             SigningKeyType.SIPL: self.signSipl,
             SigningKeyType.FIT: self.signFit,
-            }
+        }
 
         for dirpath, dirnames, filenames in os.walk(self.tmpdir):
             for filename in filenames:
@@ -301,13 +305,17 @@ class SigningUpload(CustomUpload):
 
                 if use_signing_service:
                     key = getUtility(IArchiveSigningKeySet).getSigningKey(
-                        key_type, self.archive, self.distro_series)
+                        key_type, self.archive, self.distro_series
+                    )
                     handler = partial(
-                        self.signUsingSigningService, key_type, key)
+                        self.signUsingSigningService, key_type, key
+                    )
                     if key_type in fallback_handlers:
                         fallback_handler = partial(
-                            self.signUsingLocalKey, key_type,
-                            fallback_handlers.get(key_type))
+                            self.signUsingLocalKey,
+                            key_type,
+                            fallback_handlers.get(key_type),
+                        )
                     else:
                         fallback_handler = None
                     yield file_path, handler, fallback_handler
@@ -331,7 +339,8 @@ class SigningUpload(CustomUpload):
         if not self.keyFilesExist(key_type):
             raise OSError(
                 "Could not fallback to local signing keys: the key files "
-                "were not found.")
+                "were not found."
+            )
         return handler(filename)
 
     def keyFilesExist(self, key_type):
@@ -344,7 +353,7 @@ class SigningUpload(CustomUpload):
             SigningKeyType.OPAL: [self.opal_pem, self.opal_x509],
             SigningKeyType.SIPL: [self.sipl_pem, self.sipl_x509],
             SigningKeyType.FIT: [self.fit_cert, self.fit_key],
-            }
+        }
         # If we are missing local key files, do not proceed.
         key_files = [i for i in fallback_keys.get(key_type, []) if i]
         return all(os.path.exists(key_file) for key_file in key_files)
@@ -368,32 +377,42 @@ class SigningUpload(CustomUpload):
         if signing_key is None:
             if not self.autokey:
                 raise NoSigningKeyError("No signing key for %s" % filename)
-            description = (
-                "%s key for %s" % (key_type.name, self.archive.reference))
+            description = "%s key for %s" % (
+                key_type.name,
+                self.archive.reference,
+            )
             try:
-                signing_key = getUtility(IArchiveSigningKeySet).generate(
-                    key_type, description, self.archive).signing_key
+                signing_key = (
+                    getUtility(IArchiveSigningKeySet)
+                    .generate(key_type, description, self.archive)
+                    .signing_key
+                )
             except Exception as e:
                 if self.logger:
                     self.logger.exception(
-                        "Error generating signing key for %s: %s %s" %
-                        (self.archive.reference, e.__class__.__name__, e))
+                        "Error generating signing key for %s: %s %s"
+                        % (self.archive.reference, e.__class__.__name__, e)
+                    )
                 raise SigningServiceError(
-                    "Could not generate key %s: %s" % (key_type, e))
+                    "Could not generate key %s: %s" % (key_type, e)
+                )
 
         with open(filename, "rb") as fd:
             content = fd.read()
 
         try:
             signed_content = signing_key.sign(
-                content, message_name=os.path.basename(filename))
+                content, message_name=os.path.basename(filename)
+            )
         except Exception as e:
             if self.logger:
                 self.logger.exception(
-                    "Error signing %s on signing service: %s %s" %
-                    (filename, e.__class__.__name__, e))
+                    "Error signing %s on signing service: %s %s"
+                    % (filename, e.__class__.__name__, e)
+                )
             raise SigningServiceError(
-                "Could not sign message with key %s: %s" % (signing_key, e))
+                "Could not sign message with key %s: %s" % (signing_key, e)
+            )
 
         if key_type in (SigningKeyType.UEFI, SigningKeyType.FIT):
             file_suffix = ".signed"
@@ -407,9 +426,10 @@ class SigningUpload(CustomUpload):
 
         signed_filename = filename + file_suffix
         public_key_filename = (
-            key_type.name.lower().replace("_", "-") + public_key_suffix)
+            key_type.name.lower().replace("_", "-") + public_key_suffix
+        )
 
-        with open(signed_filename, 'wb') as fd:
+        with open(signed_filename, "wb") as fd:
             fd.write(signed_content)
 
         self.publishPublicKey(public_key_filename, signing_key.public_key)
@@ -428,7 +448,8 @@ class SigningUpload(CustomUpload):
             if keyfile and not os.access(keyfile, os.R_OK):
                 if self.logger is not None:
                     self.logger.warning(
-                        "%s key %s not readable" % (which, keyfile))
+                        "%s key %s not readable" % (which, keyfile)
+                    )
                 valid = False
 
         if not valid:
@@ -436,7 +457,8 @@ class SigningUpload(CustomUpload):
         return keynames
 
     def injectIntoSigningService(
-            self, key_type, private_key_file, public_key_file):
+        self, key_type, private_key_file, public_key_file
+    ):
         """Injects the given key pair into signing service for current
         archive.
 
@@ -448,51 +470,66 @@ class SigningUpload(CustomUpload):
             raise ValueError("%s is not a valid key type to inject" % key_type)
 
         feature_flag = (
-            getFeatureFlag(PUBLISHER_SIGNING_SERVICE_INJECTS_KEYS) or '')
+            getFeatureFlag(PUBLISHER_SIGNING_SERVICE_INJECTS_KEYS) or ""
+        )
         key_types_to_inject = [i.strip() for i in feature_flag.split()]
 
         if key_type.name not in key_types_to_inject:
             if self.logger:
                 self.logger.info(
                     "Skipping injection for key type %s: not in %s",
-                    key_type, key_types_to_inject)
+                    key_type,
+                    key_types_to_inject,
+                )
             return
 
         key_set = getUtility(IArchiveSigningKeySet)
         current_key = key_set.get(
-            key_type, self.archive, None, exact_match=True)
+            key_type, self.archive, None, exact_match=True
+        )
         if current_key is not None:
-            self.logger.info("Skipping injection for key type %s: archive "
-                             "already has a key on lp-signing.", key_type)
+            self.logger.info(
+                "Skipping injection for key type %s: archive "
+                "already has a key on lp-signing.",
+                key_type,
+            )
             raise SigningKeyConflict(
                 "Archive %s already has a signing key type %s on lp-signing."
-                % (self.archive.reference, key_type))
+                % (self.archive.reference, key_type)
+            )
 
         if self.logger:
             self.logger.info(
                 "Injecting key_type %s for archive %s into signing service",
-                key_type, self.archive.name)
+                key_type,
+                self.archive.name,
+            )
 
-        with open(private_key_file, 'rb') as fd:
+        with open(private_key_file, "rb") as fd:
             private_key = fd.read()
-        with open(public_key_file, 'rb') as fd:
+        with open(public_key_file, "rb") as fd:
             public_key = fd.read()
 
         now = datetime.now().replace(tzinfo=utc)
-        description = (
-                "%s key for %s" % (key_type.name, self.archive.reference))
+        description = "%s key for %s" % (key_type.name, self.archive.reference)
         key_set.inject(
-            key_type, private_key, public_key,
-            description, now, self.archive, earliest_distro_series=None)
-
-    def generateKeyCommonName(self, owner, archive, suffix=''):
+            key_type,
+            private_key,
+            public_key,
+            description,
+            now,
+            self.archive,
+            earliest_distro_series=None,
+        )
+
+    def generateKeyCommonName(self, owner, archive, suffix=""):
         # PPA <owner> <archive> <suffix>
         # truncate <owner> <archive> to ensure the overall form is shorter
         # than 64 characters but the suffix is maintained
         if suffix:
             suffix = " " + suffix
         common_name = "PPA %s %s" % (owner, archive)
-        return common_name[0:64 - len(suffix)] + suffix
+        return common_name[0 : 64 - len(suffix)] + suffix
 
     def generateKeyCrtPair(self, key_type, key_filename, cert_filename):
         """Generate new Key/Crt key pairs."""
@@ -501,16 +538,30 @@ class SigningUpload(CustomUpload):
             os.makedirs(directory)
 
         common_name = self.generateKeyCommonName(
-            self.archive.owner.name, self.archive.name, key_type)
-        subject = '/CN=' + common_name + '/'
+            self.archive.owner.name, self.archive.name, key_type
+        )
+        subject = "/CN=" + common_name + "/"
 
         old_mask = os.umask(0o077)
         try:
             new_key_cmd = [
-                'openssl', 'req', '-new', '-x509', '-newkey', 'rsa:2048',
-                '-subj', subject, '-keyout', key_filename,
-                '-out', cert_filename, '-days', '3650', '-nodes', '-sha256',
-                ]
+                "openssl",
+                "req",
+                "-new",
+                "-x509",
+                "-newkey",
+                "rsa:2048",
+                "-subj",
+                subject,
+                "-keyout",
+                key_filename,
+                "-out",
+                cert_filename,
+                "-days",
+                "3650",
+                "-nodes",
+                "-sha256",
+            ]
             self.callLog(key_type + " keygen", new_key_cmd)
         finally:
             os.umask(old_mask)
@@ -521,7 +572,8 @@ class SigningUpload(CustomUpload):
             signing_key_type = getattr(SigningKeyType, key_type.upper())
             try:
                 self.injectIntoSigningService(
-                    signing_key_type, key_filename, cert_filename)
+                    signing_key_type, key_filename, cert_filename
+                )
             except SigningKeyConflict:
                 os.unlink(key_filename)
                 os.unlink(cert_filename)
@@ -534,15 +586,17 @@ class SigningUpload(CustomUpload):
     def signUefi(self, image):
         """Attempt to sign an image."""
         remove_if_exists("%s.signed" % image)
-        (key, cert) = self.getKeys('UEFI', self.generateUefiKeys,
-            self.uefi_key, self.uefi_cert)
+        (key, cert) = self.getKeys(
+            "UEFI", self.generateUefiKeys, self.uefi_key, self.uefi_cert
+        )
         if not key or not cert:
             return
         self.publishPublicKey(cert)
         cmdl = ["sbsign", "--key", key, "--cert", cert, image]
         return self.callLog("UEFI signing", cmdl) == 0
 
-    openssl_config_base = textwrap.dedent("""\
+    openssl_config_base = textwrap.dedent(
+        """\
         [ req ]
         default_bits = 4096
         distinguished_name = req_distinguished_name
@@ -558,29 +612,37 @@ class SigningUpload(CustomUpload):
         keyUsage=digitalSignature
         subjectKeyIdentifier=hash
         authorityKeyIdentifier=keyid
-        """)
+        """
+    )
 
     openssl_config_opal = "# OPAL OpenSSL config\n" + openssl_config_base
 
-    openssl_config_kmod = "# KMOD OpenSSL config\n" + openssl_config_base + \
-        textwrap.dedent("""
+    openssl_config_kmod = (
+        "# KMOD OpenSSL config\n"
+        + openssl_config_base
+        + textwrap.dedent(
+            """
         # codeSigning:  specifies that this key is used to sign code.
         # 1.3.6.1.4.1.2312.16.1.2:  defines this key as used for
         #   module signing only. See https://lkml.org/lkml/2015/8/26/741.
         extendedKeyUsage        = codeSigning,1.3.6.1.4.1.2312.16.1.2
-        """)
+        """
+        )
+    )
 
     openssl_config_sipl = "# SIPL OpenSSL config\n" + openssl_config_base
 
     def generateOpensslConfig(self, key_type, genkey_tmpl):
         # Truncate name to 64 character maximum.
         common_name = self.generateKeyCommonName(
-            self.archive.owner.name, self.archive.name, key_type)
+            self.archive.owner.name, self.archive.name, key_type
+        )
 
         return genkey_tmpl.format(common_name=common_name)
 
-    def generatePemX509Pair(self, key_type, genkey_text, pem_filename,
-            x509_filename):
+    def generatePemX509Pair(
+        self, key_type, genkey_text, pem_filename, x509_filename
+    ):
         """Generate new pem/x509 key pairs."""
         directory = os.path.dirname(pem_filename)
         if not os.path.exists(directory):
@@ -588,25 +650,47 @@ class SigningUpload(CustomUpload):
 
         old_mask = os.umask(0o077)
         try:
-            with tempfile.NamedTemporaryFile(suffix='.keygen') as tf:
-                tf.write(genkey_text.encode('UTF-8'))
+            with tempfile.NamedTemporaryFile(suffix=".keygen") as tf:
+                tf.write(genkey_text.encode("UTF-8"))
 
                 # Close out the underlying file so we know it is complete.
                 tf.file.close()
 
                 new_key_cmd = [
-                    'openssl', 'req', '-new', '-nodes', '-utf8', '-sha512',
-                    '-days', '3650', '-batch', '-x509', '-config', tf.name,
-                    '-outform', 'PEM', '-out', pem_filename,
-                    '-keyout', pem_filename
-                    ]
+                    "openssl",
+                    "req",
+                    "-new",
+                    "-nodes",
+                    "-utf8",
+                    "-sha512",
+                    "-days",
+                    "3650",
+                    "-batch",
+                    "-x509",
+                    "-config",
+                    tf.name,
+                    "-outform",
+                    "PEM",
+                    "-out",
+                    pem_filename,
+                    "-keyout",
+                    pem_filename,
+                ]
                 if self.callLog(key_type + " keygen key", new_key_cmd) == 0:
                     new_x509_cmd = [
-                        'openssl', 'x509', '-in', pem_filename,
-                        '-outform', 'DER', '-out', x509_filename
-                        ]
-                    if self.callLog(key_type + " keygen cert",
-                                    new_x509_cmd) != 0:
+                        "openssl",
+                        "x509",
+                        "-in",
+                        pem_filename,
+                        "-outform",
+                        "DER",
+                        "-out",
+                        x509_filename,
+                    ]
+                    if (
+                        self.callLog(key_type + " keygen cert", new_x509_cmd)
+                        != 0
+                    ):
                         os.unlink(pem_filename)
         finally:
             os.umask(old_mask)
@@ -617,7 +701,8 @@ class SigningUpload(CustomUpload):
             signing_key_type = getattr(SigningKeyType, key_type.upper())
             try:
                 self.injectIntoSigningService(
-                    signing_key_type, pem_filename, x509_filename)
+                    signing_key_type, pem_filename, x509_filename
+                )
             except SigningKeyConflict:
                 os.unlink(pem_filename)
                 os.unlink(x509_filename)
@@ -631,8 +716,12 @@ class SigningUpload(CustomUpload):
     def signKmod(self, image):
         """Attempt to sign a kernel module."""
         remove_if_exists("%s.sig" % image)
-        (pem, cert) = self.getKeys('Kernel Module', self.generateKmodKeys,
-            self.kmod_pem, self.kmod_x509)
+        (pem, cert) = self.getKeys(
+            "Kernel Module",
+            self.generateKmodKeys,
+            self.kmod_pem,
+            self.kmod_x509,
+        )
         if not pem or not cert:
             return
         self.publishPublicKey(cert)
@@ -647,8 +736,9 @@ class SigningUpload(CustomUpload):
     def signOpal(self, image):
         """Attempt to sign a kernel image for Opal."""
         remove_if_exists("%s.sig" % image)
-        (pem, cert) = self.getKeys('Opal Kernel', self.generateOpalKeys,
-            self.opal_pem, self.opal_x509)
+        (pem, cert) = self.getKeys(
+            "Opal Kernel", self.generateOpalKeys, self.opal_pem, self.opal_x509
+        )
         if not pem or not cert:
             return
         self.publishPublicKey(cert)
@@ -663,8 +753,9 @@ class SigningUpload(CustomUpload):
     def signSipl(self, image):
         """Attempt to sign a kernel image for Sipl."""
         remove_if_exists("%s.sig" % image)
-        (pem, cert) = self.getKeys('SIPL Kernel', self.generateSiplKeys,
-            self.sipl_pem, self.sipl_x509)
+        (pem, cert) = self.getKeys(
+            "SIPL Kernel", self.generateSiplKeys, self.sipl_pem, self.sipl_x509
+        )
         if not pem or not cert:
             return
         self.publishPublicKey(cert)
@@ -679,16 +770,23 @@ class SigningUpload(CustomUpload):
         """Attempt to sign an image."""
         image_signed = "%s.signed" % image
         remove_if_exists(image_signed)
-        (key, cert) = self.getKeys('FIT', self.generateFitKeys,
-            self.fit_key, self.fit_cert)
+        (key, cert) = self.getKeys(
+            "FIT", self.generateFitKeys, self.fit_key, self.fit_cert
+        )
         if not key or not cert:
             return
         self.publishPublicKey(cert)
         # Make a copy of the image as mkimage signs in place and in
         # signed-only mode we will remove the original file.
         shutil.copy(image, image_signed)
-        cmdl = ["mkimage", "-F", "-k", os.path.dirname(key), "-r",
-            image_signed]
+        cmdl = [
+            "mkimage",
+            "-F",
+            "-k",
+            os.path.dirname(key),
+            "-r",
+            image_signed,
+        ]
         return self.callLog("FIT signing", cmdl) == 0
 
     def convertToTarball(self):
@@ -724,18 +822,19 @@ class SigningUpload(CustomUpload):
                 if fallback_handler is not None and self.logger:
                     self.logger.warning(
                         "Signing service will try to fallback to local key. "
-                        "Reason: %s (%s)" % (e.__class__.__name__, e))
+                        "Reason: %s (%s)" % (e.__class__.__name__, e)
+                    )
                 was_signed = False
             if not was_signed and fallback_handler is not None:
                 was_signed = fallback_handler(filename)
-            if was_signed and 'signed-only' in self.signing_options:
+            if was_signed and "signed-only" in self.signing_options:
                 os.unlink(filename)
 
         # Copy out the public keys where they were used.
         self.copyPublishedPublicKeys()
 
         # If tarball output is requested, tar up the results.
-        if 'tarball' in self.signing_options:
+        if "tarball" in self.signing_options:
             self.convertToTarball()
 
     def installFiles(self, archive, suite):
@@ -771,5 +870,6 @@ class UefiUpload(SigningUpload):
     We expect to be able to remove this upload type once all existing
     packages are converted to the new form and location.
     """
+
     custom_type = "uefi"
     dists_directory = "uefi"
diff --git a/lib/lp/archivepublisher/static_translations.py b/lib/lp/archivepublisher/static_translations.py
index cda8845..cca631a 100644
--- a/lib/lp/archivepublisher/static_translations.py
+++ b/lib/lp/archivepublisher/static_translations.py
@@ -5,7 +5,7 @@
 
 __all__ = [
     "StaticTranslationsUpload",
-    ]
+]
 
 from lp.archivepublisher.customupload import CustomUpload
 
@@ -17,6 +17,7 @@ class StaticTranslationsUpload(CustomUpload):
     via webservice methods so that third parties can retrieve them from the
     librarian.
     """
+
     custom_type = "static-translations"
 
     @classmethod
diff --git a/lib/lp/archivepublisher/tests/__init__.py b/lib/lp/archivepublisher/tests/__init__.py
index 9d65b15..f7ff214 100644
--- a/lib/lp/archivepublisher/tests/__init__.py
+++ b/lib/lp/archivepublisher/tests/__init__.py
@@ -3,7 +3,6 @@
 
 import os
 
-
 here = os.path.dirname(os.path.realpath(__file__))
 
 
@@ -11,4 +10,4 @@ def datadir(path):
     """Return fully-qualified path inside the test data directory."""
     if path.startswith("/"):
         raise ValueError("Path is not relative: %s" % path)
-    return os.path.join(here, 'data', path)
+    return os.path.join(here, "data", path)
diff --git a/lib/lp/archivepublisher/tests/artifactory_fixture.py b/lib/lp/archivepublisher/tests/artifactory_fixture.py
index 599836b..c642031 100644
--- a/lib/lp/archivepublisher/tests/artifactory_fixture.py
+++ b/lib/lp/archivepublisher/tests/artifactory_fixture.py
@@ -5,61 +5,67 @@
 
 __all__ = [
     "FakeArtifactoryFixture",
-    ]
+]
 
-from datetime import (
-    datetime,
-    timezone,
-    )
 import fnmatch
 import hashlib
 import json
-from pathlib import Path
 import re
-from urllib.parse import (
-    parse_qs,
-    unquote,
-    urlparse,
-    )
+from datetime import datetime, timezone
+from pathlib import Path
+from urllib.parse import parse_qs, unquote, urlparse
 
-from fixtures import Fixture
 import responses
+from fixtures import Fixture
 
 
 class FakeArtifactoryFixture(Fixture):
-
     def __init__(self, base_url, repository_name):
         self.base_url = base_url
         self.repository_name = repository_name
         self.repo_url = "%s/%s" % (base_url, self.repository_name)
         self.api_url = "%s/api/storage/%s" % (
-            self.base_url, self.repository_name)
+            self.base_url,
+            self.repository_name,
+        )
         self.search_url = "%s/api/search/aql" % self.base_url
         self._fs = {}
         self.add_dir("/")
 
     def _setUp(self):
         self.requests_mock = responses.RequestsMock(
-            assert_all_requests_are_fired=False)
+            assert_all_requests_are_fired=False
+        )
         self.requests_mock.start()
         self.addCleanup(self.requests_mock.stop)
         repo_url_regex = re.compile(r"^%s/.*" % re.escape(self.repo_url))
         api_url_regex = re.compile(r"^%s/.*" % re.escape(self.api_url))
-        self.requests_mock.add(responses.CallbackResponse(
-            method="GET", url=repo_url_regex, callback=self._handle_download,
-            stream=True))
+        self.requests_mock.add(
+            responses.CallbackResponse(
+                method="GET",
+                url=repo_url_regex,
+                callback=self._handle_download,
+                stream=True,
+            )
+        )
         self.requests_mock.add_callback(
-            "GET", api_url_regex, callback=self._handle_stat)
+            "GET", api_url_regex, callback=self._handle_stat
+        )
         self.requests_mock.add_callback(
-            "PUT", repo_url_regex, callback=self._handle_upload)
+            "PUT", repo_url_regex, callback=self._handle_upload
+        )
         self.requests_mock.add_callback(
-            "PUT", api_url_regex, callback=self._handle_set_properties)
+            "PUT", api_url_regex, callback=self._handle_set_properties
+        )
         self.requests_mock.add_callback(
-            "POST", self.search_url, callback=self._handle_aql)
+            "POST", self.search_url, callback=self._handle_aql
+        )
         self.requests_mock.add_callback(
-            "DELETE", repo_url_regex, callback=self._handle_delete)
+            "DELETE", repo_url_regex, callback=self._handle_delete
+        )
         self.requests_mock.add_callback(
-            "DELETE", api_url_regex, callback=self._handle_delete_properties)
+            "DELETE", api_url_regex, callback=self._handle_delete_properties
+        )
 
     def add_dir(self, path):
         now = datetime.now(timezone.utc).isoformat()
@@ -75,24 +81,26 @@ class FakeArtifactoryFixture(Fixture):
             "checksums": {"sha1": hashlib.sha1(body).hexdigest()},
             "body": body,
             "properties": properties,
-            }
+        }
 
     def remove_file(self, path):
         del self._fs[path]
 
     def _handle_download(self, request):
         """Handle a request to download an existing file."""
-        path = urlparse(request.url[len(self.repo_url):]).path
+        path = urlparse(request.url[len(self.repo_url) :]).path
         if path in self._fs and "size" in self._fs[path]:
             return (
-                200, {"Content-Type": "application/octet-stream"},
-                self._fs[path]["body"])
+                200,
+                {"Content-Type": "application/octet-stream"},
+                self._fs[path]["body"],
+            )
         else:
             return 404, {}, "Unable to find item"
 
     def _handle_stat(self, request):
         """Handle a request to stat an existing file."""
-        parsed_url = urlparse(request.url[len(self.api_url):])
+        parsed_url = urlparse(request.url[len(self.api_url) :])
         path = parsed_url.path
         if path in self._fs:
             stat = {"repo": self.repository_name, "path": path}
@@ -120,7 +128,7 @@ class FakeArtifactoryFixture(Fixture):
         # Artifactory and urlparse seem to disagree about how to parse URLs
         # where parameters contain the "/" character, so split the
         # parameters from the rest of the URL by hand.
-        url = request.url[len(self.repo_url):]
+        url = request.url[len(self.repo_url) :]
         if ";" in url:
             url, params = url.split(";", 1)
         else:
@@ -132,13 +140,16 @@ class FakeArtifactoryFixture(Fixture):
         elif path.rsplit("/", 1)[0] in self._fs:
             properties = self._decode_properties(params)
             self.add_file(
-                path, request.body,
-                int(request.headers["Content-Length"]), properties)
+                path,
+                request.body,
+                int(request.headers["Content-Length"]),
+                properties,
+            )
         return 201, {}, ""
 
     def _handle_set_properties(self, request):
         """Handle a request to set properties on an existing file."""
-        parsed_url = urlparse(request.url[len(self.api_url):])
+        parsed_url = urlparse(request.url[len(self.api_url) :])
         path = parsed_url.path
         if path in self._fs:
             query = parse_qs(parsed_url.query)
@@ -150,7 +161,7 @@ class FakeArtifactoryFixture(Fixture):
 
     def _handle_delete_properties(self, request):
         """Handle a request to delete properties from an existing file."""
-        parsed_url = urlparse(request.url[len(self.api_url):])
+        parsed_url = urlparse(request.url[len(self.api_url) :])
         path = parsed_url.path
         if path in self._fs:
             query = parse_qs(parsed_url.query)
@@ -169,10 +180,10 @@ class FakeArtifactoryFixture(Fixture):
             "name": path_obj.name,
             "properties": [
                 {"key": key, "value": v}
-                for key, value in sorted(
-                    self._fs[path]["properties"].items())
-                for v in value],
-            }
+                for key, value in sorted(self._fs[path]["properties"].items())
+                for v in value
+            ],
+        }
 
     def _matches_aql(self, item, criteria):
         """Return True if an item matches some AQL criteria.
@@ -200,7 +211,8 @@ class FakeArtifactoryFixture(Fixture):
                             return False
                     else:
                         raise ValueError(
-                            "Unhandled AQL comparison operator: %s" % key)
+                            "Unhandled AQL comparison operator: %s" % key
+                        )
                 elif isinstance(value, str):
                     if item[key] != value:
                         return False
@@ -216,20 +228,23 @@ class FakeArtifactoryFixture(Fixture):
         No, of course we don't implement a full AQL parser.
         """
         match = re.match(
-            r"^items\.find\((.*?)\)\.include\((.*?)\)$", request.body)
+            r"^items\.find\((.*?)\)\.include\((.*?)\)$", request.body
+        )
         if match is None:
             return 400, {}, ""
         # Treating this as JSON is cheating a bit, but it works.
         criteria = json.loads(match.group(1))
         items = [
             self._make_aql_item(path)
-            for path in sorted(self._fs) if "size" in self._fs[path]]
+            for path in sorted(self._fs)
+            if "size" in self._fs[path]
+        ]
         results = [item for item in items if self._matches_aql(item, criteria)]
         return 200, {}, json.dumps({"results": results})
 
     def _handle_delete(self, request):
         """Handle a request to delete an existing file."""
-        path = urlparse(request.url[len(self.repo_url):]).path
+        path = urlparse(request.url[len(self.repo_url) :]).path
         if not path.endswith("/") and path in self._fs:
             self.remove_file(path)
         return 200, {}, ""
diff --git a/lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py b/lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py
index 4c8463e..b1f0875 100644
--- a/lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py
+++ b/lib/lp/archivepublisher/tests/test_archivegpgsigningkey.py
@@ -7,6 +7,7 @@ import os
 from textwrap import dedent
 from unittest import mock
 
+import treq
 from testtools.matchers import (
     Equals,
     FileContains,
@@ -14,24 +15,20 @@ from testtools.matchers import (
     MatchesStructure,
     Not,
     StartsWith,
-    )
+)
 from testtools.twistedsupport import (
     AsynchronousDeferredRunTest,
     AsynchronousDeferredRunTestForBrokenTwisted,
-    )
-import treq
-from twisted.internet import (
-    defer,
-    reactor,
-    )
+)
+from twisted.internet import defer, reactor
 from zope.component import getUtility
 
 from lp.archivepublisher.config import getPubConfig
 from lp.archivepublisher.interfaces.archivegpgsigningkey import (
+    PUBLISHER_GPG_USES_SIGNING_SERVICE,
     IArchiveGPGSigningKey,
     ISignableArchive,
-    PUBLISHER_GPG_USES_SIGNING_SERVICE,
-    )
+)
 from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
 from lp.archivepublisher.tests.test_run_parts import RunPartsMixin
 from lp.registry.interfaces.gpg import IGPGKeySet
@@ -40,20 +37,14 @@ from lp.services.gpg.interfaces import IGPGHandler
 from lp.services.gpg.tests.test_gpghandler import FakeGenerateKey
 from lp.services.log.logger import BufferLogger
 from lp.services.osutils import write_file
-from lp.services.signing.enums import (
-    SigningKeyType,
-    SigningMode,
-    )
+from lp.services.signing.enums import SigningKeyType, SigningMode
 from lp.services.signing.interfaces.signingkey import ISigningKeySet
 from lp.services.signing.tests.helpers import SigningServiceClientFixture
 from lp.services.twistedsupport.testing import TReqFixture
 from lp.services.twistedsupport.treq import check_status
 from lp.soyuz.enums import ArchivePurpose
 from lp.testing import TestCaseWithFactory
-from lp.testing.gpgkeys import (
-    gpgkeysdir,
-    test_pubkey_from_email,
-    )
+from lp.testing.gpgkeys import gpgkeysdir, test_pubkey_from_email
 from lp.testing.keyserver import InProcessKeyServerFixture
 from lp.testing.layers import ZopelessDatabaseLayer
 
@@ -69,17 +60,20 @@ class TestSignableArchiveWithSigningKey(TestCaseWithFactory):
         self.temp_dir = self.makeTemporaryDirectory()
         self.distro = self.factory.makeDistribution()
         db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
-            self.distro)
+            self.distro
+        )
         db_pubconf.root_dir = self.temp_dir
         self.archive = self.factory.makeArchive(
-            distribution=self.distro, purpose=ArchivePurpose.PRIMARY)
+            distribution=self.distro, purpose=ArchivePurpose.PRIMARY
+        )
         self.archive_root = getPubConfig(self.archive).archiveroot
         self.suite = "distroseries"
 
         yield self.useFixture(InProcessKeyServerFixture()).start()
-        key_path = os.path.join(gpgkeysdir, 'ppa-sample@xxxxxxxxxxxxxxxxx')
+        key_path = os.path.join(gpgkeysdir, "ppa-sample@xxxxxxxxxxxxxxxxx")
         yield IArchiveGPGSigningKey(self.archive).setSigningKey(
-            key_path, async_keyserver=True)
+            key_path, async_keyserver=True
+        )
 
     def test_signFile_absolute_within_archive(self):
         filename = os.path.join(self.archive_root, "signme")
@@ -98,7 +92,8 @@ class TestSignableArchiveWithSigningKey(TestCaseWithFactory):
         signer = ISignableArchive(self.archive)
         self.assertTrue(signer.can_sign)
         self.assertRaises(
-            AssertionError, signer.signFile, self.suite, filename)
+            AssertionError, signer.signFile, self.suite, filename
+        )
 
     def test_signFile_relative_within_archive(self):
         filename_relative = "signme"
@@ -119,18 +114,22 @@ class TestSignableArchiveWithSigningKey(TestCaseWithFactory):
         signer = ISignableArchive(self.archive)
         self.assertTrue(signer.can_sign)
         self.assertRaises(
-            AssertionError, signer.signFile, self.suite, filename_relative)
+            AssertionError, signer.signFile, self.suite, filename_relative
+        )
 
     def test_signRepository_uses_signing_service(self):
         # If the appropriate feature rule is true, then we use the signing
         # service to sign files.
         self.useFixture(
-            FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"}))
+            FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"})
+        )
         signing_service_client = self.useFixture(
-            SigningServiceClientFixture(self.factory))
+            SigningServiceClientFixture(self.factory)
+        )
         self.factory.makeSigningKey(
             key_type=SigningKeyType.OPENPGP,
-            fingerprint=self.archive.signing_key_fingerprint)
+            fingerprint=self.archive.signing_key_fingerprint,
+        )
         logger = BufferLogger()
 
         suite_dir = os.path.join(self.archive_root, "dists", self.suite)
@@ -141,33 +140,49 @@ class TestSignableArchiveWithSigningKey(TestCaseWithFactory):
         self.assertTrue(signer.can_sign)
         self.assertContentEqual(
             ["Release.gpg", "InRelease"],
-            signer.signRepository(self.suite, log=logger))
+            signer.signRepository(self.suite, log=logger),
+        )
         self.assertEqual("", logger.getLogBuffer())
-        signing_service_client.sign.assert_has_calls([
-            mock.call(
-                SigningKeyType.OPENPGP, self.archive.signing_key_fingerprint,
-                "Release", b"Release contents", SigningMode.DETACHED),
-            mock.call(
-                SigningKeyType.OPENPGP, self.archive.signing_key_fingerprint,
-                "Release", b"Release contents", SigningMode.CLEAR),
-            ])
+        signing_service_client.sign.assert_has_calls(
+            [
+                mock.call(
+                    SigningKeyType.OPENPGP,
+                    self.archive.signing_key_fingerprint,
+                    "Release",
+                    b"Release contents",
+                    SigningMode.DETACHED,
+                ),
+                mock.call(
+                    SigningKeyType.OPENPGP,
+                    self.archive.signing_key_fingerprint,
+                    "Release",
+                    b"Release contents",
+                    SigningMode.CLEAR,
+                ),
+            ]
+        )
         self.assertThat(
             os.path.join(suite_dir, "Release.gpg"),
-            FileContains("signed with key_type=OPENPGP mode=DETACHED"))
+            FileContains("signed with key_type=OPENPGP mode=DETACHED"),
+        )
         self.assertThat(
             os.path.join(suite_dir, "InRelease"),
-            FileContains("signed with key_type=OPENPGP mode=CLEAR"))
+            FileContains("signed with key_type=OPENPGP mode=CLEAR"),
+        )
 
     def test_signRepository_falls_back_from_signing_service(self):
         # If the signing service fails to sign a file, we fall back to
         # making local signatures if possible.
         self.useFixture(
-            FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"}))
+            FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"})
+        )
         signing_service_client = self.useFixture(
-            SigningServiceClientFixture(self.factory))
+            SigningServiceClientFixture(self.factory)
+        )
         self.factory.makeSigningKey(
             key_type=SigningKeyType.OPENPGP,
-            fingerprint=self.archive.signing_key_fingerprint)
+            fingerprint=self.archive.signing_key_fingerprint,
+        )
         logger = BufferLogger()
 
         suite_dir = os.path.join(self.archive_root, "dists", self.suite)
@@ -179,21 +194,32 @@ class TestSignableArchiveWithSigningKey(TestCaseWithFactory):
         signing_service_client.sign.side_effect = Exception("boom")
         self.assertContentEqual(
             ["Release.gpg", "InRelease"],
-            signer.signRepository(self.suite, log=logger))
+            signer.signRepository(self.suite, log=logger),
+        )
         self.assertEqual(
             "ERROR Failed to sign archive using signing service; falling back "
-            "to local key\n", logger.getLogBuffer())
+            "to local key\n",
+            logger.getLogBuffer(),
+        )
         signing_service_client.sign.assert_called_once_with(
-            SigningKeyType.OPENPGP, self.archive.signing_key_fingerprint,
-            "Release", b"Release contents", SigningMode.DETACHED)
+            SigningKeyType.OPENPGP,
+            self.archive.signing_key_fingerprint,
+            "Release",
+            b"Release contents",
+            SigningMode.DETACHED,
+        )
         self.assertThat(
             os.path.join(suite_dir, "Release.gpg"),
             FileContains(
-                matcher=StartsWith("-----BEGIN PGP SIGNATURE-----\n")))
+                matcher=StartsWith("-----BEGIN PGP SIGNATURE-----\n")
+            ),
+        )
         self.assertThat(
             os.path.join(suite_dir, "InRelease"),
             FileContains(
-                matcher=StartsWith("-----BEGIN PGP SIGNED MESSAGE-----\n")))
+                matcher=StartsWith("-----BEGIN PGP SIGNED MESSAGE-----\n")
+            ),
+        )
 
 
 class TestSignableArchiveWithRunParts(RunPartsMixin, TestCaseWithFactory):
@@ -205,22 +231,31 @@ class TestSignableArchiveWithRunParts(RunPartsMixin, TestCaseWithFactory):
         self.temp_dir = self.makeTemporaryDirectory()
         self.distro = self.factory.makeDistribution()
         db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
-            self.distro)
+            self.distro
+        )
         db_pubconf.root_dir = self.temp_dir
         self.archive = self.factory.makeArchive(
-            distribution=self.distro, purpose=ArchivePurpose.PRIMARY)
+            distribution=self.distro, purpose=ArchivePurpose.PRIMARY
+        )
         self.archive_root = getPubConfig(self.archive).archiveroot
         self.suite = "distroseries"
         self.enableRunParts(distribution_name=self.distro.name)
-        with open(os.path.join(
-                self.parts_directory, self.distro.name, "sign.d",
-                "10-sign"), "w") as sign_script:
-            sign_script.write(dedent("""\
+        with open(
+            os.path.join(
+                self.parts_directory, self.distro.name, "sign.d", "10-sign"
+            ),
+            "w",
+        ) as sign_script:
+            sign_script.write(
+                dedent(
+                    """\
                 #! /bin/sh
                 echo "$MODE signature of $INPUT_PATH" \\
                      "($ARCHIVEROOT, $DISTRIBUTION/$SUITE)" \\
                     >"$OUTPUT_PATH"
-                """))
+                """
+                )
+            )
             os.fchmod(sign_script.fileno(), 0o755)
 
     def test_signRepository_runs_parts(self):
@@ -231,20 +266,33 @@ class TestSignableArchiveWithRunParts(RunPartsMixin, TestCaseWithFactory):
         signer = ISignableArchive(self.archive)
         self.assertTrue(signer.can_sign)
         self.assertContentEqual(
-            ["Release.gpg", "InRelease"], signer.signRepository(self.suite))
+            ["Release.gpg", "InRelease"], signer.signRepository(self.suite)
+        )
 
         self.assertThat(
             os.path.join(suite_dir, "Release.gpg"),
             FileContains(
-                "detached signature of %s (%s, %s/%s)\n" %
-                (release_path, self.archive_root, self.distro.name,
-                 self.suite)))
+                "detached signature of %s (%s, %s/%s)\n"
+                % (
+                    release_path,
+                    self.archive_root,
+                    self.distro.name,
+                    self.suite,
+                )
+            ),
+        )
         self.assertThat(
             os.path.join(suite_dir, "InRelease"),
             FileContains(
-                "clear signature of %s (%s, %s/%s)\n" %
-                (release_path, self.archive_root, self.distro.name,
-                 self.suite)))
+                "clear signature of %s (%s, %s/%s)\n"
+                % (
+                    release_path,
+                    self.archive_root,
+                    self.distro.name,
+                    self.suite,
+                )
+            ),
+        )
 
     def test_signRepository_honours_pubconf(self):
         pubconf = getPubConfig(self.archive)
@@ -258,20 +306,33 @@ class TestSignableArchiveWithRunParts(RunPartsMixin, TestCaseWithFactory):
         self.assertRaises(AssertionError, signer.signRepository, self.suite)
         self.assertContentEqual(
             ["Release.gpg", "InRelease"],
-            signer.signRepository(self.suite, pubconf=pubconf))
+            signer.signRepository(self.suite, pubconf=pubconf),
+        )
 
         self.assertThat(
             os.path.join(suite_dir, "Release.gpg"),
             FileContains(
-                "detached signature of %s (%s, %s/%s)\n" %
-                (release_path, self.archive_root, self.distro.name,
-                 self.suite)))
+                "detached signature of %s (%s, %s/%s)\n"
+                % (
+                    release_path,
+                    self.archive_root,
+                    self.distro.name,
+                    self.suite,
+                )
+            ),
+        )
         self.assertThat(
             os.path.join(suite_dir, "InRelease"),
             FileContains(
-                "clear signature of %s (%s, %s/%s)\n" %
-                (release_path, self.archive_root, self.distro.name,
-                 self.suite)))
+                "clear signature of %s (%s, %s/%s)\n"
+                % (
+                    release_path,
+                    self.archive_root,
+                    self.distro.name,
+                    self.suite,
+                )
+            ),
+        )
 
     def test_signFile_runs_parts(self):
         filename = os.path.join(self.archive_root, "signme")
@@ -284,8 +345,10 @@ class TestSignableArchiveWithRunParts(RunPartsMixin, TestCaseWithFactory):
         self.assertThat(
             "%s.gpg" % filename,
             FileContains(
-                "detached signature of %s (%s, %s/%s)\n" %
-                (filename, self.archive_root, self.distro.name, self.suite)))
+                "detached signature of %s (%s, %s/%s)\n"
+                % (filename, self.archive_root, self.distro.name, self.suite)
+            ),
+        )
 
 
 class TestArchiveGPGSigningKey(TestCaseWithFactory):
@@ -295,7 +358,8 @@ class TestArchiveGPGSigningKey(TestCaseWithFactory):
     # to the test, so we need to spin the reactor at the end to finish
     # things off.
     run_tests_with = AsynchronousDeferredRunTestForBrokenTwisted.make_factory(
-        timeout=10000)
+        timeout=10000
+    )
 
     @defer.inlineCallbacks
     def setUp(self):
@@ -313,25 +377,33 @@ class TestArchiveGPGSigningKey(TestCaseWithFactory):
         logger = BufferLogger()
         # Use a display name that matches the pregenerated sample key.
         owner = self.factory.makePerson(
-            displayname="Celso \xe1\xe9\xed\xf3\xfa Providelo")
+            displayname="Celso \xe1\xe9\xed\xf3\xfa Providelo"
+        )
         archive = self.factory.makeArchive(owner=owner)
         yield IArchiveGPGSigningKey(archive).generateSigningKey(
-            log=logger, async_keyserver=True)
+            log=logger, async_keyserver=True
+        )
         # The key is stored in the database.
         self.assertIsNotNone(archive.signing_key_owner)
         self.assertIsNotNone(archive.signing_key_fingerprint)
         # The key is stored as a GPGKey, not a SigningKey.
         self.assertIsNotNone(
             getUtility(IGPGKeySet).getByFingerprint(
-                archive.signing_key_fingerprint))
+                archive.signing_key_fingerprint
+            )
+        )
         self.assertIsNone(
             getUtility(ISigningKeySet).get(
-                SigningKeyType.OPENPGP, archive.signing_key_fingerprint))
+                SigningKeyType.OPENPGP, archive.signing_key_fingerprint
+            )
+        )
         # The key is uploaded to the keyserver.
         client = self.useFixture(TReqFixture(reactor)).client
         response = yield client.get(
             getUtility(IGPGHandler).getURLForKeyInServer(
-                archive.signing_key_fingerprint, "get"))
+                archive.signing_key_fingerprint, "get"
+            )
+        )
         yield check_status(response)
         content = yield treq.content(response)
         self.assertIn(b"-----BEGIN PGP PUBLIC KEY BLOCK-----\n", content)
@@ -345,59 +417,81 @@ class TestArchiveGPGSigningKey(TestCaseWithFactory):
         logger = BufferLogger()
         # Use a display name that matches the pregenerated sample key.
         owner = self.factory.makePerson(
-            displayname="Celso \xe1\xe9\xed\xf3\xfa Providelo")
+            displayname="Celso \xe1\xe9\xed\xf3\xfa Providelo"
+        )
         default_ppa = self.factory.makeArchive(owner=owner)
         another_ppa = self.factory.makeArchive(owner=owner)
         yield IArchiveGPGSigningKey(another_ppa).generateSigningKey(
-            log=logger, async_keyserver=True)
-        self.assertThat(default_ppa, MatchesStructure(
-            signing_key=Not(Is(None)),
-            signing_key_owner=Not(Is(None)),
-            signing_key_fingerprint=Not(Is(None))))
+            log=logger, async_keyserver=True
+        )
+        self.assertThat(
+            default_ppa,
+            MatchesStructure(
+                signing_key=Not(Is(None)),
+                signing_key_owner=Not(Is(None)),
+                signing_key_fingerprint=Not(Is(None)),
+            ),
+        )
         self.assertIsNotNone(
             getUtility(IGPGKeySet).getByFingerprint(
-                default_ppa.signing_key_fingerprint))
+                default_ppa.signing_key_fingerprint
+            )
+        )
         self.assertIsNone(
             getUtility(ISigningKeySet).get(
-                SigningKeyType.OPENPGP, default_ppa.signing_key_fingerprint))
-        self.assertThat(another_ppa, MatchesStructure.byEquality(
-            signing_key=default_ppa.signing_key,
-            signing_key_owner=default_ppa.signing_key_owner,
-            signing_key_fingerprint=default_ppa.signing_key_fingerprint))
+                SigningKeyType.OPENPGP, default_ppa.signing_key_fingerprint
+            )
+        )
+        self.assertThat(
+            another_ppa,
+            MatchesStructure.byEquality(
+                signing_key=default_ppa.signing_key,
+                signing_key_owner=default_ppa.signing_key_owner,
+                signing_key_fingerprint=default_ppa.signing_key_fingerprint,
+            ),
+        )
 
     @defer.inlineCallbacks
     def test_generateSigningKey_signing_service(self):
         # Generating a signing key on the signing service stores it in the
         # database and pushes it to the keyserver.
         self.useFixture(
-            FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"}))
+            FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"})
+        )
         signing_service_client = self.useFixture(
-            SigningServiceClientFixture(self.factory))
+            SigningServiceClientFixture(self.factory)
+        )
         signing_service_client.generate.side_effect = None
         test_key = test_pubkey_from_email("ftpmaster@xxxxxxxxxxxxx")
         signing_service_client.generate.return_value = {
             "fingerprint": "33C0A61893A5DC5EB325B29E415A12CAC2F30234",
             "public-key": test_key,
-            }
+        }
         logger = BufferLogger()
         archive = self.factory.makeArchive()
         yield IArchiveGPGSigningKey(archive).generateSigningKey(
-            log=logger, async_keyserver=True)
+            log=logger, async_keyserver=True
+        )
         # The key is stored in the database.
         self.assertIsNotNone(archive.signing_key_owner)
         self.assertIsNotNone(archive.signing_key_fingerprint)
         # The key is stored as a SigningKey, not a GPGKey.
         self.assertIsNone(
             getUtility(IGPGKeySet).getByFingerprint(
-                archive.signing_key_fingerprint))
+                archive.signing_key_fingerprint
+            )
+        )
         signing_key = getUtility(ISigningKeySet).get(
-            SigningKeyType.OPENPGP, archive.signing_key_fingerprint)
+            SigningKeyType.OPENPGP, archive.signing_key_fingerprint
+        )
         self.assertEqual(test_key, signing_key.public_key)
         # The key is uploaded to the keyserver.
         client = self.useFixture(TReqFixture(reactor)).client
         response = yield client.get(
             getUtility(IGPGHandler).getURLForKeyInServer(
-                archive.signing_key_fingerprint, "get"))
+                archive.signing_key_fingerprint, "get"
+            )
+        )
         yield check_status(response)
         content = yield treq.content(response)
         self.assertIn(test_key, content)
@@ -408,32 +502,47 @@ class TestArchiveGPGSigningKey(TestCaseWithFactory):
         # PPA generates one for the user's default PPA first and then
         # propagates it.
         self.useFixture(
-            FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"}))
+            FeatureFixture({PUBLISHER_GPG_USES_SIGNING_SERVICE: "on"})
+        )
         signing_service_client = self.useFixture(
-            SigningServiceClientFixture(self.factory))
+            SigningServiceClientFixture(self.factory)
+        )
         signing_service_client.generate.side_effect = None
         test_key = test_pubkey_from_email("ftpmaster@xxxxxxxxxxxxx")
         signing_service_client.generate.return_value = {
             "fingerprint": "33C0A61893A5DC5EB325B29E415A12CAC2F30234",
             "public-key": test_key,
-            }
+        }
         logger = BufferLogger()
         default_ppa = self.factory.makeArchive()
         another_ppa = self.factory.makeArchive(owner=default_ppa.owner)
         yield IArchiveGPGSigningKey(another_ppa).generateSigningKey(
-            log=logger, async_keyserver=True)
-        self.assertThat(default_ppa, MatchesStructure(
-            signing_key=Is(None),
-            signing_key_owner=Not(Is(None)),
-            signing_key_fingerprint=Not(Is(None))))
+            log=logger, async_keyserver=True
+        )
+        self.assertThat(
+            default_ppa,
+            MatchesStructure(
+                signing_key=Is(None),
+                signing_key_owner=Not(Is(None)),
+                signing_key_fingerprint=Not(Is(None)),
+            ),
+        )
         self.assertIsNone(
             getUtility(IGPGKeySet).getByFingerprint(
-                default_ppa.signing_key_fingerprint))
+                default_ppa.signing_key_fingerprint
+            )
+        )
         signing_key = getUtility(ISigningKeySet).get(
-            SigningKeyType.OPENPGP, default_ppa.signing_key_fingerprint)
+            SigningKeyType.OPENPGP, default_ppa.signing_key_fingerprint
+        )
         self.assertEqual(test_key, signing_key.public_key)
-        self.assertThat(another_ppa, MatchesStructure(
-            signing_key=Is(None),
-            signing_key_owner=Equals(default_ppa.signing_key_owner),
-            signing_key_fingerprint=Equals(
-                default_ppa.signing_key_fingerprint)))
+        self.assertThat(
+            another_ppa,
+            MatchesStructure(
+                signing_key=Is(None),
+                signing_key_owner=Equals(default_ppa.signing_key_owner),
+                signing_key_fingerprint=Equals(
+                    default_ppa.signing_key_fingerprint
+                ),
+            ),
+        )
diff --git a/lib/lp/archivepublisher/tests/test_artifactory.py b/lib/lp/archivepublisher/tests/test_artifactory.py
index fea99fb..3edb76b 100644
--- a/lib/lp/archivepublisher/tests/test_artifactory.py
+++ b/lib/lp/archivepublisher/tests/test_artifactory.py
@@ -5,44 +5,38 @@
 
 from pathlib import PurePath
 
-from artifactory import ArtifactoryPath
 import transaction
+from artifactory import ArtifactoryPath
 from zope.component import getUtility
 
 from lp.archivepublisher.artifactory import ArtifactoryPool
 from lp.archivepublisher.tests.artifactory_fixture import (
     FakeArtifactoryFixture,
-    )
+)
 from lp.archivepublisher.tests.test_pool import (
     FakeArchive,
     FakePackageReleaseFile,
     FakeReleaseType,
     PoolTestingFile,
-    )
+)
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.interfaces.sourcepackage import (
     SourcePackageFileType,
     SourcePackageType,
-    )
+)
 from lp.services.log.logger import BufferLogger
 from lp.soyuz.enums import (
     ArchivePurpose,
     ArchiveRepositoryFormat,
     BinaryPackageFileType,
     BinaryPackageFormat,
-    )
+)
 from lp.soyuz.interfaces.publishing import (
     IPublishingSet,
     PoolFileOverwriteError,
-    )
-from lp.testing import (
-    TestCase,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    BaseLayer,
-    LaunchpadZopelessLayer,
-    )
+)
+from lp.testing import TestCase, TestCaseWithFactory
+from lp.testing.layers import BaseLayer, LaunchpadZopelessLayer
 
 
 class ArtifactoryPoolTestingFile(PoolTestingFile):
@@ -69,7 +63,8 @@ class ArtifactoryPoolTestingFile(PoolTestingFile):
 
     def getProperties(self):
         path = self.pool.pathFor(
-            None, self.source_name, self.source_version, self.pub_file)
+            None, self.source_name, self.source_version, self.pub_file
+        )
         return path.properties
 
 
@@ -82,7 +77,8 @@ class TestArtifactoryPool(TestCase):
         self.base_url = "https://foo.example.com/artifactory";
         self.repository_name = "repository"
         self.artifactory = self.useFixture(
-            FakeArtifactoryFixture(self.base_url, self.repository_name))
+            FakeArtifactoryFixture(self.base_url, self.repository_name)
+        )
 
     def makePool(self, repository_format=ArchiveRepositoryFormat.DEBIAN):
         # Matches behaviour of lp.archivepublisher.config.getPubConfig.
@@ -90,7 +86,8 @@ class TestArtifactoryPool(TestCase):
         if repository_format == ArchiveRepositoryFormat.DEBIAN:
             root_url += "/pool"
         return ArtifactoryPool(
-            FakeArchive(repository_format), root_url, BufferLogger())
+            FakeArchive(repository_format), root_url, BufferLogger()
+        )
 
     def test_pathFor_debian_with_file(self):
         pool = self.makePool()
@@ -98,8 +95,10 @@ class TestArtifactoryPool(TestCase):
         self.assertEqual(
             ArtifactoryPath(
                 "https://foo.example.com/artifactory/repository/pool/f/foo/";
-                "foo-1.0.deb"),
-            pool.pathFor(None, "foo", "1.0", pub_file))
+                "foo-1.0.deb"
+            ),
+            pool.pathFor(None, "foo", "1.0", pub_file),
+        )
 
     def test_pathFor_python_with_file(self):
         pool = self.makePool(ArchiveRepositoryFormat.PYTHON)
@@ -107,26 +106,36 @@ class TestArtifactoryPool(TestCase):
         self.assertEqual(
             ArtifactoryPath(
                 "https://foo.example.com/artifactory/repository/foo/1.0/";
-                "foo-1.0.whl"),
-            pool.pathFor(None, "foo", "1.0", pub_file))
+                "foo-1.0.whl"
+            ),
+            pool.pathFor(None, "foo", "1.0", pub_file),
+        )
 
     def test_pathFor_conda_with_file(self):
         pool = self.makePool(ArchiveRepositoryFormat.CONDA)
         pub_file = FakePackageReleaseFile(
-            b"foo", "foo-1.0.tar.bz2",
-            user_defined_fields=[("subdir", "linux-64")])
+            b"foo",
+            "foo-1.0.tar.bz2",
+            user_defined_fields=[("subdir", "linux-64")],
+        )
         self.assertEqual(
             ArtifactoryPath(
                 "https://foo.example.com/artifactory/repository/linux-64/";
-                "foo-1.0.tar.bz2"),
-            pool.pathFor(None, "foo", "1.0", pub_file))
+                "foo-1.0.tar.bz2"
+            ),
+            pool.pathFor(None, "foo", "1.0", pub_file),
+        )
 
     def test_addFile(self):
         pool = self.makePool()
         foo = ArtifactoryPoolTestingFile(
-            pool=pool, source_name="foo", source_version="1.0",
-            filename="foo-1.0.deb", release_type=FakeReleaseType.BINARY,
-            release_id=1)
+            pool=pool,
+            source_name="foo",
+            source_version="1.0",
+            filename="foo-1.0.deb",
+            release_type=FakeReleaseType.BINARY,
+            release_id=1,
+        )
         self.assertFalse(foo.checkIsFile())
         result = foo.addToPool()
         self.assertEqual(pool.results.FILE_ADDED, result)
@@ -136,15 +145,20 @@ class TestArtifactoryPool(TestCase):
                 "launchpad.release-id": ["binary:1"],
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
-                },
-            foo.getProperties())
+            },
+            foo.getProperties(),
+        )
 
     def test_addFile_exists_identical(self):
         pool = self.makePool()
         foo = ArtifactoryPoolTestingFile(
-            pool=pool, source_name="foo", source_version="1.0",
-            filename="foo-1.0.deb", release_type=FakeReleaseType.BINARY,
-            release_id=1)
+            pool=pool,
+            source_name="foo",
+            source_version="1.0",
+            filename="foo-1.0.deb",
+            release_type=FakeReleaseType.BINARY,
+            release_id=1,
+        )
         foo.addToPool()
         self.assertTrue(foo.checkIsFile())
         result = foo.addToPool()
@@ -154,9 +168,13 @@ class TestArtifactoryPool(TestCase):
     def test_addFile_exists_overwrite(self):
         pool = self.makePool()
         foo = ArtifactoryPoolTestingFile(
-            pool=pool, source_name="foo", source_version="1.0",
-            filename="foo-1.0.deb", release_type=FakeReleaseType.BINARY,
-            release_id=1)
+            pool=pool,
+            source_name="foo",
+            source_version="1.0",
+            filename="foo-1.0.deb",
+            release_type=FakeReleaseType.BINARY,
+            release_id=1,
+        )
         foo.addToPool()
         self.assertTrue(foo.checkIsFile())
         foo.pub_file.libraryfile.contents = b"different"
@@ -165,8 +183,11 @@ class TestArtifactoryPool(TestCase):
     def test_removeFile(self):
         pool = self.makePool()
         foo = ArtifactoryPoolTestingFile(
-            pool=pool, source_name="foo", source_version="1.0",
-            filename="foo-1.0.deb")
+            pool=pool,
+            source_name="foo",
+            source_version="1.0",
+            filename="foo-1.0.deb",
+        )
         foo.addToPool()
         self.assertTrue(foo.checkIsFile())
         size = foo.removeFromPool()
@@ -183,14 +204,15 @@ class TestArtifactoryPool(TestCase):
                 "*.dsc",
                 "*.tar.*",
                 "*.udeb",
-                ],
-            pool.getArtifactPatterns(ArchiveRepositoryFormat.DEBIAN))
+            ],
+            pool.getArtifactPatterns(ArchiveRepositoryFormat.DEBIAN),
+        )
 
     def test_getArtifactPatterns_python(self):
         pool = self.makePool()
         self.assertEqual(
-            ["*.whl"],
-            pool.getArtifactPatterns(ArchiveRepositoryFormat.PYTHON))
+            ["*.whl"], pool.getArtifactPatterns(ArchiveRepositoryFormat.PYTHON)
+        )
 
     def test_getArtifactPatterns_conda(self):
         pool = self.makePool()
@@ -199,7 +221,7 @@ class TestArtifactoryPool(TestCase):
                 "*.tar.bz2",
                 "*.conda",
             ],
-            pool.getArtifactPatterns(ArchiveRepositoryFormat.CONDA)
+            pool.getArtifactPatterns(ArchiveRepositoryFormat.CONDA),
         )
 
     def test_getAllArtifacts(self):
@@ -210,46 +232,66 @@ class TestArtifactoryPool(TestCase):
         # correctly.
         pool = self.makePool()
         ArtifactoryPoolTestingFile(
-            pool=pool, source_name="foo", source_version="1.0",
-            filename="foo-1.0.deb", release_type=FakeReleaseType.BINARY,
-            release_id=1).addToPool()
+            pool=pool,
+            source_name="foo",
+            source_version="1.0",
+            filename="foo-1.0.deb",
+            release_type=FakeReleaseType.BINARY,
+            release_id=1,
+        ).addToPool()
         ArtifactoryPoolTestingFile(
-            pool=pool, source_name="foo", source_version="1.1",
-            filename="foo-1.1.deb", release_type=FakeReleaseType.BINARY,
-            release_id=2).addToPool()
+            pool=pool,
+            source_name="foo",
+            source_version="1.1",
+            filename="foo-1.1.deb",
+            release_type=FakeReleaseType.BINARY,
+            release_id=2,
+        ).addToPool()
         ArtifactoryPoolTestingFile(
-            pool=pool, source_name="bar", source_version="1.0",
-            filename="bar-1.0.whl", release_type=FakeReleaseType.BINARY,
-            release_id=3).addToPool()
+            pool=pool,
+            source_name="bar",
+            source_version="1.0",
+            filename="bar-1.0.whl",
+            release_type=FakeReleaseType.BINARY,
+            release_id=3,
+        ).addToPool()
         ArtifactoryPoolTestingFile(
-            pool=pool, source_name="qux", source_version="1.0",
-            filename="qux-1.0.conda", release_type=FakeReleaseType.BINARY,
-            release_id=4).addToPool()
+            pool=pool,
+            source_name="qux",
+            source_version="1.0",
+            filename="qux-1.0.conda",
+            release_type=FakeReleaseType.BINARY,
+            release_id=4,
+        ).addToPool()
         self.assertEqual(
             {
                 PurePath("pool/f/foo/foo-1.0.deb"): {
                     "launchpad.release-id": ["binary:1"],
                     "launchpad.source-name": ["foo"],
                     "launchpad.source-version": ["1.0"],
-                    },
+                },
                 PurePath("pool/f/foo/foo-1.1.deb"): {
                     "launchpad.release-id": ["binary:2"],
                     "launchpad.source-name": ["foo"],
                     "launchpad.source-version": ["1.1"],
-                    },
                 },
+            },
             pool.getAllArtifacts(
-                self.repository_name, ArchiveRepositoryFormat.DEBIAN))
+                self.repository_name, ArchiveRepositoryFormat.DEBIAN
+            ),
+        )
         self.assertEqual(
             {
                 PurePath("pool/b/bar/bar-1.0.whl"): {
                     "launchpad.release-id": ["binary:3"],
                     "launchpad.source-name": ["bar"],
                     "launchpad.source-version": ["1.0"],
-                    },
                 },
+            },
             pool.getAllArtifacts(
-                self.repository_name, ArchiveRepositoryFormat.PYTHON))
+                self.repository_name, ArchiveRepositoryFormat.PYTHON
+            ),
+        )
         self.assertEqual(
             {
                 PurePath("pool/q/qux/qux-1.0.conda"): {
@@ -259,8 +301,9 @@ class TestArtifactoryPool(TestCase):
                 },
             },
             pool.getAllArtifacts(
-                self.repository_name, ArchiveRepositoryFormat.CONDA))
-
+                self.repository_name, ArchiveRepositoryFormat.CONDA
+            ),
+        )
 
 
 class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
@@ -272,7 +315,8 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
         self.base_url = "https://foo.example.com/artifactory";
         self.repository_name = "repository"
         self.artifactory = self.useFixture(
-            FakeArtifactoryFixture(self.base_url, self.repository_name))
+            FakeArtifactoryFixture(self.base_url, self.repository_name)
+        )
 
     def makePool(self, repository_format=ArchiveRepositoryFormat.DEBIAN):
         # Matches behaviour of lp.archivepublisher.config.getPubConfig.
@@ -280,28 +324,38 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
         if repository_format == ArchiveRepositoryFormat.DEBIAN:
             root_url += "/pool"
         archive = self.factory.makeArchive(
-            purpose=ArchivePurpose.PPA, repository_format=repository_format)
+            purpose=ArchivePurpose.PPA, repository_format=repository_format
+        )
         return ArtifactoryPool(archive, root_url, BufferLogger())
 
     def test_updateProperties_debian_source(self):
         pool = self.makePool()
         dses = [
             self.factory.makeDistroSeries(
-                distribution=pool.archive.distribution)
-            for _ in range(2)]
+                distribution=pool.archive.distribution
+            )
+            for _ in range(2)
+        ]
         spph = self.factory.makeSourcePackagePublishingHistory(
-            archive=pool.archive, distroseries=dses[0],
-            pocket=PackagePublishingPocket.RELEASE, component="main",
-            sourcepackagename="foo", version="1.0")
+            archive=pool.archive,
+            distroseries=dses[0],
+            pocket=PackagePublishingPocket.RELEASE,
+            component="main",
+            sourcepackagename="foo",
+            version="1.0",
+        )
         spr = spph.sourcepackagerelease
         sprf = self.factory.makeSourcePackageReleaseFile(
             sourcepackagerelease=spr,
             library_file=self.factory.makeLibraryFileAlias(
-                filename="foo_1.0.dsc"),
-            filetype=SourcePackageFileType.DSC)
+                filename="foo_1.0.dsc"
+            ),
+            filetype=SourcePackageFileType.DSC,
+        )
         spphs = [spph]
-        spphs.append(spph.copyTo(
-            dses[1], PackagePublishingPocket.RELEASE, pool.archive))
+        spphs.append(
+            spph.copyTo(dses[1], PackagePublishingPocket.RELEASE, pool.archive)
+        )
         transaction.commit()
         pool.addFile(None, spr.name, spr.version, sprf)
         path = pool.rootpath / "f" / "foo" / "foo_1.0.dsc"
@@ -312,8 +366,9 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.release-id": ["source:%d" % spr.id],
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
         pool.updateProperties(spr.name, spr.version, sprf, spphs)
         self.assertEqual(
             {
@@ -322,41 +377,58 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.source-version": ["1.0"],
                 "deb.distribution": list(sorted(ds.name for ds in dses)),
                 "deb.component": ["main"],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
 
     def test_updateProperties_debian_binary_multiple_series(self):
         pool = self.makePool()
         dses = [
             self.factory.makeDistroSeries(
-                distribution=pool.archive.distribution)
-            for _ in range(2)]
+                distribution=pool.archive.distribution
+            )
+            for _ in range(2)
+        ]
         processor = self.factory.makeProcessor()
         dases = [
             self.factory.makeDistroArchSeries(
-                distroseries=ds, architecturetag=processor.name)
-            for ds in dses]
+                distroseries=ds, architecturetag=processor.name
+            )
+            for ds in dses
+        ]
         spr = self.factory.makeSourcePackageRelease(
-            archive=pool.archive, sourcepackagename="foo", version="1.0")
+            archive=pool.archive, sourcepackagename="foo", version="1.0"
+        )
         bpph = self.factory.makeBinaryPackagePublishingHistory(
-            archive=pool.archive, distroarchseries=dases[0],
-            pocket=PackagePublishingPocket.RELEASE, component="main",
-            source_package_release=spr, binarypackagename="foo",
-            architecturespecific=True)
+            archive=pool.archive,
+            distroarchseries=dases[0],
+            pocket=PackagePublishingPocket.RELEASE,
+            component="main",
+            source_package_release=spr,
+            binarypackagename="foo",
+            architecturespecific=True,
+        )
         bpr = bpph.binarypackagerelease
         bpf = self.factory.makeBinaryPackageFile(
             binarypackagerelease=bpr,
             library_file=self.factory.makeLibraryFileAlias(
-                filename="foo_1.0_%s.deb" % processor.name),
-            filetype=BinaryPackageFileType.DEB)
+                filename="foo_1.0_%s.deb" % processor.name
+            ),
+            filetype=BinaryPackageFileType.DEB,
+        )
         bpphs = [bpph]
-        bpphs.append(bpph.copyTo(
-            dses[1], PackagePublishingPocket.RELEASE, pool.archive)[0])
+        bpphs.append(
+            bpph.copyTo(
+                dses[1], PackagePublishingPocket.RELEASE, pool.archive
+            )[0]
+        )
         transaction.commit()
         pool.addFile(
-            None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf)
+            None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf
+        )
         path = (
-            pool.rootpath / "f" / "foo" / ("foo_1.0_%s.deb" % processor.name))
+            pool.rootpath / "f" / "foo" / ("foo_1.0_%s.deb" % processor.name)
+        )
         self.assertTrue(path.exists())
         self.assertFalse(path.is_symlink())
         self.assertEqual(
@@ -364,10 +436,12 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.release-id": ["binary:%d" % bpr.id],
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
         pool.updateProperties(
-            bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs)
+            bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs
+        )
         self.assertEqual(
             {
                 "launchpad.release-id": ["binary:%d" % bpr.id],
@@ -376,35 +450,51 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "deb.distribution": list(sorted(ds.name for ds in dses)),
                 "deb.component": ["main"],
                 "deb.architecture": [processor.name],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
 
     def test_updateProperties_debian_binary_multiple_architectures(self):
         pool = self.makePool()
         ds = self.factory.makeDistroSeries(
-            distribution=pool.archive.distribution)
+            distribution=pool.archive.distribution
+        )
         dases = [
             self.factory.makeDistroArchSeries(distroseries=ds)
-            for _ in range(2)]
+            for _ in range(2)
+        ]
         spr = self.factory.makeSourcePackageRelease(
-            archive=pool.archive, sourcepackagename="foo", version="1.0")
+            archive=pool.archive, sourcepackagename="foo", version="1.0"
+        )
         bpb = self.factory.makeBinaryPackageBuild(
-            archive=pool.archive, source_package_release=spr,
-            distroarchseries=dases[0], pocket=PackagePublishingPocket.RELEASE)
+            archive=pool.archive,
+            source_package_release=spr,
+            distroarchseries=dases[0],
+            pocket=PackagePublishingPocket.RELEASE,
+        )
         bpr = self.factory.makeBinaryPackageRelease(
-            binarypackagename="foo", build=bpb, component="main",
-            architecturespecific=False)
+            binarypackagename="foo",
+            build=bpb,
+            component="main",
+            architecturespecific=False,
+        )
         bpf = self.factory.makeBinaryPackageFile(
             binarypackagerelease=bpr,
             library_file=self.factory.makeLibraryFileAlias(
-                filename="foo_1.0_all.deb"),
-            filetype=BinaryPackageFileType.DEB)
+                filename="foo_1.0_all.deb"
+            ),
+            filetype=BinaryPackageFileType.DEB,
+        )
         bpphs = getUtility(IPublishingSet).publishBinaries(
-            pool.archive, ds, PackagePublishingPocket.RELEASE,
-            {bpr: (bpr.component, bpr.section, bpr.priority, None)})
+            pool.archive,
+            ds,
+            PackagePublishingPocket.RELEASE,
+            {bpr: (bpr.component, bpr.section, bpr.priority, None)},
+        )
         transaction.commit()
         pool.addFile(
-            None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf)
+            None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf
+        )
         path = pool.rootpath / "f" / "foo" / "foo_1.0_all.deb"
         self.assertTrue(path.exists())
         self.assertFalse(path.is_symlink())
@@ -413,10 +503,12 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.release-id": ["binary:%d" % bpr.id],
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
         pool.updateProperties(
-            bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs)
+            bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs
+        )
         self.assertEqual(
             {
                 "launchpad.release-id": ["binary:%d" % bpr.id],
@@ -424,31 +516,43 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.source-version": ["1.0"],
                 "deb.distribution": [ds.name],
                 "deb.component": ["main"],
-                "deb.architecture": list(sorted(
-                    das.architecturetag for das in dases)),
-                },
-            path.properties)
+                "deb.architecture": list(
+                    sorted(das.architecturetag for das in dases)
+                ),
+            },
+            path.properties,
+        )
 
     def test_updateProperties_python_sdist(self):
         pool = self.makePool(ArchiveRepositoryFormat.PYTHON)
         dses = [
             self.factory.makeDistroSeries(
-                distribution=pool.archive.distribution)
-            for _ in range(2)]
+                distribution=pool.archive.distribution
+            )
+            for _ in range(2)
+        ]
         spph = self.factory.makeSourcePackagePublishingHistory(
-            archive=pool.archive, distroseries=dses[0],
-            pocket=PackagePublishingPocket.RELEASE, component="main",
-            sourcepackagename="foo", version="1.0", channel="edge",
-            format=SourcePackageType.SDIST)
+            archive=pool.archive,
+            distroseries=dses[0],
+            pocket=PackagePublishingPocket.RELEASE,
+            component="main",
+            sourcepackagename="foo",
+            version="1.0",
+            channel="edge",
+            format=SourcePackageType.SDIST,
+        )
         spr = spph.sourcepackagerelease
         sprf = self.factory.makeSourcePackageReleaseFile(
             sourcepackagerelease=spr,
             library_file=self.factory.makeLibraryFileAlias(
-                filename="foo-1.0.tar.gz"),
-            filetype=SourcePackageFileType.SDIST)
+                filename="foo-1.0.tar.gz"
+            ),
+            filetype=SourcePackageFileType.SDIST,
+        )
         spphs = [spph]
-        spphs.append(spph.copyTo(
-            dses[1], PackagePublishingPocket.RELEASE, pool.archive))
+        spphs.append(
+            spph.copyTo(dses[1], PackagePublishingPocket.RELEASE, pool.archive)
+        )
         transaction.commit()
         pool.addFile(None, spr.name, spr.version, sprf)
         path = pool.rootpath / "foo" / "1.0" / "foo-1.0.tar.gz"
@@ -459,8 +563,9 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.release-id": ["source:%d" % spr.id],
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
         pool.updateProperties(spr.name, spr.version, sprf, spphs)
         self.assertEqual(
             {
@@ -468,44 +573,66 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
                 "launchpad.channel": list(
-                    sorted("%s:edge" % ds.name for ds in dses)),
-                },
-            path.properties)
+                    sorted("%s:edge" % ds.name for ds in dses)
+                ),
+            },
+            path.properties,
+        )
 
     def test_updateProperties_python_wheel(self):
         pool = self.makePool(ArchiveRepositoryFormat.PYTHON)
         dses = [
             self.factory.makeDistroSeries(
-                distribution=pool.archive.distribution)
-            for _ in range(2)]
+                distribution=pool.archive.distribution
+            )
+            for _ in range(2)
+        ]
         processor = self.factory.makeProcessor()
         dases = [
             self.factory.makeDistroArchSeries(
-                distroseries=ds, architecturetag=processor.name)
-            for ds in dses]
+                distroseries=ds, architecturetag=processor.name
+            )
+            for ds in dses
+        ]
         spr = self.factory.makeSourcePackageRelease(
-            archive=pool.archive, sourcepackagename="foo", version="1.0",
-            format=SourcePackageType.SDIST)
+            archive=pool.archive,
+            sourcepackagename="foo",
+            version="1.0",
+            format=SourcePackageType.SDIST,
+        )
         bpph = self.factory.makeBinaryPackagePublishingHistory(
-            archive=pool.archive, distroarchseries=dases[0],
-            pocket=PackagePublishingPocket.RELEASE, component="main",
-            source_package_release=spr, binarypackagename="foo",
+            archive=pool.archive,
+            distroarchseries=dases[0],
+            pocket=PackagePublishingPocket.RELEASE,
+            component="main",
+            source_package_release=spr,
+            binarypackagename="foo",
             binpackageformat=BinaryPackageFormat.WHL,
-            architecturespecific=False, channel="edge")
+            architecturespecific=False,
+            channel="edge",
+        )
         bpr = bpph.binarypackagerelease
         bpf = self.factory.makeBinaryPackageFile(
             binarypackagerelease=bpr,
             library_file=self.factory.makeLibraryFileAlias(
-                filename="foo-1.0-py3-none-any.whl"),
-            filetype=BinaryPackageFileType.WHL)
+                filename="foo-1.0-py3-none-any.whl"
+            ),
+            filetype=BinaryPackageFileType.WHL,
+        )
         bpphs = [bpph]
         bpphs.append(
             getUtility(IPublishingSet).copyBinaries(
-                pool.archive, dses[1], PackagePublishingPocket.RELEASE, [bpph],
-                channel="edge")[0])
+                pool.archive,
+                dses[1],
+                PackagePublishingPocket.RELEASE,
+                [bpph],
+                channel="edge",
+            )[0]
+        )
         transaction.commit()
         pool.addFile(
-            None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf)
+            None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf
+        )
         path = pool.rootpath / "foo" / "1.0" / "foo-1.0-py3-none-any.whl"
         self.assertTrue(path.exists())
         self.assertFalse(path.is_symlink())
@@ -514,51 +641,73 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.release-id": ["binary:%d" % bpr.id],
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
         pool.updateProperties(
-            bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs)
+            bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs
+        )
         self.assertEqual(
             {
                 "launchpad.release-id": ["binary:%d" % bpr.id],
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
                 "launchpad.channel": list(
-                    sorted("%s:edge" % ds.name for ds in dses)),
-                },
-            path.properties)
+                    sorted("%s:edge" % ds.name for ds in dses)
+                ),
+            },
+            path.properties,
+        )
 
     def test_updateProperties_conda_v1(self):
         pool = self.makePool(ArchiveRepositoryFormat.CONDA)
         dses = [
             self.factory.makeDistroSeries(
-                distribution=pool.archive.distribution)
-            for _ in range(2)]
+                distribution=pool.archive.distribution
+            )
+            for _ in range(2)
+        ]
         processor = self.factory.makeProcessor()
         dases = [
             self.factory.makeDistroArchSeries(
-                distroseries=ds, architecturetag=processor.name)
-            for ds in dses]
+                distroseries=ds, architecturetag=processor.name
+            )
+            for ds in dses
+        ]
         ci_build = self.factory.makeCIBuild(distro_arch_series=dases[0])
         bpn = self.factory.makeBinaryPackageName(name="foo")
         bpr = self.factory.makeBinaryPackageRelease(
-            binarypackagename=bpn, version="1.0", ci_build=ci_build,
+            binarypackagename=bpn,
+            version="1.0",
+            ci_build=ci_build,
             binpackageformat=BinaryPackageFormat.CONDA_V1,
-            user_defined_fields=[("subdir", "linux-64")])
+            user_defined_fields=[("subdir", "linux-64")],
+        )
         bpf = self.factory.makeBinaryPackageFile(
             binarypackagerelease=bpr,
             library_file=self.factory.makeLibraryFileAlias(
-                filename="foo-1.0.tar.bz2"),
-            filetype=BinaryPackageFileType.CONDA_V1)
+                filename="foo-1.0.tar.bz2"
+            ),
+            filetype=BinaryPackageFileType.CONDA_V1,
+        )
         bpph = self.factory.makeBinaryPackagePublishingHistory(
-            binarypackagerelease=bpr, archive=pool.archive,
-            distroarchseries=dases[0], pocket=PackagePublishingPocket.RELEASE,
-            architecturespecific=False, channel="edge")
+            binarypackagerelease=bpr,
+            archive=pool.archive,
+            distroarchseries=dases[0],
+            pocket=PackagePublishingPocket.RELEASE,
+            architecturespecific=False,
+            channel="edge",
+        )
         bpphs = [bpph]
         bpphs.append(
             getUtility(IPublishingSet).copyBinaries(
-                pool.archive, dses[1], PackagePublishingPocket.RELEASE, [bpph],
-                channel="edge")[0])
+                pool.archive,
+                dses[1],
+                PackagePublishingPocket.RELEASE,
+                [bpph],
+                channel="edge",
+            )[0]
+        )
         transaction.commit()
         pool.addFile(None, bpph.pool_name, bpph.pool_version, bpf)
         path = pool.rootpath / "linux-64" / "foo-1.0.tar.bz2"
@@ -570,10 +719,12 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
                 "soss.source_url": [
-                    ci_build.git_repository.getCodebrowseUrl()],
+                    ci_build.git_repository.getCodebrowseUrl()
+                ],
                 "soss.commit_id": [ci_build.commit_sha1],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
         pool.updateProperties(bpph.pool_name, bpph.pool_version, bpf, bpphs)
         self.assertEqual(
             {
@@ -581,44 +732,65 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
                 "launchpad.channel": list(
-                    sorted("%s:edge" % ds.name for ds in dses)),
+                    sorted("%s:edge" % ds.name for ds in dses)
+                ),
                 "soss.source_url": [
-                    ci_build.git_repository.getCodebrowseUrl()],
+                    ci_build.git_repository.getCodebrowseUrl()
+                ],
                 "soss.commit_id": [ci_build.commit_sha1],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
 
     def test_updateProperties_conda_v2(self):
         pool = self.makePool(ArchiveRepositoryFormat.CONDA)
         dses = [
             self.factory.makeDistroSeries(
-                distribution=pool.archive.distribution)
-            for _ in range(2)]
+                distribution=pool.archive.distribution
+            )
+            for _ in range(2)
+        ]
         processor = self.factory.makeProcessor()
         dases = [
             self.factory.makeDistroArchSeries(
-                distroseries=ds, architecturetag=processor.name)
-            for ds in dses]
+                distroseries=ds, architecturetag=processor.name
+            )
+            for ds in dses
+        ]
         ci_build = self.factory.makeCIBuild(distro_arch_series=dases[0])
         bpn = self.factory.makeBinaryPackageName(name="foo")
         bpr = self.factory.makeBinaryPackageRelease(
-            binarypackagename=bpn, version="1.0", ci_build=ci_build,
+            binarypackagename=bpn,
+            version="1.0",
+            ci_build=ci_build,
             binpackageformat=BinaryPackageFormat.CONDA_V2,
-            user_defined_fields=[("subdir", "noarch")])
+            user_defined_fields=[("subdir", "noarch")],
+        )
         bpf = self.factory.makeBinaryPackageFile(
             binarypackagerelease=bpr,
             library_file=self.factory.makeLibraryFileAlias(
-                filename="foo-1.0.conda"),
-            filetype=BinaryPackageFileType.CONDA_V2)
+                filename="foo-1.0.conda"
+            ),
+            filetype=BinaryPackageFileType.CONDA_V2,
+        )
         bpph = self.factory.makeBinaryPackagePublishingHistory(
-            binarypackagerelease=bpr, archive=pool.archive,
-            distroarchseries=dases[0], pocket=PackagePublishingPocket.RELEASE,
-            architecturespecific=True, channel="edge")
+            binarypackagerelease=bpr,
+            archive=pool.archive,
+            distroarchseries=dases[0],
+            pocket=PackagePublishingPocket.RELEASE,
+            architecturespecific=True,
+            channel="edge",
+        )
         bpphs = [bpph]
         bpphs.append(
             getUtility(IPublishingSet).copyBinaries(
-                pool.archive, dses[1], PackagePublishingPocket.RELEASE, [bpph],
-                channel="edge")[0])
+                pool.archive,
+                dses[1],
+                PackagePublishingPocket.RELEASE,
+                [bpph],
+                channel="edge",
+            )[0]
+        )
         transaction.commit()
         pool.addFile(None, bpph.pool_name, bpph.pool_version, bpf)
         path = pool.rootpath / "noarch" / "foo-1.0.conda"
@@ -630,10 +802,12 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
                 "soss.source_url": [
-                    ci_build.git_repository.getCodebrowseUrl()],
+                    ci_build.git_repository.getCodebrowseUrl()
+                ],
                 "soss.commit_id": [ci_build.commit_sha1],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
         pool.updateProperties(bpph.pool_name, bpph.pool_version, bpf, bpphs)
         self.assertEqual(
             {
@@ -641,39 +815,56 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
                 "launchpad.channel": list(
-                    sorted("%s:edge" % ds.name for ds in dses)),
+                    sorted("%s:edge" % ds.name for ds in dses)
+                ),
                 "soss.source_url": [
-                    ci_build.git_repository.getCodebrowseUrl()],
+                    ci_build.git_repository.getCodebrowseUrl()
+                ],
                 "soss.commit_id": [ci_build.commit_sha1],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
 
     def test_updateProperties_preserves_externally_set_properties(self):
         # Artifactory sets some properties by itself as part of scanning
         # packages.  We leave those untouched.
         pool = self.makePool()
         ds = self.factory.makeDistroSeries(
-            distribution=pool.archive.distribution)
+            distribution=pool.archive.distribution
+        )
         das = self.factory.makeDistroArchSeries(distroseries=ds)
         spr = self.factory.makeSourcePackageRelease(
-            archive=pool.archive, sourcepackagename="foo", version="1.0")
+            archive=pool.archive, sourcepackagename="foo", version="1.0"
+        )
         bpb = self.factory.makeBinaryPackageBuild(
-            archive=pool.archive, source_package_release=spr,
-            distroarchseries=das, pocket=PackagePublishingPocket.RELEASE)
+            archive=pool.archive,
+            source_package_release=spr,
+            distroarchseries=das,
+            pocket=PackagePublishingPocket.RELEASE,
+        )
         bpr = self.factory.makeBinaryPackageRelease(
-            binarypackagename="foo", build=bpb, component="main",
-            architecturespecific=False)
+            binarypackagename="foo",
+            build=bpb,
+            component="main",
+            architecturespecific=False,
+        )
         bpf = self.factory.makeBinaryPackageFile(
             binarypackagerelease=bpr,
             library_file=self.factory.makeLibraryFileAlias(
-                filename="foo_1.0_all.deb"),
-            filetype=BinaryPackageFileType.DEB)
+                filename="foo_1.0_all.deb"
+            ),
+            filetype=BinaryPackageFileType.DEB,
+        )
         bpphs = getUtility(IPublishingSet).publishBinaries(
-            pool.archive, ds, PackagePublishingPocket.RELEASE,
-            {bpr: (bpr.component, bpr.section, bpr.priority, None)})
+            pool.archive,
+            ds,
+            PackagePublishingPocket.RELEASE,
+            {bpr: (bpr.component, bpr.section, bpr.priority, None)},
+        )
         transaction.commit()
         pool.addFile(
-            None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf)
+            None, bpr.sourcepackagename, bpr.sourcepackageversion, bpf
+        )
         path = pool.rootpath / "f" / "foo" / "foo_1.0_all.deb"
         path.set_properties({"deb.version": ["1.0"]}, recursive=False)
         self.assertEqual(
@@ -682,10 +873,12 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "launchpad.source-name": ["foo"],
                 "launchpad.source-version": ["1.0"],
                 "deb.version": ["1.0"],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
         pool.updateProperties(
-            bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs)
+            bpr.sourcepackagename, bpr.sourcepackageversion, bpf, bpphs
+        )
         self.assertEqual(
             {
                 "launchpad.release-id": ["binary:%d" % bpr.id],
@@ -695,5 +888,6 @@ class TestArtifactoryPoolFromLibrarian(TestCaseWithFactory):
                 "deb.component": ["main"],
                 "deb.architecture": [das.architecturetag],
                 "deb.version": ["1.0"],
-                },
-            path.properties)
+            },
+            path.properties,
+        )
diff --git a/lib/lp/archivepublisher/tests/test_config.py b/lib/lp/archivepublisher/tests/test_config.py
index 75b1431..dd85b75 100644
--- a/lib/lp/archivepublisher/tests/test_config.py
+++ b/lib/lp/archivepublisher/tests/test_config.py
@@ -20,7 +20,7 @@ from lp.soyuz.enums import (
     ArchivePublishingMethod,
     ArchivePurpose,
     ArchiveRepositoryFormat,
-    )
+)
 from lp.soyuz.interfaces.archive import IArchiveSet
 from lp.testing import TestCaseWithFactory
 from lp.testing.layers import ZopelessDatabaseLayer
@@ -32,7 +32,7 @@ class TestGetPubConfig(TestCaseWithFactory):
 
     def setUp(self):
         super().setUp()
-        self.ubuntutest = getUtility(IDistributionSet)['ubuntutest']
+        self.ubuntutest = getUtility(IDistributionSet)["ubuntutest"]
         self.root = "/var/tmp/archive"
 
     def test_getPubConfig_returns_None_if_no_publisherconfig_found(self):
@@ -48,11 +48,13 @@ class TestGetPubConfig(TestCaseWithFactory):
         self.assertEqual(archiveroot + "/pool", primary_config.poolroot)
         self.assertEqual(archiveroot + "/dists", primary_config.distsroot)
         self.assertEqual(
-            archiveroot + "-overrides", primary_config.overrideroot)
+            archiveroot + "-overrides", primary_config.overrideroot
+        )
         self.assertEqual(archiveroot + "-cache", primary_config.cacheroot)
         self.assertEqual(archiveroot + "-misc", primary_config.miscroot)
         self.assertEqual(
-            self.root + "/ubuntutest-temp", primary_config.temproot)
+            self.root + "/ubuntutest-temp", primary_config.temproot
+        )
         self.assertEqual(archiveroot + "-signing", primary_config.signingroot)
         self.assertFalse(primary_config.signingautokey)
         self.assertIs(None, primary_config.metaroot)
@@ -72,7 +74,8 @@ class TestGetPubConfig(TestCaseWithFactory):
         # components.  This prevents non-partner being published in the
         # partner archive.
         partner_archive = getUtility(IArchiveSet).getByDistroAndName(
-            self.ubuntutest, "partner")
+            self.ubuntutest, "partner"
+        )
         partner_config = getPubConfig(partner_archive)
         self.root = "/var/tmp/archive"
         self.assertEqual(self.root, partner_config.distroroot)
@@ -84,7 +87,8 @@ class TestGetPubConfig(TestCaseWithFactory):
         self.assertIsNone(partner_config.cacheroot)
         self.assertIsNone(partner_config.miscroot)
         self.assertEqual(
-            self.root + "/ubuntutest-temp", partner_config.temproot)
+            self.root + "/ubuntutest-temp", partner_config.temproot
+        )
         self.assertEqual(archiveroot + "-signing", partner_config.signingroot)
         self.assertFalse(partner_config.signingautokey)
         self.assertIs(None, partner_config.metaroot)
@@ -95,16 +99,18 @@ class TestGetPubConfig(TestCaseWithFactory):
         # archiveroot is of the form
         # DISTROROOT/DISTRONAME-ARCHIVENAME/DISTRONAME.
         copy_archive = getUtility(IArchiveSet).new(
-            purpose=ArchivePurpose.COPY, owner=self.ubuntutest.owner,
-            distribution=self.ubuntutest, name="rebuildtest99")
+            purpose=ArchivePurpose.COPY,
+            owner=self.ubuntutest.owner,
+            distribution=self.ubuntutest,
+            name="rebuildtest99",
+        )
         copy_config = getPubConfig(copy_archive)
         self.assertEqual(self.root, copy_config.distroroot)
         archiveroot = self.root + "/ubuntutest-rebuildtest99/ubuntutest"
         self.assertEqual(archiveroot, copy_config.archiveroot)
         self.assertEqual(archiveroot + "/pool", copy_config.poolroot)
         self.assertEqual(archiveroot + "/dists", copy_config.distsroot)
-        self.assertEqual(
-            archiveroot + "-overrides", copy_config.overrideroot)
+        self.assertEqual(archiveroot + "-overrides", copy_config.overrideroot)
         self.assertEqual(archiveroot + "-cache", copy_config.cacheroot)
         self.assertEqual(archiveroot + "-misc", copy_config.miscroot)
         self.assertEqual(archiveroot + "-temp", copy_config.temproot)
@@ -117,19 +123,22 @@ class TestGetPubConfig(TestCaseWithFactory):
         primary_config = getPubConfig(self.ubuntutest.main_archive)
         disk_pool = primary_config.getDiskPool(BufferLogger())
         self.assertEqual(
-            Path(self.root + "/ubuntutest/pool/"), disk_pool.rootpath)
+            Path(self.root + "/ubuntutest/pool/"), disk_pool.rootpath
+        )
         self.assertEqual(
-            Path(self.root + "/ubuntutest-temp/"), disk_pool.temppath)
+            Path(self.root + "/ubuntutest-temp/"), disk_pool.temppath
+        )
         self.assertEqual(logging.INFO, disk_pool.logger.level)
 
     def test_getDiskPool_pool_root_override(self):
         primary_config = getPubConfig(self.ubuntutest.main_archive)
         disk_pool = primary_config.getDiskPool(
-            BufferLogger(), pool_root_override="/path/to/pool")
-        self.assertEqual(
-            Path("/path/to/pool/"), disk_pool.rootpath)
+            BufferLogger(), pool_root_override="/path/to/pool"
+        )
+        self.assertEqual(Path("/path/to/pool/"), disk_pool.rootpath)
         self.assertEqual(
-            Path(self.root + "/ubuntutest-temp/"), disk_pool.temppath)
+            Path(self.root + "/ubuntutest-temp/"), disk_pool.temppath
+        )
         self.assertEqual(logging.INFO, disk_pool.logger.level)
 
 
@@ -139,15 +148,17 @@ class TestGetPubConfigPPA(TestCaseWithFactory):
 
     def setUp(self):
         super().setUp()
-        self.ubuntutest = getUtility(IDistributionSet)['ubuntutest']
+        self.ubuntutest = getUtility(IDistributionSet)["ubuntutest"]
         self.ppa = self.factory.makeArchive(
-            distribution=self.ubuntutest, purpose=ArchivePurpose.PPA)
+            distribution=self.ubuntutest, purpose=ArchivePurpose.PPA
+        )
         self.ppa_config = getPubConfig(self.ppa)
 
     def test_ppa_root_matches_config(self):
         # The base publication location is set by Launchpad configuration.
         self.assertEqual(
-            config.personalpackagearchive.root, self.ppa_config.distroroot)
+            config.personalpackagearchive.root, self.ppa_config.distroroot
+        )
 
     def test_ppa_config(self):
         # PPA configuration matches the PPA repository topology:
@@ -156,7 +167,10 @@ class TestGetPubConfigPPA(TestCaseWithFactory):
         # None in order that they won't get created.
         self.assertEqual("/var/tmp/ppa.test/", self.ppa_config.distroroot)
         archiveroot = "%s%s/%s/ubuntutest" % (
-            self.ppa_config.distroroot, self.ppa.owner.name, self.ppa.name)
+            self.ppa_config.distroroot,
+            self.ppa.owner.name,
+            self.ppa.name,
+        )
         self.assertEqual(archiveroot, self.ppa_config.archiveroot)
         self.assertEqual(archiveroot + "/pool", self.ppa_config.poolroot)
         self.assertEqual(archiveroot + "/dists", self.ppa_config.distsroot)
@@ -164,9 +178,12 @@ class TestGetPubConfigPPA(TestCaseWithFactory):
         self.assertIsNone(self.ppa_config.cacheroot)
         self.assertIsNone(self.ppa_config.miscroot)
         self.assertEqual(
-            "/var/tmp/archive/ubuntutest-temp", self.ppa_config.temproot)
+            "/var/tmp/archive/ubuntutest-temp", self.ppa_config.temproot
+        )
         signingroot = "/var/tmp/ppa-signing-keys.test/signing/%s/%s" % (
-            self.ppa.owner.name, self.ppa.name)
+            self.ppa.owner.name,
+            self.ppa.name,
+        )
         self.assertEqual(signingroot, self.ppa_config.signingroot)
         self.assertTrue(self.ppa_config.signingautokey)
         self.assertIs(None, self.ppa_config.metaroot)
@@ -176,19 +193,27 @@ class TestGetPubConfigPPA(TestCaseWithFactory):
         # Private PPAs are published to a different location.
         self.assertNotEqual(
             config.personalpackagearchive.private_root,
-            config.personalpackagearchive.root)
+            config.personalpackagearchive.root,
+        )
 
     def test_private_ppa_config(self):
         # Private PPA configuration uses the separate base location.
         p3a = self.factory.makeArchive(
-            owner=self.ppa.owner, name="myprivateppa",
-            distribution=self.ubuntutest, purpose=ArchivePurpose.PPA)
+            owner=self.ppa.owner,
+            name="myprivateppa",
+            distribution=self.ubuntutest,
+            purpose=ArchivePurpose.PPA,
+        )
         p3a.private = True
         p3a_config = getPubConfig(p3a)
         self.assertEqual(
-            config.personalpackagearchive.private_root, p3a_config.distroroot)
+            config.personalpackagearchive.private_root, p3a_config.distroroot
+        )
         archiveroot = "%s/%s/%s/ubuntutest" % (
-            p3a_config.distroroot, p3a.owner.name, p3a.name)
+            p3a_config.distroroot,
+            p3a.owner.name,
+            p3a.name,
+        )
         self.assertEqual(archiveroot, p3a_config.archiveroot)
         self.assertEqual(archiveroot + "/pool", p3a_config.poolroot)
         self.assertEqual(archiveroot + "/dists", p3a_config.distsroot)
@@ -196,11 +221,14 @@ class TestGetPubConfigPPA(TestCaseWithFactory):
         self.assertIsNone(p3a_config.cacheroot)
         self.assertIsNone(p3a_config.miscroot)
         self.assertEqual(
-            "/var/tmp/archive/ubuntutest-temp", p3a_config.temproot)
+            "/var/tmp/archive/ubuntutest-temp", p3a_config.temproot
+        )
         # It's OK for the signing keys to be in the same location as for
         # public PPAs, as the owner/name namespace is shared.
         signingroot = "/var/tmp/ppa-signing-keys.test/signing/%s/%s" % (
-            p3a.owner.name, p3a.name)
+            p3a.owner.name,
+            p3a.name,
+        )
         self.assertEqual(signingroot, p3a_config.signingroot)
         self.assertTrue(self.ppa_config.signingautokey)
         self.assertIs(None, p3a_config.metaroot)
@@ -212,12 +240,14 @@ class TestGetPubConfigPPA(TestCaseWithFactory):
         # Ubuntu PPAs have a metaroot.
         ubuntu_ppa = self.factory.makeArchive(purpose=ArchivePurpose.PPA)
         test_ppa = self.factory.makeArchive(
-            distribution=self.ubuntutest, purpose=ArchivePurpose.PPA)
+            distribution=self.ubuntutest, purpose=ArchivePurpose.PPA
+        )
 
         self.assertEqual(
-            "/var/tmp/ppa.test/%s/meta/%s" % (
-                ubuntu_ppa.owner.name, ubuntu_ppa.name),
-            getPubConfig(ubuntu_ppa).metaroot)
+            "/var/tmp/ppa.test/%s/meta/%s"
+            % (ubuntu_ppa.owner.name, ubuntu_ppa.name),
+            getPubConfig(ubuntu_ppa).metaroot,
+        )
         self.assertIs(None, getPubConfig(test_ppa).metaroot)
 
 
@@ -227,9 +257,10 @@ class TestGetPubConfigPPACompatUefi(TestCaseWithFactory):
 
     def setUp(self):
         super().setUp()
-        self.ubuntutest = getUtility(IDistributionSet)['ubuntutest']
+        self.ubuntutest = getUtility(IDistributionSet)["ubuntutest"]
         self.ppa = self.factory.makeArchive(
-            distribution=self.ubuntutest, purpose=ArchivePurpose.PPA)
+            distribution=self.ubuntutest, purpose=ArchivePurpose.PPA
+        )
         signingroot = "/var/tmp/ppa-signing-keys.test/uefi"
         self.addCleanup(os.rmdir, signingroot)
         os.makedirs(signingroot)
@@ -237,7 +268,9 @@ class TestGetPubConfigPPACompatUefi(TestCaseWithFactory):
 
     def test_ppa_uefi_config(self):
         signingroot = "/var/tmp/ppa-signing-keys.test/uefi/%s/%s" % (
-            self.ppa.owner.name, self.ppa.name)
+            self.ppa.owner.name,
+            self.ppa.name,
+        )
         self.assertEqual(signingroot, self.ppa_config.signingroot)
 
 
@@ -252,7 +285,8 @@ class TestGetPubConfigPPARepositoryFormatPython(TestCaseWithFactory):
         self.ppa = self.factory.makeArchive(
             purpose=ArchivePurpose.PPA,
             publishing_method=ArchivePublishingMethod.ARTIFACTORY,
-            repository_format=ArchiveRepositoryFormat.PYTHON)
+            repository_format=ArchiveRepositoryFormat.PYTHON,
+        )
         self.ppa_config = getPubConfig(self.ppa)
 
     def test_config(self):
@@ -269,5 +303,6 @@ class TestGetPubConfigPPARepositoryFormatPython(TestCaseWithFactory):
         self.assertIsNone(self.ppa_config.miscroot)
         self.assertEqual(
             "/var/tmp/archive/%s-temp" % self.ppa.distribution.name,
-            self.ppa_config.temproot)
+            self.ppa_config.temproot,
+        )
         self.assertIsNone(self.ppa_config.metaroot)
diff --git a/lib/lp/archivepublisher/tests/test_copy_signingkeys.py b/lib/lp/archivepublisher/tests/test_copy_signingkeys.py
index 3eb0bc6..a31e85e 100644
--- a/lib/lp/archivepublisher/tests/test_copy_signingkeys.py
+++ b/lib/lp/archivepublisher/tests/test_copy_signingkeys.py
@@ -3,12 +3,9 @@
 
 """Test cases for copying signing keys between archives."""
 
-from testtools.content import text_content
-from testtools.matchers import (
-    MatchesSetwise,
-    MatchesStructure,
-    )
 import transaction
+from testtools.content import text_content
+from testtools.matchers import MatchesSetwise, MatchesStructure
 
 from lp.archivepublisher.scripts.copy_signingkeys import CopySigningKeysScript
 from lp.services.config import config
@@ -35,8 +32,10 @@ class TestCopySigningKeysScript(TestCaseWithFactory):
         try:
             with CapturedOutput() as captured:
                 script = CopySigningKeysScript(
-                    "copy-signingkeys", dbuser=config.archivepublisher.dbuser,
-                    test_args=test_args)
+                    "copy-signingkeys",
+                    dbuser=config.archivepublisher.dbuser,
+                    test_args=test_args,
+                )
                 script.processOptions()
         except SystemExit:
             exited = True
@@ -50,11 +49,12 @@ class TestCopySigningKeysScript(TestCaseWithFactory):
             self.addDetail("stderr", text_content(stderr))
         if expect_exit:
             if not exited:
-                raise AssertionError('Script unexpectedly exited successfully')
+                raise AssertionError("Script unexpectedly exited successfully")
         else:
             if exited:
                 raise AssertionError(
-                    'Script unexpectedly exited unsuccessfully')
+                    "Script unexpectedly exited unsuccessfully"
+                )
             self.assertEqual("", stderr)
             script.logger = BufferLogger()
             return script
@@ -63,7 +63,9 @@ class TestCopySigningKeysScript(TestCaseWithFactory):
         return IStore(ArchiveSigningKey).find(
             ArchiveSigningKey,
             ArchiveSigningKey.archive_id.is_in(
-                archive.id for archive in archives))
+                archive.id for archive in archives
+            ),
+        )
 
     def test_getArchive(self):
         archives = [self.factory.makeArchive() for _ in range(2)]
@@ -87,17 +89,21 @@ class TestCopySigningKeysScript(TestCaseWithFactory):
         archives = [self.factory.makeArchive() for _ in range(2)]
         self.assertRaisesWithContent(
             LaunchpadScriptFailure,
-            "Could not find series 'nonexistent' in %s." % (
-                archives[0].distribution.display_name),
+            "Could not find series 'nonexistent' in %s."
+            % (archives[0].distribution.display_name),
             self.makeScript,
-            test_args=["-s", "nonexistent"], archives=archives)
+            test_args=["-s", "nonexistent"],
+            archives=archives,
+        )
 
     def test_getSeries(self):
         archives = [self.factory.makeArchive() for _ in range(2)]
         distro_series = self.factory.makeDistroSeries(
-            distribution=archives[0].distribution)
+            distribution=archives[0].distribution
+        )
         script = self.makeScript(
-            test_args=["-s", distro_series.name], archives=archives)
+            test_args=["-s", distro_series.name], archives=archives
+        )
         self.assertEqual(distro_series, script.series)
 
     def test_wrong_number_of_arguments(self):
@@ -110,239 +116,355 @@ class TestCopySigningKeysScript(TestCaseWithFactory):
         signing_keys = [
             self.factory.makeSigningKey(key_type=key_type)
             for key_type in (
-                SigningKeyType.UEFI, SigningKeyType.KMOD, SigningKeyType.OPAL)]
+                SigningKeyType.UEFI,
+                SigningKeyType.KMOD,
+                SigningKeyType.OPAL,
+            )
+        ]
         for signing_key in signing_keys[:2]:
             self.factory.makeArchiveSigningKey(
-                archive=archives[0], signing_key=signing_key)
+                archive=archives[0], signing_key=signing_key
+            )
         distro_series = self.factory.makeDistroSeries(
-            distribution=archives[0].distribution)
+            distribution=archives[0].distribution
+        )
         self.factory.makeArchiveSigningKey(
-            archive=archives[0], distro_series=distro_series,
-            signing_key=signing_keys[1])
+            archive=archives[0],
+            distro_series=distro_series,
+            signing_key=signing_keys[1],
+        )
         self.factory.makeArchiveSigningKey(
-            archive=archives[2], signing_key=signing_keys[2])
+            archive=archives[2], signing_key=signing_keys[2]
+        )
         script = self.makeScript(archives=archives[:2])
         script.main()
         expected_log = [
-            "INFO Copying UEFI signing key %s from %s / None to %s / None" % (
+            "INFO Copying UEFI signing key %s from %s / None to %s / None"
+            % (
                 signing_keys[0].fingerprint,
-                archives[0].reference, archives[1].reference),
-            "INFO Copying Kmod signing key %s from %s / None to %s / None" % (
+                archives[0].reference,
+                archives[1].reference,
+            ),
+            "INFO Copying Kmod signing key %s from %s / None to %s / None"
+            % (
                 signing_keys[1].fingerprint,
-                archives[0].reference, archives[1].reference),
+                archives[0].reference,
+                archives[1].reference,
+            ),
             "INFO No OPAL signing key for %s / None" % archives[0].reference,
             "INFO No SIPL signing key for %s / None" % archives[0].reference,
             "INFO No FIT signing key for %s / None" % archives[0].reference,
-            "INFO No OpenPGP signing key for %s / None" %
-                archives[0].reference,
-            "INFO No CV2 Kernel signing key for %s / None" %
-                archives[0].reference,
-            "INFO No Android Kernel signing key for %s / None" %
-            archives[0].reference,
-            ]
+            "INFO No OpenPGP signing key for %s / None"
+            % archives[0].reference,
+            "INFO No CV2 Kernel signing key for %s / None"
+            % archives[0].reference,
+            "INFO No Android Kernel signing key for %s / None"
+            % archives[0].reference,
+        ]
         self.assertEqual(
-            expected_log, script.logger.content.as_text().splitlines())
+            expected_log, script.logger.content.as_text().splitlines()
+        )
         self.assertThat(
             self.findKeys(archives),
             MatchesSetwise(
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.KMOD, signing_key=signing_keys[1]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.KMOD,
+                    signing_key=signing_keys[1],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=distro_series,
-                    key_type=SigningKeyType.KMOD, signing_key=signing_keys[1]),
+                    archive=archives[0],
+                    earliest_distro_series=distro_series,
+                    key_type=SigningKeyType.KMOD,
+                    signing_key=signing_keys[1],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[1], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[1],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[1], earliest_distro_series=None,
-                    key_type=SigningKeyType.KMOD, signing_key=signing_keys[1]),
+                    archive=archives[1],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.KMOD,
+                    signing_key=signing_keys[1],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[2], earliest_distro_series=None,
-                    key_type=SigningKeyType.OPAL, signing_key=signing_keys[2]),
-                ))
+                    archive=archives[2],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.OPAL,
+                    signing_key=signing_keys[2],
+                ),
+            ),
+        )
 
     def test_copy_by_key_type(self):
         archives = [self.factory.makeArchive() for _ in range(3)]
         signing_keys = [
             self.factory.makeSigningKey(key_type=key_type)
-            for key_type in (SigningKeyType.UEFI, SigningKeyType.KMOD)]
+            for key_type in (SigningKeyType.UEFI, SigningKeyType.KMOD)
+        ]
         for signing_key in signing_keys:
             self.factory.makeArchiveSigningKey(
-                archive=archives[0], signing_key=signing_key)
+                archive=archives[0], signing_key=signing_key
+            )
         distro_series = self.factory.makeDistroSeries(
-            distribution=archives[0].distribution)
+            distribution=archives[0].distribution
+        )
         self.factory.makeArchiveSigningKey(
-            archive=archives[0], distro_series=distro_series,
-            signing_key=signing_keys[0])
+            archive=archives[0],
+            distro_series=distro_series,
+            signing_key=signing_keys[0],
+        )
         script = self.makeScript(
-            test_args=["--key-type", "UEFI"], archives=archives[:2])
+            test_args=["--key-type", "UEFI"], archives=archives[:2]
+        )
         script.main()
         expected_log = [
-            "INFO Copying UEFI signing key %s from %s / None to %s / None" % (
+            "INFO Copying UEFI signing key %s from %s / None to %s / None"
+            % (
                 signing_keys[0].fingerprint,
-                archives[0].reference, archives[1].reference),
-            ]
+                archives[0].reference,
+                archives[1].reference,
+            ),
+        ]
         self.assertEqual(
-            expected_log, script.logger.content.as_text().splitlines())
+            expected_log, script.logger.content.as_text().splitlines()
+        )
         self.assertThat(
             self.findKeys(archives),
             MatchesSetwise(
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.KMOD, signing_key=signing_keys[1]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.KMOD,
+                    signing_key=signing_keys[1],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=distro_series,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[0],
+                    earliest_distro_series=distro_series,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[1], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
-                ))
+                    archive=archives[1],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
+            ),
+        )
 
     def test_copy_by_series(self):
         distribution = self.factory.makeDistribution()
         archives = [
             self.factory.makeArchive(distribution=distribution)
-            for _ in range(3)]
+            for _ in range(3)
+        ]
         signing_keys = [
             self.factory.makeSigningKey(key_type=key_type)
-            for key_type in (SigningKeyType.UEFI, SigningKeyType.KMOD)]
+            for key_type in (SigningKeyType.UEFI, SigningKeyType.KMOD)
+        ]
         distro_serieses = [
             self.factory.makeDistroSeries(distribution=distribution)
-            for _ in range(2)]
+            for _ in range(2)
+        ]
         for signing_key in signing_keys:
             self.factory.makeArchiveSigningKey(
-                archive=archives[0], distro_series=distro_serieses[0],
-                signing_key=signing_key)
+                archive=archives[0],
+                distro_series=distro_serieses[0],
+                signing_key=signing_key,
+            )
         self.factory.makeArchiveSigningKey(
-            archive=archives[0], signing_key=signing_keys[0])
+            archive=archives[0], signing_key=signing_keys[0]
+        )
         self.factory.makeArchiveSigningKey(
-            archive=archives[0], distro_series=distro_serieses[1],
-            signing_key=signing_keys[0])
+            archive=archives[0],
+            distro_series=distro_serieses[1],
+            signing_key=signing_keys[0],
+        )
         script = self.makeScript(
-            test_args=["-s", distro_serieses[0].name], archives=archives[:2])
+            test_args=["-s", distro_serieses[0].name], archives=archives[:2]
+        )
         script.main()
         expected_log = [
-            "INFO Copying UEFI signing key %s from %s / %s to %s / %s" % (
+            "INFO Copying UEFI signing key %s from %s / %s to %s / %s"
+            % (
                 signing_keys[0].fingerprint,
-                archives[0].reference, distro_serieses[0].name,
-                archives[1].reference, distro_serieses[0].name),
-            "INFO Copying Kmod signing key %s from %s / %s to %s / %s" % (
+                archives[0].reference,
+                distro_serieses[0].name,
+                archives[1].reference,
+                distro_serieses[0].name,
+            ),
+            "INFO Copying Kmod signing key %s from %s / %s to %s / %s"
+            % (
                 signing_keys[1].fingerprint,
-                archives[0].reference, distro_serieses[0].name,
-                archives[1].reference, distro_serieses[0].name),
-            "INFO No OPAL signing key for %s / %s" % (
-                archives[0].reference, distro_serieses[0].name),
-            "INFO No SIPL signing key for %s / %s" % (
-                archives[0].reference, distro_serieses[0].name),
-            "INFO No FIT signing key for %s / %s" % (
-                archives[0].reference, distro_serieses[0].name),
-            "INFO No OpenPGP signing key for %s / %s" % (
-                archives[0].reference, distro_serieses[0].name),
-            "INFO No CV2 Kernel signing key for %s / %s" % (
-                archives[0].reference, distro_serieses[0].name),
-            "INFO No Android Kernel signing key for %s / %s" % (
-                archives[0].reference, distro_serieses[0].name),
-            ]
+                archives[0].reference,
+                distro_serieses[0].name,
+                archives[1].reference,
+                distro_serieses[0].name,
+            ),
+            "INFO No OPAL signing key for %s / %s"
+            % (archives[0].reference, distro_serieses[0].name),
+            "INFO No SIPL signing key for %s / %s"
+            % (archives[0].reference, distro_serieses[0].name),
+            "INFO No FIT signing key for %s / %s"
+            % (archives[0].reference, distro_serieses[0].name),
+            "INFO No OpenPGP signing key for %s / %s"
+            % (archives[0].reference, distro_serieses[0].name),
+            "INFO No CV2 Kernel signing key for %s / %s"
+            % (archives[0].reference, distro_serieses[0].name),
+            "INFO No Android Kernel signing key for %s / %s"
+            % (archives[0].reference, distro_serieses[0].name),
+        ]
         self.assertEqual(
-            expected_log, script.logger.content.as_text().splitlines())
+            expected_log, script.logger.content.as_text().splitlines()
+        )
         self.assertThat(
             self.findKeys(archives),
             MatchesSetwise(
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
                     archive=archives[0],
                     earliest_distro_series=distro_serieses[0],
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
                     archive=archives[0],
                     earliest_distro_series=distro_serieses[0],
-                    key_type=SigningKeyType.KMOD, signing_key=signing_keys[1]),
+                    key_type=SigningKeyType.KMOD,
+                    signing_key=signing_keys[1],
+                ),
                 MatchesStructure.byEquality(
                     archive=archives[0],
                     earliest_distro_series=distro_serieses[1],
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
                     archive=archives[1],
                     earliest_distro_series=distro_serieses[0],
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
                     archive=archives[1],
                     earliest_distro_series=distro_serieses[0],
-                    key_type=SigningKeyType.KMOD, signing_key=signing_keys[1]),
-                ))
+                    key_type=SigningKeyType.KMOD,
+                    signing_key=signing_keys[1],
+                ),
+            ),
+        )
 
     def test_copy_refuses_overwrite(self):
         archives = [self.factory.makeArchive() for _ in range(2)]
         signing_keys = [
             self.factory.makeSigningKey(key_type=SigningKeyType.UEFI)
-            for _ in range(2)]
+            for _ in range(2)
+        ]
         for archive, signing_key in zip(archives, signing_keys):
             self.factory.makeArchiveSigningKey(
-                archive=archive, signing_key=signing_key)
+                archive=archive, signing_key=signing_key
+            )
         script = self.makeScript(
-            test_args=["--key-type", "UEFI"], archives=archives[:2])
+            test_args=["--key-type", "UEFI"], archives=archives[:2]
+        )
         script.main()
         expected_log = [
-            "WARNING UEFI signing key for %s / None already exists" %
-                archives[1].reference,
-            ]
+            "WARNING UEFI signing key for %s / None already exists"
+            % archives[1].reference,
+        ]
         self.assertEqual(
-            expected_log, script.logger.content.as_text().splitlines())
+            expected_log, script.logger.content.as_text().splitlines()
+        )
         self.assertThat(
             self.findKeys(archives),
             MatchesSetwise(
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[1], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[1]),
-            ))
+                    archive=archives[1],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[1],
+                ),
+            ),
+        )
 
     def test_copy_forced_overwrite(self):
         archives = [self.factory.makeArchive() for _ in range(2)]
         signing_keys = [
             self.factory.makeSigningKey(key_type=SigningKeyType.UEFI)
-            for _ in range(2)]
+            for _ in range(2)
+        ]
         for archive, signing_key in zip(archives, signing_keys):
             self.factory.makeArchiveSigningKey(
-                archive=archive, signing_key=signing_key)
+                archive=archive, signing_key=signing_key
+            )
         script = self.makeScript(
             test_args=["--key-type", "UEFI", "--overwrite"],
-            archives=archives[:2])
+            archives=archives[:2],
+        )
         script.main()
 
         expected_log = [
-            "WARNING UEFI signing key for %s / None being overwritten" % (
-                archives[1].reference),
-            "INFO Copying UEFI signing key %s from %s / %s to %s / %s" % (
+            "WARNING UEFI signing key for %s / None being overwritten"
+            % (archives[1].reference),
+            "INFO Copying UEFI signing key %s from %s / %s to %s / %s"
+            % (
                 signing_keys[0].fingerprint,
-                archives[0].reference, None,
-                archives[1].reference, None),
-            ]
+                archives[0].reference,
+                None,
+                archives[1].reference,
+                None,
+            ),
+        ]
         self.assertEqual(
-            expected_log, script.logger.content.as_text().splitlines())
+            expected_log, script.logger.content.as_text().splitlines()
+        )
         self.assertThat(
             self.findKeys(archives),
             MatchesSetwise(
                 # First archive keeps its signing keys.
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 # Second archive uses the same signing_key from first archive.
                 MatchesStructure.byEquality(
-                    archive=archives[1], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
-                ))
+                    archive=archives[1],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
+            ),
+        )
 
     def runScript(self, args=None):
         transaction.commit()
@@ -358,10 +480,12 @@ class TestCopySigningKeysScript(TestCaseWithFactory):
         archives = [self.factory.makeArchive() for _ in range(2)]
         signing_keys = [
             self.factory.makeSigningKey(key_type=key_type)
-            for key_type in (SigningKeyType.UEFI, SigningKeyType.KMOD)]
+            for key_type in (SigningKeyType.UEFI, SigningKeyType.KMOD)
+        ]
         for signing_key in signing_keys[:2]:
             self.factory.makeArchiveSigningKey(
-                archive=archives[0], signing_key=signing_key)
+                archive=archives[0], signing_key=signing_key
+            )
 
         self.runScript(args=[archive.reference for archive in archives])
 
@@ -369,15 +493,28 @@ class TestCopySigningKeysScript(TestCaseWithFactory):
             self.findKeys(archives),
             MatchesSetwise(
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[0], earliest_distro_series=None,
-                    key_type=SigningKeyType.KMOD, signing_key=signing_keys[1]),
+                    archive=archives[0],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.KMOD,
+                    signing_key=signing_keys[1],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[1], earliest_distro_series=None,
-                    key_type=SigningKeyType.UEFI, signing_key=signing_keys[0]),
+                    archive=archives[1],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.UEFI,
+                    signing_key=signing_keys[0],
+                ),
                 MatchesStructure.byEquality(
-                    archive=archives[1], earliest_distro_series=None,
-                    key_type=SigningKeyType.KMOD, signing_key=signing_keys[1]),
-                ))
+                    archive=archives[1],
+                    earliest_distro_series=None,
+                    key_type=SigningKeyType.KMOD,
+                    signing_key=signing_keys[1],
+                ),
+            ),
+        )
diff --git a/lib/lp/archivepublisher/tests/test_customupload.py b/lib/lp/archivepublisher/tests/test_customupload.py
index f62feaa..f077874 100644
--- a/lib/lp/archivepublisher/tests/test_customupload.py
+++ b/lib/lp/archivepublisher/tests/test_customupload.py
@@ -12,12 +12,7 @@ import unittest
 
 from fixtures import MonkeyPatch
 from testtools.deferredruntest import AsynchronousDeferredRunTest
-from testtools.matchers import (
-    Equals,
-    MatchesDict,
-    Not,
-    PathExists,
-    )
+from testtools.matchers import Equals, MatchesDict, Not, PathExists
 from twisted.internet import defer
 from zope.component import getUtility
 
@@ -27,19 +22,16 @@ from lp.archivepublisher.customupload import (
     CustomUploadTarballBadFile,
     CustomUploadTarballBadSymLink,
     CustomUploadTarballInvalidFileType,
-    )
+)
 from lp.archivepublisher.interfaces.archivegpgsigningkey import (
     IArchiveGPGSigningKey,
-    )
+)
 from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
 from lp.archivepublisher.tests.test_run_parts import RunPartsMixin
 from lp.services.gpg.interfaces import IGPGHandler
 from lp.services.osutils import write_file
 from lp.soyuz.enums import ArchivePurpose
-from lp.testing import (
-    TestCase,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCase, TestCaseWithFactory
 from lp.testing.fakemethod import FakeMethod
 from lp.testing.gpgkeys import gpgkeysdir
 from lp.testing.keyserver import InProcessKeyServerFixture
@@ -47,16 +39,14 @@ from lp.testing.layers import LaunchpadZopelessLayer
 
 
 class TestCustomUpload(unittest.TestCase):
-
     def setUp(self):
-        self.test_dir = tempfile.mkdtemp(prefix='archive_root_')
+        self.test_dir = tempfile.mkdtemp(prefix="archive_root_")
 
     def tearDown(self):
         shutil.rmtree(self.test_dir)
 
     def assertEntries(self, entries):
-        self.assertEqual(
-            entries, sorted(os.listdir(self.test_dir)))
+        self.assertEqual(entries, sorted(os.listdir(self.test_dir)))
 
     def testFixCurrentSymlink(self):
         """Test `CustomUpload.fixCurrentSymlink` behaviour.
@@ -74,32 +64,33 @@ class TestCustomUpload(unittest.TestCase):
         custom_processor.targetdir = self.test_dir
 
         # Let's create 4 entries named as valid versions.
-        os.mkdir(os.path.join(self.test_dir, '1.0'))
-        os.mkdir(os.path.join(self.test_dir, '1.1'))
-        os.mkdir(os.path.join(self.test_dir, '1.2'))
-        os.mkdir(os.path.join(self.test_dir, '1.3'))
-        self.assertEntries(['1.0', '1.1', '1.2', '1.3'])
+        os.mkdir(os.path.join(self.test_dir, "1.0"))
+        os.mkdir(os.path.join(self.test_dir, "1.1"))
+        os.mkdir(os.path.join(self.test_dir, "1.2"))
+        os.mkdir(os.path.join(self.test_dir, "1.3"))
+        self.assertEntries(["1.0", "1.1", "1.2", "1.3"])
 
         # `fixCurrentSymlink` will keep only the latest 3 and create a
         # 'current' symbolic link the highest one.
         custom_processor.fixCurrentSymlink()
-        self.assertEntries(['1.1', '1.2', '1.3', 'current'])
+        self.assertEntries(["1.1", "1.2", "1.3", "current"])
         self.assertEqual(
-            '1.3', os.readlink(os.path.join(self.test_dir, 'current')))
+            "1.3", os.readlink(os.path.join(self.test_dir, "current"))
+        )
 
         # When there is a invalid version present in the directory it is
         # ignored, since it was probably put there manually. The symbolic
         # link still pointing to the latest version.
-        os.mkdir(os.path.join(self.test_dir, '1.4'))
-        os.mkdir(os.path.join(self.test_dir, 'alpha-5'))
+        os.mkdir(os.path.join(self.test_dir, "1.4"))
+        os.mkdir(os.path.join(self.test_dir, "alpha-5"))
         custom_processor.fixCurrentSymlink()
-        self.assertEntries(['1.2', '1.3', '1.4', 'alpha-5', 'current'])
+        self.assertEntries(["1.2", "1.3", "1.4", "alpha-5", "current"])
         self.assertEqual(
-            '1.4', os.readlink(os.path.join(self.test_dir, 'current')))
+            "1.4", os.readlink(os.path.join(self.test_dir, "current"))
+        )
 
 
 class TestTarfileVerification(TestCase):
-
     def setUp(self):
         TestCase.setUp(self)
         self.tarfile_path = "/tmp/_verify_extract"
@@ -111,7 +102,7 @@ class TestTarfileVerification(TestCase):
     def createTarfile(self):
         self.tar_fileobj = io.BytesIO()
         tar_file = tarfile.open(name=None, mode="w", fileobj=self.tar_fileobj)
-        root_info = tarfile.TarInfo(name='./')
+        root_info = tarfile.TarInfo(name="./")
         root_info.type = tarfile.DIRTYPE
         tar_file.addfile(root_info)
         # Ordering matters here, addCleanup pushes onto a stack which is
@@ -141,9 +132,8 @@ class TestTarfileVerification(TestCase):
 
     def assertFails(self, exception, tar_file):
         self.assertRaises(
-            exception,
-            self.custom_processor.verifyBeforeExtracting,
-            tar_file)
+            exception, self.custom_processor.verifyBeforeExtracting, tar_file
+        )
 
     def assertPasses(self, tar_file):
         result = self.custom_processor.verifyBeforeExtracting(tar_file)
@@ -194,10 +184,10 @@ class TestTarfileVerification(TestCase):
         self.assertPasses(tar_file)
 
     def testRelativeSymlinkTargetInsideDirectoryDoesntRaise(self):
-        tar_file = self.createTarfileWithFile(
-            tarfile.DIRTYPE, name="testdir")
+        tar_file = self.createTarfileWithFile(tarfile.DIRTYPE, name="testdir")
         info = self.createSymlinkInfo(
-            name="testdir/symlink", target="../dummy")
+            name="testdir/symlink", target="../dummy"
+        )
         tar_file.addfile(info)
         self.assertPasses(tar_file)
 
@@ -219,7 +209,8 @@ class TestTarfileVerification(TestCase):
             tar_file.close()
             self.assertRaises(
                 CustomUploadTarballInvalidFileType,
-                self.custom_processor.extract)
+                self.custom_processor.extract,
+            )
         finally:
             shutil.rmtree(self.tarfile_path)
 
@@ -234,14 +225,15 @@ class TestSigning(TestCaseWithFactory, RunPartsMixin):
         self.temp_dir = self.makeTemporaryDirectory()
         self.distro = self.factory.makeDistribution()
         db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
-            self.distro)
+            self.distro
+        )
         db_pubconf.root_dir = self.temp_dir
         self.archive = self.factory.makeArchive(
-            distribution=self.distro, purpose=ArchivePurpose.PRIMARY)
+            distribution=self.distro, purpose=ArchivePurpose.PRIMARY
+        )
 
     def test_sign_without_signing_key(self):
-        filename = os.path.join(
-            getPubConfig(self.archive).archiveroot, "file")
+        filename = os.path.join(getPubConfig(self.archive).archiveroot, "file")
         self.assertIsNone(self.archive.signing_key)
         custom_processor = CustomUpload()
         custom_processor.sign(self.archive, "suite", filename)
@@ -249,14 +241,14 @@ class TestSigning(TestCaseWithFactory, RunPartsMixin):
 
     @defer.inlineCallbacks
     def test_sign_with_signing_key(self):
-        filename = os.path.join(
-            getPubConfig(self.archive).archiveroot, "file")
+        filename = os.path.join(getPubConfig(self.archive).archiveroot, "file")
         write_file(filename, b"contents")
         self.assertIsNone(self.archive.signing_key)
         self.useFixture(InProcessKeyServerFixture()).start()
-        key_path = os.path.join(gpgkeysdir, 'ppa-sample@xxxxxxxxxxxxxxxxx')
+        key_path = os.path.join(gpgkeysdir, "ppa-sample@xxxxxxxxxxxxxxxxx")
         yield IArchiveGPGSigningKey(self.archive).setSigningKey(
-            key_path, async_keyserver=True)
+            key_path, async_keyserver=True
+        )
         self.assertIsNotNone(self.archive.signing_key)
         custom_processor = CustomUpload()
         custom_processor.sign(self.archive, "suite", filename)
@@ -264,9 +256,11 @@ class TestSigning(TestCaseWithFactory, RunPartsMixin):
             cleartext = cleartext_file.read()
             with open("%s.gpg" % filename, "rb") as signature_file:
                 signature = getUtility(IGPGHandler).getVerifiedSignature(
-                    cleartext, signature_file.read())
+                    cleartext, signature_file.read()
+                )
         self.assertEqual(
-            self.archive.signing_key.fingerprint, signature.fingerprint)
+            self.archive.signing_key.fingerprint, signature.fingerprint
+        )
 
     def test_sign_with_external_run_parts(self):
         self.enableRunParts(distribution_name=self.distro.name)
@@ -274,18 +268,26 @@ class TestSigning(TestCaseWithFactory, RunPartsMixin):
         filename = os.path.join(archiveroot, "file")
         write_file(filename, b"contents")
         self.assertIsNone(self.archive.signing_key)
-        run_parts_fixture = self.useFixture(MonkeyPatch(
-            "lp.archivepublisher.archivegpgsigningkey.run_parts",
-            FakeMethod()))
+        run_parts_fixture = self.useFixture(
+            MonkeyPatch(
+                "lp.archivepublisher.archivegpgsigningkey.run_parts",
+                FakeMethod(),
+            )
+        )
         custom_processor = CustomUpload()
         custom_processor.sign(self.archive, "suite", filename)
         args, kwargs = run_parts_fixture.new_value.calls[0]
         self.assertEqual((self.distro.name, "sign.d"), args)
-        self.assertThat(kwargs["env"], MatchesDict({
-            "ARCHIVEROOT": Equals(archiveroot),
-            "INPUT_PATH": Equals(filename),
-            "OUTPUT_PATH": Equals("%s.gpg" % filename),
-            "MODE": Equals("detached"),
-            "DISTRIBUTION": Equals(self.distro.name),
-            "SUITE": Equals("suite"),
-            }))
+        self.assertThat(
+            kwargs["env"],
+            MatchesDict(
+                {
+                    "ARCHIVEROOT": Equals(archiveroot),
+                    "INPUT_PATH": Equals(filename),
+                    "OUTPUT_PATH": Equals("%s.gpg" % filename),
+                    "MODE": Equals("detached"),
+                    "DISTRIBUTION": Equals(self.distro.name),
+                    "SUITE": Equals("suite"),
+                }
+            ),
+        )
diff --git a/lib/lp/archivepublisher/tests/test_ddtp_tarball.py b/lib/lp/archivepublisher/tests/test_ddtp_tarball.py
index 88e2d4f..5cdc793 100644
--- a/lib/lp/archivepublisher/tests/test_ddtp_tarball.py
+++ b/lib/lp/archivepublisher/tests/test_ddtp_tarball.py
@@ -30,12 +30,15 @@ class TestDdtpTarball(TestCaseWithFactory):
         self.temp_dir = self.makeTemporaryDirectory()
         self.distro = self.factory.makeDistribution()
         db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
-            self.distro)
+            self.distro
+        )
         db_pubconf.root_dir = self.temp_dir
         self.archive = self.factory.makeArchive(
-            distribution=self.distro, purpose=ArchivePurpose.PRIMARY)
+            distribution=self.distro, purpose=ArchivePurpose.PRIMARY
+        )
         self.distroseries = self.factory.makeDistroSeries(
-            distribution=self.distro)
+            distribution=self.distro
+        )
         self.suite = self.distroseries.name
         # CustomUpload.installFiles requires a umask of 0o022.
         old_umask = os.umask(0o022)
@@ -43,7 +46,8 @@ class TestDdtpTarball(TestCaseWithFactory):
 
     def openArchive(self, version):
         self.path = os.path.join(
-            self.temp_dir, "translations_main_%s.tar.gz" % version)
+            self.temp_dir, "translations_main_%s.tar.gz" % version
+        )
         self.buffer = open(self.path, "wb")
         self.tarfile = LaunchpadWriteTarFile(self.buffer)
 
@@ -55,14 +59,18 @@ class TestDdtpTarball(TestCaseWithFactory):
     def getTranslationsPath(self, filename):
         pubconf = getPubConfig(self.archive)
         return os.path.join(
-            pubconf.archiveroot, "dists", self.suite, "main", "i18n", filename)
+            pubconf.archiveroot, "dists", self.suite, "main", "i18n", filename
+        )
 
     def test_basic(self):
         # Processing a simple correct tar file works.
         self.openArchive("20060728")
         names = (
-            "Translation-en", "Translation-en.xz",
-            "Translation-de", "Translation-de.xz")
+            "Translation-en",
+            "Translation-en.xz",
+            "Translation-de",
+            "Translation-de.xz",
+        )
         for name in names:
             self.tarfile.add_file(os.path.join("i18n", name), b"")
         self.process()
@@ -75,8 +83,9 @@ class TestDdtpTarball(TestCaseWithFactory):
         self.tarfile.add_file("i18n/Translation-de", b"")
         self.tarfile.add_directory("i18n/foo")
         self.process()
-        self.assertTrue(os.path.exists(
-            self.getTranslationsPath("Translation-de")))
+        self.assertTrue(
+            os.path.exists(self.getTranslationsPath("Translation-de"))
+        )
         self.assertFalse(os.path.exists(self.getTranslationsPath("foo")))
 
     def test_partial_update(self):
@@ -119,10 +128,15 @@ class TestDdtpTarball(TestCaseWithFactory):
         # file (for the PPA case), then colliding entries in DDTP
         # tarballs are not extracted.
         self.archive = self.factory.makeArchive(
-            distribution=self.distro, purpose=ArchivePurpose.PPA)
-        self.useFixture(FeatureFixture({
-            "soyuz.ppa.separate_long_descriptions": "on",
-            }))
+            distribution=self.distro, purpose=ArchivePurpose.PPA
+        )
+        self.useFixture(
+            FeatureFixture(
+                {
+                    "soyuz.ppa.separate_long_descriptions": "on",
+                }
+            )
+        )
         self.distroseries.include_long_descriptions = False
         self.openArchive("20060728")
         en_names = ("Translation-en", "Translation-en.xz")
@@ -170,7 +184,9 @@ class TestDdtpTarball(TestCaseWithFactory):
             # uses the version for anything.
             ("translations", "main", "1.tar.gz"),
             DdtpTarballUpload.parsePath(
-                "/dir_with_underscores/translations_main_1.tar.gz"))
+                "/dir_with_underscores/translations_main_1.tar.gz"
+            ),
+        )
 
     def test_getSeriesKey_extracts_component(self):
         # getSeriesKey extracts the component from an upload's filename.
@@ -187,5 +203,6 @@ class TestDdtpTarball(TestCaseWithFactory):
     def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
         # getSeriesKey requires exactly three fields.
         self.assertIsNone(DdtpTarballUpload.getSeriesKey("package_1.0.tar.gz"))
-        self.assertIsNone(DdtpTarballUpload.getSeriesKey(
-            "one_two_three_four_5.tar.gz"))
+        self.assertIsNone(
+            DdtpTarballUpload.getSeriesKey("one_two_three_four_5.tar.gz")
+        )
diff --git a/lib/lp/archivepublisher/tests/test_deathrow.py b/lib/lp/archivepublisher/tests/test_deathrow.py
index e7c82f1..77bcf8b 100644
--- a/lib/lp/archivepublisher/tests/test_deathrow.py
+++ b/lib/lp/archivepublisher/tests/test_deathrow.py
@@ -3,9 +3,9 @@
 
 """Tests for deathrow class."""
 
-from pathlib import Path
 import shutil
 import tempfile
+from pathlib import Path
 
 from zope.component import getUtility
 
@@ -36,12 +36,13 @@ class TestDeathRow(TestCase):
         Created the temporary 'pool' and 'temp' directories and register
         a 'cleanup' to purge them after the test runs.
         """
-        pool_path = tempfile.mkdtemp('-pool')
-        temp_path = tempfile.mkdtemp('-pool-tmp')
+        pool_path = tempfile.mkdtemp("-pool")
+        temp_path = tempfile.mkdtemp("-pool-tmp")
 
         def clean_pool(pool_path, temp_path):
             shutil.rmtree(pool_path)
             shutil.rmtree(temp_path)
+
         self.addCleanup(clean_pool, pool_path, temp_path)
 
         logger = BufferLogger()
@@ -51,34 +52,26 @@ class TestDeathRow(TestCase):
     def getDiskPoolPath(self, pub, pub_file, diskpool):
         """Return the absolute path to a published file in the disk pool/."""
         return diskpool.pathFor(
-            pub.component.name,
-            pub.pool_name,
-            pub.pool_version,
-            pub_file)
+            pub.component.name, pub.pool_name, pub.pool_version, pub_file
+        )
 
     def assertIsFile(self, path: Path) -> None:
         """Assert the path exists and is a regular file."""
-        self.assertTrue(
-            path.exists(),
-            "File %s does not exist" % path.name)
+        self.assertTrue(path.exists(), "File %s does not exist" % path.name)
         self.assertFalse(
-            path.is_symlink(),
-            "File %s is a symbolic link" % path.name)
+            path.is_symlink(), "File %s is a symbolic link" % path.name
+        )
 
     def assertIsLink(self, path: Path) -> None:
         """Assert the path exists and is a symbolic link."""
+        self.assertTrue(path.exists(), "File %s does not exist" % path.name)
         self.assertTrue(
-            path.exists(),
-            "File %s does not exist" % path.name)
-        self.assertTrue(
-            path.is_symlink(),
-            "File %s is a not symbolic link" % path.name)
+            path.is_symlink(), "File %s is a not symbolic link" % path.name
+        )
 
     def assertDoesNotExist(self, path: Path) -> None:
         """Assert the path does not exit."""
-        self.assertFalse(
-            path.exists(),
-            "File %s exists" % path.name)
+        self.assertFalse(path.exists(), "File %s exists" % path.name)
 
     def test_MissingSymLinkInPool(self):
         # When a publication is promoted from 'universe' to 'main' and
@@ -89,16 +82,17 @@ class TestDeathRow(TestCase):
         # updated to match the disk status.
 
         # Setup an `SoyuzTestPublisher` and a `DeathRow` instance.
-        ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
-        hoary = ubuntu.getSeries('hoary')
+        ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
+        hoary = ubuntu.getSeries("hoary")
         stp = self.getTestPublisher(hoary)
         deathrow = self.getDeathRow(hoary.main_archive)
 
         # Create a source publication with a since file (DSC) in
         # 'universe' and promote it to 'main'.
-        source_universe = stp.getPubSource(component='universe')
+        source_universe = stp.getPubSource(component="universe")
         source_main = source_universe.changeOverride(
-            new_component=getUtility(IComponentSet)['main'])
+            new_component=getUtility(IComponentSet)["main"]
+        )
         test_publications = (source_universe, source_main)
 
         # Commit for exposing the just-created librarian files.
@@ -111,10 +105,12 @@ class TestDeathRow(TestCase):
             pub.publish(deathrow.diskpool, deathrow.logger)
         [main_dsc_path] = [
             self.getDiskPoolPath(source_main, pub_file, deathrow.diskpool)
-            for pub_file in source_main.files]
+            for pub_file in source_main.files
+        ]
         [universe_dsc_path] = [
             self.getDiskPoolPath(source_universe, pub_file, deathrow.diskpool)
-            for pub_file in source_universe.files]
+            for pub_file in source_universe.files
+        ]
         self.assertIsFile(main_dsc_path)
         self.assertIsLink(universe_dsc_path)
 
@@ -136,8 +132,9 @@ class TestDeathRow(TestCase):
         for pub in test_publications:
             self.assertTrue(
                 pub.dateremoved is not None,
-                '%s (%s) is not marked as removed.'
-                % (pub.displayname, pub.component.name))
+                "%s (%s) is not marked as removed."
+                % (pub.displayname, pub.component.name),
+            )
 
         self.assertDoesNotExist(main_dsc_path)
         self.assertDoesNotExist(universe_dsc_path)
diff --git a/lib/lp/archivepublisher/tests/test_debian_installer.py b/lib/lp/archivepublisher/tests/test_debian_installer.py
index 3bc26a5..a41785e 100644
--- a/lib/lp/archivepublisher/tests/test_debian_installer.py
+++ b/lib/lp/archivepublisher/tests/test_debian_installer.py
@@ -17,7 +17,7 @@ from lp.archivepublisher.config import getPubConfig
 from lp.archivepublisher.customupload import (
     CustomUploadAlreadyExists,
     CustomUploadBadUmask,
-    )
+)
 from lp.archivepublisher.debian_installer import DebianInstallerUpload
 from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
 from lp.archivepublisher.tests.test_run_parts import RunPartsMixin
@@ -36,10 +36,12 @@ class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):
         self.temp_dir = self.makeTemporaryDirectory()
         self.distro = self.factory.makeDistribution()
         db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
-            self.distro)
+            self.distro
+        )
         db_pubconf.root_dir = self.temp_dir
         self.archive = self.factory.makeArchive(
-            distribution=self.distro, purpose=ArchivePurpose.PRIMARY)
+            distribution=self.distro, purpose=ArchivePurpose.PRIMARY
+        )
         self.suite = "distroseries"
         # CustomUpload.installFiles requires a umask of 0o022.
         old_umask = os.umask(0o022)
@@ -50,17 +52,20 @@ class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):
         self.arch = "i386"
         self.path = os.path.join(
             self.temp_dir,
-            "debian-installer-images_%s_%s.tar.gz" % (self.version, self.arch))
+            "debian-installer-images_%s_%s.tar.gz" % (self.version, self.arch),
+        )
         self.buffer = open(self.path, "wb")
         self.tarfile = LaunchpadWriteTarFile(self.buffer)
 
     def addFile(self, path, contents):
         self.tarfile.add_file(
-            "installer-%s/%s/%s" % (self.arch, self.version, path), contents)
+            "installer-%s/%s/%s" % (self.arch, self.version, path), contents
+        )
 
     def addSymlink(self, path, target):
         self.tarfile.add_symlink(
-            "installer-%s/%s/%s" % (self.arch, self.version, path), target)
+            "installer-%s/%s/%s" % (self.arch, self.version, path), target
+        )
 
     def process(self):
         self.tarfile.close()
@@ -70,11 +75,16 @@ class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):
     def getInstallerPath(self, versioned_filename=None):
         pubconf = getPubConfig(self.archive)
         installer_path = os.path.join(
-            pubconf.archiveroot, "dists", self.suite, "main",
-            "installer-%s" % self.arch)
+            pubconf.archiveroot,
+            "dists",
+            self.suite,
+            "main",
+            "installer-%s" % self.arch,
+        )
         if versioned_filename is not None:
             installer_path = os.path.join(
-                installer_path, self.version, versioned_filename)
+                installer_path, self.version, versioned_filename
+            )
         return installer_path
 
     def test_basic(self):
@@ -103,18 +113,22 @@ class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):
         self.process()
         installer_path = self.getInstallerPath()
         self.assertContentEqual(
-            [self.version, "current"], os.listdir(installer_path))
+            [self.version, "current"], os.listdir(installer_path)
+        )
         self.assertEqual(
-            self.version, os.readlink(os.path.join(installer_path, "current")))
+            self.version, os.readlink(os.path.join(installer_path, "current"))
+        )
 
     def test_correct_file(self):
         # Files in the tarball are extracted correctly.
         self.openArchive()
-        directory = ("images/netboot/ubuntu-installer/i386/"
-                     "pxelinux.cfg.serial-9600")
+        directory = (
+            "images/netboot/ubuntu-installer/i386/" "pxelinux.cfg.serial-9600"
+        )
         filename = os.path.join(directory, "default")
         long_filename = os.path.join(
-            directory, "very_very_very_very_very_very_long_filename")
+            directory, "very_very_very_very_very_very_long_filename"
+        )
         self.addFile(filename, b"hey")
         self.addFile(long_filename, b"long")
         self.process()
@@ -134,11 +148,14 @@ class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):
         self.addSymlink(link_to_dir_path, link_to_dir_target)
         self.process()
         self.assertEqual(
-            foo_target, os.readlink(self.getInstallerPath(foo_path)))
+            foo_target, os.readlink(self.getInstallerPath(foo_path))
+        )
         self.assertEqual(
             link_to_dir_target,
-            os.path.normpath(os.readlink(
-                self.getInstallerPath(link_to_dir_path))))
+            os.path.normpath(
+                os.readlink(self.getInstallerPath(link_to_dir_path))
+            ),
+        )
 
     def test_top_level_permissions(self):
         # Top-level directories are set to mode 0o755 (see bug 107068).
@@ -149,30 +166,41 @@ class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):
         self.assertEqual(0o755, os.stat(installer_path).st_mode & 0o777)
         self.assertEqual(
             0o755,
-            os.stat(os.path.join(installer_path, os.pardir)).st_mode & 0o777)
+            os.stat(os.path.join(installer_path, os.pardir)).st_mode & 0o777,
+        )
 
     def test_extracted_permissions(self):
         # Extracted files and directories are set to 0o644/0o755.
         self.openArchive()
-        directory = ("images/netboot/ubuntu-installer/i386/"
-                     "pxelinux.cfg.serial-9600")
+        directory = (
+            "images/netboot/ubuntu-installer/i386/" "pxelinux.cfg.serial-9600"
+        )
         filename = os.path.join(directory, "default")
         self.addFile(filename, b"hey")
         self.process()
         self.assertEqual(
-            0o644, os.stat(self.getInstallerPath(filename)).st_mode & 0o777)
+            0o644, os.stat(self.getInstallerPath(filename)).st_mode & 0o777
+        )
         self.assertEqual(
-            0o755, os.stat(self.getInstallerPath(directory)).st_mode & 0o777)
+            0o755, os.stat(self.getInstallerPath(directory)).st_mode & 0o777
+        )
 
     def test_sign_with_external_run_parts(self):
         self.enableRunParts(distribution_name=self.distro.name)
-        with open(os.path.join(
-                self.parts_directory, self.distro.name, "sign.d",
-                "10-sign"), "w") as f:
-            f.write(dedent("""\
+        with open(
+            os.path.join(
+                self.parts_directory, self.distro.name, "sign.d", "10-sign"
+            ),
+            "w",
+        ) as f:
+            f.write(
+                dedent(
+                    """\
                 #! /bin/sh
                 touch "$OUTPUT_PATH"
-                """))
+                """
+                )
+            )
             os.fchmod(f.fileno(), 0o755)
         self.openArchive()
         self.addFile("images/list", b"a list")
@@ -180,13 +208,15 @@ class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):
         self.process()
         self.assertThat(
             self.getInstallerPath("images"),
-            DirContains(["list", "SHA256SUMS", "SHA256SUMS.gpg"]))
+            DirContains(["list", "SHA256SUMS", "SHA256SUMS.gpg"]),
+        )
 
     def test_getSeriesKey_extracts_architecture(self):
         # getSeriesKey extracts the architecture from an upload's filename.
         self.openArchive()
         self.assertEqual(
-            self.arch, DebianInstallerUpload.getSeriesKey(self.path))
+            self.arch, DebianInstallerUpload.getSeriesKey(self.path)
+        )
 
     def test_getSeriesKey_returns_None_on_mismatch(self):
         # getSeriesKey returns None if the filename does not match the
@@ -195,7 +225,9 @@ class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):
 
     def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
         # getSeriesKey requires exactly three fields.
-        self.assertIsNone(DebianInstallerUpload.getSeriesKey(
-            "package_1.0.tar.gz"))
-        self.assertIsNone(DebianInstallerUpload.getSeriesKey(
-            "one_two_three_four_5.tar.gz"))
+        self.assertIsNone(
+            DebianInstallerUpload.getSeriesKey("package_1.0.tar.gz")
+        )
+        self.assertIsNone(
+            DebianInstallerUpload.getSeriesKey("one_two_three_four_5.tar.gz")
+        )
diff --git a/lib/lp/archivepublisher/tests/test_debversion.py b/lib/lp/archivepublisher/tests/test_debversion.py
index ee252de..3bc6746 100644
--- a/lib/lp/archivepublisher/tests/test_debversion.py
+++ b/lib/lp/archivepublisher/tests/test_debversion.py
@@ -12,7 +12,7 @@ from lp.archivepublisher.debversion import (
     BadUpstreamError,
     Version,
     VersionError,
-    )
+)
 
 
 class VersionTests(unittest.TestCase):
@@ -27,7 +27,7 @@ class VersionTests(unittest.TestCase):
         "1.5.4-1.woody.0",
         "1.6-0+1.5a-4",
         "1.3~rc1-4",
-        )
+    )
 
     # Known less-than comparisons
     COMPARISONS = (
@@ -52,7 +52,7 @@ class VersionTests(unittest.TestCase):
         ("1~", "1"),
         ("1~", "1~a"),
         ("1~a", "1~b"),
-        )
+    )
 
     def testAcceptsString(self):
         """Version should accept a string input."""
@@ -128,7 +128,6 @@ class VersionTests(unittest.TestCase):
         self.assertEqual(Version("1.0"), Version("0:1.0"))
 
     def notestNullRevisionIsZero(self):
-        """Version should treat an omitted revision as being equal to zero.
-        """
+        """Version should treat an omitted revision as being equal to zero."""
         self.assertEqual(Version("1.0"), Version("1.0-0"))
         self.assertTrue(Version("1.0") == Version("1.0-0"))
diff --git a/lib/lp/archivepublisher/tests/test_dist_upgrader.py b/lib/lp/archivepublisher/tests/test_dist_upgrader.py
index 69092c8..1584d60 100644
--- a/lib/lp/archivepublisher/tests/test_dist_upgrader.py
+++ b/lib/lp/archivepublisher/tests/test_dist_upgrader.py
@@ -17,11 +17,11 @@ from lp.archivepublisher.config import getPubConfig
 from lp.archivepublisher.customupload import (
     CustomUploadAlreadyExists,
     CustomUploadBadUmask,
-    )
+)
 from lp.archivepublisher.dist_upgrader import (
     DistUpgraderBadVersion,
     DistUpgraderUpload,
-    )
+)
 from lp.archivepublisher.interfaces.publisherconfig import IPublisherConfigSet
 from lp.archivepublisher.tests.test_run_parts import RunPartsMixin
 from lp.services.tarfile_helpers import LaunchpadWriteTarFile
@@ -39,10 +39,12 @@ class TestDistUpgrader(RunPartsMixin, TestCaseWithFactory):
         self.temp_dir = self.makeTemporaryDirectory()
         self.distro = self.factory.makeDistribution()
         db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
-            self.distro)
+            self.distro
+        )
         db_pubconf.root_dir = self.temp_dir
         self.archive = self.factory.makeArchive(
-            distribution=self.distro, purpose=ArchivePurpose.PRIMARY)
+            distribution=self.distro, purpose=ArchivePurpose.PRIMARY
+        )
         self.suite = "distroseries"
         # CustomUpload.installFiles requires a umask of 0o022.
         old_umask = os.umask(0o022)
@@ -50,7 +52,8 @@ class TestDistUpgrader(RunPartsMixin, TestCaseWithFactory):
 
     def openArchive(self, version):
         self.path = os.path.join(
-            self.temp_dir, "dist-upgrader_%s_all.tar.gz" % version)
+            self.temp_dir, "dist-upgrader_%s_all.tar.gz" % version
+        )
         self.buffer = open(self.path, "wb")
         self.tarfile = LaunchpadWriteTarFile(self.buffer)
 
@@ -62,8 +65,12 @@ class TestDistUpgrader(RunPartsMixin, TestCaseWithFactory):
     def getUpgraderPath(self):
         pubconf = getPubConfig(self.archive)
         return os.path.join(
-            pubconf.archiveroot, "dists", self.suite, "main",
-            "dist-upgrader-all")
+            pubconf.archiveroot,
+            "dists",
+            self.suite,
+            "main",
+            "dist-upgrader-all",
+        )
 
     def test_basic(self):
         # Processing a simple correct tar file works.
@@ -92,13 +99,15 @@ class TestDistUpgrader(RunPartsMixin, TestCaseWithFactory):
         self.process()
         upgrader_path = self.getUpgraderPath()
         self.assertContentEqual(
-            ["20060302.0120", "current"], os.listdir(upgrader_path))
+            ["20060302.0120", "current"], os.listdir(upgrader_path)
+        )
         self.assertEqual(
             "20060302.0120",
-            os.readlink(os.path.join(upgrader_path, "current")))
+            os.readlink(os.path.join(upgrader_path, "current")),
+        )
         self.assertContentEqual(
-            ["hello"],
-            os.listdir(os.path.join(upgrader_path, "20060302.0120")))
+            ["hello"], os.listdir(os.path.join(upgrader_path, "20060302.0120"))
+        )
 
     def test_bad_version(self):
         # Bad versions in the tarball are refused.
@@ -108,13 +117,20 @@ class TestDistUpgrader(RunPartsMixin, TestCaseWithFactory):
 
     def test_sign_with_external_run_parts(self):
         self.enableRunParts(distribution_name=self.distro.name)
-        with open(os.path.join(
-                self.parts_directory, self.distro.name, "sign.d",
-                "10-sign"), "w") as f:
-            f.write(dedent("""\
+        with open(
+            os.path.join(
+                self.parts_directory, self.distro.name, "sign.d", "10-sign"
+            ),
+            "w",
+        ) as f:
+            f.write(
+                dedent(
+                    """\
                 #! /bin/sh
                 touch "$OUTPUT_PATH"
-                """))
+                """
+                )
+            )
             os.fchmod(f.fileno(), 0o755)
         self.openArchive("20060302.0120")
         self.tarfile.add_file("20060302.0120/list", b"a list")
@@ -122,7 +138,8 @@ class TestDistUpgrader(RunPartsMixin, TestCaseWithFactory):
         self.process()
         self.assertThat(
             os.path.join(self.getUpgraderPath(), "20060302.0120"),
-            DirContains(["list", "foo.tar.gz", "foo.tar.gz.gpg"]))
+            DirContains(["list", "foo.tar.gz", "foo.tar.gz.gpg"]),
+        )
 
     def test_getSeriesKey_extracts_architecture(self):
         # getSeriesKey extracts the architecture from an upload's filename.
@@ -136,7 +153,9 @@ class TestDistUpgrader(RunPartsMixin, TestCaseWithFactory):
 
     def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
         # getSeriesKey requires exactly three fields.
-        self.assertIsNone(DistUpgraderUpload.getSeriesKey(
-            "package_1.0.tar.gz"))
-        self.assertIsNone(DistUpgraderUpload.getSeriesKey(
-            "one_two_three_four_5.tar.gz"))
+        self.assertIsNone(
+            DistUpgraderUpload.getSeriesKey("package_1.0.tar.gz")
+        )
+        self.assertIsNone(
+            DistUpgraderUpload.getSeriesKey("one_two_three_four_5.tar.gz")
+        )
diff --git a/lib/lp/archivepublisher/tests/test_dominator.py b/lib/lp/archivepublisher/tests/test_dominator.py
index 7d88122..acff7b4 100644
--- a/lib/lp/archivepublisher/tests/test_dominator.py
+++ b/lib/lp/archivepublisher/tests/test_dominator.py
@@ -8,43 +8,37 @@ from functools import cmp_to_key
 from operator import attrgetter
 
 import apt_pkg
-from testtools.matchers import (
-    GreaterThan,
-    LessThan,
-    )
 import transaction
+from testtools.matchers import GreaterThan, LessThan
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
 from lp.archivepublisher.domination import (
+    STAY_OF_EXECUTION,
     ArchSpecificPublicationsCache,
-    contains_arch_indep,
     Dominator,
+    GeneralizedPublication,
+    contains_arch_indep,
     find_live_binary_versions_pass_1,
     find_live_binary_versions_pass_2,
     find_live_source_versions,
-    GeneralizedPublication,
-    STAY_OF_EXECUTION,
-    )
+)
 from lp.archivepublisher.publishing import Publisher
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.interfaces.series import SeriesStatus
 from lp.services.log.logger import DevNullLogger
 from lp.soyuz.adapters.packagelocation import PackageLocation
-from lp.soyuz.enums import (
-    BinaryPackageFormat,
-    PackagePublishingStatus,
-    )
+from lp.soyuz.enums import BinaryPackageFormat, PackagePublishingStatus
 from lp.soyuz.interfaces.publishing import (
     IPublishingSet,
     ISourcePackagePublishingHistory,
-    )
+)
 from lp.soyuz.tests.test_publishing import TestNativePublishingBase
 from lp.testing import (
-    monkey_patch,
     StormStatementRecorder,
     TestCaseWithFactory,
-    )
+    monkey_patch,
+)
 from lp.testing.dbuser import lp_dbuser
 from lp.testing.fakemethod import FakeMethod
 from lp.testing.layers import ZopelessDatabaseLayer
@@ -54,15 +48,18 @@ from lp.testing.matchers import HasQueryCount
 class TestDominator(TestNativePublishingBase):
     """Test Dominator class."""
 
-    def createSourceAndBinaries(self, version, with_debug=False,
-                                archive=None):
+    def createSourceAndBinaries(self, version, with_debug=False, archive=None):
         """Create a source and binaries with the given version."""
         source = self.getPubSource(
-            version=version, archive=archive,
-            status=PackagePublishingStatus.PUBLISHED)
+            version=version,
+            archive=archive,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         binaries = self.getPubBinaries(
-            pub_source=source, with_debug=with_debug,
-            status=PackagePublishingStatus.PUBLISHED)
+            pub_source=source,
+            with_debug=with_debug,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         return (source, binaries)
 
     def createSimpleDominationContext(self):
@@ -80,20 +77,26 @@ class TestDominator(TestNativePublishingBase):
 
         Note that as an optimization the binaries list is already unpacked.
         """
-        foo_10_source, foo_10_binaries = self.createSourceAndBinaries('1.0')
-        foo_11_source, foo_11_binaries = self.createSourceAndBinaries('1.1')
-        return (foo_11_source, foo_11_binaries[0],
-                foo_10_source, foo_10_binaries[0])
+        foo_10_source, foo_10_binaries = self.createSourceAndBinaries("1.0")
+        foo_11_source, foo_11_binaries = self.createSourceAndBinaries("1.1")
+        return (
+            foo_11_source,
+            foo_11_binaries[0],
+            foo_10_source,
+            foo_10_binaries[0],
+        )
 
     def dominateAndCheck(self, dominant, dominated, supersededby):
         generalization = GeneralizedPublication(
-            is_source=ISourcePackagePublishingHistory.providedBy(dominant))
+            is_source=ISourcePackagePublishingHistory.providedBy(dominant)
+        )
         dominator = Dominator(self.logger, self.ubuntutest.main_archive)
 
         pubs = [dominant, dominated]
         live_versions = [generalization.getPackageVersion(dominant)]
         supersede, keep, delete = dominator.planPackageDomination(
-            pubs, live_versions, generalization)
+            pubs, live_versions, generalization
+        )
 
         # The dominant version will remain published, while the dominated
         # version will be superseded.
@@ -103,40 +106,55 @@ class TestDominator(TestNativePublishingBase):
 
     def testManualSourceDomination(self):
         """Test source domination procedure."""
-        [dominant_source, dominant_binary, dominated_source,
-         dominated_binary] = self.createSimpleDominationContext()
+        [
+            dominant_source,
+            dominant_binary,
+            dominated_source,
+            dominated_binary,
+        ] = self.createSimpleDominationContext()
 
         self.dominateAndCheck(
-            dominant_source, dominated_source,
-            dominant_source.sourcepackagerelease)
+            dominant_source,
+            dominated_source,
+            dominant_source.sourcepackagerelease,
+        )
 
     def testManualBinaryDomination(self):
         """Test binary domination procedure."""
-        [dominant_source, dominant, dominated_source,
-         dominated] = self.createSimpleDominationContext()
+        [
+            dominant_source,
+            dominant,
+            dominated_source,
+            dominated,
+        ] = self.createSimpleDominationContext()
 
         self.dominateAndCheck(
-            dominant, dominated, dominant.binarypackagerelease.build)
+            dominant, dominated, dominant.binarypackagerelease.build
+        )
 
     def testJudgeAndDominate(self):
         """Verify that judgeAndDominate correctly dominates everything."""
-        foo_10_source, foo_10_binaries = self.createSourceAndBinaries('1.0')
-        foo_11_source, foo_11_binaries = self.createSourceAndBinaries('1.1')
-        foo_12_source, foo_12_binaries = self.createSourceAndBinaries('1.2')
+        foo_10_source, foo_10_binaries = self.createSourceAndBinaries("1.0")
+        foo_11_source, foo_11_binaries = self.createSourceAndBinaries("1.1")
+        foo_12_source, foo_12_binaries = self.createSourceAndBinaries("1.2")
 
         dominator = Dominator(self.logger, foo_10_source.archive)
         dominator.judgeAndDominate(
-            foo_10_source.distroseries, foo_10_source.pocket)
+            foo_10_source.distroseries, foo_10_source.pocket
+        )
 
         self.checkPublications(
             [foo_12_source] + foo_12_binaries,
-            PackagePublishingStatus.PUBLISHED)
+            PackagePublishingStatus.PUBLISHED,
+        )
         self.checkPublications(
             [foo_11_source] + foo_11_binaries,
-            PackagePublishingStatus.SUPERSEDED)
+            PackagePublishingStatus.SUPERSEDED,
+        )
         self.checkPublications(
             [foo_10_source] + foo_10_binaries,
-            PackagePublishingStatus.SUPERSEDED)
+            PackagePublishingStatus.SUPERSEDED,
+        )
 
     def testJudgeAndDominateWithDDEBs(self):
         """Verify that judgeAndDominate ignores DDEBs correctly.
@@ -148,25 +166,32 @@ class TestDominator(TestNativePublishingBase):
         """
         ppa = self.factory.makeArchive()
         foo_10_source, foo_10_binaries = self.createSourceAndBinaries(
-            '1.0', with_debug=True, archive=ppa)
+            "1.0", with_debug=True, archive=ppa
+        )
         foo_11_source, foo_11_binaries = self.createSourceAndBinaries(
-            '1.1', with_debug=True, archive=ppa)
+            "1.1", with_debug=True, archive=ppa
+        )
         foo_12_source, foo_12_binaries = self.createSourceAndBinaries(
-            '1.2', with_debug=True, archive=ppa)
+            "1.2", with_debug=True, archive=ppa
+        )
 
         dominator = Dominator(self.logger, ppa)
         dominator.judgeAndDominate(
-            foo_10_source.distroseries, foo_10_source.pocket)
+            foo_10_source.distroseries, foo_10_source.pocket
+        )
 
         self.checkPublications(
             [foo_12_source] + foo_12_binaries,
-            PackagePublishingStatus.PUBLISHED)
+            PackagePublishingStatus.PUBLISHED,
+        )
         self.checkPublications(
             [foo_11_source] + foo_11_binaries,
-            PackagePublishingStatus.SUPERSEDED)
+            PackagePublishingStatus.SUPERSEDED,
+        )
         self.checkPublications(
             [foo_10_source] + foo_10_binaries,
-            PackagePublishingStatus.SUPERSEDED)
+            PackagePublishingStatus.SUPERSEDED,
+        )
 
     def test_dominateBinaries_rejects_empty_publication_list(self):
         """Domination asserts for non-empty input list."""
@@ -176,16 +201,17 @@ class TestDominator(TestNativePublishingBase):
         package = self.factory.makeBinaryPackageName()
         location = PackageLocation(
             archive=self.ubuntutest.main_archive,
-            distribution=distroseries.distribution, distroseries=distroseries,
-            pocket=pocket)
+            distribution=distroseries.distribution,
+            distroseries=distroseries,
+            pocket=pocket,
+        )
         dominator = Dominator(self.logger, self.ubuntutest.main_archive)
         dominator._sortPackages = FakeMethod({(package.name, location): []})
         # This isn't a really good exception. It should probably be
         # something more indicative of bad input.
         self.assertRaises(
-            AssertionError,
-            dominator.dominateBinaries,
-            distroseries, pocket)
+            AssertionError, dominator.dominateBinaries, distroseries, pocket
+        )
 
     def test_dominateSources_rejects_empty_publication_list(self):
         """Domination asserts for non-empty input list."""
@@ -195,16 +221,17 @@ class TestDominator(TestNativePublishingBase):
         package = self.factory.makeSourcePackageName()
         location = PackageLocation(
             archive=self.ubuntutest.main_archive,
-            distribution=distroseries.distribution, distroseries=distroseries,
-            pocket=pocket)
+            distribution=distroseries.distribution,
+            distroseries=distroseries,
+            pocket=pocket,
+        )
         dominator = Dominator(self.logger, self.ubuntutest.main_archive)
         dominator._sortPackages = FakeMethod({(package.name, location): []})
         # This isn't a really good exception. It should probably be
         # something more indicative of bad input.
         self.assertRaises(
-            AssertionError,
-            dominator.dominateSources,
-            distroseries, pocket)
+            AssertionError, dominator.dominateSources, distroseries, pocket
+        )
 
     def test_archall_domination(self):
         # Arch-all binaries should not be dominated when a new source
@@ -216,129 +243,198 @@ class TestDominator(TestNativePublishingBase):
         # Set up a source, "foo" which builds "foo-bin" and foo-common
         # (which is arch-all).
         foo_10_src = self.getPubSource(
-            sourcename="foo", version="1.0", architecturehintlist="i386",
-            status=PackagePublishingStatus.PUBLISHED)
+            sourcename="foo",
+            version="1.0",
+            architecturehintlist="i386",
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         [foo_10_i386_bin] = self.getPubBinaries(
-            binaryname="foo-bin", status=PackagePublishingStatus.PUBLISHED,
-            architecturespecific=True, version="1.0", pub_source=foo_10_src)
+            binaryname="foo-bin",
+            status=PackagePublishingStatus.PUBLISHED,
+            architecturespecific=True,
+            version="1.0",
+            pub_source=foo_10_src,
+        )
         [build] = foo_10_src.getBuilds()
         bpr = self.factory.makeBinaryPackageRelease(
-            binarypackagename="foo-common", version="1.0", build=build,
-            architecturespecific=False)
+            binarypackagename="foo-common",
+            version="1.0",
+            build=build,
+            architecturespecific=False,
+        )
         foo_10_all_bins = self.publishBinaryInArchive(
-            bpr, self.ubuntutest.main_archive, pocket=foo_10_src.pocket,
-            status=PackagePublishingStatus.PUBLISHED)
+            bpr,
+            self.ubuntutest.main_archive,
+            pocket=foo_10_src.pocket,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
 
         # Now, make version 1.1 of foo and add a foo-common but not foo-bin
         # (imagine that it's not finished building yet).
         foo_11_src = self.getPubSource(
-            sourcename="foo", version="1.1", architecturehintlist="all",
-            status=PackagePublishingStatus.PUBLISHED)
+            sourcename="foo",
+            version="1.1",
+            architecturehintlist="all",
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         # Generate binary publications for architecture "all" (actually,
         # one such publication per architecture).
         self.getPubBinaries(
-            binaryname="foo-common", status=PackagePublishingStatus.PUBLISHED,
-            architecturespecific=False, version="1.1", pub_source=foo_11_src)
+            binaryname="foo-common",
+            status=PackagePublishingStatus.PUBLISHED,
+            architecturespecific=False,
+            version="1.1",
+            pub_source=foo_11_src,
+        )
 
         dominator = Dominator(self.logger, self.ubuntutest.main_archive)
-        dominator.judgeAndDominate(
-            foo_10_src.distroseries, foo_10_src.pocket)
+        dominator.judgeAndDominate(foo_10_src.distroseries, foo_10_src.pocket)
 
         # The source will be superseded.
         self.checkPublication(foo_10_src, PackagePublishingStatus.SUPERSEDED)
         # The arch-specific has no dominant, so it's still published
         self.checkPublication(
-            foo_10_i386_bin, PackagePublishingStatus.PUBLISHED)
+            foo_10_i386_bin, PackagePublishingStatus.PUBLISHED
+        )
         # The arch-indep has a dominant but must not be superseded yet
         # since the arch-specific is still published.
         self.checkPublications(
-            foo_10_all_bins, PackagePublishingStatus.PUBLISHED)
+            foo_10_all_bins, PackagePublishingStatus.PUBLISHED
+        )
 
         # Now creating a newer foo-bin should see those last two
         # publications superseded.
         [build2] = foo_11_src.getBuilds()
         foo_11_bin = self.factory.makeBinaryPackageRelease(
-            binarypackagename="foo-bin", version="1.1", build=build2,
-            architecturespecific=True)
+            binarypackagename="foo-bin",
+            version="1.1",
+            build=build2,
+            architecturespecific=True,
+        )
         self.publishBinaryInArchive(
-            foo_11_bin, self.ubuntutest.main_archive,
+            foo_11_bin,
+            self.ubuntutest.main_archive,
             pocket=foo_10_src.pocket,
-            status=PackagePublishingStatus.PUBLISHED)
-        dominator.judgeAndDominate(
-            foo_10_src.distroseries, foo_10_src.pocket)
+            status=PackagePublishingStatus.PUBLISHED,
+        )
+        dominator.judgeAndDominate(foo_10_src.distroseries, foo_10_src.pocket)
         self.checkPublication(
-            foo_10_i386_bin, PackagePublishingStatus.SUPERSEDED)
+            foo_10_i386_bin, PackagePublishingStatus.SUPERSEDED
+        )
         self.checkPublications(
-            foo_10_all_bins, PackagePublishingStatus.SUPERSEDED)
+            foo_10_all_bins, PackagePublishingStatus.SUPERSEDED
+        )
 
     def test_any_superseded_by_all(self):
         # Set up a source, foo, which builds an architecture-dependent
         # binary, foo-bin.
         foo_10_src = self.getPubSource(
-            sourcename="foo", version="1.0", architecturehintlist="i386",
-            status=PackagePublishingStatus.PUBLISHED)
+            sourcename="foo",
+            version="1.0",
+            architecturehintlist="i386",
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         [foo_10_i386_bin] = self.getPubBinaries(
-            binaryname="foo-bin", status=PackagePublishingStatus.PUBLISHED,
-            architecturespecific=True, version="1.0", pub_source=foo_10_src)
+            binaryname="foo-bin",
+            status=PackagePublishingStatus.PUBLISHED,
+            architecturespecific=True,
+            version="1.0",
+            pub_source=foo_10_src,
+        )
 
         # Now, make version 1.1 of foo, where foo-bin is now
         # architecture-independent.
         foo_11_src = self.getPubSource(
-            sourcename="foo", version="1.1", architecturehintlist="all",
-            status=PackagePublishingStatus.PUBLISHED)
+            sourcename="foo",
+            version="1.1",
+            architecturehintlist="all",
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         [foo_10_all_bin, foo_10_all_bin_2] = self.getPubBinaries(
-            binaryname="foo-bin", status=PackagePublishingStatus.PUBLISHED,
-            architecturespecific=False, version="1.1", pub_source=foo_11_src)
+            binaryname="foo-bin",
+            status=PackagePublishingStatus.PUBLISHED,
+            architecturespecific=False,
+            version="1.1",
+            pub_source=foo_11_src,
+        )
 
         dominator = Dominator(self.logger, self.ubuntutest.main_archive)
-        dominator.judgeAndDominate(
-            foo_10_src.distroseries, foo_10_src.pocket)
+        dominator.judgeAndDominate(foo_10_src.distroseries, foo_10_src.pocket)
 
         # The source will be superseded.
         self.checkPublication(foo_10_src, PackagePublishingStatus.SUPERSEDED)
         # The arch-specific is superseded by the new arch-indep.
         self.checkPublication(
-            foo_10_i386_bin, PackagePublishingStatus.SUPERSEDED)
+            foo_10_i386_bin, PackagePublishingStatus.SUPERSEDED
+        )
 
     def test_schitzoid_package(self):
         # Test domination of a source that produces an arch-indep and an
         # arch-all, that then switches both on the next version to the
         # other arch type.
         foo_10_src = self.getPubSource(
-            sourcename="foo", version="1.0", architecturehintlist="i386",
-            status=PackagePublishingStatus.PUBLISHED)
+            sourcename="foo",
+            version="1.0",
+            architecturehintlist="i386",
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         [foo_10_i386_bin] = self.getPubBinaries(
-            binaryname="foo-bin", status=PackagePublishingStatus.PUBLISHED,
-            architecturespecific=True, version="1.0", pub_source=foo_10_src)
+            binaryname="foo-bin",
+            status=PackagePublishingStatus.PUBLISHED,
+            architecturespecific=True,
+            version="1.0",
+            pub_source=foo_10_src,
+        )
         [build] = foo_10_src.getBuilds()
         bpr = self.factory.makeBinaryPackageRelease(
-            binarypackagename="foo-common", version="1.0", build=build,
-            architecturespecific=False)
+            binarypackagename="foo-common",
+            version="1.0",
+            build=build,
+            architecturespecific=False,
+        )
         foo_10_all_bins = self.publishBinaryInArchive(
-            bpr, self.ubuntutest.main_archive, pocket=foo_10_src.pocket,
-            status=PackagePublishingStatus.PUBLISHED)
+            bpr,
+            self.ubuntutest.main_archive,
+            pocket=foo_10_src.pocket,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
 
         foo_11_src = self.getPubSource(
-            sourcename="foo", version="1.1", architecturehintlist="i386",
-            status=PackagePublishingStatus.PUBLISHED)
+            sourcename="foo",
+            version="1.1",
+            architecturehintlist="i386",
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         [foo_11_i386_bin] = self.getPubBinaries(
-            binaryname="foo-common", status=PackagePublishingStatus.PUBLISHED,
-            architecturespecific=True, version="1.1", pub_source=foo_11_src)
+            binaryname="foo-common",
+            status=PackagePublishingStatus.PUBLISHED,
+            architecturespecific=True,
+            version="1.1",
+            pub_source=foo_11_src,
+        )
         [build] = foo_11_src.getBuilds()
         bpr = self.factory.makeBinaryPackageRelease(
-            binarypackagename="foo-bin", version="1.1", build=build,
-            architecturespecific=False)
+            binarypackagename="foo-bin",
+            version="1.1",
+            build=build,
+            architecturespecific=False,
+        )
         # Generate binary publications for architecture "all" (actually,
         # one such publication per architecture).
         self.publishBinaryInArchive(
-            bpr, self.ubuntutest.main_archive, pocket=foo_11_src.pocket,
-            status=PackagePublishingStatus.PUBLISHED)
+            bpr,
+            self.ubuntutest.main_archive,
+            pocket=foo_11_src.pocket,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
 
         dominator = Dominator(self.logger, self.ubuntutest.main_archive)
         dominator.judgeAndDominate(foo_10_src.distroseries, foo_10_src.pocket)
 
-        self.checkPublications(foo_10_all_bins + [foo_10_i386_bin],
-                               PackagePublishingStatus.SUPERSEDED)
+        self.checkPublications(
+            foo_10_all_bins + [foo_10_i386_bin],
+            PackagePublishingStatus.SUPERSEDED,
+        )
 
     def test_supersedes_arch_indep_binaries