← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:black-archiveuploader into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:black-archiveuploader into launchpad:master.

Commit message:
lp.archiveuploader: Apply black

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/425089
-- 
The attached diff has been truncated due to its size.
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:black-archiveuploader into launchpad:master.
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index 7a8022a..639fef0 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -60,3 +60,5 @@ c606443bdb2f342593c9a7c9437cb70c01f85f29
 8fd124775592a33c3d2ce9ef8111a9a5f1a5e089
 # apply black to lp.archivepublisher
 8885e7977012e4f376e23f52125784567aefebe4
+# apply black to lp.archiveuploader
+01c7f7112b20dab5c48373339d530a39f0dc859b
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 95a373b..a8e3f73 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -44,6 +44,7 @@ repos:
             answers
             |app
             |archivepublisher
+            |archiveuploader
           )/
 -   repo: https://github.com/PyCQA/isort
     rev: 5.9.2
@@ -64,6 +65,7 @@ repos:
             answers
             |app
             |archivepublisher
+            |archiveuploader
           )/
     -   id: isort
         alias: isort-black
@@ -74,6 +76,7 @@ repos:
             answers
             |app
             |archivepublisher
+            |archiveuploader
           )/
 -   repo: https://github.com/PyCQA/flake8
     rev: 3.9.2
diff --git a/lib/lp/archiveuploader/buildinfofile.py b/lib/lp/archiveuploader/buildinfofile.py
index b7fd2ee..bd8851d 100644
--- a/lib/lp/archiveuploader/buildinfofile.py
+++ b/lib/lp/archiveuploader/buildinfofile.py
@@ -4,27 +4,43 @@
 """Build information files."""
 
 __all__ = [
-    'BuildInfoFile',
-    ]
+    "BuildInfoFile",
+]
 
 from lp.app.errors import NotFoundError
 from lp.archiveuploader.dscfile import SignableTagFile
 from lp.archiveuploader.nascentuploadfile import PackageUploadFile
-from lp.archiveuploader.utils import (
-    re_isbuildinfo,
-    re_no_epoch,
-    UploadError,
-    )
+from lp.archiveuploader.utils import UploadError, re_isbuildinfo, re_no_epoch
 
 
 class BuildInfoFile(PackageUploadFile, SignableTagFile):
     """Represents an uploaded build information file."""
 
-    def __init__(self, filepath, checksums, size, component_and_section,
-                 priority_name, package, version, changes, policy, logger):
+    def __init__(
+        self,
+        filepath,
+        checksums,
+        size,
+        component_and_section,
+        priority_name,
+        package,
+        version,
+        changes,
+        policy,
+        logger,
+    ):
         super().__init__(
-            filepath, checksums, size, component_and_section, priority_name,
-            package, version, changes, policy, logger)
+            filepath,
+            checksums,
+            size,
+            component_and_section,
+            priority_name,
+            package,
+            version,
+            changes,
+            policy,
+            logger,
+        )
         self.parse(verify_signature=not policy.unsigned_buildinfo_ok)
         arch_match = re_isbuildinfo.match(self.filename)
         self.architecture = arch_match.group(3)
@@ -54,12 +70,14 @@ class BuildInfoFile(PackageUploadFile, SignableTagFile):
         """
         self.logger.debug("Verifying buildinfo file %s" % self.filename)
 
-        version_chopped = re_no_epoch.sub('', self.version)
+        version_chopped = re_no_epoch.sub("", self.version)
         buildinfo_match = re_isbuildinfo.match(self.filename)
         filename_version = buildinfo_match.group(2)
         if filename_version != version_chopped:
-            yield UploadError("%s: should be %s according to changes file."
-                % (filename_version, version_chopped))
+            yield UploadError(
+                "%s: should be %s according to changes file."
+                % (filename_version, version_chopped)
+            )
 
     def checkBuild(self, build):
         """See `PackageUploadFile`."""
@@ -67,21 +85,29 @@ class BuildInfoFile(PackageUploadFile, SignableTagFile):
             das = self.policy.distroseries[self.architecture]
         except NotFoundError:
             raise UploadError(
-                "Upload to unknown architecture %s for distroseries %s" %
-                (self.architecture, self.policy.distroseries))
+                "Upload to unknown architecture %s for distroseries %s"
+                % (self.architecture, self.policy.distroseries)
+            )
 
         # Sanity check; raise an error if the build we've been
         # told to link to makes no sense.
-        if (build.pocket != self.policy.pocket or
-            build.distro_arch_series != das or
-            build.archive != self.policy.archive):
+        if (
+            build.pocket != self.policy.pocket
+            or build.distro_arch_series != das
+            or build.archive != self.policy.archive
+        ):
             raise UploadError(
                 "Attempt to upload buildinfo specifying build %s, where it "
-                "doesn't fit." % build.id)
+                "doesn't fit." % build.id
+            )
 
     def storeInDatabase(self):
         """Create and return the corresponding `LibraryFileAlias` reference."""
         with open(self.filepath, "rb") as f:
             return self.librarian.create(
-                self.filename, self.size, f, self.content_type,
-                restricted=self.policy.archive.private)
+                self.filename,
+                self.size,
+                f,
+                self.content_type,
+                restricted=self.policy.archive.private,
+            )
diff --git a/lib/lp/archiveuploader/changesfile.py b/lib/lp/archiveuploader/changesfile.py
index 43ff662..6a7b3a7 100644
--- a/lib/lp/archiveuploader/changesfile.py
+++ b/lib/lp/archiveuploader/changesfile.py
@@ -8,30 +8,29 @@ files uploaded.
 """
 
 __all__ = [
-    'CannotDetermineFileTypeError',
-    'ChangesFile',
-    'determine_file_class_and_name',
-    ]
+    "CannotDetermineFileTypeError",
+    "ChangesFile",
+    "determine_file_class_and_name",
+]
 
 import os
 
 import six
 
 from lp.archiveuploader.buildinfofile import BuildInfoFile
-from lp.archiveuploader.dscfile import (
-    DSCFile,
-    SignableTagFile,
-    )
+from lp.archiveuploader.dscfile import DSCFile, SignableTagFile
 from lp.archiveuploader.nascentuploadfile import (
     BaseBinaryUploadFile,
     CustomUploadFile,
     DdebBinaryUploadFile,
     DebBinaryUploadFile,
     SourceUploadFile,
-    splitComponentAndSection,
     UdebBinaryUploadFile,
-    )
+    splitComponentAndSection,
+)
 from lp.archiveuploader.utils import (
+    UploadError,
+    UploadWarning,
     determine_binary_file_type,
     determine_source_file_type,
     parse_and_merge_file_lists,
@@ -40,13 +39,11 @@ from lp.archiveuploader.utils import (
     re_isbuildinfo,
     re_issource,
     rfc822_encode_address,
-    UploadError,
-    UploadWarning,
-    )
+)
 from lp.registry.interfaces.sourcepackage import (
     SourcePackageFileType,
     SourcePackageUrgency,
-    )
+)
 from lp.services.encoding import guess as guess_encoding
 from lp.soyuz.enums import BinaryPackageFileType
 
@@ -59,12 +56,19 @@ class ChangesFile(SignableTagFile):
     """Changesfile model."""
 
     mandatory_fields = {
-        "Source", "Architecture", "Version", "Distribution",
-        "Maintainer", "Files", "Changes", "Date",
+        "Source",
+        "Architecture",
+        "Version",
+        "Distribution",
+        "Maintainer",
+        "Files",
+        "Changes",
+        "Date",
         # Changed-By is not technically mandatory according to
         # Debian policy but Soyuz relies on it being set in
         # various places.
-        "Changed-By"}
+        "Changed-By",
+    }
 
     # Map urgencies to their dbschema values.
     # Debian policy only permits low, medium, high, emergency.
@@ -75,7 +79,7 @@ class ChangesFile(SignableTagFile):
         "high": SourcePackageUrgency.HIGH,
         "critical": SourcePackageUrgency.EMERGENCY,
         "emergency": SourcePackageUrgency.EMERGENCY,
-        }
+    }
 
     dsc = None
     buildinfo = None
@@ -116,7 +120,8 @@ class ChangesFile(SignableTagFile):
             if field not in self._dict:
                 yield UploadError(
                     "Unable to find mandatory field '%s' in the changes "
-                    "file." % field)
+                    "file." % field
+                )
                 return
 
         try:
@@ -128,7 +133,8 @@ class ChangesFile(SignableTagFile):
         if format < 1.5 or format > 2.0:
             yield UploadError(
                 "Format out of acceptable range for changes file. Range "
-                "1.5 - 2.0, format %g" % format)
+                "1.5 - 2.0, format %g" % format
+            )
 
     def checkFileName(self):
         """Make sure the changes file name is well-formed.
@@ -139,9 +145,10 @@ class ChangesFile(SignableTagFile):
         match_changes = re_changes_file_name.match(self.filename)
         if match_changes is None:
             yield UploadError(
-                '%s -> inappropriate changesfile name, '
+                "%s -> inappropriate changesfile name, "
                 'should follow "<pkg>_<version>_<arch>.changes" format'
-                % self.filename)
+                % self.filename
+            )
         else:
             self.filename_archtag = match_changes.group(3)
 
@@ -156,12 +163,12 @@ class ChangesFile(SignableTagFile):
             # signed upload.  This is desireable because it avoids us
             # doing ensurePerson() for buildds and sync owners.
             try:
-                self.maintainer = self.parseAddress(self._dict['Maintainer'])
+                self.maintainer = self.parseAddress(self._dict["Maintainer"])
             except UploadError as error:
                 yield error
 
         try:
-            self.changed_by = self.parseAddress(self._dict['Changed-By'])
+            self.changed_by = self.parseAddress(self._dict["Changed-By"])
         except UploadError as error:
             yield error
 
@@ -173,8 +180,9 @@ class ChangesFile(SignableTagFile):
         Further checks will be performed in CustomUploadFile class.
         """
         component_name, section_name = splitComponentAndSection(
-            component_and_section)
-        if section_name.startswith('raw-'):
+            component_and_section
+        )
+        if section_name.startswith("raw-"):
             return True
         return False
 
@@ -201,21 +209,36 @@ class ChangesFile(SignableTagFile):
                     # otherwise the tarballs in custom uploads match
                     # with source_match.
                     file_instance = CustomUploadFile(
-                        filepath, hashes, size, component_and_section,
-                        priority_name, self.policy, self.logger)
+                        filepath,
+                        hashes,
+                        size,
+                        component_and_section,
+                        priority_name,
+                        self.policy,
+                        self.logger,
+                    )
                 else:
                     try:
                         package, cls = determine_file_class_and_name(filename)
                     except CannotDetermineFileTypeError:
                         yield UploadError(
                             "Unable to identify file %s (%s) in changes."
-                            % (filename, component_and_section))
+                            % (filename, component_and_section)
+                        )
                         continue
 
                     file_instance = cls(
-                        filepath, hashes, size, component_and_section,
-                        priority_name, package, self.version, self,
-                        self.policy, self.logger)
+                        filepath,
+                        hashes,
+                        size,
+                        component_and_section,
+                        priority_name,
+                        package,
+                        self.version,
+                        self,
+                        self.policy,
+                        self.logger,
+                    )
 
                     if cls == DSCFile:
                         self.dsc = file_instance
@@ -240,20 +263,22 @@ class ChangesFile(SignableTagFile):
         if len(self.files) == 0:
             yield UploadError("No files found in the changes")
 
-        if 'Urgency' not in self._dict:
+        if "Urgency" not in self._dict:
             # Urgency is recommended but not mandatory. Default to 'low'
-            self._dict['Urgency'] = b"low"
+            self._dict["Urgency"] = b"low"
 
-        raw_urgency = six.ensure_text(self._dict['Urgency']).lower()
+        raw_urgency = six.ensure_text(self._dict["Urgency"]).lower()
         if raw_urgency not in self.urgency_map:
             yield UploadWarning(
                 "Unable to grok urgency %s, overriding with 'low'"
-                % (raw_urgency))
-            self._dict['Urgency'] = b"low"
+                % (raw_urgency)
+            )
+            self._dict["Urgency"] = b"low"
 
         if not self.policy.unsigned_changes_ok:
-            assert self.signer is not None, (
-                "Policy does not allow unsigned changesfile")
+            assert (
+                self.signer is not None
+            ), "Policy does not allow unsigned changesfile"
 
     #
     # useful properties
@@ -274,8 +299,11 @@ class ChangesFile(SignableTagFile):
         It ensure the files mentioned in the changes are already processed.
         """
         assert self.files is not None, "Files must but processed first."
-        return [upload_file for upload_file in self.files
-                if isinstance(upload_file, upload_filetype)]
+        return [
+            upload_file
+            for upload_file in self.files
+            if isinstance(upload_file, upload_filetype)
+        ]
 
     @property
     def binary_package_files(self):
@@ -298,7 +326,7 @@ class ChangesFile(SignableTagFile):
 
         For example, 'hoary' or 'hoary-security'.
         """
-        return six.ensure_text(self._dict['Distribution'])
+        return six.ensure_text(self._dict["Distribution"])
 
     @property
     def architectures(self):
@@ -307,23 +335,24 @@ class ChangesFile(SignableTagFile):
         For instance ['source', 'all'] or ['source', 'i386', 'amd64']
         or ['source'].
         """
-        return set(six.ensure_text(self._dict['Architecture']).split())
+        return set(six.ensure_text(self._dict["Architecture"]).split())
 
     @property
     def binaries(self):
         """Return set of binary package names listed."""
         return set(
-            six.ensure_text(self._dict.get('Binary', '')).strip().split())
+            six.ensure_text(self._dict.get("Binary", "")).strip().split()
+        )
 
     @property
     def converted_urgency(self):
         """Return the appropriate SourcePackageUrgency item."""
-        return self.urgency_map[six.ensure_text(self._dict['Urgency']).lower()]
+        return self.urgency_map[six.ensure_text(self._dict["Urgency"]).lower()]
 
     @property
     def version(self):
         """Return changesfile claimed version."""
-        return six.ensure_text(self._dict['Version'])
+        return six.ensure_text(self._dict["Version"])
 
     @classmethod
     def formatChangesComment(cls, comment):
@@ -333,31 +362,31 @@ class ChangesFile(SignableTagFile):
         # debian policy rules. First replacing the blank line
         # indicator '\n .' and then stripping one space from each
         # successive line.
-        comment = comment.replace('\n .', '\n')
-        comment = comment.replace('\n ', '\n')
+        comment = comment.replace("\n .", "\n")
+        comment = comment.replace("\n ", "\n")
         return comment
 
     @property
     def changes_comment(self):
         """Return changesfile 'change' comment."""
-        comment = guess_encoding(self._dict['Changes'])
+        comment = guess_encoding(self._dict["Changes"])
 
         return self.formatChangesComment(comment)
 
     @property
     def date(self):
         """Return changesfile date."""
-        return six.ensure_text(self._dict['Date'])
+        return six.ensure_text(self._dict["Date"])
 
     @property
     def source(self):
         """Return changesfile claimed source name."""
-        return six.ensure_text(self._dict['Source'])
+        return six.ensure_text(self._dict["Source"])
 
     @property
     def architecture_line(self):
         """Return changesfile archicteture line."""
-        return six.ensure_text(self._dict['Architecture'])
+        return six.ensure_text(self._dict["Architecture"])
 
     @property
     def simulated_changelog(self):
@@ -370,9 +399,12 @@ class ChangesFile(SignableTagFile):
         }}}
         """
         changes_author = rfc822_encode_address(
-            self.changed_by['name'], self.changed_by['email'])
-        return ('%s\n\n -- %s  %s' % (
-            self.changes_comment, changes_author, self.date)).encode('UTF-8')
+            self.changed_by["name"], self.changed_by["email"]
+        )
+        return (
+            "%s\n\n -- %s  %s"
+            % (self.changes_comment, changes_author, self.date)
+        ).encode("UTF-8")
 
 
 def determine_file_class_and_name(filename):
@@ -382,8 +414,7 @@ def determine_file_class_and_name(filename):
     buildinfo_match = re_isbuildinfo.match(filename)
     if source_match:
         package = source_match.group(1)
-        if (determine_source_file_type(filename) ==
-            SourcePackageFileType.DSC):
+        if determine_source_file_type(filename) == SourcePackageFileType.DSC:
             cls = DSCFile
         else:
             cls = SourceUploadFile
@@ -393,12 +424,13 @@ def determine_file_class_and_name(filename):
             BinaryPackageFileType.DEB: DebBinaryUploadFile,
             BinaryPackageFileType.DDEB: DdebBinaryUploadFile,
             BinaryPackageFileType.UDEB: UdebBinaryUploadFile,
-            }[determine_binary_file_type(filename)]
+        }[determine_binary_file_type(filename)]
     elif buildinfo_match:
         package = buildinfo_match.group(1)
         cls = BuildInfoFile
     else:
         raise CannotDetermineFileTypeError(
-            "Could not determine the type of %r" % filename)
+            "Could not determine the type of %r" % filename
+        )
 
     return package, cls
diff --git a/lib/lp/archiveuploader/charmrecipeupload.py b/lib/lp/archiveuploader/charmrecipeupload.py
index 5d08beb..5ae5fe2 100644
--- a/lib/lp/archiveuploader/charmrecipeupload.py
+++ b/lib/lp/archiveuploader/charmrecipeupload.py
@@ -5,7 +5,7 @@
 
 __all__ = [
     "CharmRecipeUpload",
-    ]
+]
 
 import os
 
@@ -51,10 +51,12 @@ class CharmRecipeUpload:
 
         for charm_path in charm_paths:
             libraryfile = self.librarian.create(
-                os.path.basename(charm_path), os.stat(charm_path).st_size,
+                os.path.basename(charm_path),
+                os.stat(charm_path).st_size,
                 open(charm_path, "rb"),
                 filenameToContentType(charm_path),
-                restricted=build.is_private)
+                restricted=build.is_private,
+            )
             build.addFile(libraryfile)
 
         # The master verifies the status to confirm successful upload.
diff --git a/lib/lp/archiveuploader/ciupload.py b/lib/lp/archiveuploader/ciupload.py
index 90a8d94..1a1641a 100644
--- a/lib/lp/archiveuploader/ciupload.py
+++ b/lib/lp/archiveuploader/ciupload.py
@@ -5,7 +5,7 @@
 
 __all__ = [
     "CIUpload",
-    ]
+]
 
 import os
 
@@ -40,7 +40,8 @@ class CIUpload:
         # CI builds, but we need to fit into how the rest of the build farm
         # works.
         upload_path = os.path.join(
-            self.upload_path, str(build.archive.id), build.distribution.name)
+            self.upload_path, str(build.archive.id), build.distribution.name
+        )
         # we assume first level directories are job directories
         if os.path.isdir(upload_path):
             job_directories = [
@@ -50,13 +51,13 @@ class CIUpload:
             job_directories = []
         for job_directory in job_directories:
             artifacts[job_directory] = []
-            for dirpath, _, filenames in os.walk(os.path.join(
-                upload_path, job_directory
-            )):
+            for dirpath, _, filenames in os.walk(
+                os.path.join(upload_path, job_directory)
+            ):
                 for filename in filenames:
-                    artifacts[job_directory].append(os.path.join(
-                        dirpath, filename
-                    ))
+                    artifacts[job_directory].append(
+                        os.path.join(dirpath, filename)
+                    )
 
         for job_id in build.results:
             report = build.getOrCreateRevisionStatusReport(job_id)
@@ -68,8 +69,8 @@ class CIUpload:
                     report.setLog(f.read())
             except FileNotFoundError as e:
                 raise UploadError(
-                    "log file `%s` for job `%s` not found" % (
-                        e.filename, job_id)
+                    "log file `%s` for job `%s` not found"
+                    % (e.filename, job_id)
                 ) from e
 
             # attach artifacts
diff --git a/lib/lp/archiveuploader/dscfile.py b/lib/lp/archiveuploader/dscfile.py
index ee18c03..c72c28f 100644
--- a/lib/lp/archiveuploader/dscfile.py
+++ b/lib/lp/archiveuploader/dscfile.py
@@ -8,13 +8,13 @@ files representing a source uploaded.
 """
 
 __all__ = [
-    'DSCFile',
-    'DSCUploadedFile',
-    'findFile',
-    'find_changelog',
-    'find_copyright',
-    'SignableTagFile',
-    ]
+    "DSCFile",
+    "DSCUploadedFile",
+    "findFile",
+    "find_changelog",
+    "find_copyright",
+    "SignableTagFile",
+]
 
 import errno
 import glob
@@ -24,60 +24,51 @@ import shutil
 import tempfile
 import warnings
 
-from debian.deb822 import (
-    Deb822Dict,
-    PkgRelation,
-    )
 import six
+from debian.deb822 import Deb822Dict, PkgRelation
 from zope.component import getUtility
 
 from lp.app.errors import NotFoundError
 from lp.archiveuploader.nascentuploadfile import (
     NascentUploadFile,
     SourceUploadFile,
-    )
+)
 from lp.archiveuploader.tagfiles import (
-    parse_tagfile_content,
     TagFileParseError,
-    )
+    parse_tagfile_content,
+)
 from lp.archiveuploader.utils import (
-    determine_source_file_type,
     DpkgSourceError,
+    ParseMaintError,
+    UploadError,
+    UploadWarning,
+    determine_source_file_type,
     extract_dpkg_source,
     get_source_file_extension,
     parse_and_merge_file_lists,
     parse_maintainer_bytes,
-    ParseMaintError,
     re_is_component_orig_tar_ext,
     re_is_component_orig_tar_ext_sig,
     re_issource,
     re_valid_pkg_name,
     re_valid_version,
-    UploadError,
-    UploadWarning,
-    )
+)
 from lp.registry.interfaces.gpg import IGPGKeySet
-from lp.registry.interfaces.person import (
-    IPersonSet,
-    PersonCreationRationale,
-    )
+from lp.registry.interfaces.person import IPersonSet, PersonCreationRationale
 from lp.registry.interfaces.sourcepackage import (
     SourcePackageFileType,
     SourcePackageType,
-    )
+)
 from lp.registry.interfaces.sourcepackagename import ISourcePackageNameSet
 from lp.services.encoding import guess as guess_encoding
 from lp.services.gpg.interfaces import (
     GPGKeyExpired,
     GPGVerificationError,
     IGPGHandler,
-    )
+)
 from lp.services.identity.interfaces.emailaddress import InvalidEmailAddress
 from lp.services.librarian.utils import copy_and_close
-from lp.soyuz.enums import (
-    ArchivePurpose,
-    SourcePackageFormat,
-    )
+from lp.soyuz.enums import ArchivePurpose, SourcePackageFormat
 
 
 def unpack_source(dsc_filepath):
@@ -105,10 +96,11 @@ def cleanup_unpacked_dir(unpacked_dir):
     try:
         shutil.rmtree(unpacked_dir)
     except OSError as error:
-        if errno.errorcode[error.errno] != 'EACCES':
+        if errno.errorcode[error.errno] != "EACCES":
             raise UploadError(
-                "couldn't remove tmp dir %s: code %s" % (
-                unpacked_dir, error.errno))
+                "couldn't remove tmp dir %s: code %s"
+                % (unpacked_dir, error.errno)
+            )
         else:
             result = os.system("chmod -R u+rwx " + unpacked_dir)
             if result != 0:
@@ -138,11 +130,10 @@ class SignableTagFile:
         or if signature verification was requested but failed.
         """
         try:
-            with open(self.filepath, 'rb') as f:
+            with open(self.filepath, "rb") as f:
                 self.raw_content = f.read()
         except OSError as error:
-            raise UploadError(
-                "Unable to read %s: %s" % (self.filename, error))
+            raise UploadError("Unable to read %s: %s" % (self.filename, error))
 
         if verify_signature:
             # We set self.signingkey regardless of whether the key is
@@ -151,28 +142,34 @@ class SignableTagFile:
             # UploadError is enough to prevent the upload being accepted.
             try:
                 self.signingkey, self.parsed_content = self._verifySignature(
-                    self.raw_content, self.filepath)
+                    self.raw_content, self.filepath
+                )
                 if not self.signingkey.active:
                     raise UploadError(
-                        "File %s is signed with a deactivated key %s" %
-                        (self.filepath, self.signingkey.fingerprint))
+                        "File %s is signed with a deactivated key %s"
+                        % (self.filepath, self.signingkey.fingerprint)
+                    )
             except GPGKeyExpired as e:
                 # This may theoretically return None, but the "expired"
                 # error will take precedence anyway.
                 self.signingkey = getUtility(IGPGKeySet).getByFingerprint(
-                    e.key.fingerprint)
+                    e.key.fingerprint
+                )
                 raise UploadError(
-                    "File %s is signed with an expired key %s" %
-                    (self.filepath, e.key.fingerprint))
+                    "File %s is signed with an expired key %s"
+                    % (self.filepath, e.key.fingerprint)
+                )
         else:
             self.logger.debug("%s can be unsigned." % self.filename)
             self.parsed_content = self.raw_content
         try:
             self._dict = parse_tagfile_content(
-                self.parsed_content, filename=self.filepath)
+                self.parsed_content, filename=self.filepath
+            )
         except TagFileParseError as error:
             raise UploadError(
-                "Unable to parse %s: %s" % (self.filename, error))
+                "Unable to parse %s: %s" % (self.filename, error)
+            )
 
     def _verifySignature(self, content, filename):
         """Verify the signature on the file content.
@@ -184,20 +181,23 @@ class SignableTagFile:
         cleartext data.
         """
         self.logger.debug(
-            "Verifying signature on %s" % os.path.basename(filename))
+            "Verifying signature on %s" % os.path.basename(filename)
+        )
 
         try:
             sig = getUtility(IGPGHandler).getVerifiedSignatureResilient(
-                content)
+                content
+            )
         except GPGVerificationError as error:
             raise UploadError(
-                "GPG verification of %s failed: %s" % (
-                filename, str(error)))
+                "GPG verification of %s failed: %s" % (filename, str(error))
+            )
 
         key = getUtility(IGPGKeySet).getByFingerprint(sig.fingerprint)
         if key is None:
-            raise UploadError("Signing key %s not registered in launchpad."
-                              % sig.fingerprint)
+            raise UploadError(
+                "Signing key %s not registered in launchpad." % sig.fingerprint
+            )
 
         return (key, sig.plain_data)
 
@@ -223,31 +223,39 @@ class SignableTagFile:
             raise UploadError("Invalid Maintainer.")
 
         if person is None and self.policy.create_people:
-            package = six.ensure_text(self._dict['Source'])
-            version = six.ensure_text(self._dict['Version'])
+            package = six.ensure_text(self._dict["Source"])
+            version = six.ensure_text(self._dict["Version"])
             if self.policy.distroseries and self.policy.pocket:
-                policy_suite = ('%s/%s' % (self.policy.distroseries.name,
-                                           self.policy.pocket.name))
+                policy_suite = "%s/%s" % (
+                    self.policy.distroseries.name,
+                    self.policy.pocket.name,
+                )
             else:
-                policy_suite = '(unknown)'
+                policy_suite = "(unknown)"
             try:
                 person = getUtility(IPersonSet).ensurePerson(
-                    email, name, PersonCreationRationale.SOURCEPACKAGEUPLOAD,
-                    comment=('when the %s_%s package was uploaded to %s'
-                             % (package, version, policy_suite)))
+                    email,
+                    name,
+                    PersonCreationRationale.SOURCEPACKAGEUPLOAD,
+                    comment=(
+                        "when the %s_%s package was uploaded to %s"
+                        % (package, version, policy_suite)
+                    ),
+                )
             except InvalidEmailAddress:
                 self.logger.info("Invalid email address: '%s'", email)
                 person = None
 
         if person is None:
-            raise UploadError("Unable to identify '%s':<%s> in launchpad"
-                              % (name, email))
+            raise UploadError(
+                "Unable to identify '%s':<%s> in launchpad" % (name, email)
+            )
 
         return {
             "name": name,
             "email": email,
             "person": person,
-            }
+        }
 
 
 class DSCFile(SourceUploadFile, SignableTagFile):
@@ -259,20 +267,23 @@ class DSCFile(SourceUploadFile, SignableTagFile):
         "Binary",
         "Maintainer",
         "Architecture",
-        "Files"}
-
-    known_fields = mandatory_fields.union({
-        "Build-Depends",
-        "Build-Depends-Indep",
-        "Build-Conflicts",
-        "Build-Conflicts-Indep",
-        "Checksums-Sha1",
-        "Checksums-Sha256",
-        "Checksums-Sha512",
-        "Format",
-        "Homepage",
-        "Standards-Version",
-        })
+        "Files",
+    }
+
+    known_fields = mandatory_fields.union(
+        {
+            "Build-Depends",
+            "Build-Depends-Indep",
+            "Build-Conflicts",
+            "Build-Conflicts-Indep",
+            "Checksums-Sha1",
+            "Checksums-Sha256",
+            "Checksums-Sha512",
+            "Format",
+            "Homepage",
+            "Standards-Version",
+        }
+    )
 
     # Note that files is actually only set inside verify().
     files = None
@@ -280,8 +291,19 @@ class DSCFile(SourceUploadFile, SignableTagFile):
     copyright = None
     changelog = None
 
-    def __init__(self, filepath, checksums, size, component_and_section,
-                 priority, package, version, changes, policy, logger):
+    def __init__(
+        self,
+        filepath,
+        checksums,
+        size,
+        component_and_section,
+        priority,
+        package,
+        version,
+        changes,
+        policy,
+        logger,
+    ):
         """Construct a DSCFile instance.
 
         This takes all NascentUploadFile constructor parameters plus package
@@ -293,29 +315,41 @@ class DSCFile(SourceUploadFile, SignableTagFile):
         from lp.archiveuploader.nascentupload import EarlyReturnUploadError
 
         SourceUploadFile.__init__(
-            self, filepath, checksums, size, component_and_section, priority,
-            package, version, changes, policy, logger)
+            self,
+            filepath,
+            checksums,
+            size,
+            component_and_section,
+            priority,
+            package,
+            version,
+            changes,
+            policy,
+            logger,
+        )
         self.parse(verify_signature=not policy.unsigned_dsc_ok)
 
         self.logger.debug("Performing DSC verification.")
         for mandatory_field in self.mandatory_fields:
             if mandatory_field not in self._dict:
                 raise UploadError(
-                    "Unable to find mandatory field %s in %s" % (
-                    mandatory_field, self.filename))
+                    "Unable to find mandatory field %s in %s"
+                    % (mandatory_field, self.filename)
+                )
 
-        self.maintainer = self.parseAddress(self._dict['Maintainer'])
+        self.maintainer = self.parseAddress(self._dict["Maintainer"])
 
         # If format is not present, assume 1.0. At least one tool in
         # the wild generates dsc files with format missing, and we need
         # to accept them.
-        if 'Format' not in self._dict:
-            self._dict['Format'] = b"1.0"
+        if "Format" not in self._dict:
+            self._dict["Format"] = b"1.0"
 
         if self.format is None:
             raise EarlyReturnUploadError(
-                "Unsupported source format: %s" %
-                six.ensure_str(self._dict['Format']))
+                "Unsupported source format: %s"
+                % six.ensure_str(self._dict["Format"])
+            )
 
     #
     # Useful properties.
@@ -323,31 +357,32 @@ class DSCFile(SourceUploadFile, SignableTagFile):
     @property
     def source(self):
         """Return the DSC source name."""
-        return six.ensure_text(self._dict['Source'])
+        return six.ensure_text(self._dict["Source"])
 
     @property
     def dsc_version(self):
         """Return the DSC source version."""
-        return six.ensure_text(self._dict['Version'])
+        return six.ensure_text(self._dict["Version"])
 
     @property
     def format(self):
         """Return the DSC format."""
         try:
             return SourcePackageFormat.getTermByToken(
-                six.ensure_text(self._dict['Format'])).value
+                six.ensure_text(self._dict["Format"])
+            ).value
         except LookupError:
             return None
 
     @property
     def architecture(self):
         """Return the DSC source architecture."""
-        return six.ensure_text(self._dict['Architecture'])
+        return six.ensure_text(self._dict["Architecture"])
 
     @property
     def binary(self):
         """Return the DSC claimed binary line."""
-        return six.ensure_text(self._dict['Binary'])
+        return six.ensure_text(self._dict["Binary"])
 
     #
     # DSC file checks.
@@ -381,13 +416,16 @@ class DSCFile(SourceUploadFile, SignableTagFile):
                 # DSC files only really hold on references to source
                 # files; they are essentially a description of a source
                 # package. Anything else is crack.
-                yield UploadError("%s: File %s does not look sourceful." % (
-                                  self.filename, filename))
+                yield UploadError(
+                    "%s: File %s does not look sourceful."
+                    % (self.filename, filename)
+                )
                 continue
             filepath = os.path.join(self.dirname, filename)
             try:
                 file_instance = DSCUploadedFile(
-                    filepath, hashes, size, self.policy, self.logger)
+                    filepath, hashes, size, self.policy, self.logger
+                )
             except UploadError as error:
                 yield error
             else:
@@ -396,26 +434,31 @@ class DSCFile(SourceUploadFile, SignableTagFile):
 
         if not re_valid_pkg_name.match(self.source):
             yield UploadError(
-                "%s: invalid source name %s" % (self.filename, self.source))
+                "%s: invalid source name %s" % (self.filename, self.source)
+            )
         if not re_valid_version.match(self.dsc_version):
             yield UploadError(
-                "%s: invalid version %s" % (self.filename, self.dsc_version))
+                "%s: invalid version %s" % (self.filename, self.dsc_version)
+            )
 
         if not self.policy.distroseries.isSourcePackageFormatPermitted(
-            self.format):
+            self.format
+        ):
             yield UploadError(
-                "%s: format '%s' is not permitted in %s." %
-                (self.filename, self.format, self.policy.distroseries.name))
+                "%s: format '%s' is not permitted in %s."
+                % (self.filename, self.format, self.policy.distroseries.name)
+            )
 
         # Validate the build dependencies
-        for field_name in ['Build-Depends', 'Build-Depends-Indep']:
+        for field_name in ["Build-Depends", "Build-Depends-Indep"]:
             field = self._dict.get(field_name, None)
             if field is not None:
                 field = six.ensure_text(field)
                 if field.startswith("ARRAY"):
                     yield UploadError(
                         "%s: invalid %s field produced by a broken version "
-                        "of dpkg-dev (1.10.11)" % (self.filename, field_name))
+                        "of dpkg-dev (1.10.11)" % (self.filename, field_name)
+                    )
                 try:
                     with warnings.catch_warnings():
                         warnings.simplefilter("error")
@@ -423,7 +466,8 @@ class DSCFile(SourceUploadFile, SignableTagFile):
                 except Warning as error:
                     yield UploadError(
                         "%s: invalid %s field; cannot be parsed by deb822: %s"
-                        % (self.filename, field_name, error))
+                        % (self.filename, field_name, error)
+                    )
 
         # Verify if version declared in changesfile is the same than that
         # in DSC (including epochs).
@@ -431,7 +475,8 @@ class DSCFile(SourceUploadFile, SignableTagFile):
             yield UploadError(
                 "%s: version ('%s') in .dsc does not match version "
                 "('%s') in .changes."
-                % (self.filename, self.dsc_version, self.version))
+                % (self.filename, self.dsc_version, self.version)
+            )
 
         for error in self.checkFiles():
             yield error
@@ -457,13 +502,16 @@ class DSCFile(SourceUploadFile, SignableTagFile):
 
         :raise: `NotFoundError` when the wanted file could not be found.
         """
-        if (self.policy.archive.purpose == ArchivePurpose.PPA and
-            determine_source_file_type(filename) in (
+        if (
+            self.policy.archive.purpose == ArchivePurpose.PPA
+            and determine_source_file_type(filename)
+            in (
                 SourcePackageFileType.ORIG_TARBALL,
                 SourcePackageFileType.COMPONENT_ORIG_TARBALL,
                 SourcePackageFileType.ORIG_TARBALL_SIGNATURE,
                 SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE,
-                )):
+            )
+        ):
             archives = [self.policy.archive, self.policy.distro.main_archive]
         else:
             archives = [self.policy.archive]
@@ -475,7 +523,8 @@ class DSCFile(SourceUploadFile, SignableTagFile):
             try:
                 library_file = archive.getFileByName(filename)
                 self.logger.debug(
-                    "%s found in %s" % (filename, archive.displayname))
+                    "%s found in %s" % (filename, archive.displayname)
+                )
                 return library_file, archive
             except NotFoundError:
                 pass
@@ -495,7 +544,7 @@ class DSCFile(SourceUploadFile, SignableTagFile):
             SourcePackageFileType.ORIG_TARBALL_SIGNATURE: 0,
             SourcePackageFileType.DEBIAN_TARBALL: 0,
             SourcePackageFileType.NATIVE_TARBALL: 0,
-            }
+        }
         component_orig_tar_counts = {}
         component_orig_tar_signature_counts = {}
         bzip2_count = 0
@@ -506,36 +555,40 @@ class DSCFile(SourceUploadFile, SignableTagFile):
             file_type = determine_source_file_type(sub_dsc_file.filename)
 
             if file_type is None:
-                yield UploadError('Unknown file: ' + sub_dsc_file.filename)
+                yield UploadError("Unknown file: " + sub_dsc_file.filename)
                 continue
 
             if file_type == SourcePackageFileType.COMPONENT_ORIG_TARBALL:
                 # Split the count by component name.
                 component = re_is_component_orig_tar_ext.match(
-                    get_source_file_extension(sub_dsc_file.filename)).group(1)
+                    get_source_file_extension(sub_dsc_file.filename)
+                ).group(1)
                 if component not in component_orig_tar_counts:
                     component_orig_tar_counts[component] = 0
                 component_orig_tar_counts[component] += 1
             elif (
-                file_type ==
-                    SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE):
+                file_type
+                == SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE
+            ):
                 # Split the count by component name.
                 component = re_is_component_orig_tar_ext_sig.match(
-                    get_source_file_extension(sub_dsc_file.filename)).group(1)
+                    get_source_file_extension(sub_dsc_file.filename)
+                ).group(1)
                 if component not in component_orig_tar_signature_counts:
                     component_orig_tar_signature_counts[component] = 0
                 component_orig_tar_signature_counts[component] += 1
             else:
                 file_type_counts[file_type] += 1
 
-            if sub_dsc_file.filename.endswith('.bz2'):
+            if sub_dsc_file.filename.endswith(".bz2"):
                 bzip2_count += 1
-            elif sub_dsc_file.filename.endswith('.xz'):
+            elif sub_dsc_file.filename.endswith(".xz"):
                 xz_count += 1
 
             try:
                 library_file, file_archive = self._getFileByName(
-                    sub_dsc_file.filename)
+                    sub_dsc_file.filename
+                )
             except NotFoundError:
                 library_file = None
                 file_archive = None
@@ -545,13 +598,14 @@ class DSCFile(SourceUploadFile, SignableTagFile):
                 # dismiss. It prevents us from having scary duplicated
                 # filenames in Librarian and misapplied files in archive,
                 # fixes bug # 38636 and friends.
-                if sub_dsc_file.checksums['MD5'] != library_file.content.md5:
+                if sub_dsc_file.checksums["MD5"] != library_file.content.md5:
                     yield UploadError(
                         "File %s already exists in %s, but uploaded version "
                         "has different contents. See more information about "
                         "this error in "
-                        "https://help.launchpad.net/Packaging/UploadErrors."; %
-                        (sub_dsc_file.filename, file_archive.displayname))
+                        "https://help.launchpad.net/Packaging/UploadErrors.";
+                        % (sub_dsc_file.filename, file_archive.displayname)
+                    )
                     files_missing = True
                     continue
 
@@ -562,13 +616,15 @@ class DSCFile(SourceUploadFile, SignableTagFile):
                     # context Distribution.
                     yield UploadError(
                         "Unable to find %s in upload or distribution."
-                        % (sub_dsc_file.filename))
+                        % (sub_dsc_file.filename)
+                    )
                     files_missing = True
                     continue
 
                 # Pump the file through.
-                self.logger.debug("Pumping %s out of the librarian" % (
-                    sub_dsc_file.filename))
+                self.logger.debug(
+                    "Pumping %s out of the librarian" % (sub_dsc_file.filename)
+                )
                 library_file.open()
                 target_file = open(sub_dsc_file.filepath, "wb")
                 copy_and_close(library_file, target_file)
@@ -581,17 +637,24 @@ class DSCFile(SourceUploadFile, SignableTagFile):
             file_checker = format_to_file_checker_map[self.format]
         except KeyError:
             raise AssertionError(
-                "No file checker for source format %s." % self.format)
+                "No file checker for source format %s." % self.format
+            )
 
         for error in file_checker(
-            self.filename, file_type_counts, component_orig_tar_counts,
-            component_orig_tar_signature_counts, bzip2_count, xz_count):
+            self.filename,
+            file_type_counts,
+            component_orig_tar_counts,
+            component_orig_tar_signature_counts,
+            bzip2_count,
+            xz_count,
+        ):
             yield error
 
         if files_missing:
             yield UploadError(
                 "Files specified in DSC are broken or missing, "
-                "skipping package unpack verification.")
+                "skipping package unpack verification."
+            )
         else:
             for error in self.unpackAndCheckSource():
                 # Pass on errors found when unpacking the source.
@@ -599,8 +662,7 @@ class DSCFile(SourceUploadFile, SignableTagFile):
 
     def unpackAndCheckSource(self):
         """Verify uploaded source using dpkg-source."""
-        self.logger.debug(
-            "Verifying uploaded source package by unpacking it.")
+        self.logger.debug("Verifying uploaded source package by unpacking it.")
 
         try:
             unpacked_dir = unpack_source(self.filepath)
@@ -608,7 +670,8 @@ class DSCFile(SourceUploadFile, SignableTagFile):
             yield UploadError(
                 "dpkg-source failed for %s [return: %s]\n"
                 "[dpkg-source output: %s]"
-                % (self.filename, e.result, e.output))
+                % (self.filename, e.result, e.output)
+            )
             return
 
         try:
@@ -617,12 +680,15 @@ class DSCFile(SourceUploadFile, SignableTagFile):
 
             # Check if 'dpkg-source' created only one directory.
             temp_directories = [
-                entry.name for entry in os.scandir(unpacked_dir)
-                if entry.is_dir()]
+                entry.name
+                for entry in os.scandir(unpacked_dir)
+                if entry.is_dir()
+            ]
             if len(temp_directories) > 1:
                 yield UploadError(
-                    'Unpacked source contains more than one directory: %r'
-                    % temp_directories)
+                    "Unpacked source contains more than one directory: %r"
+                    % temp_directories
+                )
 
             # XXX cprov 20070713: We should access only the expected directory
             # name (<sourcename>-<no_epoch(no_revision(version))>).
@@ -658,8 +724,8 @@ class DSCFile(SourceUploadFile, SignableTagFile):
         """
         # Organize all the parameters requiring encoding transformation.
         pending = self._dict.copy()
-        pending['simulated_changelog'] = self.changes.simulated_changelog
-        pending['copyright'] = self.copyright
+        pending["simulated_changelog"] = self.changes.simulated_changelog
+        pending["copyright"] = self.copyright
 
         # We have no way of knowing what encoding the original copyright
         # file is in, unfortunately, and there is no standard, so guess.
@@ -681,13 +747,16 @@ class DSCFile(SourceUploadFile, SignableTagFile):
             len(self.changelog),
             io.BytesIO(self.changelog),
             "text/x-debian-source-changelog",
-            restricted=self.policy.archive.private)
+            restricted=self.policy.archive.private,
+        )
 
-        source_name = getUtility(
-            ISourcePackageNameSet).getOrCreateByName(self.source)
+        source_name = getUtility(ISourcePackageNameSet).getOrCreateByName(
+            self.source
+        )
 
-        user_defined_fields = self.extractUserDefinedFields([
-            (field, encoded[field]) for field in self._dict])
+        user_defined_fields = self.extractUserDefinedFields(
+            [(field, encoded[field]) for field in self._dict]
+        )
 
         if self.changes.buildinfo is not None:
             buildinfo_lfa = self.changes.buildinfo.storeInDatabase()
@@ -698,32 +767,32 @@ class DSCFile(SourceUploadFile, SignableTagFile):
             sourcepackagename=source_name,
             version=self.dsc_version,
             format=SourcePackageType.DPKG,
-            maintainer=self.maintainer['person'],
-            builddepends=encoded.get('Build-Depends', ''),
-            builddependsindep=encoded.get('Build-Depends-Indep', ''),
-            build_conflicts=encoded.get('Build-Conflicts', ''),
-            build_conflicts_indep=encoded.get('Build-Conflicts-Indep', ''),
-            architecturehintlist=encoded.get('Architecture', ''),
-            creator=self.changes.changed_by['person'],
+            maintainer=self.maintainer["person"],
+            builddepends=encoded.get("Build-Depends", ""),
+            builddependsindep=encoded.get("Build-Depends-Indep", ""),
+            build_conflicts=encoded.get("Build-Conflicts", ""),
+            build_conflicts_indep=encoded.get("Build-Conflicts-Indep", ""),
+            architecturehintlist=encoded.get("Architecture", ""),
+            creator=self.changes.changed_by["person"],
             urgency=self.changes.converted_urgency,
-            homepage=encoded.get('Homepage'),
+            homepage=encoded.get("Homepage"),
             dsc=encoded_raw_content,
             dscsigningkey=self.signingkey,
-            dsc_maintainer_rfc822=encoded['Maintainer'],
-            dsc_format=encoded['Format'],
-            dsc_binaries=encoded['Binary'],
-            dsc_standards_version=encoded.get('Standards-Version'),
+            dsc_maintainer_rfc822=encoded["Maintainer"],
+            dsc_format=encoded["Format"],
+            dsc_binaries=encoded["Binary"],
+            dsc_standards_version=encoded.get("Standards-Version"),
             component=self.component,
             changelog=changelog_lfa,
-            changelog_entry=encoded.get('simulated_changelog'),
+            changelog_entry=encoded.get("simulated_changelog"),
             section=self.section,
             archive=self.policy.archive,
             source_package_recipe_build=build,
-            copyright=encoded.get('copyright'),
+            copyright=encoded.get("copyright"),
             # dateuploaded by default is UTC:now in the database
             user_defined_fields=user_defined_fields,
             buildinfo=buildinfo_lfa,
-            )
+        )
 
         # SourcePackageFiles should contain also the DSC
         source_files = self.files + [self]
@@ -734,7 +803,8 @@ class DSCFile(SourceUploadFile, SignableTagFile):
                     uploaded_file.size,
                     uploaded_file_obj,
                     uploaded_file.content_type,
-                    restricted=self.policy.archive.private)
+                    restricted=self.policy.archive.private,
+                )
             release.addFile(library_file)
 
         return release
@@ -756,8 +826,15 @@ class DSCUploadedFile(NascentUploadFile):
     def __init__(self, filepath, checksums, size, policy, logger):
         component_and_section = priority = "--no-value--"
         NascentUploadFile.__init__(
-            self, filepath, checksums, size, component_and_section,
-            priority, policy, logger)
+            self,
+            filepath,
+            checksums,
+            size,
+            component_and_section,
+            priority,
+            policy,
+            logger,
+        )
 
     def verify(self):
         """Check Sub DSCFile mentioned size & checksum."""
@@ -783,14 +860,12 @@ def findFile(source_dir, filename):
         if not os.path.exists(fullpath):
             continue
         if os.path.islink(fullpath):
-            raise UploadError(
-                "Symbolic link for %s not allowed" % filename)
+            raise UploadError("Symbolic link for %s not allowed" % filename)
         # Anything returned by this method should be less than 10MiB since it
         # will be stored in the database assuming the source package isn't
         # rejected before hand
         if os.stat(fullpath).st_size > 10485760:
-            raise UploadError(
-                "%s file too large, 10MiB max" % filename)
+            raise UploadError("%s file too large, 10MiB max" % filename)
         else:
             return fullpath
     return None
@@ -803,7 +878,7 @@ def find_copyright(source_dir, logger):
     :param logger: A logger object for debug output.
     :return: Contents of copyright file
     """
-    copyright_file = findFile(source_dir, 'debian/copyright')
+    copyright_file = findFile(source_dir, "debian/copyright")
     if copyright_file is None:
         raise UploadWarning("No copyright file found.")
 
@@ -823,7 +898,7 @@ def find_changelog(source_dir, logger):
     :param logger: A logger object for debug output.
     :return: Changelog contents
     """
-    changelog_file = findFile(source_dir, 'debian/changelog')
+    changelog_file = findFile(source_dir, "debian/changelog")
     if changelog_file is None:
         # Policy requires debian/changelog to always exist.
         raise UploadError("No changelog file found.")
@@ -834,9 +909,14 @@ def find_changelog(source_dir, logger):
         return changelog.read()
 
 
-def check_format_1_0_files(filename, file_type_counts,
-                           component_counts, component_signature_counts,
-                           bzip2_count, xz_count):
+def check_format_1_0_files(
+    filename,
+    file_type_counts,
+    component_counts,
+    component_signature_counts,
+    bzip2_count,
+    xz_count,
+):
     """Check that the given counts of each file type suit format 1.0.
 
     A 1.0 source must be native (with only one tar.gz), or have an orig.tar.gz
@@ -845,12 +925,12 @@ def check_format_1_0_files(filename, file_type_counts,
     """
     if bzip2_count > 0:
         yield UploadError(
-            "%s: is format 1.0 but uses bzip2 compression."
-            % filename)
+            "%s: is format 1.0 but uses bzip2 compression." % filename
+        )
     if xz_count > 0:
         yield UploadError(
-            "%s: is format 1.0 but uses xz compression."
-            % filename)
+            "%s: is format 1.0 but uses xz compression." % filename
+        )
 
     valid_file_type_counts = [
         {
@@ -876,16 +956,25 @@ def check_format_1_0_files(filename, file_type_counts,
         },
     ]
 
-    if (file_type_counts not in valid_file_type_counts or
-        len(component_counts) > 0 or len(component_signature_counts) > 0):
+    if (
+        file_type_counts not in valid_file_type_counts
+        or len(component_counts) > 0
+        or len(component_signature_counts) > 0
+    ):
         yield UploadError(
             "%s: must have exactly one tar.gz, or an orig.tar.gz and diff.gz"
-            % filename)
-
-
-def check_format_3_0_native_files(filename, file_type_counts,
-                                  component_counts, component_signature_counts,
-                                  bzip2_count, xz_count):
+            % filename
+        )
+
+
+def check_format_3_0_native_files(
+    filename,
+    file_type_counts,
+    component_counts,
+    component_signature_counts,
+    bzip2_count,
+    xz_count,
+):
     """Check that the given counts of each file type suit format 3.0 (native).
 
     A 3.0 (native) source must have only one tar.*. Any of gzip, bzip2, and
@@ -902,14 +991,22 @@ def check_format_3_0_native_files(filename, file_type_counts,
         },
     ]
 
-    if (file_type_counts not in valid_file_type_counts or
-        len(component_counts) > 0 or len(component_signature_counts) > 0):
+    if (
+        file_type_counts not in valid_file_type_counts
+        or len(component_counts) > 0
+        or len(component_signature_counts) > 0
+    ):
         yield UploadError("%s: must have only a tar.*." % filename)
 
 
-def check_format_3_0_quilt_files(filename, file_type_counts,
-                                 component_counts, component_signature_counts,
-                                 bzip2_count, xz_count):
+def check_format_3_0_quilt_files(
+    filename,
+    file_type_counts,
+    component_counts,
+    component_signature_counts,
+    bzip2_count,
+    xz_count,
+):
     """Check that the given counts of each file type suit format 3.0 (native).
 
     A 3.0 (quilt) source must have exactly one orig.tar.*, one debian.tar.*,
@@ -938,22 +1035,24 @@ def check_format_3_0_quilt_files(filename, file_type_counts,
     if file_type_counts not in valid_file_type_counts:
         yield UploadError(
             "%s: must have only an orig.tar.*, a debian.tar.*, and "
-            "optionally orig-*.tar.*" % filename)
+            "optionally orig-*.tar.*" % filename
+        )
 
     for component in component_counts:
         if component_counts[component] > 1:
             yield UploadError(
-                "%s: has more than one orig-%s.tar.*."
-                % (filename, component))
+                "%s: has more than one orig-%s.tar.*." % (filename, component)
+            )
     for component in component_signature_counts:
         if component_signature_counts[component] > 1:
             yield UploadError(
                 "%s: has more than one orig-%s.tar.*.asc."
-                % (filename, component))
+                % (filename, component)
+            )
 
 
 format_to_file_checker_map = {
     SourcePackageFormat.FORMAT_1_0: check_format_1_0_files,
     SourcePackageFormat.FORMAT_3_0_NATIVE: check_format_3_0_native_files,
     SourcePackageFormat.FORMAT_3_0_QUILT: check_format_3_0_quilt_files,
-    }
+}
diff --git a/lib/lp/archiveuploader/livefsupload.py b/lib/lp/archiveuploader/livefsupload.py
index 4d12f8e..96504b3 100644
--- a/lib/lp/archiveuploader/livefsupload.py
+++ b/lib/lp/archiveuploader/livefsupload.py
@@ -41,10 +41,13 @@ class LiveFSUpload:
             for livefs_file in sorted(filenames):
                 livefs_path = os.path.join(dirpath, livefs_file)
                 libraryfile = self.librarian.create(
-                    livefs_file, os.stat(livefs_path).st_size,
+                    livefs_file,
+                    os.stat(livefs_path).st_size,
                     open(livefs_path, "rb"),
                     filenameToContentType(livefs_path),
-                    restricted=build.is_private, allow_zero_length=True)
+                    restricted=build.is_private,
+                    allow_zero_length=True,
+                )
                 build.addFile(libraryfile)
 
         # The master verifies the status to confirm successful upload.
diff --git a/lib/lp/archiveuploader/nascentupload.py b/lib/lp/archiveuploader/nascentupload.py
index cb6e144..e7ecb7c 100644
--- a/lib/lp/archiveuploader/nascentupload.py
+++ b/lib/lp/archiveuploader/nascentupload.py
@@ -32,12 +32,12 @@ from lp.archiveuploader.nascentuploadfile import (
     DebBinaryUploadFile,
     SourceUploadFile,
     UdebBinaryUploadFile,
-    )
+)
 from lp.archiveuploader.utils import (
-    determine_source_file_type,
     UploadError,
     UploadWarning,
-    )
+    determine_source_file_type,
+)
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.interfaces.sourcepackage import SourcePackageFileType
@@ -48,14 +48,13 @@ from lp.soyuz.adapters.overrides import (
     FallbackOverridePolicy,
     FromExistingOverridePolicy,
     SourceOverride,
-    )
+)
 from lp.soyuz.enums import PackageUploadStatus
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.soyuz.interfaces.queue import QueueInconsistentStateError
 
-
-PARTNER_COMPONENT_NAME = 'partner'
+PARTNER_COMPONENT_NAME = "partner"
 
 
 class EarlyReturnUploadError(Exception):
@@ -88,6 +87,7 @@ class NascentUpload:
     a nascent upload is something we're trying to get into a shape we can
     insert into the database as a queued upload to be processed.
     """
+
     recipients = None
 
     # Defined in check_changes_consistency()
@@ -146,7 +146,8 @@ class NascentUpload:
             policy.setDistroSeriesAndPocket(self.changes.suite_name)
         except NotFoundError:
             self.reject(
-                "Unable to find distroseries: %s" % self.changes.suite_name)
+                "Unable to find distroseries: %s" % self.changes.suite_name
+            )
 
         if policy.redirect_warning is not None:
             self.warn(policy.redirect_warning)
@@ -238,16 +239,16 @@ class NascentUpload:
         think_archdep = False
 
         changes_architectures = self.changes.architectures
-        if 'source' in changes_architectures:
+        if "source" in changes_architectures:
             think_sourceful = True
-            changes_architectures.remove('source')
+            changes_architectures.remove("source")
 
         if changes_architectures:
             think_binaryful = True
 
-        if 'all' in changes_architectures:
+        if "all" in changes_architectures:
             think_archindep = True
-            changes_architectures.remove('all')
+            changes_architectures.remove("all")
 
         if think_binaryful and len(changes_architectures) > 0:
             think_archdep = True
@@ -262,15 +263,12 @@ class NascentUpload:
                 files_binaryful = files_binaryful or True
             elif isinstance(uploaded_file, BaseBinaryUploadFile):
                 files_binaryful = files_binaryful or True
-                files_archindep = (
-                    files_archindep or uploaded_file.is_archindep)
-                files_archdep = (
-                    files_archdep or not uploaded_file.is_archindep)
+                files_archindep = files_archindep or uploaded_file.is_archindep
+                files_archdep = files_archdep or not uploaded_file.is_archindep
             elif isinstance(uploaded_file, SourceUploadFile):
                 files_sourceful = True
             elif isinstance(uploaded_file, BuildInfoFile):
-                files_sourceful = (
-                    files_sourceful or uploaded_file.is_sourceful)
+                files_sourceful = files_sourceful or uploaded_file.is_sourceful
                 if uploaded_file.is_binaryful:
                     files_binaryful = files_binaryful or True
             else:
@@ -278,19 +276,27 @@ class NascentUpload:
                 raise AssertionError("Unknown uploaded file type.")
 
         if files_sourceful != think_sourceful:
-            self.reject("Mismatch in sourcefulness. (arch) %s != (files) %s"
-                 % (think_sourceful, files_sourceful))
+            self.reject(
+                "Mismatch in sourcefulness. (arch) %s != (files) %s"
+                % (think_sourceful, files_sourceful)
+            )
         if files_binaryful != think_binaryful:
-            self.reject("Mismatch in binaryfulness. (arch) %s != (files) %s"
-                 % (think_binaryful, files_binaryful))
+            self.reject(
+                "Mismatch in binaryfulness. (arch) %s != (files) %s"
+                % (think_binaryful, files_binaryful)
+            )
 
         if files_archindep and not think_archindep:
-            self.reject("One or more files uploaded with architecture "
-                        "'all' but changes file does not list 'all'.")
+            self.reject(
+                "One or more files uploaded with architecture "
+                "'all' but changes file does not list 'all'."
+            )
 
         if files_archdep and not think_archdep:
-            self.reject("One or more files uploaded with specific "
-                        "architecture but changes file does not list it.")
+            self.reject(
+                "One or more files uploaded with specific "
+                "architecture but changes file does not list it."
+            )
 
         # Remember the information for later use in properties.
         self.sourceful = think_sourceful
@@ -305,13 +311,18 @@ class NascentUpload:
         Ensures a sourceful upload has exactly one DSC. All further source
         checks are performed later by the DSC.
         """
-        assert self.sourceful, (
-            "Source consistency check called for a non-source upload")
-
-        dsc = len([
-            file for file in self.changes.files
-            if determine_source_file_type(file.filename) ==
-                SourcePackageFileType.DSC])
+        assert (
+            self.sourceful
+        ), "Source consistency check called for a non-source upload"
+
+        dsc = len(
+            [
+                file
+                for file in self.changes.files
+                if determine_source_file_type(file.filename)
+                == SourcePackageFileType.DSC
+            ]
+        )
 
         # It is never sane to upload more than one source at a time.
         if dsc > 1:
@@ -328,14 +339,16 @@ class NascentUpload:
         upload contents.
         """
         considered_archs = [
-            arch_name for arch_name in self.changes.architectures
-            if not arch_name.endswith("_translations")]
+            arch_name
+            for arch_name in self.changes.architectures
+            if not arch_name.endswith("_translations")
+        ]
         max = 1
         if self.sourceful:
             # When sourceful, the tools add 'source' to the architecture
             # list in the upload.
             max = self.policy.distroseries.architecturecount + 1
-        if 'all' in considered_archs:
+        if "all" in considered_archs:
             # Sometimes we get 'i386 all' which would count as two archs
             # so if 'all' is present, we bump the permitted number up
             # by one.
@@ -356,11 +369,15 @@ class NascentUpload:
         unmatched_ddebs = {}
         for uploaded_file in self.changes.files:
             if isinstance(uploaded_file, DdebBinaryUploadFile):
-                ddeb_key = (uploaded_file.package, uploaded_file.version,
-                            uploaded_file.architecture)
+                ddeb_key = (
+                    uploaded_file.package,
+                    uploaded_file.version,
+                    uploaded_file.architecture,
+                )
                 if ddeb_key in unmatched_ddebs:
                     yield UploadError(
-                        "Duplicated debug packages: %s %s (%s)" % ddeb_key)
+                        "Duplicated debug packages: %s %s (%s)" % ddeb_key
+                    )
                 else:
                     unmatched_ddebs[ddeb_key] = uploaded_file
 
@@ -372,9 +389,12 @@ class NascentUpload:
             if (is_deb or is_udeb) and not is_ddeb:
                 try:
                     matching_ddeb = unmatched_ddebs.pop(
-                        (uploaded_file.package + '-dbgsym',
-                         uploaded_file.version,
-                         uploaded_file.architecture))
+                        (
+                            uploaded_file.package + "-dbgsym",
+                            uploaded_file.version,
+                            uploaded_file.architecture,
+                        )
+                    )
                 except KeyError:
                     continue
                 uploaded_file.ddeb_file = matching_ddeb
@@ -382,8 +402,9 @@ class NascentUpload:
 
         if len(unmatched_ddebs) > 0:
             yield UploadError(
-                "Orphaned debug packages: %s" % ', '.join(
-                    '%s %s (%s)' % d for d in unmatched_ddebs))
+                "Orphaned debug packages: %s"
+                % ", ".join("%s %s (%s)" % d for d in unmatched_ddebs)
+            )
 
     def _overrideDDEBSs(self):
         """Make sure that any DDEBs in the upload have the same overrides
@@ -394,9 +415,12 @@ class NascentUpload:
         lockstep.
         """
         for uploaded_file in self.changes.files:
-            if (isinstance(uploaded_file, DdebBinaryUploadFile)
-                    and uploaded_file.deb_file is not None):
+            if (
+                isinstance(uploaded_file, DdebBinaryUploadFile)
+                and uploaded_file.deb_file is not None
+            ):
                 self.overrideBinaryFile(uploaded_file, uploaded_file.deb_file)
+
     #
     # Helpers for warnings and rejections
     #
@@ -435,15 +459,15 @@ class NascentUpload:
             elif isinstance(error, UploadWarning):
                 self.warn("".join(error.args))
             else:
-                raise AssertionError(
-                    "Unknown error occurred: %s" % str(error))
+                raise AssertionError("Unknown error occurred: %s" % str(error))
 
     def run_and_reject_on_error(self, callable):
         """Run given callable and raise EarlyReturnUploadError on errors."""
         self.run_and_collect_errors(callable)
         if self.is_rejected:
             raise EarlyReturnUploadError(
-                "An error occurred that prevented further processing.")
+                "An error occurred that prevented further processing."
+            )
 
     @property
     def is_ppa(self):
@@ -467,7 +491,7 @@ class NascentUpload:
     @property
     def rejection_message(self):
         """Aggregate rejection messages."""
-        return '\n'.join(self.rejections)
+        return "\n".join(self.rejections)
 
     @property
     def is_rejected(self):
@@ -487,8 +511,7 @@ class NascentUpload:
         """
         if not self.warnings:
             return None
-        warning_header = (
-            "\nUpload Warnings:\n%s" % '\n'.join(self.warnings))
+        warning_header = "\nUpload Warnings:\n%s" % "\n".join(self.warnings)
         return warning_header
 
     #
@@ -522,12 +545,18 @@ class NascentUpload:
             self.logger.debug("No signer, therefore ACL not processed")
             return
 
-        source_name = getUtility(
-            ISourcePackageNameSet).queryByName(self.changes.dsc.package)
+        source_name = getUtility(ISourcePackageNameSet).queryByName(
+            self.changes.dsc.package
+        )
 
         rejection_reason = self.policy.archive.checkUpload(
-            uploader, self.policy.distroseries, source_name,
-            self.changes.dsc.component, self.policy.pocket, not self.is_new)
+            uploader,
+            self.policy.distroseries,
+            source_name,
+            self.changes.dsc.component,
+            self.policy.pocket,
+            not self.is_new,
+        )
 
         if rejection_reason is not None:
             self.reject(str(rejection_reason))
@@ -538,8 +567,10 @@ class NascentUpload:
     def checkVersion(self, proposed_version, archive_version, filename):
         """Check if the proposed version is higher than the one in archive."""
         if apt_pkg.version_compare(proposed_version, archive_version) < 0:
-            self.reject("%s: Version older than that in the archive. %s <= %s"
-                        % (filename, proposed_version, archive_version))
+            self.reject(
+                "%s: Version older than that in the archive. %s <= %s"
+                % (filename, proposed_version, archive_version)
+            )
 
     def overrideSourceFile(self, uploaded_file, override):
         """Overrides the uploaded source based on its override information.
@@ -591,37 +622,50 @@ class NascentUpload:
             check_version = False
 
         override_policy = self.policy.archive.getOverridePolicy(
-            self.policy.distroseries, self.policy.pocket)
+            self.policy.distroseries, self.policy.pocket
+        )
 
         if check_version:
-            version_policy = FallbackOverridePolicy([
-                FromExistingOverridePolicy(
-                    self.policy.archive, self.policy.distroseries, pocket)
-                for pocket in lookup_pockets])
+            version_policy = FallbackOverridePolicy(
+                [
+                    FromExistingOverridePolicy(
+                        self.policy.archive, self.policy.distroseries, pocket
+                    )
+                    for pocket in lookup_pockets
+                ]
+            )
         else:
             version_policy = None
 
         for uploaded_file in self.changes.files:
             upload_component = getUtility(IComponentSet)[
-                uploaded_file.component_name]
+                uploaded_file.component_name
+            ]
             if isinstance(uploaded_file, DSCFile):
                 self.logger.debug(
                     "Checking for %s/%s source ancestry"
-                    % (uploaded_file.package, uploaded_file.version))
+                    % (uploaded_file.package, uploaded_file.version)
+                )
                 spn = getUtility(ISourcePackageNameSet).getOrCreateByName(
-                        uploaded_file.package)
+                    uploaded_file.package
+                )
                 override = override_policy.calculateSourceOverrides(
-                    {spn: SourceOverride(component=upload_component)})[spn]
+                    {spn: SourceOverride(component=upload_component)}
+                )[spn]
                 if version_policy is not None:
                     ancestor = version_policy.calculateSourceOverrides(
-                        {spn: SourceOverride()}).get(spn)
+                        {spn: SourceOverride()}
+                    ).get(spn)
                     if ancestor is not None and ancestor.version is not None:
                         self.checkVersion(
-                            self.changes.dsc.dsc_version, ancestor.version,
-                            uploaded_file.filename)
+                            self.changes.dsc.dsc_version,
+                            ancestor.version,
+                            uploaded_file.filename,
+                        )
                 if override.new != False:
                     self.logger.debug(
-                        "%s: (source) NEW", uploaded_file.package)
+                        "%s: (source) NEW", uploaded_file.package
+                    )
                     uploaded_file.new = True
                 # XXX wgrant 2014-07-23: We want to preserve the upload
                 # component for PPA uploads, so we force the component
@@ -637,18 +681,22 @@ class NascentUpload:
                         uploaded_file.package,
                         uploaded_file.version,
                         uploaded_file.architecture,
-                        ))
+                    )
+                )
 
                 # If we are dealing with a DDEB, use the DEB's
                 # overrides. If there's no deb_file set, don't worry
                 # about it. Rejection is already guaranteed.
-                if (isinstance(uploaded_file, DdebBinaryUploadFile)
-                    and uploaded_file.deb_file):
+                if (
+                    isinstance(uploaded_file, DdebBinaryUploadFile)
+                    and uploaded_file.deb_file
+                ):
                     override_name = uploaded_file.deb_file.package
                 else:
                     override_name = uploaded_file.package
                 bpn = getUtility(IBinaryPackageNameSet).getOrCreateByName(
-                    override_name)
+                    override_name
+                )
 
                 if uploaded_file.architecture == "all":
                     archtag = None
@@ -662,20 +710,31 @@ class NascentUpload:
                     source_override = None
 
                 override = override_policy.calculateBinaryOverrides(
-                    {(bpn, archtag): BinaryOverride(
-                        component=upload_component,
-                        source_override=source_override)})[(bpn, archtag)]
+                    {
+                        (bpn, archtag): BinaryOverride(
+                            component=upload_component,
+                            source_override=source_override,
+                        )
+                    }
+                )[(bpn, archtag)]
                 if version_policy is not None:
                     ancestor = version_policy.calculateBinaryOverrides(
-                        {(bpn, archtag): BinaryOverride(
-                            component=upload_component)}).get((bpn, archtag))
+                        {
+                            (bpn, archtag): BinaryOverride(
+                                component=upload_component
+                            )
+                        }
+                    ).get((bpn, archtag))
                     if ancestor is not None and ancestor.version is not None:
                         self.checkVersion(
-                            uploaded_file.control_version, ancestor.version,
-                            uploaded_file.filename)
+                            uploaded_file.control_version,
+                            ancestor.version,
+                            uploaded_file.filename,
+                        )
                 if override.new != False:
                     self.logger.debug(
-                        "%s: (binary) NEW", uploaded_file.package)
+                        "%s: (binary) NEW", uploaded_file.package
+                    )
                     uploaded_file.new = True
                 # XXX wgrant 2014-07-23: We want to preserve the upload
                 # component for PPA uploads, so we force the component
@@ -717,7 +776,8 @@ class NascentUpload:
                     self.queue_root.notify(
                         summary_text=self.warning_message,
                         changes_file_object=changes_file_object,
-                        logger=self.logger)
+                        logger=self.logger,
+                    )
             return True
 
         except QueueInconsistentStateError as e:
@@ -737,7 +797,7 @@ class NascentUpload:
     def _reject_with_logging(self, error, notify, log_func):
         """Helper to reject an upload and log it using the logger function."""
         self.reject("%s" % error)
-        log_func('Exception while accepting:\n %s' % error, exc_info=True)
+        log_func("Exception while accepting:\n %s" % error, exc_info=True)
         self.do_reject(notify)
         return False
 
@@ -763,7 +823,9 @@ class NascentUpload:
             self.queue_root.notify(
                 status=PackageUploadStatus.REJECTED,
                 summary_text=self.rejection_message,
-                changes_file_object=changes_file_object, logger=self.logger)
+                changes_file_object=changes_file_object,
+                logger=self.logger,
+            )
 
     def _createQueueEntry(self):
         """Return a PackageUpload object."""
@@ -771,19 +833,25 @@ class NascentUpload:
         if not distroseries:
             # Upload was probably rejected with a bad distroseries, so we
             # can create a dummy one for the purposes of a rejection email.
-            assert self.is_rejected, (
-                "The upload is not rejected but distroseries is None.")
-            distroseries = getUtility(
-                IDistributionSet)['ubuntu'].currentseries
+            assert (
+                self.is_rejected
+            ), "The upload is not rejected but distroseries is None."
+            distroseries = getUtility(IDistributionSet)["ubuntu"].currentseries
             return distroseries.createQueueEntry(
                 PackagePublishingPocket.RELEASE,
-                self.policy.archive, self.changes.filename,
-                self.changes.raw_content, signing_key=self.changes.signingkey)
+                self.policy.archive,
+                self.changes.filename,
+                self.changes.raw_content,
+                signing_key=self.changes.signingkey,
+            )
         else:
             return distroseries.createQueueEntry(
-                self.policy.pocket, self.policy.archive,
-                self.changes.filename, self.changes.raw_content,
-                signing_key=self.changes.signingkey)
+                self.policy.pocket,
+                self.policy.archive,
+                self.changes.filename,
+                self.changes.raw_content,
+                signing_key=self.changes.signingkey,
+            )
 
     #
     # Inserting stuff in the database
@@ -808,21 +876,22 @@ class NascentUpload:
                 self.changes.dsc.checkBuild(build)
             sourcepackagerelease = self.changes.dsc.storeInDatabase(build)
             package_upload_source = self.queue_root.addSource(
-                sourcepackagerelease)
+                sourcepackagerelease
+            )
             ancestry = package_upload_source.getSourceAncestryForDiffs()
             if ancestry is not None:
                 to_sourcepackagerelease = ancestry.sourcepackagerelease
                 diff = to_sourcepackagerelease.requestDiffTo(
-                    self.queue_root.findPersonToNotify(), sourcepackagerelease)
+                    self.queue_root.findPersonToNotify(), sourcepackagerelease
+                )
                 self.logger.debug(
-                    '%s %s requested' % (
-                        diff.from_source.name, diff.title))
+                    "%s %s requested" % (diff.from_source.name, diff.title)
+                )
 
         if self.binaryful:
             for custom_file in self.changes.custom_files:
                 libraryfile = custom_file.storeInDatabase()
-                self.queue_root.addCustom(
-                    libraryfile, custom_file.custom_type)
+                self.queue_root.addCustom(libraryfile, custom_file.custom_type)
 
             # Container for the build that will be processed.
             processed_builds = []
@@ -831,61 +900,75 @@ class NascentUpload:
             # storeInDatabase find and link DDEBs when creating DEBs.
             bpfs_to_create = sorted(
                 self.changes.binary_package_files,
-                key=lambda file: file.ddeb_file is not None)
+                key=lambda file: file.ddeb_file is not None,
+            )
             for binary_package_file in bpfs_to_create:
                 if self.sourceful:
                     # The reason we need to do this verification
                     # so late in the game is that in the
                     # mixed-upload case we only have a
                     # sourcepackagerelease to verify here!
-                    assert sourcepackagerelease, (
-                        "No sourcepackagerelease was found.")
+                    assert (
+                        sourcepackagerelease
+                    ), "No sourcepackagerelease was found."
                     binary_package_file.verifySourcePackageRelease(
-                        sourcepackagerelease)
+                        sourcepackagerelease
+                    )
                 else:
                     sourcepackagerelease = (
-                        binary_package_file.findSourcePackageRelease())
+                        binary_package_file.findSourcePackageRelease()
+                    )
 
                 # Find the build for this particular binary package file.
                 if build is None:
                     bpf_build = binary_package_file.findBuild(
-                        sourcepackagerelease)
+                        sourcepackagerelease
+                    )
                 else:
                     bpf_build = build
                 if bpf_build.source_package_release != sourcepackagerelease:
                     raise AssertionError(
                         "Attempt to upload binaries specifying build %s, "
-                        "where they don't fit." % bpf_build.id)
+                        "where they don't fit." % bpf_build.id
+                    )
                 binary_package_file.checkBuild(bpf_build)
-                assert self.queue_root.pocket == bpf_build.pocket, (
-                    "Binary was not build for the claimed pocket.")
+                assert (
+                    self.queue_root.pocket == bpf_build.pocket
+                ), "Binary was not build for the claimed pocket."
                 binary_package_file.storeInDatabase(bpf_build)
-                if (self.changes.buildinfo is not None and
-                        bpf_build.buildinfo is None):
+                if (
+                    self.changes.buildinfo is not None
+                    and bpf_build.buildinfo is None
+                ):
                     self.changes.buildinfo.checkBuild(bpf_build)
                     bpf_build.addBuildInfo(
-                        self.changes.buildinfo.storeInDatabase())
+                        self.changes.buildinfo.storeInDatabase()
+                    )
                 processed_builds.append(bpf_build)
 
             # Store the related builds after verifying they were built
             # from the same source.
             for considered_build in processed_builds:
-                attached_builds = [build.build.id
-                                   for build in self.queue_root.builds]
+                attached_builds = [
+                    build.build.id for build in self.queue_root.builds
+                ]
                 if considered_build.id in attached_builds:
                     continue
-                assert (considered_build.source_package_release.id ==
-                        sourcepackagerelease.id), (
-                    "Upload contains binaries of different sources.")
+                assert (
+                    considered_build.source_package_release.id
+                    == sourcepackagerelease.id
+                ), "Upload contains binaries of different sources."
                 self.queue_root.addBuild(considered_build)
 
         # Uploads always start in NEW. Try to autoapprove into ACCEPTED
         # if possible, otherwise just move to UNAPPROVED unless it's
         # new.
-        if ((self.is_new and self.policy.autoApproveNew(self)) or
-            (not self.is_new and self.policy.autoApprove(self))):
+        if (self.is_new and self.policy.autoApproveNew(self)) or (
+            not self.is_new and self.policy.autoApprove(self)
+        ):
             self.queue_root.acceptFromUploader(
-                self.changes.filepath, logger=self.logger)
+                self.changes.filepath, logger=self.logger
+            )
         elif not self.is_new:
             self.logger.debug("Setting it to UNAPPROVED")
             self.queue_root.setUnapproved()
@@ -923,14 +1006,17 @@ class NascentUpload:
             # See if there is an archive to override with.
             distribution = self.policy.distroseries.distribution
             archive = distribution.getArchiveByComponent(
-                PARTNER_COMPONENT_NAME)
+                PARTNER_COMPONENT_NAME
+            )
 
             # Check for data problems:
             if not archive:
                 # Don't override the archive to None here or the rest of the
                 # processing will throw exceptions.
-                self.reject("Partner archive for distro '%s' not found" %
-                    self.policy.distroseries.distribution.name)
+                self.reject(
+                    "Partner archive for distro '%s' not found"
+                    % self.policy.distroseries.distribution.name
+                )
             else:
                 # Reset the archive in the policy to the partner archive.
                 self.policy.archive = archive
diff --git a/lib/lp/archiveuploader/nascentuploadfile.py b/lib/lp/archiveuploader/nascentuploadfile.py
index 74fb3f0..cd18961 100644
--- a/lib/lp/archiveuploader/nascentuploadfile.py
+++ b/lib/lp/archiveuploader/nascentuploadfile.py
@@ -4,28 +4,28 @@
 """Specific models for uploaded files"""
 
 __all__ = [
-    'BaseBinaryUploadFile',
-    'CustomUploadFile',
-    'DdebBinaryUploadFile',
-    'DebBinaryUploadFile',
-    'NascentUploadFile',
-    'PackageUploadFile',
-    'SourceUploadFile',
-    'UdebBinaryUploadFile',
-    'splitComponentAndSection',
-    ]
+    "BaseBinaryUploadFile",
+    "CustomUploadFile",
+    "DdebBinaryUploadFile",
+    "DebBinaryUploadFile",
+    "NascentUploadFile",
+    "PackageUploadFile",
+    "SourceUploadFile",
+    "UdebBinaryUploadFile",
+    "splitComponentAndSection",
+]
 
-from collections import OrderedDict
 import hashlib
 import os
 import subprocess
 import sys
 import time
+from collections import OrderedDict
 
 import apt_inst
 import apt_pkg
-from debian.deb822 import Deb822Dict
 import six
+from debian.deb822 import Deb822Dict
 from zope.component import getUtility
 
 from lp.app.errors import NotFoundError
@@ -33,11 +33,9 @@ from lp.archivepublisher.ddtp_tarball import DdtpTarballUpload
 from lp.archivepublisher.debian_installer import DebianInstallerUpload
 from lp.archivepublisher.dist_upgrader import DistUpgraderUpload
 from lp.archivepublisher.rosetta_translations import RosettaTranslationsUpload
-from lp.archivepublisher.signing import (
-    SigningUpload,
-    UefiUpload,
-    )
+from lp.archivepublisher.signing import SigningUpload, UefiUpload
 from lp.archiveuploader.utils import (
+    UploadError,
     determine_source_file_type,
     prefix_multi_line_string,
     re_extract_src_version,
@@ -48,8 +46,7 @@ from lp.archiveuploader.utils import (
     re_taint_free,
     re_valid_pkg_name,
     re_valid_version,
-    UploadError,
-    )
+)
 from lp.buildmaster.enums import BuildStatus
 from lp.services.encoding import guess as guess_encoding
 from lp.services.librarian.interfaces import ILibraryFileAliasSet
@@ -58,7 +55,7 @@ from lp.soyuz.enums import (
     BinaryPackageFormat,
     PackagePublishingPriority,
     PackageUploadCustomFormat,
-    )
+)
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
 from lp.soyuz.interfaces.component import IComponentSet
@@ -66,7 +63,6 @@ from lp.soyuz.interfaces.publishing import active_publishing_status
 from lp.soyuz.interfaces.section import ISectionSet
 from lp.soyuz.model.files import SourceFileMixin
 
-
 apt_pkg.init_system()
 
 
@@ -77,7 +73,7 @@ class TarFileDateChecker:
     """
 
     def __init__(self, future_cutoff, past_cutoff):
-        """Setup timestamp limits """
+        """Setup timestamp limits"""
         self.reset()
         self.future_cutoff = future_cutoff
         self.past_cutoff = past_cutoff
@@ -118,6 +114,7 @@ class NascentUploadFile:
 
     The filename, along with information about it, is kept here.
     """
+
     new = False
 
     # Files need their content type for creating in the librarian.
@@ -129,10 +126,18 @@ class NascentUploadFile:
         ".udeb": "application/x-micro-debian-package",
         ".diff.gz": "application/gzipped-patch",
         ".tar.gz": "application/gzipped-tar",
-        }
-
-    def __init__(self, filepath, checksums, size, component_and_section,
-                 priority_name, policy, logger):
+    }
+
+    def __init__(
+        self,
+        filepath,
+        checksums,
+        size,
+        component_and_section,
+        priority_name,
+        policy,
+        logger,
+    ):
         self.filepath = filepath
         self.checksums = checksums
         self.priority_name = priority_name
@@ -140,8 +145,9 @@ class NascentUploadFile:
         self.logger = logger
 
         self.size = int(size)
-        self.component_name, self.section_name = (
-            splitComponentAndSection(component_and_section))
+        self.component_name, self.section_name = splitComponentAndSection(
+            component_and_section
+        )
 
         self.librarian = getUtility(ILibraryFileAliasSet)
 
@@ -200,7 +206,8 @@ class NascentUploadFile:
         """Verify if the filename contains forbidden characters."""
         if not re_taint_free.match(self.filename):
             raise UploadError(
-                "Invalid character(s) in filename: '%s'." % self.filename)
+                "Invalid character(s) in filename: '%s'." % self.filename
+            )
 
     def checkSizeAndCheckSum(self):
         """Check the size and checksums of the nascent file.
@@ -211,7 +218,8 @@ class NascentUploadFile:
         if not self.exists_on_disk:
             raise UploadError(
                 "File %s mentioned in the changes file was not found."
-                % self.filename)
+                % self.filename
+            )
 
         # Read in the file and compute its md5 and sha1 checksums and remember
         # the size of the file as read-in.
@@ -228,13 +236,19 @@ class NascentUploadFile:
             if digesters[n].hexdigest() != self.checksums[n]:
                 raise UploadError(
                     "File %s mentioned in the changes has a %s mismatch. "
-                    "%s != %s" % (
-                        self.filename, n, digesters[n].hexdigest(),
-                        self.checksums[n]))
+                    "%s != %s"
+                    % (
+                        self.filename,
+                        n,
+                        digesters[n].hexdigest(),
+                        self.checksums[n],
+                    )
+                )
         if size != self.size:
             raise UploadError(
                 "File %s mentioned in the changes has a size mismatch. "
-                "%s != %s" % (self.filename, size, self.size))
+                "%s != %s" % (self.filename, size, self.size)
+            )
 
 
 class CustomUploadFile(NascentUploadFile):
@@ -253,27 +267,28 @@ class CustomUploadFile(NascentUploadFile):
     # the marker in the codebase and make sure the same changes are made
     # everywhere which needs them.
     custom_sections = {
-        'raw-installer': PackageUploadCustomFormat.DEBIAN_INSTALLER,
-        'raw-translations': PackageUploadCustomFormat.ROSETTA_TRANSLATIONS,
-        'raw-dist-upgrader': PackageUploadCustomFormat.DIST_UPGRADER,
-        'raw-ddtp-tarball': PackageUploadCustomFormat.DDTP_TARBALL,
-        'raw-translations-static':
-            PackageUploadCustomFormat.STATIC_TRANSLATIONS,
-        'raw-meta-data':
-            PackageUploadCustomFormat.META_DATA,
-        'raw-uefi': PackageUploadCustomFormat.UEFI,
-        'raw-signing': PackageUploadCustomFormat.SIGNING,
-        }
+        "raw-installer": PackageUploadCustomFormat.DEBIAN_INSTALLER,
+        "raw-translations": PackageUploadCustomFormat.ROSETTA_TRANSLATIONS,
+        "raw-dist-upgrader": PackageUploadCustomFormat.DIST_UPGRADER,
+        "raw-ddtp-tarball": PackageUploadCustomFormat.DDTP_TARBALL,
+        "raw-translations-static": (
+            PackageUploadCustomFormat.STATIC_TRANSLATIONS
+        ),
+        "raw-meta-data": PackageUploadCustomFormat.META_DATA,
+        "raw-uefi": PackageUploadCustomFormat.UEFI,
+        "raw-signing": PackageUploadCustomFormat.SIGNING,
+    }
 
     custom_handlers = {
         PackageUploadCustomFormat.DEBIAN_INSTALLER: DebianInstallerUpload,
         PackageUploadCustomFormat.DIST_UPGRADER: DistUpgraderUpload,
         PackageUploadCustomFormat.DDTP_TARBALL: DdtpTarballUpload,
-        PackageUploadCustomFormat.ROSETTA_TRANSLATIONS:
-            RosettaTranslationsUpload,
+        PackageUploadCustomFormat.ROSETTA_TRANSLATIONS: (
+            RosettaTranslationsUpload
+        ),
         PackageUploadCustomFormat.UEFI: UefiUpload,
         PackageUploadCustomFormat.SIGNING: SigningUpload,
-        }
+    }
 
     @property
     def custom_type(self):
@@ -288,41 +303,61 @@ class CustomUploadFile(NascentUploadFile):
         """
         if self.section_name not in self.custom_sections:
             yield UploadError(
-                "Unsupported custom section name %r" % self.section_name)
+                "Unsupported custom section name %r" % self.section_name
+            )
         else:
             handler = self.custom_handlers.get(
-                self.custom_sections[self.section_name])
+                self.custom_sections[self.section_name]
+            )
             if handler is not None:
                 try:
                     handler.parsePath(self.filename)
                 except ValueError:
                     yield UploadError(
-                        "Invalid filename %r for section name %r" % (
-                            self.filename, self.section_name))
+                        "Invalid filename %r for section name %r"
+                        % (self.filename, self.section_name)
+                    )
 
     def storeInDatabase(self):
         """Create and return the corresponding LibraryFileAlias reference."""
         with open(self.filepath, "rb") as f:
             libraryfile = self.librarian.create(
-                self.filename, self.size, f, self.content_type,
-                restricted=self.policy.archive.private)
+                self.filename,
+                self.size,
+                f,
+                self.content_type,
+                restricted=self.policy.archive.private,
+            )
         return libraryfile
 
     def autoApprove(self):
         """Return whether this custom upload can be automatically approved."""
         # Signing uploads will be signed, and must therefore be approved
         # by a human.
-        if self.custom_type in (PackageUploadCustomFormat.UEFI,
-                                PackageUploadCustomFormat.SIGNING):
+        if self.custom_type in (
+            PackageUploadCustomFormat.UEFI,
+            PackageUploadCustomFormat.SIGNING,
+        ):
             return False
         return True
 
 
 class PackageUploadFile(NascentUploadFile):
-    """Base class to model sources and binary files contained in a upload. """
-
-    def __init__(self, filepath, md5, size, component_and_section,
-                 priority_name, package, version, changes, policy, logger):
+    """Base class to model sources and binary files contained in a upload."""
+
+    def __init__(
+        self,
+        filepath,
+        md5,
+        size,
+        component_and_section,
+        priority_name,
+        package,
+        version,
+        changes,
+        policy,
+        logger,
+    ):
         """Check presence of the component and section from an uploaded_file.
 
         They need to satisfy at least the NEW queue constraints that includes
@@ -330,25 +365,33 @@ class PackageUploadFile(NascentUploadFile):
         Even if they might be overridden in the future.
         """
         super().__init__(
-            filepath, md5, size, component_and_section, priority_name,
-            policy, logger)
+            filepath,
+            md5,
+            size,
+            component_and_section,
+            priority_name,
+            policy,
+            logger,
+        )
         self.package = package
         self.version = version
         self.changes = changes
 
-        valid_components = [component.name for component in
-                            getUtility(IComponentSet)]
+        valid_components = [
+            component.name for component in getUtility(IComponentSet)
+        ]
         valid_sections = [section.name for section in getUtility(ISectionSet)]
 
         if self.section_name not in valid_sections:
             raise UploadError(
-                "%s: Unknown section '%s'" % (
-                self.filename, self.section_name))
+                "%s: Unknown section '%s'" % (self.filename, self.section_name)
+            )
 
         if self.component_name not in valid_components:
             raise UploadError(
-                "%s: Unknown component '%s'" % (
-                self.filename, self.component_name))
+                "%s: Unknown component '%s'"
+                % (self.filename, self.component_name)
+            )
 
     @property
     def component(self):
@@ -368,12 +411,12 @@ class PackageUploadFile(NascentUploadFile):
         raise NotImplementedError(self.checkBuild)
 
     def extractUserDefinedFields(self, control):
-        """Extract the user defined fields out of a control file list.
-        """
+        """Extract the user defined fields out of a control file list."""
         return [
             (field, contents)
-            for (field, contents) in
-            control if field not in self.known_fields]
+            for (field, contents) in control
+            if field not in self.known_fields
+        ]
 
 
 class SourceUploadFile(SourceFileMixin, PackageUploadFile):
@@ -396,19 +439,23 @@ class SourceUploadFile(SourceFileMixin, PackageUploadFile):
         """
         self.logger.debug("Verifying source file %s" % self.filename)
 
-        if 'source' not in self.changes.architectures:
-            yield UploadError("%s: changes file doesn't list 'source' in "
-                "Architecture field." % (self.filename))
+        if "source" not in self.changes.architectures:
+            yield UploadError(
+                "%s: changes file doesn't list 'source' in "
+                "Architecture field." % (self.filename)
+            )
 
-        version_chopped = re_no_epoch.sub('', self.version)
+        version_chopped = re_no_epoch.sub("", self.version)
         if self.is_orig:
-            version_chopped = re_no_revision.sub('', version_chopped)
+            version_chopped = re_no_revision.sub("", version_chopped)
 
         source_match = re_issource.match(self.filename)
         filename_version = source_match.group(2)
         if filename_version != version_chopped:
-            yield UploadError("%s: should be %s according to changes file."
-                % (filename_version, version_chopped))
+            yield UploadError(
+                "%s: should be %s according to changes file."
+                % (filename_version, version_chopped)
+            )
 
     def checkBuild(self, build):
         """See PackageUploadFile."""
@@ -417,12 +464,15 @@ class SourceUploadFile(SourceFileMixin, PackageUploadFile):
 
         # Sanity check; raise an error if the build we've been
         # told to link to makes no sense.
-        if (build.pocket != self.policy.pocket or
-            build.distroseries != self.policy.distroseries or
-            build.archive != self.policy.archive):
+        if (
+            build.pocket != self.policy.pocket
+            or build.distroseries != self.policy.distroseries
+            or build.archive != self.policy.archive
+        ):
             raise UploadError(
                 "Attempt to upload source specifying "
-                "recipe build %s, where it doesn't fit." % build.id)
+                "recipe build %s, where it doesn't fit." % build.id
+            )
 
 
 class BaseBinaryUploadFile(PackageUploadFile):
@@ -434,31 +484,33 @@ class BaseBinaryUploadFile(PackageUploadFile):
     # Capitalised because we extract these directly from the control file.
     mandatory_fields = {"Package", "Architecture", "Version"}
 
-    known_fields = mandatory_fields.union({
-        "Depends",
-        "Conflicts",
-        "Breaks",
-        "Recommends",
-        "Suggests",
-        "Replaces",
-        "Provides",
-        "Pre-Depends",
-        "Enhances",
-        # Note that we intentionally don't include Built-Using here;
-        # although we parse it, we want to preserve its original form to
-        # make sure apt doesn't decide that it needs to keep re-upgrading
-        # the package to the same version because the metadata looks
-        # slightly out of sync.  This is most easily done by adding it to
-        # user_defined_fields.
-        "Essential",
-        "Description",
-        "Installed-Size",
-        "Priority",
-        "Section",
-        "Maintainer",
-        "Source",
-        "Homepage",
-        })
+    known_fields = mandatory_fields.union(
+        {
+            "Depends",
+            "Conflicts",
+            "Breaks",
+            "Recommends",
+            "Suggests",
+            "Replaces",
+            "Provides",
+            "Pre-Depends",
+            "Enhances",
+            # Note that we intentionally don't include Built-Using here;
+            # although we parse it, we want to preserve its original form to
+            # make sure apt doesn't decide that it needs to keep re-upgrading
+            # the package to the same version because the metadata looks
+            # slightly out of sync.  This is most easily done by adding it to
+            # user_defined_fields.
+            "Essential",
+            "Description",
+            "Installed-Size",
+            "Priority",
+            "Section",
+            "Maintainer",
+            "Source",
+            "Homepage",
+        }
+    )
 
     # Map priorities to their dbschema valuesa
     # We treat a priority of '-' as EXTRA since some packages in some distros
@@ -470,7 +522,7 @@ class BaseBinaryUploadFile(PackageUploadFile):
         "optional": PackagePublishingPriority.OPTIONAL,
         "extra": PackagePublishingPriority.EXTRA,
         "-": PackagePublishingPriority.EXTRA,
-        }
+    }
 
     # These are divined when parsing the package file in verify(), and
     # then used to locate or create the relevant sources and build.
@@ -480,18 +532,40 @@ class BaseBinaryUploadFile(PackageUploadFile):
     source_name = None
     source_version = None
 
-    def __init__(self, filepath, md5, size, component_and_section,
-                 priority_name, package, version, changes, policy, logger):
+    def __init__(
+        self,
+        filepath,
+        md5,
+        size,
+        component_and_section,
+        priority_name,
+        package,
+        version,
+        changes,
+        policy,
+        logger,
+    ):
 
         PackageUploadFile.__init__(
-            self, filepath, md5, size, component_and_section,
-            priority_name, package, version, changes, policy, logger)
+            self,
+            filepath,
+            md5,
+            size,
+            component_and_section,
+            priority_name,
+            package,
+            version,
+            changes,
+            policy,
+            logger,
+        )
 
         if self.priority_name not in self.priority_map:
-            default_priority = 'extra'
+            default_priority = "extra"
             self.logger.warning(
-                 "Unable to grok priority %r, overriding it with %s"
-                 % (self.priority_name, default_priority))
+                "Unable to grok priority %r, overriding it with %s"
+                % (self.priority_name, default_priority)
+            )
             self.priority_name = default_priority
 
         # Yeah, this is weird. Where else can I discover this without
@@ -510,7 +584,7 @@ class BaseBinaryUploadFile(PackageUploadFile):
         They can be build in any architecture and the result will fit all
         architectures available.
         """
-        return self.architecture.lower() == 'all'
+        return self.architecture.lower() == "all"
 
     @property
     def archtag(self):
@@ -521,7 +595,7 @@ class BaseBinaryUploadFile(PackageUploadFile):
         name).
         """
         archtag = self.architecture
-        if archtag == 'all':
+        if archtag == "all":
             return self.changes.filename_archtag
         return archtag
 
@@ -548,7 +622,7 @@ class BaseBinaryUploadFile(PackageUploadFile):
         # Run mandatory and local checks and collect errors.
         mandatory_checks = [
             self.extractAndParseControl,
-            ]
+        ]
         checks = mandatory_checks + self.local_checks
         for check in checks:
             yield from check()
@@ -562,14 +636,16 @@ class BaseBinaryUploadFile(PackageUploadFile):
         except Exception as e:
             yield UploadError(
                 "%s: extracting control file raised %s: %s. giving up."
-                 % (self.filename, sys.exc_info()[0], e))
+                % (self.filename, sys.exc_info()[0], e)
+            )
             return
 
         for mandatory_field in self.mandatory_fields:
             if control_lines.find(mandatory_field) is None:
                 yield UploadError(
                     "%s: control file lacks mandatory field %r"
-                     % (self.filename, mandatory_field))
+                    % (self.filename, mandatory_field)
+                )
         control = {}
         for key in control_lines.keys():
             control[key] = control_lines.find(key)
@@ -606,22 +682,25 @@ class BaseBinaryUploadFile(PackageUploadFile):
 
     def verifyPackage(self):
         """Check if the binary is in changesfile and its name is valid."""
-        control_package = six.ensure_text(self.control.get("Package", b''))
+        control_package = six.ensure_text(self.control.get("Package", b""))
 
         # Since DDEBs are generated after the original DEBs are processed
         # and considered by `dpkg-genchanges` they are only half-incorporated
         # the binary upload changes file. DDEBs are only listed in the
         # Files/Checksums-Sha1/ChecksumsSha256 sections and missing from
         # Binary/Description.
-        if not self.filename.endswith('.ddeb'):
+        if not self.filename.endswith(".ddeb"):
             if control_package not in self.changes.binaries:
                 yield UploadError(
                     "%s: control file lists name as %r, which isn't in "
-                    "changes file." % (self.filename, control_package))
+                    "changes file." % (self.filename, control_package)
+                )
 
         if not re_valid_pkg_name.match(control_package):
-            yield UploadError("%s: invalid package name %r." % (
-                self.filename, control_package))
+            yield UploadError(
+                "%s: invalid package name %r."
+                % (self.filename, control_package)
+            )
 
         # Ensure the filename matches the contents of the .deb
         # First check the file package name matches the deb contents.
@@ -631,7 +710,8 @@ class BaseBinaryUploadFile(PackageUploadFile):
             yield UploadError(
                 "%s: package part of filename %r does not match "
                 "package name in the control fields %r"
-                % (self.filename, file_package, control_package))
+                % (self.filename, file_package, control_package)
+            )
 
     def verifyVersion(self):
         """Check if control version is valid matches the filename version.
@@ -640,69 +720,89 @@ class BaseBinaryUploadFile(PackageUploadFile):
         because the changesfile version refers to the SOURCE version.
         """
         if not re_valid_version.match(self.control_version):
-            yield UploadError("%s: invalid version number %r."
-                              % (self.filename, self.control_version))
+            yield UploadError(
+                "%s: invalid version number %r."
+                % (self.filename, self.control_version)
+            )
 
         binary_match = re_isadeb.match(self.filename)
         filename_version = binary_match.group(2)
-        control_version_chopped = re_no_epoch.sub('', self.control_version)
+        control_version_chopped = re_no_epoch.sub("", self.control_version)
         if filename_version != control_version_chopped:
-            yield UploadError("%s: should be %s according to control file."
-                              % (filename_version, control_version_chopped))
+            yield UploadError(
+                "%s: should be %s according to control file."
+                % (filename_version, control_version_chopped)
+            )
 
     def verifyArchitecture(self):
         """Check if the control architecture matches the changesfile.
 
         Also check if it is a valid architecture in LP context.
         """
-        control_arch = six.ensure_text(self.control.get("Architecture", b''))
-        valid_archs = [a.architecturetag
-                       for a in self.policy.distroseries.architectures]
+        control_arch = six.ensure_text(self.control.get("Architecture", b""))
+        valid_archs = [
+            a.architecturetag for a in self.policy.distroseries.architectures
+        ]
 
         if control_arch not in valid_archs and control_arch != "all":
             yield UploadError(
-                "%s: Unknown architecture: '%s'" % (
-                self.filename, control_arch))
+                "%s: Unknown architecture: '%s'"
+                % (self.filename, control_arch)
+            )
 
         if control_arch not in self.changes.architectures:
             yield UploadError(
                 "%s: control file lists arch as '%s' which isn't "
-                "in the changes file." % (self.filename, control_arch))
+                "in the changes file." % (self.filename, control_arch)
+            )
 
         if control_arch != self.architecture:
             yield UploadError(
                 "%s: control file lists arch as '%s' which doesn't "
                 "agree with version '%s' in the filename."
-                % (self.filename, control_arch, self.architecture))
+                % (self.filename, control_arch, self.architecture)
+            )
 
     def verifyDepends(self):
         """Check if control depends field is present and not empty."""
-        control_depends = self.control.get('Depends', b"--unset-marker--")
+        control_depends = self.control.get("Depends", b"--unset-marker--")
         if not control_depends:
             yield UploadError(
-                "%s: Depends field present and empty." % self.filename)
+                "%s: Depends field present and empty." % self.filename
+            )
 
     def verifySection(self):
         """Check the section & priority match those in changesfile."""
         control_section_and_component = six.ensure_text(
-            self.control.get('Section', b''))
+            self.control.get("Section", b"")
+        )
         control_component, control_section = splitComponentAndSection(
-            control_section_and_component)
-        if ((control_component, control_section) !=
-            (self.component_name, self.section_name)):
+            control_section_and_component
+        )
+        if (control_component, control_section) != (
+            self.component_name,
+            self.section_name,
+        ):
             yield UploadError(
                 "%s control file lists section as %s/%s but changes file "
-                "has %s/%s." % (self.filename, control_component,
-                                control_section, self.component_name,
-                                self.section_name))
+                "has %s/%s."
+                % (
+                    self.filename,
+                    control_component,
+                    control_section,
+                    self.component_name,
+                    self.section_name,
+                )
+            )
 
     def verifyPriority(self):
         """Check if priority matches changesfile."""
-        control_priority = six.ensure_text(self.control.get('Priority', b''))
+        control_priority = six.ensure_text(self.control.get("Priority", b""))
         if control_priority and self.priority_name != control_priority:
             yield UploadError(
                 "%s control file lists priority as %s but changes file has "
-                "%s." % (self.filename, control_priority, self.priority_name))
+                "%s." % (self.filename, control_priority, self.priority_name)
+            )
 
     def verifyFormat(self):
         """Check if the DEB format is sane.
@@ -712,25 +812,33 @@ class BaseBinaryUploadFile(PackageUploadFile):
         """
         try:
             subprocess.check_output(
-                ["dpkg-deb", "-I", self.filepath], stderr=subprocess.STDOUT)
+                ["dpkg-deb", "-I", self.filepath], stderr=subprocess.STDOUT
+            )
         except subprocess.CalledProcessError as e:
             yield UploadError(
-                "%s: 'dpkg-deb -I' invocation failed." % self.filename)
+                "%s: 'dpkg-deb -I' invocation failed." % self.filename
+            )
             yield UploadError(
                 prefix_multi_line_string(
                     six.ensure_text(e.output, errors="replace"),
-                    " [dpkg-deb output:] "))
+                    " [dpkg-deb output:] ",
+                )
+            )
 
         try:
             subprocess.check_output(
-                ["dpkg-deb", "-c", self.filepath], stderr=subprocess.STDOUT)
+                ["dpkg-deb", "-c", self.filepath], stderr=subprocess.STDOUT
+            )
         except subprocess.CalledProcessError as e:
             yield UploadError(
-                "%s: 'dpkg-deb -c' invocation failed." % self.filename)
+                "%s: 'dpkg-deb -c' invocation failed." % self.filename
+            )
             yield UploadError(
                 prefix_multi_line_string(
                     six.ensure_text(e.output, errors="replace"),
-                    " [dpkg-deb output:] "))
+                    " [dpkg-deb output:] ",
+                )
+            )
 
     def verifyDebTimestamp(self):
         """Check specific DEB format timestamp checks."""
@@ -760,8 +868,8 @@ class BaseBinaryUploadFile(PackageUploadFile):
                 yield UploadError(
                     "%s: has %s file(s) with a time stamp too "
                     "far into the future (e.g. %s [%s])."
-                     % (self.filename, len(future_files), first_file,
-                        timestamp))
+                    % (self.filename, len(future_files), first_file, timestamp)
+                )
 
             ancient_files = list(tar_checker.ancient_files)
             if ancient_files:
@@ -770,16 +878,23 @@ class BaseBinaryUploadFile(PackageUploadFile):
                 yield UploadError(
                     "%s: has %s file(s) with a time stamp too "
                     "far in the past (e.g. %s [%s])."
-                     % (self.filename, len(ancient_files), first_file,
-                        timestamp))
+                    % (
+                        self.filename,
+                        len(ancient_files),
+                        first_file,
+                        timestamp,
+                    )
+                )
         except Exception as error:
             # There is a very large number of places where we
             # might get an exception while checking the timestamps.
             # Many of them come from apt_inst/apt_pkg and they are
             # terrible in giving sane exceptions. We thusly capture
             # them all and make them into rejection messages instead
-            yield UploadError("%s: deb contents timestamp check failed: %s"
-                 % (self.filename, error))
+            yield UploadError(
+                "%s: deb contents timestamp check failed: %s"
+                % (self.filename, error)
+            )
 
     #
     #   Database relationship methods
@@ -796,17 +911,21 @@ class BaseBinaryUploadFile(PackageUploadFile):
         assert self.source_version is not None
         distroseries = self.policy.distroseries
         spphs = self.policy.archive.getPublishedSources(
-            name=self.source_name, version=self.source_version,
-            status=active_publishing_status, distroseries=distroseries,
-            exact_match=True)
+            name=self.source_name,
+            version=self.source_version,
+            status=active_publishing_status,
+            distroseries=distroseries,
+            exact_match=True,
+        )
         # Workaround storm bug in EmptyResultSet.
         spphs = list(spphs[:1])
         try:
             return spphs[0]
         except IndexError:
             raise UploadError(
-                "Unable to find source publication %s/%s in %s" % (
-                self.source_name, self.source_version, distroseries.name))
+                "Unable to find source publication %s/%s in %s"
+                % (self.source_name, self.source_version, distroseries.name)
+            )
 
     def findSourcePackageRelease(self):
         """Return the respective ISourcePackageRelease for this binary upload.
@@ -824,20 +943,27 @@ class BaseBinaryUploadFile(PackageUploadFile):
 
     def verifySourcePackageRelease(self, sourcepackagerelease):
         """Check if the given ISourcePackageRelease matches the context."""
-        assert 'source' in self.changes.architectures, (
-            "It should be a mixed upload, but no source part was found.")
+        assert (
+            "source" in self.changes.architectures
+        ), "It should be a mixed upload, but no source part was found."
 
         if self.source_version != sourcepackagerelease.version:
             raise UploadError(
                 "source version %r for %s does not match version %r "
-                "from control file" % (sourcepackagerelease.version,
-                self.source_version, self.filename))
+                "from control file"
+                % (
+                    sourcepackagerelease.version,
+                    self.source_version,
+                    self.filename,
+                )
+            )
 
         if self.source_name != sourcepackagerelease.name:
             raise UploadError(
                 "source name %r for %s does not match name %r in "
-                "control file" % (sourcepackagerelease.name, self.filename,
-                                  self.source_name))
+                "control file"
+                % (sourcepackagerelease.name, self.filename, self.source_name)
+            )
 
     def findBuild(self, sourcepackagerelease):
         """Find and return a build for the given archtag, cached on policy.
@@ -854,17 +980,23 @@ class BaseBinaryUploadFile(PackageUploadFile):
 
         # Check if there's a suitable existing build.
         build = getUtility(IBinaryPackageBuildSet).getBySourceAndLocation(
-            sourcepackagerelease, self.policy.archive, dar)
+            sourcepackagerelease, self.policy.archive, dar
+        )
         if build is not None:
             build.updateStatus(BuildStatus.FULLYBUILT)
-            self.logger.debug("Updating build for %s: %s" % (
-                dar.architecturetag, build.id))
+            self.logger.debug(
+                "Updating build for %s: %s" % (dar.architecturetag, build.id)
+            )
         else:
             # No luck. Make one.
             # Usually happen for security binary uploads.
             build = getUtility(IBinaryPackageBuildSet).new(
-                sourcepackagerelease, self.policy.archive, dar,
-                self.policy.pocket, status=BuildStatus.FULLYBUILT)
+                sourcepackagerelease,
+                self.policy.archive,
+                dar,
+                self.policy.pocket,
+                status=BuildStatus.FULLYBUILT,
+            )
             self.logger.debug("Build %s created" % build.id)
         return build
 
@@ -874,19 +1006,23 @@ class BaseBinaryUploadFile(PackageUploadFile):
             dar = self.policy.distroseries[self.archtag]
         except NotFoundError:
             raise UploadError(
-                "Upload to unknown architecture %s for distroseries %s" %
-                (self.archtag, self.policy.distroseries))
+                "Upload to unknown architecture %s for distroseries %s"
+                % (self.archtag, self.policy.distroseries)
+            )
 
         build.updateStatus(BuildStatus.FULLYBUILT)
 
         # Sanity check; raise an error if the build we've been
         # told to link to makes no sense.
-        if (build.pocket != self.policy.pocket or
-            build.distro_arch_series != dar or
-            build.archive != self.policy.archive):
+        if (
+            build.pocket != self.policy.pocket
+            or build.distro_arch_series != dar
+            or build.archive != self.policy.archive
+        ):
             raise UploadError(
                 "Attempt to upload binaries specifying "
-                "build %s, where they don't fit." % build.id)
+                "build %s, where they don't fit." % build.id
+            )
 
     def storeInDatabase(self, build):
         """Insert this binary release and build into the database."""
@@ -896,27 +1032,30 @@ class BaseBinaryUploadFile(PackageUploadFile):
         for key, value in self.control.items():
             encoded[key] = guess_encoding(value)
 
-        desclines = encoded['Description'].split("\n")
+        desclines = encoded["Description"].split("\n")
         summary = desclines[0]
         description = "\n".join(desclines[1:])
 
         # XXX: dsilvers 2005-10-14 bug 3160: erm, need to work shlibdeps out.
         shlibdeps = ""
 
-        is_essential = encoded.get('Essential', '').lower() == 'yes'
+        is_essential = encoded.get("Essential", "").lower() == "yes"
         architecturespecific = not self.is_archindep
-        installedsize = int(self.control.get('Installed-Size', b'0'))
-        binary_name = getUtility(
-            IBinaryPackageNameSet).getOrCreateByName(self.package)
+        installedsize = int(self.control.get("Installed-Size", b"0"))
+        binary_name = getUtility(IBinaryPackageNameSet).getOrCreateByName(
+            self.package
+        )
 
         if self.ddeb_file:
             debug_package = build.getBinaryPackageFileByName(
-                self.ddeb_file.filename).binarypackagerelease
+                self.ddeb_file.filename
+            ).binarypackagerelease
         else:
             debug_package = None
 
         user_defined_fields = self.extractUserDefinedFields(
-            [(field, encoded[field]) for field in self.control])
+            [(field, encoded[field]) for field in self.control]
+        )
 
         binary = build.createBinaryPackageRelease(
             binarypackagename=binary_name,
@@ -928,33 +1067,39 @@ class BaseBinaryUploadFile(PackageUploadFile):
             section=self.section,
             priority=self.priority,
             shlibdeps=shlibdeps,
-            depends=encoded.get('Depends', ''),
-            recommends=encoded.get('Recommends', ''),
-            suggests=encoded.get('Suggests', ''),
-            conflicts=encoded.get('Conflicts', ''),
-            replaces=encoded.get('Replaces', ''),
-            provides=encoded.get('Provides', ''),
-            pre_depends=encoded.get('Pre-Depends', ''),
-            enhances=encoded.get('Enhances', ''),
-            breaks=encoded.get('Breaks', ''),
-            built_using=encoded.get('Built-Using', ''),
-            homepage=encoded.get('Homepage'),
+            depends=encoded.get("Depends", ""),
+            recommends=encoded.get("Recommends", ""),
+            suggests=encoded.get("Suggests", ""),
+            conflicts=encoded.get("Conflicts", ""),
+            replaces=encoded.get("Replaces", ""),
+            provides=encoded.get("Provides", ""),
+            pre_depends=encoded.get("Pre-Depends", ""),
+            enhances=encoded.get("Enhances", ""),
+            breaks=encoded.get("Breaks", ""),
+            built_using=encoded.get("Built-Using", ""),
+            homepage=encoded.get("Homepage"),
             essential=is_essential,
             installedsize=installedsize,
             architecturespecific=architecturespecific,
             user_defined_fields=user_defined_fields,
-            debug_package=debug_package)
+            debug_package=debug_package,
+        )
 
         with open(self.filepath, "rb") as f:
-            library_file = self.librarian.create(self.filename,
-                 self.size, f, self.content_type,
-                 restricted=self.policy.archive.private)
+            library_file = self.librarian.create(
+                self.filename,
+                self.size,
+                f,
+                self.content_type,
+                restricted=self.policy.archive.private,
+            )
         binary.addFile(library_file)
         return binary
 
 
 class UdebBinaryUploadFile(BaseBinaryUploadFile):
     """Represents an uploaded binary package file in udeb format."""
+
     format = BinaryPackageFormat.UDEB
 
     @property
@@ -968,11 +1113,12 @@ class UdebBinaryUploadFile(BaseBinaryUploadFile):
             self.verifySection,
             self.verifyPriority,
             self.verifyFormat,
-            ]
+        ]
 
 
 class DebBinaryUploadFile(BaseBinaryUploadFile):
     """Represents an uploaded binary package file in deb format."""
+
     format = BinaryPackageFormat.DEB
 
     @property
@@ -992,10 +1138,11 @@ class DebBinaryUploadFile(BaseBinaryUploadFile):
             # can be slow on large files, so it's best to avoid running both
             # of them.
             self.verifyDebTimestamp,
-            ]
+        ]
 
 
 class DdebBinaryUploadFile(DebBinaryUploadFile):
     """Represents an uploaded binary package file in ddeb format."""
+
     format = BinaryPackageFormat.DDEB
     deb_file = None
diff --git a/lib/lp/archiveuploader/ocirecipeupload.py b/lib/lp/archiveuploader/ocirecipeupload.py
index ac655a5..6d692f9 100644
--- a/lib/lp/archiveuploader/ocirecipeupload.py
+++ b/lib/lp/archiveuploader/ocirecipeupload.py
@@ -3,7 +3,7 @@
 
 """Upload OCI build artifacts to the librarian."""
 
-__all__ = ['OCIRecipeUpload']
+__all__ = ["OCIRecipeUpload"]
 
 
 import json
@@ -41,10 +41,10 @@ class OCIRecipeUpload:
             if dirpath == self.upload_path:
                 # All relevant files will be in a subdirectory.
                 continue
-            if 'digests.json' not in filenames:
+            if "digests.json" not in filenames:
                 continue
             # Open the digest file
-            digest_path = os.path.join(dirpath, 'digests.json')
+            digest_path = os.path.join(dirpath, "digests.json")
             self.logger.debug("Digest path: {}".format(digest_path))
             with open(digest_path) as digest_fp:
                 digests = json.load(digest_fp)
@@ -55,35 +55,38 @@ class OCIRecipeUpload:
                     digest = data["digest"]
                     layer_id = data["layer_id"]
                     layer_path = os.path.join(
-                        dirpath,
-                        "{}.tar.gz".format(layer_id)
+                        dirpath, "{}.tar.gz".format(layer_id)
                     )
                     self.logger.debug("Layer path: {}".format(layer_path))
                     # If the file is already in the librarian,
                     # we can just reuse it.
                     existing_file = getUtility(IOCIFileSet).getByLayerDigest(
-                        digest)
+                        digest
+                    )
                     # XXX 2020-05-14 twom This will need to respect restricted
                     # when we do private builds.
                     if existing_file:
                         build.addFile(
                             existing_file.library_file,
-                            layer_file_digest=digest)
+                            layer_file_digest=digest,
+                        )
                         continue
                     if not os.path.exists(layer_path):
                         raise UploadError(
-                            "Missing layer file: {}.".format(layer_id))
+                            "Missing layer file: {}.".format(layer_id)
+                        )
                     # Upload layer
                     libraryfile = self.librarian.create(
                         os.path.basename(layer_path),
                         os.stat(layer_path).st_size,
                         open(layer_path, "rb"),
                         filenameToContentType(layer_path),
-                        restricted=build.is_private)
+                        restricted=build.is_private,
+                    )
                     build.addFile(libraryfile, layer_file_digest=digest)
             # Upload all json files
             for filename in filenames:
-                if filename.endswith('.json'):
+                if filename.endswith(".json"):
                     file_path = os.path.join(dirpath, filename)
                     self.logger.debug("JSON file: {}".format(file_path))
                     libraryfile = self.librarian.create(
@@ -91,7 +94,8 @@ class OCIRecipeUpload:
                         os.stat(file_path).st_size,
                         open(file_path, "rb"),
                         filenameToContentType(file_path),
-                        restricted=build.is_private)
+                        restricted=build.is_private,
+                    )
                     # This doesn't have a digest as it's not a layer file.
                     build.addFile(libraryfile, layer_file_digest=None)
             # We've found digest, we can stop now
diff --git a/lib/lp/archiveuploader/scripts/processupload.py b/lib/lp/archiveuploader/scripts/processupload.py
index 53969bc..ad87b87 100644
--- a/lib/lp/archiveuploader/scripts/processupload.py
+++ b/lib/lp/archiveuploader/scripts/processupload.py
@@ -3,7 +3,7 @@
 
 """Upload processor for Soyuz."""
 
-__all__ = ['ProcessUpload']
+__all__ = ["ProcessUpload"]
 
 import os
 
@@ -13,7 +13,7 @@ from lp.services.config import config
 from lp.services.scripts.base import (
     LaunchpadCronScript,
     LaunchpadScriptFailure,
-    )
+)
 from lp.services.timeout import default_timeout
 
 
@@ -22,60 +22,107 @@ class ProcessUpload(LaunchpadCronScript):
 
     def add_my_options(self):
         self.parser.add_option(
-            "-n", "--dry-run", action="store_true",
-            dest="dryrun", metavar="DRY_RUN", default=False,
-            help=("Whether to treat this as a dry-run or not."
-                  "Also implies -KM."))
+            "-n",
+            "--dry-run",
+            action="store_true",
+            dest="dryrun",
+            metavar="DRY_RUN",
+            default=False,
+            help=(
+                "Whether to treat this as a dry-run or not."
+                "Also implies -KM."
+            ),
+        )
 
         self.parser.add_option(
-            "-K", "--keep", action="store_true",
-            dest="keep", metavar="KEEP", default=False,
-            help="Whether to keep or not the uploads directory.")
+            "-K",
+            "--keep",
+            action="store_true",
+            dest="keep",
+            metavar="KEEP",
+            default=False,
+            help="Whether to keep or not the uploads directory.",
+        )
 
         self.parser.add_option(
-            "-M", "--no-mails", action="store_true",
-            dest="nomails", default=False,
-            help="Whether to suppress the sending of mails or not.")
+            "-M",
+            "--no-mails",
+            action="store_true",
+            dest="nomails",
+            default=False,
+            help="Whether to suppress the sending of mails or not.",
+        )
 
         self.parser.add_option(
-            "--builds", action="store_true",
-            dest="builds", default=False,
-            help="Whether to interpret leaf names as build ids.")
+            "--builds",
+            action="store_true",
+            dest="builds",
+            default=False,
+            help="Whether to interpret leaf names as build ids.",
+        )
 
         self.parser.add_option(
-            "-J", "--just-leaf", action="store", dest="leafname",
-            default=None, help="A specific leaf dir to limit to.",
-            metavar="LEAF")
+            "-J",
+            "--just-leaf",
+            action="store",
+            dest="leafname",
+            default=None,
+            help="A specific leaf dir to limit to.",
+            metavar="LEAF",
+        )
 
         self.parser.add_option(
-            "-C", "--context", action="store", dest="context",
-            metavar="CONTEXT", default="insecure",
-            help="The context in which to consider the upload.")
+            "-C",
+            "--context",
+            action="store",
+            dest="context",
+            metavar="CONTEXT",
+            default="insecure",
+            help="The context in which to consider the upload.",
+        )
 
         self.parser.add_option(
-            "-d", "--distro", action="store", dest="distro", metavar="DISTRO",
-            default="ubuntu", help="Distribution to give back from")
+            "-d",
+            "--distro",
+            action="store",
+            dest="distro",
+            metavar="DISTRO",
+            default="ubuntu",
+            help="Distribution to give back from",
+        )
 
         self.parser.add_option(
-            "-s", "--series", action="store", default=None,
-            dest="distroseries", metavar="DISTROSERIES",
-            help="Distro series to give back from.")
+            "-s",
+            "--series",
+            action="store",
+            default=None,
+            dest="distroseries",
+            metavar="DISTROSERIES",
+            help="Distro series to give back from.",
+        )
 
         self.parser.add_option(
-            "-a", "--announce", action="store", dest="announcelist",
-            metavar="ANNOUNCELIST", help="Override the announcement list")
+            "-a",
+            "--announce",
+            action="store",
+            dest="announcelist",
+            metavar="ANNOUNCELIST",
+            help="Override the announcement list",
+        )
 
     def main(self):
         if len(self.args) != 1:
             raise LaunchpadScriptFailure(
                 "Need to be given exactly one non-option "
-                "argument, namely the fsroot for the upload.")
+                "argument, namely the fsroot for the upload."
+            )
 
         self.options.base_fsroot = os.path.abspath(self.args[0])
 
         if not os.path.isdir(self.options.base_fsroot):
             raise LaunchpadScriptFailure(
-                "%s is not a directory" % self.options.base_fsroot)
+                "%s is not a directory" % self.options.base_fsroot
+            )
 
         self.logger.debug("Initializing connection.")
 
@@ -90,9 +137,16 @@ class ProcessUpload(LaunchpadCronScript):
                 policy.archive = build.archive
             return policy
 
-        processor = UploadProcessor(self.options.base_fsroot,
-            self.options.dryrun, self.options.nomails, self.options.builds,
-            self.options.keep, getPolicy, self.txn, self.logger)
+        processor = UploadProcessor(
+            self.options.base_fsroot,
+            self.options.dryrun,
+            self.options.nomails,
+            self.options.builds,
+            self.options.keep,
+            getPolicy,
+            self.txn,
+            self.logger,
+        )
         with default_timeout(config.uploader.timeout):
             processor.processUploadQueue(self.options.leafname)
 
diff --git a/lib/lp/archiveuploader/snapupload.py b/lib/lp/archiveuploader/snapupload.py
index 80f14ae..9cc00c1 100644
--- a/lib/lp/archiveuploader/snapupload.py
+++ b/lib/lp/archiveuploader/snapupload.py
@@ -50,12 +50,14 @@ class SnapUpload:
             raise UploadError("Build did not produce any snap packages.")
 
         for snap_path in snap_paths:
-            with open(snap_path, 'rb') as file_to_upload:
+            with open(snap_path, "rb") as file_to_upload:
                 libraryfile = self.librarian.create(
-                    os.path.basename(snap_path), os.stat(snap_path).st_size,
+                    os.path.basename(snap_path),
+                    os.stat(snap_path).st_size,
                     file_to_upload,
                     filenameToContentType(snap_path),
-                    restricted=build.is_private)
+                    restricted=build.is_private,
+                )
             build.addFile(libraryfile)
 
         # The master verifies the status to confirm successful upload.
diff --git a/lib/lp/archiveuploader/tagfiles.py b/lib/lp/archiveuploader/tagfiles.py
index 666c9c1..7470f27 100644
--- a/lib/lp/archiveuploader/tagfiles.py
+++ b/lib/lp/archiveuploader/tagfiles.py
@@ -3,11 +3,7 @@
 
 """Utility classes for parsing Debian tag files."""
 
-__all__ = [
-    'TagFileParseError',
-    'parse_tagfile',
-    'parse_tagfile_content'
-    ]
+__all__ = ["TagFileParseError", "parse_tagfile", "parse_tagfile_content"]
 
 
 import tempfile
@@ -19,6 +15,7 @@ from lp.services.mail.signedmessage import strip_pgp_signature
 
 class TagFileParseError(Exception):
     """This exception is raised if parse_changes encounters nastiness"""
+
     pass
 
 
@@ -43,7 +40,8 @@ def parse_tagfile_content(content, filename=None):
             raise TagFileParseError("%s: %s" % (filename, e))
     if len(stanzas) != 1:
         raise TagFileParseError(
-            "%s: multiple stanzas where only one is expected" % filename)
+            "%s: multiple stanzas where only one is expected" % filename
+        )
 
     [stanza] = stanzas
 
diff --git a/lib/lp/archiveuploader/tests/__init__.py b/lib/lp/archiveuploader/tests/__init__.py
index 2133cd0..16d6020 100644
--- a/lib/lp/archiveuploader/tests/__init__.py
+++ b/lib/lp/archiveuploader/tests/__init__.py
@@ -4,11 +4,11 @@
 """Tests for the archive uploader."""
 
 __all__ = [
-    'datadir',
-    'getPolicy',
-    'insertFakeChangesFile',
-    'insertFakeChangesFileForAllPackageUploads',
-    ]
+    "datadir",
+    "getPolicy",
+    "insertFakeChangesFile",
+    "insertFakeChangesFileForAllPackageUploads",
+]
 
 import os
 
@@ -16,13 +16,12 @@ from zope.component import getGlobalSiteManager
 
 from lp.archiveuploader.uploadpolicy import (
     AbstractUploadPolicy,
-    findPolicyByName,
     IArchiveUploadPolicy,
-    )
+    findPolicyByName,
+)
 from lp.services.librarianserver.testing.server import fillLibrarianFile
 from lp.soyuz.model.queue import PackageUploadSet
 
-
 here = os.path.dirname(os.path.realpath(__file__))
 
 
@@ -30,7 +29,7 @@ def datadir(path):
     """Return fully-qualified path inside the test data directory."""
     if path.startswith("/"):
         raise ValueError("Path is not relative: %s" % path)
-    return os.path.join(here, 'data', path)
+    return os.path.join(here, "data", path)
 
 
 def insertFakeChangesFile(fileID, path=None):
@@ -42,7 +41,7 @@ def insertFakeChangesFile(fileID, path=None):
     """
     if path is None:
         path = datadir("ed-0.2-21/ed_0.2-21_source.changes")
-    with open(path, 'rb') as changes_file_obj:
+    with open(path, "rb") as changes_file_obj:
         test_changes_file = changes_file_obj.read()
     fillLibrarianFile(fileID, content=test_changes_file)
 
@@ -56,12 +55,12 @@ def insertFakeChangesFileForAllPackageUploads():
 class MockUploadOptions:
     """Mock upload policy options helper"""
 
-    def __init__(self, distro='ubuntutest', distroseries=None):
+    def __init__(self, distro="ubuntutest", distroseries=None):
         self.distro = distro
         self.distroseries = distroseries
 
 
-def getPolicy(name='anything', distro='ubuntu', distroseries=None):
+def getPolicy(name="anything", distro="ubuntu", distroseries=None):
     """Build and return an Upload Policy for the given context."""
     policy = findPolicyByName(name)
     options = MockUploadOptions(distro, distroseries)
@@ -75,7 +74,7 @@ class AnythingGoesUploadPolicy(AbstractUploadPolicy):
     We require a signed changes file but that's it.
     """
 
-    name = 'anything'
+    name = "anything"
 
     def __init__(self):
         AbstractUploadPolicy.__init__(self)
@@ -99,7 +98,7 @@ class AbsolutelyAnythingGoesUploadPolicy(AnythingGoesUploadPolicy):
     of dealing with inappropriate checks in tests.
     """
 
-    name = 'absolutely-anything'
+    name = "absolutely-anything"
 
     def __init__(self):
         AnythingGoesUploadPolicy.__init__(self)
@@ -111,9 +110,9 @@ class AbsolutelyAnythingGoesUploadPolicy(AnythingGoesUploadPolicy):
 
 
 def register_archive_upload_policy_adapters():
-    policies = [
-        AnythingGoesUploadPolicy, AbsolutelyAnythingGoesUploadPolicy]
+    policies = [AnythingGoesUploadPolicy, AbsolutelyAnythingGoesUploadPolicy]
     sm = getGlobalSiteManager()
     for policy in policies:
         sm.registerUtility(
-            component=policy, provided=IArchiveUploadPolicy, name=policy.name)
+            component=policy, provided=IArchiveUploadPolicy, name=policy.name
+        )
diff --git a/lib/lp/archiveuploader/tests/test_buildduploads.py b/lib/lp/archiveuploader/tests/test_buildduploads.py
index 473c7ee..3070282 100644
--- a/lib/lp/archiveuploader/tests/test_buildduploads.py
+++ b/lib/lp/archiveuploader/tests/test_buildduploads.py
@@ -9,16 +9,13 @@ from zope.component import getUtility
 
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
-    )
+)
 from lp.archiveuploader.uploadprocessor import UploadHandler
 from lp.buildmaster.interfaces.processor import IProcessorSet
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.database.constants import UTC_NOW
-from lp.soyuz.enums import (
-    PackagePublishingStatus,
-    PackageUploadStatus,
-    )
+from lp.soyuz.enums import PackagePublishingStatus, PackageUploadStatus
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.publishing import IPublishingSet
 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
@@ -26,12 +23,12 @@ from lp.testing.gpgkeys import import_public_test_keys
 
 
 class TestStagedBinaryUploadBase(TestUploadProcessorBase):
-    name = 'baz'
-    version = '1.0-1'
+    name = "baz"
+    version = "1.0-1"
     distribution_name = None
     distroseries_name = None
     pocket = None
-    policy = 'buildd'
+    policy = "buildd"
     no_mails = True
 
     @property
@@ -78,7 +75,8 @@ class TestStagedBinaryUploadBase(TestUploadProcessorBase):
         # Set up the uploadprocessor with appropriate options and logger
         self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
         self.build_uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
+            self.layer.txn, builds=True
+        )
         self.builds_before_upload = BinaryPackageBuild.select().count()
         self.source_queue = None
         self._uploadSource()
@@ -87,30 +85,35 @@ class TestStagedBinaryUploadBase(TestUploadProcessorBase):
     def assertBuildsCreated(self, amount):
         """Assert that a given 'amount' of build records was created."""
         builds_count = BinaryPackageBuild.select().count()
-        self.assertEqual(
-            self.builds_before_upload + amount, builds_count)
+        self.assertEqual(self.builds_before_upload + amount, builds_count)
 
     def _prepareUpload(self, upload_dir):
         """Place a copy of the upload directory into incoming queue."""
-        os.system("cp -a %s %s" %
-            (os.path.join(self.test_files_dir, upload_dir),
-             os.path.join(self.queue_folder, "incoming")))
+        os.system(
+            "cp -a %s %s"
+            % (
+                os.path.join(self.test_files_dir, upload_dir),
+                os.path.join(self.queue_folder, "incoming"),
+            )
+        )
 
     def _uploadSource(self):
         """Upload and Accept (if necessary) the base source."""
         self._prepareUpload(self.source_dir)
         fsroot = os.path.join(self.queue_folder, "incoming")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, fsroot, self.source_dir)
+            self.uploadprocessor, fsroot, self.source_dir
+        )
         handler.processChangesFile(self.source_changesfile)
         queue_item = self.uploadprocessor.last_processed_upload.queue_root
         self.assertTrue(
             queue_item is not None,
-            "Source Upload Failed\nGot: %s" % self.log.getLogBuffer())
+            "Source Upload Failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         acceptable_statuses = [
             PackageUploadStatus.NEW,
             PackageUploadStatus.UNAPPROVED,
-            ]
+        ]
         if queue_item.status in acceptable_statuses:
             queue_item.setAccepted()
         # Store source queue item for future use.
@@ -125,13 +128,15 @@ class TestStagedBinaryUploadBase(TestUploadProcessorBase):
         self._prepareUpload(self.binary_dir)
         fsroot = os.path.join(self.queue_folder, "incoming")
         handler = UploadHandler.forProcessor(
-            self.build_uploadprocessor, fsroot, self.binary_dir, build=build)
+            self.build_uploadprocessor, fsroot, self.binary_dir, build=build
+        )
         handler.processChangesFile(self.getBinaryChangesfileFor(archtag))
         last_processed = self.build_uploadprocessor.last_processed_upload
         queue_item = last_processed.queue_root
         self.assertTrue(
             queue_item is not None,
-            "Binary Upload Failed\nGot: %s" % self.log.getLogBuffer())
+            "Binary Upload Failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(1, len(queue_item.builds))
         return queue_item.builds[0].build
 
@@ -139,8 +144,11 @@ class TestStagedBinaryUploadBase(TestUploadProcessorBase):
         """Create a build record attached to the base source."""
         spr = self.source_queue.sources[0].sourcepackagerelease
         build = getUtility(IBinaryPackageBuildSet).new(
-            spr, self.distroseries.main_archive, self.distroseries[archtag],
-            self.pocket)
+            spr,
+            self.distroseries.main_archive,
+            self.distroseries[archtag],
+            self.pocket,
+        )
         self.layer.txn.commit()
         return build
 
@@ -165,12 +173,13 @@ class TestBuilddUploads(TestStagedBinaryUploadBase):
     This class allows uploads to ubuntu/breezy in i386 & powerpc
     architectures.
     """
-    name = 'foo'
-    version = '1.0-1'
-    distribution_name = 'ubuntu'
-    distroseries_name = 'breezy'
+
+    name = "foo"
+    version = "1.0-1"
+    distribution_name = "ubuntu"
+    distroseries_name = "breezy"
     pocket = PackagePublishingPocket.RELEASE
-    policy = 'buildd'
+    policy = "buildd"
     no_mails = True
 
     def setupBreezy(self):
@@ -178,9 +187,9 @@ class TestBuilddUploads(TestStagedBinaryUploadBase):
         TestStagedBinaryUploadBase.setupBreezy(self)
         self.switchToAdmin()
         ppc = getUtility(IProcessorSet).new(
-            name='powerpc', title='PowerPC', description='not yet')
-        self.breezy.newArch(
-            'powerpc', ppc, True, self.breezy.owner)
+            name="powerpc", title="PowerPC", description="not yet"
+        )
+        self.breezy.newArch("powerpc", ppc, True, self.breezy.owner)
         self.switchToUploader()
 
     def setUp(self):
@@ -197,15 +206,19 @@ class TestBuilddUploads(TestStagedBinaryUploadBase):
         self.layer.txn.commit()
 
         real_policy = self.policy
-        self.policy = 'insecure'
+        self.policy = "insecure"
         super().setUp()
         # Publish the source package release so it can be found by
         # NascentUploadFile.findSourcePackageRelease().
         spr = self.source_queue.sources[0].sourcepackagerelease
         getUtility(IPublishingSet).newSourcePublication(
-            self.distroseries.main_archive, spr,
-            self.distroseries, spr.component,
-            spr.section, PackagePublishingPocket.RELEASE)
+            self.distroseries.main_archive,
+            spr,
+            self.distroseries,
+            spr.component,
+            spr.section,
+            PackagePublishingPocket.RELEASE,
+        )
         self.policy = real_policy
 
     def _publishBuildQueueItem(self, queue_item):
@@ -221,8 +234,7 @@ class TestBuilddUploads(TestStagedBinaryUploadBase):
     def _setupUploadProcessorForBuild(self):
         """Setup an UploadProcessor instance for a given buildd context."""
         self.options.context = self.policy
-        self.uploadprocessor = self.getUploadProcessor(
-            self.layer.txn)
+        self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
     def testDelayedBinaryUpload(self):
         """Check if Soyuz copes with delayed binary uploads.
@@ -234,16 +246,17 @@ class TestBuilddUploads(TestStagedBinaryUploadBase):
         Find more information on bug #89846.
         """
         # Upload i386 binary.
-        build_candidate = self._createBuild('i386')
+        build_candidate = self._createBuild("i386")
         self._setupUploadProcessorForBuild()
-        build_used = self._uploadBinary('i386', build_candidate)
+        build_used = self._uploadBinary("i386", build_candidate)
 
         self.assertEqual(build_used.id, build_candidate.id)
         self.assertBuildsCreated(1)
         self.assertEqual(
-            'i386 build of foo 1.0-1 in ubuntu breezy RELEASE',
-            build_used.title)
-        self.assertEqual('FULLYBUILT', build_used.status.name)
+            "i386 build of foo 1.0-1 in ubuntu breezy RELEASE",
+            build_used.title,
+        )
+        self.assertEqual("FULLYBUILT", build_used.status.name)
 
         # Force immediate publication.
         last_processed = self.build_uploadprocessor.last_processed_upload
@@ -251,13 +264,14 @@ class TestBuilddUploads(TestStagedBinaryUploadBase):
         self._publishBuildQueueItem(queue_item)
 
         # Upload powerpc binary
-        build_candidate = self._createBuild('powerpc')
+        build_candidate = self._createBuild("powerpc")
         self._setupUploadProcessorForBuild()
-        build_used = self._uploadBinary('powerpc', build_candidate)
+        build_used = self._uploadBinary("powerpc", build_candidate)
 
         self.assertEqual(build_used.id, build_candidate.id)
         self.assertBuildsCreated(2)
         self.assertEqual(
-            'powerpc build of foo 1.0-1 in ubuntu breezy RELEASE',
-            build_used.title)
-        self.assertEqual('FULLYBUILT', build_used.status.name)
+            "powerpc build of foo 1.0-1 in ubuntu breezy RELEASE",
+            build_used.title,
+        )
+        self.assertEqual("FULLYBUILT", build_used.status.name)
diff --git a/lib/lp/archiveuploader/tests/test_buildinfofile.py b/lib/lp/archiveuploader/tests/test_buildinfofile.py
index d14134f..b54e4e4 100644
--- a/lib/lp/archiveuploader/tests/test_buildinfofile.py
+++ b/lib/lp/archiveuploader/tests/test_buildinfofile.py
@@ -9,7 +9,7 @@ from lp.archiveuploader.buildinfofile import BuildInfoFile
 from lp.archiveuploader.nascentuploadfile import UploadError
 from lp.archiveuploader.tests.test_nascentuploadfile import (
     PackageUploadFileTestCase,
-    )
+)
 from lp.testing.layers import LaunchpadZopelessLayer
 
 
@@ -25,26 +25,49 @@ class TestBuildInfoFile(PackageUploadFileTestCase):
         buildinfo["Format"] = "1.0"
         return buildinfo
 
-    def makeBuildInfoFile(self, filename, buildinfo, component_and_section,
-                          priority_name, package, version, changes):
+    def makeBuildInfoFile(
+        self,
+        filename,
+        buildinfo,
+        component_and_section,
+        priority_name,
+        package,
+        version,
+        changes,
+    ):
         path, md5, sha1, size = self.writeUploadFile(
-            filename, buildinfo.dump().encode("UTF-8"))
+            filename, buildinfo.dump().encode("UTF-8")
+        )
         return BuildInfoFile(
-            path, {"MD5": md5}, size, component_and_section, priority_name,
-            package, version, changes, self.policy, self.logger)
+            path,
+            {"MD5": md5},
+            size,
+            component_and_section,
+            priority_name,
+            package,
+            version,
+            changes,
+            self.policy,
+            self.logger,
+        )
 
     def test_properties(self):
         buildinfo = self.getBaseBuildInfo()
         changes = self.getBaseChanges()
         for (arch, is_sourceful, is_binaryful, is_archindep) in (
-                ("source", True, False, False),
-                ("all", False, True, True),
-                ("i386", False, True, False),
-                ):
+            ("source", True, False, False),
+            ("all", False, True, True),
+            ("i386", False, True, False),
+        ):
             buildinfofile = self.makeBuildInfoFile(
-                "foo_0.1-1_%s.buildinfo" % arch, buildinfo,
-                "main/net", "extra", "dulwich", "0.42",
-                self.createChangesFile("foo_0.1-1_%s.changes" % arch, changes))
+                "foo_0.1-1_%s.buildinfo" % arch,
+                buildinfo,
+                "main/net",
+                "extra",
+                "dulwich",
+                "0.42",
+                self.createChangesFile("foo_0.1-1_%s.changes" % arch, changes),
+            )
             self.assertEqual(arch, buildinfofile.architecture)
             self.assertEqual(is_sourceful, buildinfofile.is_sourceful)
             self.assertEqual(is_binaryful, buildinfofile.is_binaryful)
@@ -54,35 +77,54 @@ class TestBuildInfoFile(PackageUploadFileTestCase):
         buildinfo = self.getBaseBuildInfo()
         changes = self.getBaseChanges()
         buildinfofile = self.makeBuildInfoFile(
-            "foo_0.1-1_source.buildinfo", buildinfo,
-            "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo_0.1-1_source.changes", changes))
+            "foo_0.1-1_source.buildinfo",
+            buildinfo,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo_0.1-1_source.changes", changes),
+        )
         lfa = buildinfofile.storeInDatabase()
         self.layer.txn.commit()
-        self.assertEqual(buildinfo.dump().encode('UTF-8'), lfa.read())
+        self.assertEqual(buildinfo.dump().encode("UTF-8"), lfa.read())
 
     def test_checkBuild(self):
         das = self.factory.makeDistroArchSeries(
-            distroseries=self.policy.distroseries, architecturetag="i386")
+            distroseries=self.policy.distroseries, architecturetag="i386"
+        )
         build = self.factory.makeBinaryPackageBuild(
-            distroarchseries=das, archive=self.policy.archive)
+            distroarchseries=das, archive=self.policy.archive
+        )
         buildinfo = self.getBaseBuildInfo()
         changes = self.getBaseChanges()
         buildinfofile = self.makeBuildInfoFile(
-            "foo_0.1-1_i386.buildinfo", buildinfo,
-            "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo_0.1-1_i386.changes", changes))
+            "foo_0.1-1_i386.buildinfo",
+            buildinfo,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo_0.1-1_i386.changes", changes),
+        )
         buildinfofile.checkBuild(build)
 
     def test_checkBuild_inconsistent(self):
         das = self.factory.makeDistroArchSeries(
-            distroseries=self.policy.distroseries, architecturetag="amd64")
+            distroseries=self.policy.distroseries, architecturetag="amd64"
+        )
         build = self.factory.makeBinaryPackageBuild(
-            distroarchseries=das, archive=self.policy.archive)
+            distroarchseries=das, archive=self.policy.archive
+        )
         buildinfo = self.getBaseBuildInfo()
         changes = self.getBaseChanges()
         buildinfofile = self.makeBuildInfoFile(
-            "foo_0.1-1_i386.buildinfo", buildinfo,
-            "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo_0.1-1_i386.changes", changes))
+            "foo_0.1-1_i386.buildinfo",
+            buildinfo,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo_0.1-1_i386.changes", changes),
+        )
         self.assertRaises(UploadError, buildinfofile.checkBuild, build)
diff --git a/lib/lp/archiveuploader/tests/test_changesfile.py b/lib/lp/archiveuploader/tests/test_changesfile.py
index aeb87d1..16a4c42 100644
--- a/lib/lp/archiveuploader/tests/test_changesfile.py
+++ b/lib/lp/archiveuploader/tests/test_changesfile.py
@@ -5,13 +5,9 @@
 
 import os
 
-from debian.deb822 import Changes
 import six
-from testtools.matchers import (
-    Equals,
-    MatchesDict,
-    MatchesStructure,
-    )
+from debian.deb822 import Changes
+from testtools.matchers import Equals, MatchesDict, MatchesStructure
 from zope.component import getUtility
 
 from lp.archiveuploader.buildinfofile import BuildInfoFile
@@ -19,7 +15,7 @@ from lp.archiveuploader.changesfile import (
     CannotDetermineFileTypeError,
     ChangesFile,
     determine_file_class_and_name,
-    )
+)
 from lp.archiveuploader.dscfile import DSCFile
 from lp.archiveuploader.nascentuploadfile import (
     DdebBinaryUploadFile,
@@ -27,11 +23,11 @@ from lp.archiveuploader.nascentuploadfile import (
     SourceUploadFile,
     UdebBinaryUploadFile,
     UploadError,
-    )
+)
 from lp.archiveuploader.tests import (
     AbsolutelyAnythingGoesUploadPolicy,
     datadir,
-    )
+)
 from lp.archiveuploader.uploadpolicy import InsecureUploadPolicy
 from lp.archiveuploader.utils import merge_file_lists
 from lp.registry.interfaces.person import IPersonSet
@@ -39,43 +35,44 @@ from lp.services.log.logger import BufferLogger
 from lp.testing import TestCase
 from lp.testing.gpgkeys import import_public_test_keys
 from lp.testing.keyserver import KeyServerTac
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    ZopelessDatabaseLayer,
-    )
+from lp.testing.layers import LaunchpadZopelessLayer, ZopelessDatabaseLayer
 
 
 class TestDetermineFileClassAndName(TestCase):
-
     def testSourceFile(self):
         # A non-DSC source file is a SourceUploadFile.
         self.assertEqual(
-            ('foo', SourceUploadFile),
-            determine_file_class_and_name('foo_1.0.diff.gz'))
+            ("foo", SourceUploadFile),
+            determine_file_class_and_name("foo_1.0.diff.gz"),
+        )
 
     def testDSCFile(self):
         # A DSC is a DSCFile, since they're special.
         self.assertEqual(
-            ('foo', DSCFile),
-            determine_file_class_and_name('foo_1.0.dsc'))
+            ("foo", DSCFile), determine_file_class_and_name("foo_1.0.dsc")
+        )
 
     def testDEBFile(self):
         # A binary file is the appropriate PackageUploadFile subclass.
         self.assertEqual(
-            ('foo', DebBinaryUploadFile),
-            determine_file_class_and_name('foo_1.0_all.deb'))
+            ("foo", DebBinaryUploadFile),
+            determine_file_class_and_name("foo_1.0_all.deb"),
+        )
         self.assertEqual(
-            ('foo', DdebBinaryUploadFile),
-            determine_file_class_and_name('foo_1.0_all.ddeb'))
+            ("foo", DdebBinaryUploadFile),
+            determine_file_class_and_name("foo_1.0_all.ddeb"),
+        )
         self.assertEqual(
-            ('foo', UdebBinaryUploadFile),
-            determine_file_class_and_name('foo_1.0_all.udeb'))
+            ("foo", UdebBinaryUploadFile),
+            determine_file_class_and_name("foo_1.0_all.udeb"),
+        )
 
     def testBuildInfoFile(self):
         # A buildinfo file is a BuildInfoFile.
         self.assertEqual(
-            ('foo', BuildInfoFile),
-            determine_file_class_and_name('foo_1.0_all.buildinfo'))
+            ("foo", BuildInfoFile),
+            determine_file_class_and_name("foo_1.0_all.buildinfo"),
+        )
 
     def testUnmatchingFile(self):
         # Files with unknown extensions or none at all are not
@@ -83,84 +80,130 @@ class TestDetermineFileClassAndName(TestCase):
         self.assertRaises(
             CannotDetermineFileTypeError,
             determine_file_class_and_name,
-            'foo_1.0.notdsc')
+            "foo_1.0.notdsc",
+        )
         self.assertRaises(
-            CannotDetermineFileTypeError,
-            determine_file_class_and_name,
-            'foo')
+            CannotDetermineFileTypeError, determine_file_class_and_name, "foo"
+        )
 
 
 class TestMergeFileLists(TestCase):
-
     def test_all_hashes(self):
         # merge_file_lists returns a list of
         # (filename, {algo: hash}, size, component_and_section, priority).
         files = [
-            ('a', '1', 'd', 'e', 'foo.deb'), ('b', '2', 's', 'o', 'bar.dsc')]
-        checksums_sha1 = [('aa', '1', 'foo.deb'), ('bb', '2', 'bar.dsc')]
-        checksums_sha256 = [('aaa', '1', 'foo.deb'), ('bbb', '2', 'bar.dsc')]
+            ("a", "1", "d", "e", "foo.deb"),
+            ("b", "2", "s", "o", "bar.dsc"),
+        ]
+        checksums_sha1 = [("aa", "1", "foo.deb"), ("bb", "2", "bar.dsc")]
+        checksums_sha256 = [("aaa", "1", "foo.deb"), ("bbb", "2", "bar.dsc")]
         self.assertEqual(
-            [("foo.deb",
-              {'MD5': 'a', 'SHA1': 'aa', 'SHA256': 'aaa'}, '1', 'd', 'e'),
-             ("bar.dsc",
-              {'MD5': 'b', 'SHA1': 'bb', 'SHA256': 'bbb'}, '2', 's', 'o')],
-             merge_file_lists(files, checksums_sha1, checksums_sha256))
+            [
+                (
+                    "foo.deb",
+                    {"MD5": "a", "SHA1": "aa", "SHA256": "aaa"},
+                    "1",
+                    "d",
+                    "e",
+                ),
+                (
+                    "bar.dsc",
+                    {"MD5": "b", "SHA1": "bb", "SHA256": "bbb"},
+                    "2",
+                    "s",
+                    "o",
+                ),
+            ],
+            merge_file_lists(files, checksums_sha1, checksums_sha256),
+        )
 
     def test_all_hashes_for_dsc(self):
         # merge_file_lists in DSC mode returns a list of
         # (filename, {algo: hash}, size).
-        files = [
-            ('a', '1', 'foo.deb'), ('b', '2', 'bar.dsc')]
-        checksums_sha1 = [('aa', '1', 'foo.deb'), ('bb', '2', 'bar.dsc')]
-        checksums_sha256 = [('aaa', '1', 'foo.deb'), ('bbb', '2', 'bar.dsc')]
+        files = [("a", "1", "foo.deb"), ("b", "2", "bar.dsc")]
+        checksums_sha1 = [("aa", "1", "foo.deb"), ("bb", "2", "bar.dsc")]
+        checksums_sha256 = [("aaa", "1", "foo.deb"), ("bbb", "2", "bar.dsc")]
         self.assertEqual(
-            [("foo.deb", {'MD5': 'a', 'SHA1': 'aa', 'SHA256': 'aaa'}, '1'),
-             ("bar.dsc", {'MD5': 'b', 'SHA1': 'bb', 'SHA256': 'bbb'}, '2')],
-             merge_file_lists(
-                 files, checksums_sha1, checksums_sha256, changes=False))
+            [
+                ("foo.deb", {"MD5": "a", "SHA1": "aa", "SHA256": "aaa"}, "1"),
+                ("bar.dsc", {"MD5": "b", "SHA1": "bb", "SHA256": "bbb"}, "2"),
+            ],
+            merge_file_lists(
+                files, checksums_sha1, checksums_sha256, changes=False
+            ),
+        )
 
     def test_just_md5(self):
         # merge_file_lists copes with the omission of SHA1 or SHA256
         # hashes.
         files = [
-            ('a', '1', 'd', 'e', 'foo.deb'), ('b', '2', 's', 'o', 'bar.dsc')]
+            ("a", "1", "d", "e", "foo.deb"),
+            ("b", "2", "s", "o", "bar.dsc"),
+        ]
         self.assertEqual(
-            [("foo.deb", {'MD5': 'a'}, '1', 'd', 'e'),
-             ("bar.dsc", {'MD5': 'b'}, '2', 's', 'o')],
-             merge_file_lists(files, None, None))
+            [
+                ("foo.deb", {"MD5": "a"}, "1", "d", "e"),
+                ("bar.dsc", {"MD5": "b"}, "2", "s", "o"),
+            ],
+            merge_file_lists(files, None, None),
+        )
 
     def test_duplicate_filename_is_rejected(self):
         # merge_file_lists rejects fields with duplicated filenames.
         files = [
-            ('a', '1', 'd', 'e', 'foo.deb'), ('b', '2', 's', 'o', 'foo.deb')]
+            ("a", "1", "d", "e", "foo.deb"),
+            ("b", "2", "s", "o", "foo.deb"),
+        ]
         self.assertRaisesWithContent(
-            UploadError, "Duplicate filenames in Files field.",
-            merge_file_lists, files, None, None)
+            UploadError,
+            "Duplicate filenames in Files field.",
+            merge_file_lists,
+            files,
+            None,
+            None,
+        )
 
     def test_differing_file_lists_are_rejected(self):
         # merge_file_lists rejects Checksums-* fields which are present
         # but have a different set of filenames.
         files = [
-            ('a', '1', 'd', 'e', 'foo.deb'), ('b', '2', 's', 'o', 'bar.dsc')]
-        sha1s = [('aa', '1', 'foo.deb')]
-        sha256s = [('aaa', '1', 'foo.deb')]
+            ("a", "1", "d", "e", "foo.deb"),
+            ("b", "2", "s", "o", "bar.dsc"),
+        ]
+        sha1s = [("aa", "1", "foo.deb")]
+        sha256s = [("aaa", "1", "foo.deb")]
         self.assertRaisesWithContent(
-            UploadError, "Mismatch between Checksums-Sha1 and Files fields.",
-            merge_file_lists, files, sha1s, None)
+            UploadError,
+            "Mismatch between Checksums-Sha1 and Files fields.",
+            merge_file_lists,
+            files,
+            sha1s,
+            None,
+        )
         self.assertRaisesWithContent(
-            UploadError, "Mismatch between Checksums-Sha256 and Files fields.",
-            merge_file_lists, files, None, sha256s)
+            UploadError,
+            "Mismatch between Checksums-Sha256 and Files fields.",
+            merge_file_lists,
+            files,
+            None,
+            sha256s,
+        )
 
     def test_differing_file_sizes_are_rejected(self):
         # merge_file_lists rejects Checksums-* fields which are present
         # but have a different set of filenames.
-        files = [('a', '1', 'd', 'e', 'foo.deb')]
-        sha1s = [('aa', '1', 'foo.deb')]
-        sha1s_bad_size = [('aa', '2', 'foo.deb')]
+        files = [("a", "1", "d", "e", "foo.deb")]
+        sha1s = [("aa", "1", "foo.deb")]
+        sha1s_bad_size = [("aa", "2", "foo.deb")]
         self.assertEqual(1, len(merge_file_lists(files, sha1s, None)))
         self.assertRaisesWithContent(
-            UploadError, "Mismatch between Checksums-Sha1 and Files fields.",
-            merge_file_lists, files, sha1s_bad_size, None)
+            UploadError,
+            "Mismatch between Checksums-Sha1 and Files fields.",
+            merge_file_lists,
+            files,
+            sha1s_bad_size,
+            None,
+        )
 
 
 class ChangesFileTests(TestCase):
@@ -195,12 +238,15 @@ class ChangesFileTests(TestCase):
         contents["Changes"] = "Something changed"
         contents["Description"] = "\n An awesome package."
         contents["Changed-By"] = "Somebody <somebody@xxxxxxxxxx>"
-        contents["Files"] = [{
-            "md5sum": "d2bd347b3fed184fe28e112695be491c",
-            "size": "1791",
-            "section": "python",
-            "priority": "optional",
-            "name": "dulwich_0.4.1-1_i386.deb"}]
+        contents["Files"] = [
+            {
+                "md5sum": "d2bd347b3fed184fe28e112695be491c",
+                "size": "1791",
+                "section": "python",
+                "priority": "optional",
+                "name": "dulwich_0.4.1-1_i386.deb",
+            }
+        ]
         return contents
 
     def test_newline_in_Binary_field(self):
@@ -208,8 +254,7 @@ class ChangesFileTests(TestCase):
         contents = self.getBaseChanges()
         contents["Binary"] = "binary1\n binary2 \n binary3"
         changes = self.createChangesFile("mypkg_0.1_i386.changes", contents)
-        self.assertEqual(
-            {"binary1", "binary2", "binary3"}, changes.binaries)
+        self.assertEqual({"binary1", "binary2", "binary3"}, changes.binaries)
 
     def test_checkFileName(self):
         # checkFileName() yields an UploadError if the filename is invalid.
@@ -224,64 +269,73 @@ class ChangesFileTests(TestCase):
     def test_filename(self):
         # Filename gets set to the basename of the changes file on disk.
         changes = self.createChangesFile(
-            "mypkg_0.1_i386.changes", self.getBaseChanges())
+            "mypkg_0.1_i386.changes", self.getBaseChanges()
+        )
         self.assertEqual("mypkg_0.1_i386.changes", changes.filename)
 
     def test_suite_name(self):
         # The suite name gets extracted from the changes file.
         changes = self.createChangesFile(
-            "mypkg_0.1_i386.changes", self.getBaseChanges())
+            "mypkg_0.1_i386.changes", self.getBaseChanges()
+        )
         self.assertEqual("nifty", changes.suite_name)
 
     def test_version(self):
         # The version gets extracted from the changes file.
         changes = self.createChangesFile(
-            "mypkg_0.1_i386.changes", self.getBaseChanges())
+            "mypkg_0.1_i386.changes", self.getBaseChanges()
+        )
         self.assertEqual("0.1", changes.version)
 
     def test_architectures(self):
         # The architectures get extracted from the changes file
         # and parsed correctly.
         changes = self.createChangesFile(
-            "mypkg_0.1_i386.changes", self.getBaseChanges())
+            "mypkg_0.1_i386.changes", self.getBaseChanges()
+        )
         self.assertEqual("i386", changes.architecture_line)
         self.assertEqual({"i386"}, changes.architectures)
 
     def test_source(self):
         # The source package name gets extracted from the changes file.
         changes = self.createChangesFile(
-            "mypkg_0.1_i386.changes", self.getBaseChanges())
+            "mypkg_0.1_i386.changes", self.getBaseChanges()
+        )
         self.assertEqual("mypkg", changes.source)
 
     def test_processAddresses(self):
         # processAddresses parses the changes file and sets the
         # changed_by field.
         contents = self.getBaseChanges()
-        changes = self.createChangesFile(
-            "mypkg_0.1_i386.changes", contents)
+        changes = self.createChangesFile("mypkg_0.1_i386.changes", contents)
         self.assertIsNone(changes.changed_by)
         errors = list(changes.processAddresses())
         self.assertEqual(0, len(errors), "Errors: %r" % errors)
         self.assertThat(
             changes.changed_by,
-            MatchesDict({
-                "name": Equals("Somebody"),
-                "email": Equals("somebody@xxxxxxxxxx"),
-                "person": MatchesStructure.byEquality(displayname="Somebody"),
-                }))
+            MatchesDict(
+                {
+                    "name": Equals("Somebody"),
+                    "email": Equals("somebody@xxxxxxxxxx"),
+                    "person": MatchesStructure.byEquality(
+                        displayname="Somebody"
+                    ),
+                }
+            ),
+        )
 
     def test_simulated_changelog(self):
         # The simulated_changelog property returns a changelog entry based on
         # the control fields.
         contents = self.getBaseChanges()
-        changes = self.createChangesFile(
-            "mypkg_0.1_i386.changes", contents)
+        changes = self.createChangesFile("mypkg_0.1_i386.changes", contents)
         self.assertEqual([], list(changes.processAddresses()))
         self.assertEqual(
             b"Something changed\n\n"
             b" -- Somebody <somebody@xxxxxxxxxx>  "
             b"Fri, 25 Jun 2010 11:20:22 -0600",
-            changes.simulated_changelog)
+            changes.simulated_changelog,
+        )
 
     def test_requires_changed_by(self):
         # A changes file is rejected if it does not have a Changed-By field.
@@ -289,7 +343,10 @@ class ChangesFileTests(TestCase):
         del contents["Changed-By"]
         self.assertRaises(
             UploadError,
-            self.createChangesFile, "mypkg_0.1_i386.changes", contents)
+            self.createChangesFile,
+            "mypkg_0.1_i386.changes",
+            contents,
+        )
 
     def test_processFiles(self):
         # processFiles sets self.files to a list of NascentUploadFiles.
@@ -303,8 +360,12 @@ class ChangesFileTests(TestCase):
             MatchesStructure.byEquality(
                 filepath=changes.dirname + "/dulwich_0.4.1-1_i386.deb",
                 checksums=dict(MD5="d2bd347b3fed184fe28e112695be491c"),
-                size=1791, priority_name="optional",
-                component_name="main", section_name="python"))
+                size=1791,
+                priority_name="optional",
+                component_name="main",
+                section_name="python",
+            ),
+        )
 
     def test_processFiles_additional_checksums(self):
         # processFiles parses the Checksums-Sha1 and Checksums-Sha256
@@ -314,13 +375,18 @@ class ChangesFileTests(TestCase):
         sha1 = "378b3498ead213d35a82033a6e9196014a5ef25c"
         sha256 = (
             "39bb3bad01bf931b34f3983536c0f331e4b4e3e38fb78abfc75e5b09"
-            "efd6507f")
-        contents["Checksums-Sha1"] = [{
-            "sha1": sha1, "size": "1791",
-            "name": "dulwich_0.4.1-1_i386.deb"}]
-        contents["Checksums-Sha256"] = [{
-            "sha256": sha256, "size": "1791",
-            "name": "dulwich_0.4.1-1_i386.deb"}]
+            "efd6507f"
+        )
+        contents["Checksums-Sha1"] = [
+            {"sha1": sha1, "size": "1791", "name": "dulwich_0.4.1-1_i386.deb"}
+        ]
+        contents["Checksums-Sha256"] = [
+            {
+                "sha256": sha256,
+                "size": "1791",
+                "name": "dulwich_0.4.1-1_i386.deb",
+            }
+        ]
         changes = self.createChangesFile("mypkg_0.1_i386.changes", contents)
         self.assertEqual([], list(changes.processFiles()))
         [file] = changes.files
@@ -330,24 +396,30 @@ class ChangesFileTests(TestCase):
             MatchesStructure.byEquality(
                 filepath=changes.dirname + "/dulwich_0.4.1-1_i386.deb",
                 checksums=dict(MD5=md5, SHA1=sha1, SHA256=sha256),
-                size=1791, priority_name="optional",
-                component_name="main", section_name="python"))
+                size=1791,
+                priority_name="optional",
+                component_name="main",
+                section_name="python",
+            ),
+        )
 
     def test_processFiles_additional_checksums_must_match(self):
         # processFiles ensures that Files, Checksums-Sha1 and
         # Checksums-Sha256 all list the same files.
         contents = self.getBaseChanges()
-        contents["Checksums-Sha1"] = [{
-            "sha1": "aaa", "size": "1791", "name": "doesnotexist.deb"}]
+        contents["Checksums-Sha1"] = [
+            {"sha1": "aaa", "size": "1791", "name": "doesnotexist.deb"}
+        ]
         changes = self.createChangesFile("mypkg_0.1_i386.changes", contents)
         [error] = list(changes.processFiles())
         self.assertEqual(
-            "Mismatch between Checksums-Sha1 and Files fields.", error.args[0])
+            "Mismatch between Checksums-Sha1 and Files fields.", error.args[0]
+        )
 
     def test_processFiles_rejects_duplicate_filenames(self):
         # processFiles ensures that Files lists each file only once.
         contents = self.getBaseChanges()
-        contents['Files'].append(contents['Files'][0])
+        contents["Files"].append(contents["Files"][0])
         changes = self.createChangesFile("mypkg_0.1_i386.changes", contents)
         [error] = list(changes.processFiles())
         self.assertEqual("Duplicate filenames in Files field.", error.args[0])
@@ -365,20 +437,21 @@ class TestSignatureVerification(TestCase):
     def test_valid_signature_accepted(self):
         # A correctly signed changes file is accepted, and all its
         # content is parsed.
-        path = datadir('signatures/signed.changes')
+        path = datadir("signatures/signed.changes")
         changesfile = ChangesFile(path, InsecureUploadPolicy(), BufferLogger())
         self.assertEqual([], list(changesfile.parseChanges()))
         self.assertEqual(
-            getUtility(IPersonSet).getByEmail('foo.bar@xxxxxxxxxxxxx'),
-            changesfile.signer)
+            getUtility(IPersonSet).getByEmail("foo.bar@xxxxxxxxxxxxx"),
+            changesfile.signer,
+        )
         expected = "\\AFormat: 1.7\n.*foo_1.0-1.diff.gz\\Z"
         self.assertTextMatchesExpressionIgnoreWhitespace(
-            expected,
-            six.ensure_text(changesfile.parsed_content))
+            expected, six.ensure_text(changesfile.parsed_content)
+        )
 
     def test_no_signature_rejected(self):
         # An unsigned changes file is rejected.
-        path = datadir('signatures/unsigned.changes')
+        path = datadir("signatures/unsigned.changes")
         changesfile = ChangesFile(path, InsecureUploadPolicy(), BufferLogger())
         errors = list(changesfile.parseChanges())
         self.assertIsInstance(errors[0], UploadError)
@@ -387,15 +460,16 @@ class TestSignatureVerification(TestCase):
     def test_prefix_ignored(self):
         # A signed changes file with an unsigned prefix has only the
         # signed part parsed.
-        path = datadir('signatures/prefixed.changes')
+        path = datadir("signatures/prefixed.changes")
         changesfile = ChangesFile(path, InsecureUploadPolicy(), BufferLogger())
         self.assertEqual([], list(changesfile.parseChanges()))
         self.assertEqual(
-            getUtility(IPersonSet).getByEmail('foo.bar@xxxxxxxxxxxxx'),
-            changesfile.signer)
+            getUtility(IPersonSet).getByEmail("foo.bar@xxxxxxxxxxxxx"),
+            changesfile.signer,
+        )
         expected = "\\AFormat: 1.7\n.*foo_1.0-1.diff.gz\\Z"
         self.assertTextMatchesExpressionIgnoreWhitespace(
-            expected,
-            six.ensure_text(changesfile.parsed_content))
+            expected, six.ensure_text(changesfile.parsed_content)
+        )
         self.assertEqual("breezy", changesfile.suite_name)
         self.assertNotIn("evil", changesfile.changes_comment)
diff --git a/lib/lp/archiveuploader/tests/test_charmrecipeupload.py b/lib/lp/archiveuploader/tests/test_charmrecipeupload.py
index 7fdf8d4..acf3bf8 100644
--- a/lib/lp/archiveuploader/tests/test_charmrecipeupload.py
+++ b/lib/lp/archiveuploader/tests/test_charmrecipeupload.py
@@ -10,11 +10,8 @@ from storm.store import Store
 
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
-    )
-from lp.archiveuploader.uploadprocessor import (
-    UploadHandler,
-    UploadStatusEnum,
-    )
+)
+from lp.archiveuploader.uploadprocessor import UploadHandler, UploadStatusEnum
 from lp.buildmaster.enums import BuildStatus
 from lp.charms.interfaces.charmrecipe import CHARM_RECIPE_ALLOW_CREATE
 from lp.services.features.testing import FeatureFixture
@@ -32,29 +29,34 @@ class TestCharmRecipeUploads(TestUploadProcessorBase):
 
         self.switchToAdmin()
         self.build = self.factory.makeCharmRecipeBuild(
-            distro_arch_series=self.breezy["i386"])
+            distro_arch_series=self.breezy["i386"]
+        )
         self.build.updateStatus(BuildStatus.UPLOADING)
         Store.of(self.build).flush()
         self.switchToUploader()
         self.options.context = "buildd"
 
         self.uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
+            self.layer.txn, builds=True
+        )
 
     def test_sets_build_and_state(self):
         # The upload processor uploads files and sets the correct status.
         self.assertFalse(self.build.verifySuccessfulUpload())
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "foo_0_all.charm"), b"charm")
-        write_file(
-            os.path.join(upload_dir, "foo_0_all.manifest"), b"manifest")
+        write_file(os.path.join(upload_dir, "foo_0_all.manifest"), b"manifest")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processCharmRecipe(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "Charm upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "Charm upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertTrue(self.build.verifySuccessfulUpload())
 
@@ -63,15 +65,17 @@ class TestCharmRecipeUploads(TestUploadProcessorBase):
         # .charm files.
         self.assertFalse(self.build.verifySuccessfulUpload())
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
-        write_file(
-            os.path.join(upload_dir, "foo_0_all.manifest"), b"manifest")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
+        write_file(os.path.join(upload_dir, "foo_0_all.manifest"), b"manifest")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processCharmRecipe(self.log)
         self.assertEqual(UploadStatusEnum.REJECTED, result)
         self.assertIn(
-            "ERROR Build did not produce any charms.", self.log.getLogBuffer())
+            "ERROR Build did not produce any charms.", self.log.getLogBuffer()
+        )
         self.assertFalse(self.build.verifySuccessfulUpload())
 
     def test_triggers_store_uploads(self):
@@ -84,19 +88,24 @@ class TestCharmRecipeUploads(TestUploadProcessorBase):
         # "exchanged_encrypted" is present, so don't bother setting up
         # encryption keys here.
         self.build.recipe.store_secrets = {
-            "exchanged_encrypted": Macaroon().serialize()}
+            "exchanged_encrypted": Macaroon().serialize()
+        }
         Store.of(self.build.recipe).flush()
         self.switchToUploader()
         self.assertFalse(self.build.verifySuccessfulUpload())
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "foo_0_all.charm"), b"charm")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processCharmRecipe(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "Charm upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "Charm upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertTrue(self.build.verifySuccessfulUpload())
         self.assertEqual(1, len(list(self.build.store_upload_jobs)))
diff --git a/lib/lp/archiveuploader/tests/test_ciupload.py b/lib/lp/archiveuploader/tests/test_ciupload.py
index 99dc808..6cf8f5b 100644
--- a/lib/lp/archiveuploader/tests/test_ciupload.py
+++ b/lib/lp/archiveuploader/tests/test_ciupload.py
@@ -12,11 +12,8 @@ from zope.security.proxy import removeSecurityProxy
 
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
-    )
-from lp.archiveuploader.uploadprocessor import (
-    UploadHandler,
-    UploadStatusEnum,
-    )
+)
+from lp.archiveuploader.uploadprocessor import UploadHandler, UploadStatusEnum
 from lp.buildmaster.enums import BuildStatus
 from lp.code.enums import RevisionStatusArtifactType
 from lp.code.interfaces.revisionstatus import IRevisionStatusReportSet
@@ -52,13 +49,11 @@ class TestCIBuildUploads(TestUploadProcessorBase):
 
         result = handler.processCIResult(self.log)
 
-        self.assertEqual(
-            UploadStatusEnum.REJECTED, result
-        )
+        self.assertEqual(UploadStatusEnum.REJECTED, result)
 
     def test_requires_upload_path(self):
         removeSecurityProxy(self.build).results = {
-            'build:0': {'result': 'SUCCEEDED'},
+            "build:0": {"result": "SUCCEEDED"},
         }
         os.makedirs(os.path.join(self.incoming_folder, "test"))
         handler = UploadHandler.forProcessor(
@@ -71,13 +66,11 @@ class TestCIBuildUploads(TestUploadProcessorBase):
         result = handler.processCIResult(self.log)
 
         # we explicitly provided no log file, which causes a rejected upload
-        self.assertEqual(
-            UploadStatusEnum.REJECTED, result
-        )
+        self.assertEqual(UploadStatusEnum.REJECTED, result)
 
     def test_requires_log_file(self):
         removeSecurityProxy(self.build).results = {
-            'build:0': {'result': 'SUCCEEDED'},
+            "build:0": {"result": "SUCCEEDED"},
         }
         os.makedirs(os.path.join(self.incoming_folder, "test"))
         handler = UploadHandler.forProcessor(
@@ -87,28 +80,32 @@ class TestCIBuildUploads(TestUploadProcessorBase):
             self.build,
         )
         upload_path = os.path.join(
-            self.incoming_folder, "test", str(self.build.archive.id),
-            self.build.distribution.name)
+            self.incoming_folder,
+            "test",
+            str(self.build.archive.id),
+            self.build.distribution.name,
+        )
         os.makedirs(upload_path)
 
         result = handler.processCIResult(self.log)
 
         # we explicitly provided no log file, which causes a rejected upload
-        self.assertEqual(
-            UploadStatusEnum.REJECTED, result
-        )
+        self.assertEqual(UploadStatusEnum.REJECTED, result)
 
     def test_no_artifacts(self):
         # It is possible for a job to produce no artifacts.
         removeSecurityProxy(self.build).results = {
-            'build:0': {
-                'log': 'test_file_hash',
-                'result': 'SUCCEEDED',
+            "build:0": {
+                "log": "test_file_hash",
+                "result": "SUCCEEDED",
             },
         }
         upload_path = os.path.join(
-            self.incoming_folder, "test", str(self.build.archive.id),
-            self.build.distribution.name)
+            self.incoming_folder,
+            "test",
+            str(self.build.archive.id),
+            self.build.distribution.name,
+        )
         write_file(os.path.join(upload_path, "build:0.log"), b"log content")
         report = self.build.getOrCreateRevisionStatusReport("build:0")
         handler = UploadHandler.forProcessor(
@@ -125,7 +122,7 @@ class TestCIBuildUploads(TestUploadProcessorBase):
         log_urls = report.getArtifactURLs(RevisionStatusArtifactType.LOG)
         self.assertEqual(
             {quote("build:0-%s.txt" % self.build.commit_sha1)},
-            {url.rsplit("/")[-1] for url in log_urls}
+            {url.rsplit("/")[-1] for url in log_urls},
         )
         self.assertEqual(
             [], report.getArtifactURLs(RevisionStatusArtifactType.BINARY)
@@ -133,14 +130,17 @@ class TestCIBuildUploads(TestUploadProcessorBase):
 
     def test_triggers_store_upload_for_completed_ci_builds(self):
         removeSecurityProxy(self.build).results = {
-            'build:0': {
-                'log': 'test_file_hash',
-                'result': 'SUCCEEDED',
+            "build:0": {
+                "log": "test_file_hash",
+                "result": "SUCCEEDED",
             },
         }
         upload_path = os.path.join(
-            self.incoming_folder, "test", str(self.build.archive.id),
-            self.build.distribution.name)
+            self.incoming_folder,
+            "test",
+            str(self.build.archive.id),
+            self.build.distribution.name,
+        )
 
         # create log file
         path = os.path.join(upload_path, "build:0.log")
@@ -174,26 +174,29 @@ class TestCIBuildUploads(TestUploadProcessorBase):
         log_urls = report.getArtifactURLs(RevisionStatusArtifactType.LOG)
         self.assertEqual(
             {quote("build:0-%s.txt" % self.build.commit_sha1)},
-            {url.rsplit("/")[-1] for url in log_urls}
+            {url.rsplit("/")[-1] for url in log_urls},
         )
         artifact_urls = report.getArtifactURLs(
             RevisionStatusArtifactType.BINARY
         )
         self.assertEqual(
             {"ci.whl", "test.whl"},
-            {url.rsplit("/")[-1] for url in artifact_urls}
+            {url.rsplit("/")[-1] for url in artifact_urls},
         )
 
     def test_creates_revision_status_report_if_not_present(self):
         removeSecurityProxy(self.build).results = {
-            'build:0': {
-                'log': 'test_file_hash',
-                'result': 'SUCCEEDED',
+            "build:0": {
+                "log": "test_file_hash",
+                "result": "SUCCEEDED",
             },
         }
         upload_path = os.path.join(
-            self.incoming_folder, "test", str(self.build.archive.id),
-            self.build.distribution.name)
+            self.incoming_folder,
+            "test",
+            str(self.build.archive.id),
+            self.build.distribution.name,
+        )
 
         # create log file
         path = os.path.join(upload_path, "build:0.log")
@@ -216,9 +219,9 @@ class TestCIBuildUploads(TestUploadProcessorBase):
 
         self.assertEqual(
             self.build,
-            getUtility(
-                IRevisionStatusReportSet
-            ).getByCIBuildAndTitle(self.build, "build:0").ci_build
+            getUtility(IRevisionStatusReportSet)
+            .getByCIBuildAndTitle(self.build, "build:0")
+            .ci_build,
         )
         self.assertEqual(UploadStatusEnum.ACCEPTED, result)
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
diff --git a/lib/lp/archiveuploader/tests/test_dscfile.py b/lib/lp/archiveuploader/tests/test_dscfile.py
index c38577e..b3365a2 100644
--- a/lib/lp/archiveuploader/tests/test_dscfile.py
+++ b/lib/lp/archiveuploader/tests/test_dscfile.py
@@ -3,40 +3,27 @@
 
 """Test dscfile.py"""
 
-from collections import namedtuple
 import os
+from collections import namedtuple
 
 from lp.archiveuploader.dscfile import (
-    cleanup_unpacked_dir,
     DSCFile,
+    SignableTagFile,
+    cleanup_unpacked_dir,
     find_changelog,
     find_copyright,
     format_to_file_checker_map,
-    SignableTagFile,
     unpack_source,
-    )
+)
 from lp.archiveuploader.nascentuploadfile import UploadError
-from lp.archiveuploader.tests import (
-    datadir,
-    getPolicy,
-    )
+from lp.archiveuploader.tests import datadir, getPolicy
 from lp.archiveuploader.uploadpolicy import BuildDaemonUploadPolicy
 from lp.registry.interfaces.sourcepackage import SourcePackageFileType
 from lp.registry.model.person import Person
-from lp.services.log.logger import (
-    BufferLogger,
-    DevNullLogger,
-    )
+from lp.services.log.logger import BufferLogger, DevNullLogger
 from lp.soyuz.enums import SourcePackageFormat
-from lp.testing import (
-    TestCase,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    ZopelessDatabaseLayer,
-    )
-
+from lp.testing import TestCase, TestCaseWithFactory
+from lp.testing.layers import LaunchpadZopelessLayer, ZopelessDatabaseLayer
 
 ORIG_TARBALL = SourcePackageFileType.ORIG_TARBALL
 ORIG_TARBALL_SIGNATURE = SourcePackageFileType.ORIG_TARBALL_SIGNATURE
@@ -46,7 +33,6 @@ DIFF = SourcePackageFileType.DIFF
 
 
 class TestDscFile(TestCase):
-
     def setUp(self):
         super().setUp()
         self.tmpdir = self.makeTemporaryDirectory()
@@ -63,9 +49,11 @@ class TestDscFile(TestCase):
         processing the source packages."""
         os.symlink("/etc/passwd", self.copyright_path)
         error = self.assertRaises(
-            UploadError, find_copyright, self.tmpdir, DevNullLogger())
+            UploadError, find_copyright, self.tmpdir, DevNullLogger()
+        )
         self.assertEqual(
-            error.args[0], "Symbolic link for debian/copyright not allowed")
+            error.args[0], "Symbolic link for debian/copyright not allowed"
+        )
 
     def testGoodDebianCopyright(self):
         """Test that a proper copyright file will be accepted"""
@@ -74,7 +62,8 @@ class TestDscFile(TestCase):
             f.write(copyright)
 
         self.assertEqual(
-            copyright, find_copyright(self.tmpdir, DevNullLogger()))
+            copyright, find_copyright(self.tmpdir, DevNullLogger())
+        )
 
     def testBadDebianChangelog(self):
         """Test that a symlink as debian/changelog will fail.
@@ -84,9 +73,11 @@ class TestDscFile(TestCase):
         processing the source packages."""
         os.symlink("/etc/passwd", self.changelog_path)
         error = self.assertRaises(
-            UploadError, find_changelog, self.tmpdir, DevNullLogger())
+            UploadError, find_changelog, self.tmpdir, DevNullLogger()
+        )
         self.assertEqual(
-            error.args[0], "Symbolic link for debian/changelog not allowed")
+            error.args[0], "Symbolic link for debian/changelog not allowed"
+        )
 
     def testGoodDebianChangelog(self):
         """Test that a proper changelog file will be accepted"""
@@ -95,7 +86,8 @@ class TestDscFile(TestCase):
             f.write(changelog)
 
         self.assertEqual(
-            changelog, find_changelog(self.tmpdir, DevNullLogger()))
+            changelog, find_changelog(self.tmpdir, DevNullLogger())
+        )
 
     def testOversizedFile(self):
         """Test that a file larger than 10MiB will fail.
@@ -106,7 +98,7 @@ class TestDscFile(TestCase):
         which is incredibly unlikely to be hit by normal files in the
         archive"""
         dev_zero = open("/dev/zero")
-        ten_MiB = 10 * (2 ** 20)
+        ten_MiB = 10 * (2**20)
         empty_file = dev_zero.read(ten_MiB + 1)
         dev_zero.close()
 
@@ -115,13 +107,15 @@ class TestDscFile(TestCase):
         file.close()
 
         error = self.assertRaises(
-            UploadError, find_changelog, self.tmpdir, DevNullLogger())
+            UploadError, find_changelog, self.tmpdir, DevNullLogger()
+        )
         self.assertEqual(
-            error.args[0], "debian/changelog file too large, 10MiB max")
+            error.args[0], "debian/changelog file too large, 10MiB max"
+        )
 
 
 class FakeChangesFile:
-    architectures = ['source']
+    architectures = ["source"]
 
 
 class TestDSCFileWithDatabase(TestCaseWithFactory):
@@ -130,22 +124,34 @@ class TestDSCFileWithDatabase(TestCaseWithFactory):
 
     def test_checkFiles_verifies_additional_hashes(self):
         """Test that checkFiles detects SHA1 and SHA256 mismatches."""
-        policy = getPolicy(
-            name="sync", distro="ubuntu", distroseries="hoary")
-        path = datadir(os.path.join(
-            'suite', 'badhash_1.0-1_broken_dsc', 'badhash_1.0-1.dsc'))
+        policy = getPolicy(name="sync", distro="ubuntu", distroseries="hoary")
+        path = datadir(
+            os.path.join(
+                "suite", "badhash_1.0-1_broken_dsc", "badhash_1.0-1.dsc"
+            )
+        )
         dsc = DSCFile(
-            path, {}, 426, 'main/editors', 'priority',
-            'badhash', '1.0-1', FakeChangesFile(), policy, DevNullLogger())
+            path,
+            {},
+            426,
+            "main/editors",
+            "priority",
+            "badhash",
+            "1.0-1",
+            FakeChangesFile(),
+            policy,
+            DevNullLogger(),
+        )
         errors = [e.args[0] for e in dsc.verify()]
-        self.assertEqual(
-            ['File badhash_1.0-1.tar.gz mentioned in the changes has a SHA256'
-             ' mismatch. a29ec2370df83193c3fb2cc9e1287dbfe9feba04108ccfa490bb'
-             'e20ea66f3d08 != aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
-             'aaaaaaaaaaaaaaaaa',
-             'Files specified in DSC are broken or missing, skipping package '
-             'unpack verification.'],
-            errors)
+        expected_errors = [
+            "File badhash_1.0-1.tar.gz mentioned in the changes has a SHA256"
+            " mismatch. a29ec2370df83193c3fb2cc9e1287dbfe9feba04108ccfa490bb"
+            "e20ea66f3d08 != aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+            "aaaaaaaaaaaaaaaaa",
+            "Files specified in DSC are broken or missing, skipping package "
+            "unpack verification.",
+        ]
+        self.assertEqual(expected_errors, errors)
 
 
 class TestSignableTagFile(TestCaseWithFactory):
@@ -156,43 +162,46 @@ class TestSignableTagFile(TestCaseWithFactory):
     def makeSignableTagFile(self):
         """Create a minimal `SignableTagFile` object."""
         FakePolicy = namedtuple(
-            'FakePolicy',
-            ['pocket', 'distroseries', 'create_people'])
+            "FakePolicy", ["pocket", "distroseries", "create_people"]
+        )
         tagfile = SignableTagFile()
         tagfile.logger = DevNullLogger()
         tagfile.policy = FakePolicy(None, None, create_people=True)
         tagfile._dict = {
-            'Source': 'arbitrary-source-package-name',
-            'Version': '1.0',
-            }
+            "Source": "arbitrary-source-package-name",
+            "Version": "1.0",
+        }
         return tagfile
 
     def test_parseAddress_finds_addressee(self):
         tagfile = self.makeSignableTagFile()
         email = self.factory.getUniqueEmailAddress()
         person = self.factory.makePerson(email=email)
-        self.assertEqual(person, tagfile.parseAddress(email)['person'])
+        self.assertEqual(person, tagfile.parseAddress(email)["person"])
 
     def test_parseAddress_creates_addressee_for_unknown_address(self):
         unknown_email = self.factory.getUniqueEmailAddress()
         results = self.makeSignableTagFile().parseAddress(unknown_email)
-        self.assertEqual(unknown_email, results['email'])
-        self.assertIsInstance(results['person'], Person)
+        self.assertEqual(unknown_email, results["email"])
+        self.assertIsInstance(results["person"], Person)
 
     def test_parseAddress_raises_UploadError_if_address_is_malformed(self):
         self.assertRaises(
             UploadError,
-            self.makeSignableTagFile().parseAddress, "invalid@bad@address")
+            self.makeSignableTagFile().parseAddress,
+            "invalid@bad@address",
+        )
 
     def test_parseAddress_decodes_utf8(self):
-        name = 'B\u0105r'
-        email = 'bar@xxxxxxxxxxx'
+        name = "B\u0105r"
+        email = "bar@xxxxxxxxxxx"
         results = self.makeSignableTagFile().parseAddress(
-            ('%s <%s>' % (name, email)).encode('utf-8'))
-        self.assertEqual(email, results['email'])
-        self.assertEqual(name, results['name'])
-        self.assertEqual(name, results['person'].displayname)
-        self.assertEqual(email, results['person'].guessedemails[0].email)
+            ("%s <%s>" % (name, email)).encode("utf-8")
+        )
+        self.assertEqual(email, results["email"])
+        self.assertEqual(name, results["name"])
+        self.assertEqual(name, results["person"].displayname)
+        self.assertEqual(email, results["person"].guessedemails[0].email)
 
 
 class TestDscFileLibrarian(TestCaseWithFactory):
@@ -201,34 +210,50 @@ class TestDscFileLibrarian(TestCaseWithFactory):
     layer = LaunchpadZopelessLayer
 
     def getDscFile(self, name):
-        dsc_path = datadir(os.path.join('suite', name, name + '.dsc'))
+        dsc_path = datadir(os.path.join("suite", name, name + ".dsc"))
 
         class Changes:
-            architectures = ['source']
+            architectures = ["source"]
+
         logger = BufferLogger()
         policy = BuildDaemonUploadPolicy()
         policy.distroseries = self.factory.makeDistroSeries()
         policy.archive = self.factory.makeArchive()
         policy.distro = policy.distroseries.distribution
-        return DSCFile(dsc_path, {}, 0, 'main/editors',
-            'priority', 'package', 'version', Changes, policy, logger)
+        return DSCFile(
+            dsc_path,
+            {},
+            0,
+            "main/editors",
+            "priority",
+            "package",
+            "version",
+            Changes,
+            policy,
+            logger,
+        )
 
     def test_ReadOnlyCWD(self):
         """Processing a file should work when cwd is read-only."""
         tempdir = self.useTempDir()
         os.chmod(tempdir, 0o555)
         try:
-            dsc_file = self.getDscFile('bar_1.0-1')
+            dsc_file = self.getDscFile("bar_1.0-1")
             list(dsc_file.verify())
         finally:
             os.chmod(tempdir, 0o755)
 
 
 class BaseTestSourceFileVerification(TestCase):
-
-    def assertErrorsForFiles(self, expected, files,
-                             components={}, component_signatures={},
-                             bzip2_count=0, xz_count=0):
+    def assertErrorsForFiles(
+        self,
+        expected,
+        files,
+        components={},
+        component_signatures={},
+        bzip2_count=0,
+        xz_count=0,
+    ):
         """Check problems with the given set of files for the given format.
 
         :param expected: a list of expected errors, as strings.
@@ -246,16 +271,31 @@ class BaseTestSourceFileVerification(TestCase):
             ORIG_TARBALL_SIGNATURE: 0,
             DIFF: 0,
             DEBIAN_TARBALL: 0,
-            }
+        }
         full_files.update(files)
         self.assertEqual(
             expected,
-            [str(e) for e in format_to_file_checker_map[self.format](
-                'foo_1.dsc', full_files, components, component_signatures,
-                bzip2_count, xz_count)])
-
-    def assertFilesOK(self, files, components={}, component_signatures={},
-                      bzip2_count=0, xz_count=0):
+            [
+                str(e)
+                for e in format_to_file_checker_map[self.format](
+                    "foo_1.dsc",
+                    full_files,
+                    components,
+                    component_signatures,
+                    bzip2_count,
+                    xz_count,
+                )
+            ],
+        )
+
+    def assertFilesOK(
+        self,
+        files,
+        components={},
+        component_signatures={},
+        bzip2_count=0,
+        xz_count=0,
+    ):
         """Check that the given set of files is OK for the given format.
 
         :param format: the `SourcePackageFormat` to check against.
@@ -267,17 +307,20 @@ class BaseTestSourceFileVerification(TestCase):
         :param xz_count: number of files using xz compression.
         """
         self.assertErrorsForFiles(
-            [], files, components, component_signatures, bzip2_count, xz_count)
+            [], files, components, component_signatures, bzip2_count, xz_count
+        )
 
 
 class Test10SourceFormatVerification(BaseTestSourceFileVerification):
 
     format = SourcePackageFormat.FORMAT_1_0
 
-    wrong_files_error = ('foo_1.dsc: must have exactly one tar.gz, or an '
-                         'orig.tar.gz and diff.gz')
-    bzip2_error = 'foo_1.dsc: is format 1.0 but uses bzip2 compression.'
-    xz_error = 'foo_1.dsc: is format 1.0 but uses xz compression.'
+    wrong_files_error = (
+        "foo_1.dsc: must have exactly one tar.gz, or an "
+        "orig.tar.gz and diff.gz"
+    )
+    bzip2_error = "foo_1.dsc: is format 1.0 but uses bzip2 compression."
+    xz_error = "foo_1.dsc: is format 1.0 but uses xz compression."
 
     def testFormat10Debian(self):
         # A 1.0 source can contain an original tarball and a Debian diff
@@ -286,7 +329,8 @@ class Test10SourceFormatVerification(BaseTestSourceFileVerification):
     def testFormat10DebianWithOrigSignature(self):
         # A 1.0 source can contain an original tarball signature.
         self.assertFilesOK(
-            {ORIG_TARBALL: 1, ORIG_TARBALL_SIGNATURE: 1, DIFF: 1})
+            {ORIG_TARBALL: 1, ORIG_TARBALL_SIGNATURE: 1, DIFF: 1}
+        )
 
     def testFormat10Native(self):
         # A 1.0 source can contain a native tarball.
@@ -298,78 +342,96 @@ class Test10SourceFormatVerification(BaseTestSourceFileVerification):
         # files or an original tarball signature without an original
         # tarball.
         for combination in (
-            {DIFF: 1}, {ORIG_TARBALL: 1}, {ORIG_TARBALL_SIGNATURE: 1},
-            {ORIG_TARBALL_SIGNATURE: 1, DIFF: 1}):
+            {DIFF: 1},
+            {ORIG_TARBALL: 1},
+            {ORIG_TARBALL_SIGNATURE: 1},
+            {ORIG_TARBALL_SIGNATURE: 1, DIFF: 1},
+        ):
             {ORIG_TARBALL: 1, DIFF: 1, NATIVE_TARBALL: 1},
             self.assertErrorsForFiles([self.wrong_files_error], combination)
 
         # A 1.0 source with component tarballs is invalid.
         self.assertErrorsForFiles(
-            [self.wrong_files_error], {ORIG_TARBALL: 1, DIFF: 1},
-            components={'foo': 1})
+            [self.wrong_files_error],
+            {ORIG_TARBALL: 1, DIFF: 1},
+            components={"foo": 1},
+        )
 
         # A 1.0 source with component tarball signatures is invalid.
         self.assertErrorsForFiles(
-            [self.wrong_files_error], {ORIG_TARBALL: 1, DIFF: 1},
-            component_signatures={'foo': 1})
+            [self.wrong_files_error],
+            {ORIG_TARBALL: 1, DIFF: 1},
+            component_signatures={"foo": 1},
+        )
 
     def testFormat10CannotUseBzip2(self):
         # 1.0 sources cannot use bzip2 compression.
         self.assertErrorsForFiles(
-            [self.bzip2_error], {NATIVE_TARBALL: 1}, {}, bzip2_count=1)
+            [self.bzip2_error], {NATIVE_TARBALL: 1}, {}, bzip2_count=1
+        )
 
     def testFormat10CannotUseXz(self):
         # 1.0 sources cannot use xz compression.
         self.assertErrorsForFiles(
-            [self.xz_error], {NATIVE_TARBALL: 1}, {}, xz_count=1)
+            [self.xz_error], {NATIVE_TARBALL: 1}, {}, xz_count=1
+        )
 
 
 class Test30QuiltSourceFormatVerification(BaseTestSourceFileVerification):
 
     format = SourcePackageFormat.FORMAT_3_0_QUILT
 
-    wrong_files_error = ('foo_1.dsc: must have only an orig.tar.*, a '
-                         'debian.tar.*, and optionally orig-*.tar.*')
-    comp_conflict_error = 'foo_1.dsc: has more than one orig-bar.tar.*.'
+    wrong_files_error = (
+        "foo_1.dsc: must have only an orig.tar.*, a "
+        "debian.tar.*, and optionally orig-*.tar.*"
+    )
+    comp_conflict_error = "foo_1.dsc: has more than one orig-bar.tar.*."
 
     def testFormat30Quilt(self):
         # A 3.0 (quilt) source must contain an orig tarball and a debian
         # tarball. It may also contain at most one component tarball (with
         # optional signature) for each component, and can use gzip, bzip2,
         # or xz compression.
-        for components in ({}, {'foo': 1}, {'foo': 1, 'bar': 1}):
+        for components in ({}, {"foo": 1}, {"foo": 1, "bar": 1}):
             for component_signatures in ({}, components):
                 for orig_signature in (0, 1):
                     for bzip2_count in (0, 1):
                         for xz_count in (0, 1):
                             self.assertFilesOK(
-                                {ORIG_TARBALL: 1,
-                                 ORIG_TARBALL_SIGNATURE: orig_signature,
-                                 DEBIAN_TARBALL: 1},
+                                {
+                                    ORIG_TARBALL: 1,
+                                    ORIG_TARBALL_SIGNATURE: orig_signature,
+                                    DEBIAN_TARBALL: 1,
+                                },
                                 components=components,
                                 component_signatures=component_signatures,
-                                bzip2_count=bzip2_count, xz_count=xz_count)
+                                bzip2_count=bzip2_count,
+                                xz_count=xz_count,
+                            )
 
     def testFormat30QuiltCannotHaveConflictingComponentTarballs(self):
         # Multiple conflicting tarballs for a single component are
         # invalid.
         self.assertErrorsForFiles(
             [self.comp_conflict_error],
-            {ORIG_TARBALL: 1, DEBIAN_TARBALL: 1}, {'foo': 1, 'bar': 2})
+            {ORIG_TARBALL: 1, DEBIAN_TARBALL: 1},
+            {"foo": 1, "bar": 2},
+        )
 
     def testFormat30QuiltCannotHaveWrongFiles(self):
         # 3.0 (quilt) sources may not have a diff or native tarball.
         for filetype in (DIFF, NATIVE_TARBALL):
             self.assertErrorsForFiles(
                 [self.wrong_files_error],
-                {ORIG_TARBALL: 1, DEBIAN_TARBALL: 1, filetype: 1})
+                {ORIG_TARBALL: 1, DEBIAN_TARBALL: 1, filetype: 1},
+            )
 
 
 class Test30NativeSourceFormatVerification(BaseTestSourceFileVerification):
 
     format = SourcePackageFormat.FORMAT_3_0_NATIVE
 
-    wrong_files_error = 'foo_1.dsc: must have only a tar.*.'
+    wrong_files_error = "foo_1.dsc: must have only a tar.*."
 
     def testFormat30Native(self):
         # 3.0 (native) sources must contain just a native tarball. They
@@ -382,17 +444,26 @@ class Test30NativeSourceFormatVerification(BaseTestSourceFileVerification):
         # 3.0 (quilt) sources may not have a diff, Debian tarball, orig
         # tarball, orig tarball signature, or any component tarballs.
         for filetype in (
-                DIFF, DEBIAN_TARBALL, ORIG_TARBALL, ORIG_TARBALL_SIGNATURE):
+            DIFF,
+            DEBIAN_TARBALL,
+            ORIG_TARBALL,
+            ORIG_TARBALL_SIGNATURE,
+        ):
             self.assertErrorsForFiles(
-                [self.wrong_files_error], {NATIVE_TARBALL: 1, filetype: 1})
+                [self.wrong_files_error], {NATIVE_TARBALL: 1, filetype: 1}
+            )
         # A 3.0 (native) source with component tarballs is invalid.
         self.assertErrorsForFiles(
-            [self.wrong_files_error], {NATIVE_TARBALL: 1},
-            components={'foo': 1})
+            [self.wrong_files_error],
+            {NATIVE_TARBALL: 1},
+            components={"foo": 1},
+        )
         # A 3.0 (native) source with component tarball signatures is invalid.
         self.assertErrorsForFiles(
-            [self.wrong_files_error], {NATIVE_TARBALL: 1},
-            component_signatures={'foo': 1})
+            [self.wrong_files_error],
+            {NATIVE_TARBALL: 1},
+            component_signatures={"foo": 1},
+        )
 
 
 class UnpackedDirTests(TestCase):
@@ -402,12 +473,14 @@ class UnpackedDirTests(TestCase):
         # unpack_source unpacks in a temporary directory and returns the
         # path.
         unpacked_dir = unpack_source(
-            datadir(os.path.join('suite', 'bar_1.0-1', 'bar_1.0-1.dsc')))
+            datadir(os.path.join("suite", "bar_1.0-1", "bar_1.0-1.dsc"))
+        )
         try:
             self.assertEqual(["bar-1.0"], os.listdir(unpacked_dir))
             self.assertContentEqual(
                 ["THIS_IS_BAR", "debian"],
-                os.listdir(os.path.join(unpacked_dir, "bar-1.0")))
+                os.listdir(os.path.join(unpacked_dir, "bar-1.0")),
+            )
         finally:
             cleanup_unpacked_dir(unpacked_dir)
 
diff --git a/lib/lp/archiveuploader/tests/test_livefsupload.py b/lib/lp/archiveuploader/tests/test_livefsupload.py
index 869e077..dd2fea5 100644
--- a/lib/lp/archiveuploader/tests/test_livefsupload.py
+++ b/lib/lp/archiveuploader/tests/test_livefsupload.py
@@ -10,11 +10,8 @@ from zope.component import getUtility
 
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
-    )
-from lp.archiveuploader.uploadprocessor import (
-    UploadHandler,
-    UploadStatusEnum,
-    )
+)
+from lp.archiveuploader.uploadprocessor import UploadHandler, UploadStatusEnum
 from lp.buildmaster.enums import BuildStatus
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.features.testing import FeatureFixture
@@ -35,32 +32,40 @@ class TestLiveFSBuildUploads(TestUploadProcessorBase):
         self.switchToAdmin()
         self.livefs = self.factory.makeLiveFS()
         self.build = getUtility(ILiveFSBuildSet).new(
-            requester=self.livefs.owner, livefs=self.livefs,
+            requester=self.livefs.owner,
+            livefs=self.livefs,
             archive=self.factory.makeArchive(
-                distribution=self.ubuntu, owner=self.livefs.owner),
+                distribution=self.ubuntu, owner=self.livefs.owner
+            ),
             distro_arch_series=self.breezy["i386"],
-            pocket=PackagePublishingPocket.RELEASE)
+            pocket=PackagePublishingPocket.RELEASE,
+        )
         self.build.updateStatus(BuildStatus.UPLOADING)
         Store.of(self.build).flush()
         self.switchToUploader()
         self.options.context = "buildd"
 
         self.uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
+            self.layer.txn, builds=True
+        )
 
     def test_sets_build_and_state(self):
         # The upload processor uploads files and sets the correct status.
         self.assertFalse(self.build.verifySuccessfulUpload())
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "ubuntu.squashfs"), b"squashfs")
         write_file(os.path.join(upload_dir, "ubuntu.manifest"), b"manifest")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processLiveFS(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "LiveFS upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "LiveFS upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertTrue(self.build.verifySuccessfulUpload())
 
@@ -71,13 +76,17 @@ class TestLiveFSBuildUploads(TestUploadProcessorBase):
         # be empty.)
         self.assertFalse(self.build.verifySuccessfulUpload())
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "livecd.magic-proxy.log"), b"")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processLiveFS(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "LiveFS upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "LiveFS upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertTrue(self.build.verifySuccessfulUpload())
diff --git a/lib/lp/archiveuploader/tests/test_nascentupload.py b/lib/lp/archiveuploader/tests/test_nascentupload.py
index b53fff4..0667e31 100644
--- a/lib/lp/archiveuploader/tests/test_nascentupload.py
+++ b/lib/lp/archiveuploader/tests/test_nascentupload.py
@@ -8,20 +8,13 @@ from testtools.matchers import MatchesStructure
 
 from lp.archiveuploader.changesfile import determine_file_class_and_name
 from lp.archiveuploader.nascentupload import NascentUpload
-from lp.archiveuploader.tests import (
-    datadir,
-    getPolicy,
-    )
+from lp.archiveuploader.tests import datadir, getPolicy
 from lp.archiveuploader.uploadpolicy import ArchiveUploadType
 from lp.services.log.logger import DevNullLogger
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    ZopelessDatabaseLayer,
-    )
+from lp.testing.layers import LaunchpadZopelessLayer, ZopelessDatabaseLayer
 
 
 class FakeChangesFile:
-
     def __init__(self):
         self.files = []
 
@@ -39,40 +32,51 @@ class TestMatchDDEBs(TestCase):
         self.changes = FakeChangesFile()
         self.upload = NascentUpload(self.changes, None, DevNullLogger())
 
-    def addFile(self, filename, comp_and_section='main/devel',
-                priority='extra'):
+    def addFile(
+        self, filename, comp_and_section="main/devel", priority="extra"
+    ):
         """Add a file of the right type to the upload."""
         package, cls = determine_file_class_and_name(filename)
         file = cls(
-            filename, None, 100, comp_and_section, priority, package, '666',
-            self.changes, None, self.upload.logger)
+            filename,
+            None,
+            100,
+            comp_and_section,
+            priority,
+            package,
+            "666",
+            self.changes,
+            None,
+            self.upload.logger,
+        )
         self.changes.files.append(file)
         return file
 
     def assertMatchDDEBErrors(self, error_list):
         self.assertEqual(
-            error_list, [str(e) for e in self.upload._matchDDEBs()])
+            error_list, [str(e) for e in self.upload._matchDDEBs()]
+        )
 
     def testNoLinksWithNoBinaries(self):
         # No links will be made if there are no binaries whatsoever.
-        self.addFile('something_1.0.diff.gz')
+        self.addFile("something_1.0.diff.gz")
         self.assertMatchDDEBErrors([])
 
     def testNoLinksWithJustDEBs(self):
         # No links will be made if there are no DDEBs.
-        self.addFile('blah_1.0_all.deb')
-        self.addFile('libblah_1.0_i386.deb')
+        self.addFile("blah_1.0_all.deb")
+        self.addFile("libblah_1.0_i386.deb")
         self.assertMatchDDEBErrors([])
         for file in self.changes.files:
             self.assertIs(None, file.ddeb_file)
 
     def testLinksMatchingDDEBs(self):
         # DDEBs will be linked to their matching DEBs.
-        self.addFile('blah_1.0_all.deb')
-        self.addFile('libblah_1.0_i386.deb')
-        self.addFile('libblah-dbgsym_1.0_i386.ddeb')
-        self.addFile('libfooble_1.0_i386.udeb')
-        self.addFile('libfooble-dbgsym_1.0_i386.ddeb')
+        self.addFile("blah_1.0_all.deb")
+        self.addFile("libblah_1.0_i386.deb")
+        self.addFile("libblah-dbgsym_1.0_i386.ddeb")
+        self.addFile("libfooble_1.0_i386.udeb")
+        self.addFile("libfooble-dbgsym_1.0_i386.ddeb")
         self.assertMatchDDEBErrors([])
         self.assertIs(None, self.changes.files[0].ddeb_file)
         self.assertIs(self.changes.files[2], self.changes.files[1].ddeb_file)
@@ -82,35 +86,37 @@ class TestMatchDDEBs(TestCase):
     def testDuplicateDDEBsCauseErrors(self):
         # An error will be raised if a DEB has more than one matching
         # DDEB.
-        self.addFile('libblah_1.0_i386.deb')
-        self.addFile('libblah-dbgsym_1.0_i386.ddeb')
-        self.addFile('libblah-dbgsym_1.0_i386.ddeb')
+        self.addFile("libblah_1.0_i386.deb")
+        self.addFile("libblah-dbgsym_1.0_i386.ddeb")
+        self.addFile("libblah-dbgsym_1.0_i386.ddeb")
         self.assertMatchDDEBErrors(
-            ['Duplicated debug packages: libblah-dbgsym 666 (i386)'])
+            ["Duplicated debug packages: libblah-dbgsym 666 (i386)"]
+        )
 
     def testMismatchedDDEBsCauseErrors(self):
         # An error will be raised if a DDEB has no matching DEB.
-        self.addFile('libblah_1.0_i386.deb')
-        self.addFile('libblah-dbgsym_1.0_amd64.ddeb')
+        self.addFile("libblah_1.0_i386.deb")
+        self.addFile("libblah-dbgsym_1.0_amd64.ddeb")
         self.assertMatchDDEBErrors(
-            ['Orphaned debug packages: libblah-dbgsym 666 (amd64)'])
+            ["Orphaned debug packages: libblah-dbgsym 666 (amd64)"]
+        )
 
 
 class TestOverrideDDEBs(TestMatchDDEBs):
-
     def test_DDEBsGetOverrideFromDEBs(self):
         # Test the basic case ensuring that DDEB files always match the
         # DEB's overrides.
         deb = self.addFile("foo_1.0_i386.deb", "main/devel", "extra")
-        ddeb = self.addFile(
-            "foo-dbgsym_1.0_i386.ddeb", "universe/web",  "low")
+        ddeb = self.addFile("foo-dbgsym_1.0_i386.ddeb", "universe/web", "low")
         self.assertMatchDDEBErrors([])
         self.upload._overrideDDEBSs()
 
         self.assertThat(
             ddeb,
             MatchesStructure.fromExample(
-                deb, "component_name", "section_name", "priority_name"))
+                deb, "component_name", "section_name", "priority_name"
+            ),
+        )
 
 
 class TestNascentUpload(TestCase):
@@ -120,16 +126,18 @@ class TestNascentUpload(TestCase):
     def test_hash_mismatch_rejects(self):
         # A hash mismatch for any uploaded file will cause the upload to
         # be rejected.
-        policy = getPolicy(
-            name="sync", distro="ubuntu", distroseries="hoary")
+        policy = getPolicy(name="sync", distro="ubuntu", distroseries="hoary")
         policy.accepted_type = ArchiveUploadType.BINARY_ONLY
         upload = NascentUpload.from_changesfile_path(
             datadir("suite/badhash_1.0-1/badhash_1.0-1_i386.changes"),
-            policy, DevNullLogger())
+            policy,
+            DevNullLogger(),
+        )
         upload.process()
         self.assertTrue(upload.is_rejected)
         self.assertEqual(
-            'File badhash_1.0-1_i386.deb mentioned in the changes has a SHA1 '
-            'mismatch. 2ca33cf32a45852c62b465aaf9063fb7deb31725 != '
-            '91556113ad38eb35d2fe03d27ae646e0ed487a3d',
-            upload.rejection_message)
+            "File badhash_1.0-1_i386.deb mentioned in the changes has a SHA1 "
+            "mismatch. 2ca33cf32a45852c62b465aaf9063fb7deb31725 != "
+            "91556113ad38eb35d2fe03d27ae646e0ed487a3d",
+            upload.rejection_message,
+        )
diff --git a/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py b/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py
index 0ce3f5d..1b27830 100644
--- a/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py
+++ b/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py
@@ -9,50 +9,44 @@ import unittest
 from zope.component import getUtility
 
 from lp.archiveuploader.nascentupload import NascentUpload
-from lp.archiveuploader.tests import (
-    datadir,
-    getPolicy,
-    )
+from lp.archiveuploader.tests import datadir, getPolicy
 from lp.archiveuploader.uploadpolicy import ArchiveUploadType
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.services.librarian.model import LibraryFileAlias
 from lp.services.log.logger import DevNullLogger
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.soyuz.model.component import ComponentSelection
-from lp.testing import (
-    login,
-    logout,
-    )
+from lp.testing import login, logout
 from lp.testing.dbuser import switch_dbuser
 from lp.testing.gpgkeys import import_public_test_keys
 from lp.testing.layers import LaunchpadZopelessLayer
-from lp.testing.systemdocs import (
-    LayeredDocFileSuite,
-    setGlobs,
-    )
+from lp.testing.systemdocs import LayeredDocFileSuite, setGlobs
 
 
 def getUploadForSource(upload_path):
     """Return a NascentUpload object for a source."""
-    policy = getPolicy(name='sync', distro='ubuntu', distroseries='hoary')
+    policy = getPolicy(name="sync", distro="ubuntu", distroseries="hoary")
     return NascentUpload.from_changesfile_path(
-        datadir(upload_path), policy, DevNullLogger())
+        datadir(upload_path), policy, DevNullLogger()
+    )
 
 
 def getPPAUploadForSource(upload_path, ppa):
     """Return a NascentUpload object for a PPA source."""
-    policy = getPolicy(name='insecure', distro='ubuntu', distroseries='hoary')
+    policy = getPolicy(name="insecure", distro="ubuntu", distroseries="hoary")
     policy.archive = ppa
     return NascentUpload.from_changesfile_path(
-        datadir(upload_path), policy, DevNullLogger())
+        datadir(upload_path), policy, DevNullLogger()
+    )
 
 
 def getUploadForBinary(upload_path):
     """Return a NascentUpload object for binaries."""
-    policy = getPolicy(name='sync', distro='ubuntu', distroseries='hoary')
+    policy = getPolicy(name="sync", distro="ubuntu", distroseries="hoary")
     policy.accepted_type = ArchiveUploadType.BINARY_ONLY
     return NascentUpload.from_changesfile_path(
-        datadir(upload_path), policy, DevNullLogger())
+        datadir(upload_path), policy, DevNullLogger()
+    )
 
 
 def testGlobalsSetup(test):
@@ -62,9 +56,9 @@ def testGlobalsSetup(test):
     """
     import_public_test_keys()
     setGlobs(test)
-    test.globs['getUploadForSource'] = getUploadForSource
-    test.globs['getUploadForBinary'] = getUploadForBinary
-    test.globs['getPPAUploadForSource'] = getPPAUploadForSource
+    test.globs["getUploadForSource"] = getUploadForSource
+    test.globs["getUploadForBinary"] = getUploadForBinary
+    test.globs["getPPAUploadForSource"] = getPPAUploadForSource
 
 
 def prepareHoaryForUploads(test):
@@ -74,16 +68,16 @@ def prepareHoaryForUploads(test):
     the RELEASE pocket (they are auto-overridden to the 'universe'
     component).
     """
-    ubuntu = getUtility(IDistributionSet)['ubuntu']
-    hoary = ubuntu['hoary']
+    ubuntu = getUtility(IDistributionSet)["ubuntu"]
+    hoary = ubuntu["hoary"]
 
     # Allow uploads to the universe component.
-    universe = getUtility(IComponentSet)['universe']
+    universe = getUtility(IComponentSet)["universe"]
     ComponentSelection(distroseries=hoary, component=universe)
 
     # Create a fake hoary/i386 chroot.
     fake_chroot = LibraryFileAlias.get(1)
-    hoary['i386'].addOrUpdateChroot(fake_chroot)
+    hoary["i386"].addOrUpdateChroot(fake_chroot)
 
     LaunchpadZopelessLayer.txn.commit()
 
@@ -95,10 +89,10 @@ def setUp(test):
     Log in as a Launchpad admin (foo.bar@xxxxxxxxxxxxx).
     Setup test globals and prepare hoary for uploads
     """
-    login('foo.bar@xxxxxxxxxxxxx')
+    login("foo.bar@xxxxxxxxxxxxx")
     testGlobalsSetup(test)
     prepareHoaryForUploads(test)
-    switch_dbuser('uploader')
+    switch_dbuser("uploader")
 
 
 def tearDown(test):
@@ -112,13 +106,16 @@ def test_suite():
     filenames = [
         filename
         for filename in os.listdir(tests_dir)
-        if filename.lower().endswith('.rst')
-        ]
+        if filename.lower().endswith(".rst")
+    ]
 
     for filename in sorted(filenames):
         test = LayeredDocFileSuite(
-            filename, setUp=setUp, tearDown=tearDown,
-            layer=LaunchpadZopelessLayer)
+            filename,
+            setUp=setUp,
+            tearDown=tearDown,
+            layer=LaunchpadZopelessLayer,
+        )
         suite.addTest(test)
 
     return suite
diff --git a/lib/lp/archiveuploader/tests/test_nascentuploadfile.py b/lib/lp/archiveuploader/tests/test_nascentuploadfile.py
index 25e97de..efc7c23 100644
--- a/lib/lp/archiveuploader/tests/test_nascentuploadfile.py
+++ b/lib/lp/archiveuploader/tests/test_nascentuploadfile.py
@@ -3,7 +3,6 @@
 
 """Test NascentUploadFile functionality."""
 
-from functools import partial
 import gzip
 import hashlib
 import io
@@ -11,14 +10,11 @@ import lzma
 import os
 import subprocess
 import tarfile
+from functools import partial
 from unittest import mock
 
-from debian.deb822 import (
-    Changes,
-    Deb822,
-    Dsc,
-    )
 import six
+from debian.deb822 import Changes, Deb822, Dsc
 from testtools.matchers import (
     Contains,
     Equals,
@@ -27,7 +23,7 @@ from testtools.matchers import (
     MatchesRegex,
     MatchesSetwise,
     MatchesStructure,
-    )
+)
 
 from lp.archiveuploader.changesfile import ChangesFile
 from lp.archiveuploader.dscfile import DSCFile
@@ -36,7 +32,7 @@ from lp.archiveuploader.nascentuploadfile import (
     DebBinaryUploadFile,
     NascentUploadFile,
     UploadError,
-    )
+)
 from lp.archiveuploader.tests import AbsolutelyAnythingGoesUploadPolicy
 from lp.buildmaster.enums import BuildStatus
 from lp.registry.interfaces.pocket import PackagePublishingPocket
@@ -46,12 +42,9 @@ from lp.soyuz.enums import (
     BinarySourceReferenceType,
     PackagePublishingStatus,
     PackageUploadCustomFormat,
-    )
+)
 from lp.testing import TestCaseWithFactory
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    ZopelessDatabaseLayer,
-    )
+from lp.testing.layers import LaunchpadZopelessLayer, ZopelessDatabaseLayer
 
 
 class NascentUploadFileTestCase(TestCaseWithFactory):
@@ -64,7 +57,8 @@ class NascentUploadFileTestCase(TestCaseWithFactory):
         self.distro = self.factory.makeDistribution()
         self.policy.pocket = PackagePublishingPocket.RELEASE
         self.policy.archive = self.factory.makeArchive(
-            distribution=self.distro)
+            distribution=self.distro
+        )
 
     def writeUploadFile(self, filename, contents):
         """Write a temporary file but with a specific filename.
@@ -74,11 +68,14 @@ class NascentUploadFileTestCase(TestCaseWithFactory):
         :return: Tuple with path, md5 and size
         """
         path = os.path.join(self.makeTemporaryDirectory(), filename)
-        with open(path, 'wb') as f:
+        with open(path, "wb") as f:
             f.write(contents)
         return (
-            path, hashlib.md5(contents).hexdigest(),
-            hashlib.sha1(contents).hexdigest(), len(contents))
+            path,
+            hashlib.md5(contents).hexdigest(),
+            hashlib.sha1(contents).hexdigest(),
+            len(contents),
+        )
 
 
 class TestNascentUploadFile(NascentUploadFileTestCase):
@@ -86,34 +83,45 @@ class TestNascentUploadFile(NascentUploadFileTestCase):
     layer = ZopelessDatabaseLayer
 
     def test_checkSizeAndCheckSum_validates_size(self):
-        (path, md5, sha1, size) = self.writeUploadFile('foo', b'bar')
+        (path, md5, sha1, size) = self.writeUploadFile("foo", b"bar")
         nuf = NascentUploadFile(
-            path, dict(MD5=md5), size - 1, 'main/devel', None, None, None)
+            path, dict(MD5=md5), size - 1, "main/devel", None, None, None
+        )
         self.assertRaisesWithContent(
             UploadError,
-            'File foo mentioned in the changes has a size mismatch. 3 != 2',
-            nuf.checkSizeAndCheckSum)
+            "File foo mentioned in the changes has a size mismatch. 3 != 2",
+            nuf.checkSizeAndCheckSum,
+        )
 
     def test_checkSizeAndCheckSum_validates_md5(self):
-        (path, md5, sha1, size) = self.writeUploadFile('foo', b'bar')
+        (path, md5, sha1, size) = self.writeUploadFile("foo", b"bar")
         nuf = NascentUploadFile(
-            path, dict(MD5='deadbeef'), size, 'main/devel', None, None, None)
+            path, dict(MD5="deadbeef"), size, "main/devel", None, None, None
+        )
         self.assertRaisesWithContent(
             UploadError,
-            'File foo mentioned in the changes has a MD5 mismatch. '
-            '37b51d194a7513e45b56f6524f2d51f2 != deadbeef',
-            nuf.checkSizeAndCheckSum)
+            "File foo mentioned in the changes has a MD5 mismatch. "
+            "37b51d194a7513e45b56f6524f2d51f2 != deadbeef",
+            nuf.checkSizeAndCheckSum,
+        )
 
     def test_checkSizeAndCheckSum_validates_sha1(self):
-        (path, md5, sha1, size) = self.writeUploadFile('foo', b'bar')
+        (path, md5, sha1, size) = self.writeUploadFile("foo", b"bar")
         nuf = NascentUploadFile(
-            path, dict(MD5=md5, SHA1='foobar'), size, 'main/devel', None,
-            None, None)
+            path,
+            dict(MD5=md5, SHA1="foobar"),
+            size,
+            "main/devel",
+            None,
+            None,
+            None,
+        )
         self.assertRaisesWithContent(
             UploadError,
-            'File foo mentioned in the changes has a SHA1 mismatch. '
-            '62cdb7020ff920e5aa642c3d4066950dd1f01f4d != foobar',
-            nuf.checkSizeAndCheckSum)
+            "File foo mentioned in the changes has a SHA1 mismatch. "
+            "62cdb7020ff920e5aa642c3d4066950dd1f01f4d != foobar",
+            nuf.checkSizeAndCheckSum,
+        )
 
 
 class CustomUploadFileTests(NascentUploadFileTestCase):
@@ -121,27 +129,36 @@ class CustomUploadFileTests(NascentUploadFileTestCase):
 
     layer = LaunchpadZopelessLayer
 
-    def createCustomUploadFile(self, filename, contents,
-                               component_and_section, priority_name):
+    def createCustomUploadFile(
+        self, filename, contents, component_and_section, priority_name
+    ):
         """Simple wrapper to create a CustomUploadFile."""
         (path, md5, sha1, size) = self.writeUploadFile(filename, contents)
         uploadfile = CustomUploadFile(
-            path, dict(MD5=md5), size, component_and_section, priority_name,
-            self.policy, self.logger)
+            path,
+            dict(MD5=md5),
+            size,
+            component_and_section,
+            priority_name,
+            self.policy,
+            self.logger,
+        )
         return uploadfile
 
     def test_custom_type(self):
         # The mime type gets set according to PackageUploadCustomFormat.
         uploadfile = self.createCustomUploadFile(
-            "bla.txt", b"data", "main/raw-installer", "extra")
+            "bla.txt", b"data", "main/raw-installer", "extra"
+        )
         self.assertEqual(
-            PackageUploadCustomFormat.DEBIAN_INSTALLER,
-            uploadfile.custom_type)
+            PackageUploadCustomFormat.DEBIAN_INSTALLER, uploadfile.custom_type
+        )
 
     def test_storeInDatabase(self):
         # storeInDatabase creates a library file.
         uploadfile = self.createCustomUploadFile(
-            "bla.txt", b"data", "main/raw-installer", "extra")
+            "bla.txt", b"data", "main/raw-installer", "extra"
+        )
         self.assertEqual("application/octet-stream", uploadfile.content_type)
         libraryfile = uploadfile.storeInDatabase()
         self.assertEqual("bla.txt", libraryfile.filename)
@@ -150,11 +167,15 @@ class CustomUploadFileTests(NascentUploadFileTestCase):
     def test_debian_installer_verify(self):
         # debian-installer uploads are required to have sensible filenames.
         uploadfile = self.createCustomUploadFile(
-            "debian-installer-images_20120627_i386.tar.gz", b"data",
-            "main/raw-installer", "extra")
+            "debian-installer-images_20120627_i386.tar.gz",
+            b"data",
+            "main/raw-installer",
+            "extra",
+        )
         self.assertEqual([], list(uploadfile.verify()))
         uploadfile = self.createCustomUploadFile(
-            "bla.txt", b"data", "main/raw-installer", "extra")
+            "bla.txt", b"data", "main/raw-installer", "extra"
+        )
         errors = list(uploadfile.verify())
         self.assertEqual(1, len(errors))
         self.assertIsInstance(errors[0], UploadError)
@@ -162,25 +183,29 @@ class CustomUploadFileTests(NascentUploadFileTestCase):
     def test_no_handler_no_verify(self):
         # Uploads without special handlers have no filename checks.
         uploadfile = self.createCustomUploadFile(
-            "bla.txt", b"data", "main/raw-meta-data", "extra")
+            "bla.txt", b"data", "main/raw-meta-data", "extra"
+        )
         self.assertEqual([], list(uploadfile.verify()))
 
     def test_debian_installer_auto_approved(self):
         # debian-installer uploads are auto-approved.
         uploadfile = self.createCustomUploadFile(
-            "bla.txt", b"data", "main/raw-installer", "extra")
+            "bla.txt", b"data", "main/raw-installer", "extra"
+        )
         self.assertTrue(uploadfile.autoApprove())
 
     def test_uefi_not_auto_approved(self):
         # UEFI uploads are auto-approved.
         uploadfile = self.createCustomUploadFile(
-            "bla.txt", b"data", "main/raw-uefi", "extra")
+            "bla.txt", b"data", "main/raw-uefi", "extra"
+        )
         self.assertFalse(uploadfile.autoApprove())
 
     def test_signing_not_auto_approved(self):
         # UEFI uploads are auto-approved.
         uploadfile = self.createCustomUploadFile(
-            "bla.txt", b"data", "main/raw-signing", "extra")
+            "bla.txt", b"data", "main/raw-signing", "extra"
+        )
         self.assertFalse(uploadfile.autoApprove())
 
 
@@ -190,7 +215,8 @@ class PackageUploadFileTestCase(NascentUploadFileTestCase):
     def setUp(self):
         super().setUp()
         self.policy.distroseries = self.factory.makeDistroSeries(
-            distribution=self.distro)
+            distribution=self.distro
+        )
 
     def getBaseChanges(self):
         contents = Changes()
@@ -205,12 +231,15 @@ class PackageUploadFileTestCase(NascentUploadFileTestCase):
         contents["Date"] = "Fri, 25 Jun 2010 11:20:22 -0600"
         contents["Urgency"] = "low"
         contents["Changed-By"] = "Seombody Else <somebody@xxxxxxxxxxx>"
-        contents["Files"] = [{
-            "md5sum": "d2bd347b3fed184fe28e112695be491c",
-            "size": "1791",
-            "section": "python",
-            "priority": "optional",
-            "name": "dulwich_0.4.1-1.dsc"}]
+        contents["Files"] = [
+            {
+                "md5sum": "d2bd347b3fed184fe28e112695be491c",
+                "size": "1791",
+                "section": "python",
+                "priority": "optional",
+                "name": "dulwich_0.4.1-1.dsc",
+            }
+        ]
         return contents
 
     def createChangesFile(self, filename, changes):
@@ -236,29 +265,52 @@ class DSCFileTests(PackageUploadFileTestCase):
         dsc["Binary"] = "python-dulwich"
         dsc["Standards-Version"] = "0.2.2"
         dsc["Maintainer"] = "Jelmer Vernooij <jelmer@xxxxxxxxxx>"
-        dsc["Files"] = [{
-            "md5sum": "5e8ba79b4074e2f305ddeaf2543afe83",
-            "size": "182280",
-            "name": "dulwich_0.42.tar.gz"}]
+        dsc["Files"] = [
+            {
+                "md5sum": "5e8ba79b4074e2f305ddeaf2543afe83",
+                "size": "182280",
+                "name": "dulwich_0.42.tar.gz",
+            }
+        ]
         return dsc
 
-    def createDSCFile(self, filename, dsc, component_and_section,
-                      priority_name, package, version, changes):
+    def createDSCFile(
+        self,
+        filename,
+        dsc,
+        component_and_section,
+        priority_name,
+        package,
+        version,
+        changes,
+    ):
         (path, md5, sha1, size) = self.writeUploadFile(
-            filename, dsc.dump().encode("UTF-8"))
+            filename, dsc.dump().encode("UTF-8")
+        )
         if changes:
             self.assertEqual([], list(changes.processAddresses()))
         return DSCFile(
-            path, dict(MD5=md5), size, component_and_section, priority_name,
-            package, version, changes, self.policy, self.logger)
+            path,
+            dict(MD5=md5),
+            size,
+            component_and_section,
+            priority_name,
+            package,
+            version,
+            changes,
+            self.policy,
+            self.logger,
+        )
 
     def test_filetype(self):
         # The filetype attribute is set based on the file extension.
         dsc = self.getBaseDsc()
         uploadfile = self.createDSCFile(
-            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42", None)
+            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42", None
+        )
         self.assertEqual(
-            "text/x-debian-source-package", uploadfile.content_type)
+            "text/x-debian-source-package", uploadfile.content_type
+        )
 
     def test_storeInDatabase(self):
         # storeInDatabase creates a SourcePackageRelease.
@@ -266,8 +318,14 @@ class DSCFileTests(PackageUploadFileTestCase):
         dsc["Build-Depends"] = "dpkg, bzr"
         changes = self.getBaseChanges()
         uploadfile = self.createDSCFile(
-            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo.changes", changes))
+            "foo.dsc",
+            dsc,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo.changes", changes),
+        )
         uploadfile.changelog = b"DUMMY"
         uploadfile.files = []
         release = uploadfile.storeInDatabase(None)
@@ -281,8 +339,14 @@ class DSCFileTests(PackageUploadFileTestCase):
         dsc["buIld-depends"] = "dpkg, bzr"
         changes = self.getBaseChanges()
         uploadfile = self.createDSCFile(
-            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo.changes", changes))
+            "foo.dsc",
+            dsc,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo.changes", changes),
+        )
         uploadfile.files = []
         uploadfile.changelog = b"DUMMY"
         release = uploadfile.storeInDatabase(None)
@@ -294,14 +358,21 @@ class DSCFileTests(PackageUploadFileTestCase):
         dsc["Python-Version"] = "2.5"
         changes = self.getBaseChanges()
         uploadfile = self.createDSCFile(
-            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo.changes", changes))
+            "foo.dsc",
+            dsc,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo.changes", changes),
+        )
         uploadfile.changelog = b"DUMMY"
         uploadfile.files = []
         release = uploadfile.storeInDatabase(None)
         # DSCFile lowercases the field names
         self.assertEqual(
-            [["Python-Version", "2.5"]], release.user_defined_fields)
+            [["Python-Version", "2.5"]], release.user_defined_fields
+        )
 
     def test_homepage(self):
         # storeInDatabase updates homepage.
@@ -309,8 +380,14 @@ class DSCFileTests(PackageUploadFileTestCase):
         dsc["Homepage"] = "http://samba.org/~jelmer/bzr";
         changes = self.getBaseChanges()
         uploadfile = self.createDSCFile(
-            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo.changes", changes))
+            "foo.dsc",
+            dsc,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo.changes", changes),
+        )
         uploadfile.changelog = b"DUMMY"
         uploadfile.files = []
         release = uploadfile.storeInDatabase(None)
@@ -320,14 +397,24 @@ class DSCFileTests(PackageUploadFileTestCase):
         # checkBuild() verifies consistency with a build.
         self.policy.distroseries.nominatedarchindep = (
             self.factory.makeDistroArchSeries(
-                distroseries=self.policy.distroseries))
+                distroseries=self.policy.distroseries
+            )
+        )
         build = self.factory.makeSourcePackageRecipeBuild(
-            pocket=self.policy.pocket, distroseries=self.policy.distroseries,
-            archive=self.policy.archive)
+            pocket=self.policy.pocket,
+            distroseries=self.policy.distroseries,
+            archive=self.policy.archive,
+        )
         dsc = self.getBaseDsc()
         uploadfile = self.createDSCFile(
-            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo.changes", self.getBaseChanges()))
+            "foo.dsc",
+            dsc,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo.changes", self.getBaseChanges()),
+        )
         uploadfile.checkBuild(build)
         # checkBuild() sets the build status to FULLYBUILT and
         # removes the upload log.
@@ -339,14 +426,23 @@ class DSCFileTests(PackageUploadFileTestCase):
         # and upload file are found.
         distroseries = self.factory.makeDistroSeries()
         distroseries.nominatedarchindep = self.factory.makeDistroArchSeries(
-            distroseries=distroseries)
+            distroseries=distroseries
+        )
         build = self.factory.makeSourcePackageRecipeBuild(
-            pocket=self.policy.pocket, distroseries=distroseries,
-            archive=self.policy.archive)
+            pocket=self.policy.pocket,
+            distroseries=distroseries,
+            archive=self.policy.archive,
+        )
         dsc = self.getBaseDsc()
         uploadfile = self.createDSCFile(
-            "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
-            self.createChangesFile("foo.changes", self.getBaseChanges()))
+            "foo.dsc",
+            dsc,
+            "main/net",
+            "extra",
+            "dulwich",
+            "0.42",
+            self.createChangesFile("foo.changes", self.getBaseChanges()),
+        )
         self.assertRaises(UploadError, uploadfile.checkBuild, build)
 
 
@@ -369,9 +465,9 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
             "Priority": b"optional",
             "Homepage": b"http://samba.org/~jelmer/dulwich";,
             "Description": b"Pure-python Git library\n"
-                b" Dulwich is a Python implementation of the file formats and"
-                b" protocols",
-            }
+            b" Dulwich is a Python implementation of the file formats and"
+            b" protocols",
+        }
 
     def _writeCompressedFile(self, filename, data):
         if filename.endswith(".gz"):
@@ -380,12 +476,14 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
             open_func = partial(lzma.LZMAFile, format=lzma.FORMAT_XZ)
         else:
             raise ValueError(
-                "Unhandled compression extension in '%s'" % filename)
+                "Unhandled compression extension in '%s'" % filename
+            )
         with open_func(filename, "wb") as f:
             f.write(data)
 
-    def createDeb(self, filename, control, control_format, data_format,
-                  members=None):
+    def createDeb(
+        self, filename, control, control_format, data_format, members=None
+    ):
         """Return the contents of a dummy .deb file."""
         tempdir = self.makeTemporaryDirectory()
         control = {k: six.ensure_text(v) for k, v in control.items()}
@@ -394,69 +492,102 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
                 "debian-binary",
                 "control.tar.%s" % control_format,
                 "data.tar.%s" % data_format,
-                ]
+            ]
         for member in members:
             if member == "debian-binary":
                 write_file(os.path.join(tempdir, member), b"2.0\n")
             elif member.startswith("control.tar."):
                 with io.BytesIO() as control_tar_buf:
                     with tarfile.open(
-                            mode="w", fileobj=control_tar_buf) as control_tar:
+                        mode="w", fileobj=control_tar_buf
+                    ) as control_tar:
                         with io.BytesIO() as control_buf:
                             Deb822(control).dump(
-                                fd=control_buf, encoding="UTF-8")
+                                fd=control_buf, encoding="UTF-8"
+                            )
                             control_buf.seek(0)
                             tarinfo = tarfile.TarInfo(name="control")
                             tarinfo.size = len(control_buf.getvalue())
                             control_tar.addfile(tarinfo, fileobj=control_buf)
                     control_tar_bytes = control_tar_buf.getvalue()
                 self._writeCompressedFile(
-                    os.path.join(tempdir, member), control_tar_bytes)
+                    os.path.join(tempdir, member), control_tar_bytes
+                )
             elif member.startswith("data.tar."):
                 with io.BytesIO() as data_tar_buf:
                     with tarfile.open(mode="w", fileobj=data_tar_buf):
                         pass
                     data_tar_bytes = data_tar_buf.getvalue()
                 self._writeCompressedFile(
-                    os.path.join(tempdir, member), data_tar_bytes)
+                    os.path.join(tempdir, member), data_tar_bytes
+                )
             else:
                 raise ValueError("Unhandled .deb member '%s'" % member)
         retcode = subprocess.call(
-            ["ar", "rc", filename] + members, cwd=tempdir)
+            ["ar", "rc", filename] + members, cwd=tempdir
+        )
         self.assertEqual(0, retcode)
         with open(os.path.join(tempdir, filename), "rb") as f:
             return f.read()
 
-    def createDebBinaryUploadFile(self, filename, component_and_section,
-                                  priority_name, package, version, changes,
-                                  control=None, control_format=None,
-                                  data_format=None, members=None):
+    def createDebBinaryUploadFile(
+        self,
+        filename,
+        component_and_section,
+        priority_name,
+        package,
+        version,
+        changes,
+        control=None,
+        control_format=None,
+        data_format=None,
+        members=None,
+    ):
         """Create a DebBinaryUploadFile."""
-        if (control is not None or control_format is not None or
-                data_format is not None or members is not None):
+        if (
+            control is not None
+            or control_format is not None
+            or data_format is not None
+            or members is not None
+        ):
             if control is None:
                 control = self.getBaseControl()
             data = self.createDeb(
-                filename, control, control_format, data_format,
-                members=members)
+                filename, control, control_format, data_format, members=members
+            )
         else:
             data = b"DUMMY DATA"
         (path, md5, sha1, size) = self.writeUploadFile(filename, data)
         return DebBinaryUploadFile(
-            path, dict(MD5=md5), size, component_and_section, priority_name,
-            package, version, changes, self.policy, self.logger)
+            path,
+            dict(MD5=md5),
+            size,
+            component_and_section,
+            priority_name,
+            package,
+            version,
+            changes,
+            self.policy,
+            self.logger,
+        )
 
     def test_unknown_priority(self):
         # Unknown priorities automatically get changed to 'extra'.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/net", "unknown", "mypkg", "0.42", None)
+            "foo_0.42_i386.deb", "main/net", "unknown", "mypkg", "0.42", None
+        )
         self.assertEqual("extra", uploadfile.priority_name)
 
     def test_parseControl(self):
         # parseControl sets various fields on DebBinaryUploadFile.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         control = self.getBaseControl()
         uploadfile.parseControl(control)
         self.assertEqual("python", uploadfile.section_name)
@@ -467,53 +598,91 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
     def test_verifyFormat_missing_control(self):
         # verifyFormat rejects .debs with no control member.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None, members=["debian-binary", "data.tar.gz"])
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+            members=["debian-binary", "data.tar.gz"],
+        )
         self.assertThat(
             ["".join(error.args) for error in uploadfile.verifyFormat()],
-            MatchesListwise([
-                Equals(
-                    "%s: 'dpkg-deb -I' invocation failed." %
-                    uploadfile.filename),
-                MatchesRegex(
-                    r"^ \[dpkg-deb output:\] .* has premature member "
-                    r"'data\.tar\.gz'"),
-                Equals(
-                    "%s: 'dpkg-deb -c' invocation failed." %
-                    uploadfile.filename),
-                MatchesRegex(
-                    r"^ \[dpkg-deb output:\] .* has premature member "
-                    r"'data\.tar\.gz'"),
-                ]))
+            MatchesListwise(
+                [
+                    Equals(
+                        "%s: 'dpkg-deb -I' invocation failed."
+                        % uploadfile.filename
+                    ),
+                    MatchesRegex(
+                        r"^ \[dpkg-deb output:\] .* has premature member "
+                        r"'data\.tar\.gz'"
+                    ),
+                    Equals(
+                        "%s: 'dpkg-deb -c' invocation failed."
+                        % uploadfile.filename
+                    ),
+                    MatchesRegex(
+                        r"^ \[dpkg-deb output:\] .* has premature member "
+                        r"'data\.tar\.gz'"
+                    ),
+                ]
+            ),
+        )
 
     def test_verifyFormat_missing_data(self):
         # verifyFormat rejects .debs with no data member.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None, members=["debian-binary", "control.tar.gz"])
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+            members=["debian-binary", "control.tar.gz"],
+        )
         self.assertThat(
             ["".join(error.args) for error in uploadfile.verifyFormat()],
-            MatchesListwise([
-                Equals(
-                    "%s: 'dpkg-deb -c' invocation failed." %
-                    uploadfile.filename),
-                MatchesRegex(
-                    r"^ \[dpkg-deb output:\] .* unexpected end of file"),
-                ]))
+            MatchesListwise(
+                [
+                    Equals(
+                        "%s: 'dpkg-deb -c' invocation failed."
+                        % uploadfile.filename
+                    ),
+                    MatchesRegex(
+                        r"^ \[dpkg-deb output:\] .* unexpected end of file"
+                    ),
+                ]
+            ),
+        )
 
     def test_verifyFormat_control_xz(self):
         # verifyFormat accepts .debs with an xz-compressed control member.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None, control_format="xz", data_format="gz")
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+            control_format="xz",
+            data_format="gz",
+        )
         uploadfile.extractAndParseControl()
         self.assertEqual([], list(uploadfile.verifyFormat()))
 
     def test_verifyFormat_data_xz(self):
         # verifyFormat accepts .debs with an xz-compressed data member.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None, control_format="gz", data_format="xz")
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+            control_format="gz",
+            data_format="xz",
+        )
         uploadfile.extractAndParseControl()
         self.assertEqual([], list(uploadfile.verifyFormat()))
 
@@ -523,41 +692,68 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
         # apt_inst.DebFile raises an exception
         m_apt_inst.DebFile.side_effect = KeyError("banana not found")
         uploadfile = self.createDebBinaryUploadFile(
-            "empty_0.1_all.deb", "main/admin", "extra", "empty", "0.1", None,
-            members=[])
+            "empty_0.1_all.deb",
+            "main/admin",
+            "extra",
+            "empty",
+            "0.1",
+            None,
+            members=[],
+        )
         errors = list(uploadfile.extractAndParseControl())
         self.assertEqual(1, len(errors))
         error = errors[0]
         self.assertIsInstance(error, UploadError)
         self.assertEqual(
             "empty_0.1_all.deb: extracting control file raised "
-            "%s: %r. giving up." % (KeyError, "banana not found"), str(error))
+            "%s: %r. giving up." % (KeyError, "banana not found"),
+            str(error),
+        )
 
     def test_verifyDebTimestamp_SystemError(self):
         # verifyDebTimestamp produces a reasonable error if we provoke a
         # SystemError from apt_inst.DebFile.
         uploadfile = self.createDebBinaryUploadFile(
-            "empty_0.1_all.deb", "main/admin", "extra", "empty", "0.1", None,
-            members=[])
+            "empty_0.1_all.deb",
+            "main/admin",
+            "extra",
+            "empty",
+            "0.1",
+            None,
+            members=[],
+        )
         self.assertThat(
             ["".join(error.args) for error in uploadfile.verifyDebTimestamp()],
-            MatchesListwise([MatchesAny(
-                Equals("No debian archive, missing control.tar.gz"),
-                Contains("could not locate member control.tar."))]))
+            MatchesListwise(
+                [
+                    MatchesAny(
+                        Equals("No debian archive, missing control.tar.gz"),
+                        Contains("could not locate member control.tar."),
+                    )
+                ]
+            ),
+        )
 
     def test_storeInDatabase(self):
         # storeInDatabase creates a BinaryPackageRelease.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         control = self.getBaseControl()
         uploadfile.parseControl(control)
         build = self.factory.makeBinaryPackageBuild()
         bpr = uploadfile.storeInDatabase(build)
-        self.assertEqual('python (<< 2.7), python (>= 2.5)', bpr.depends)
+        self.assertEqual("python (<< 2.7), python (>= 2.5)", bpr.depends)
         self.assertEqual(
             " Dulwich is a Python implementation of the file formats and"
-            " protocols", bpr.description)
+            " protocols",
+            bpr.description,
+        )
         self.assertEqual(False, bpr.essential)
         self.assertEqual(524, bpr.installedsize)
         self.assertEqual(True, bpr.architecturespecific)
@@ -567,21 +763,30 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
     def test_user_defined_fields(self):
         # storeInDatabase stores user defined fields.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         control = self.getBaseControl()
         control["Python-Version"] = b"2.5"
         uploadfile.parseControl(control)
         build = self.factory.makeBinaryPackageBuild()
         bpr = uploadfile.storeInDatabase(build)
-        self.assertEqual(
-            [["Python-Version", "2.5"]], bpr.user_defined_fields)
+        self.assertEqual([["Python-Version", "2.5"]], bpr.user_defined_fields)
 
     def test_user_defined_fields_newlines(self):
         # storeInDatabase stores user defined fields and keeps newlines.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         control = self.getBaseControl()
         control["RandomData"] = b"Foo\nbar\nbla\n"
         uploadfile.parseControl(control)
@@ -590,21 +795,32 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
         self.assertEqual(
             [
                 ["RandomData", "Foo\nbar\nbla\n"],
-            ], bpr.user_defined_fields)
+            ],
+            bpr.user_defined_fields,
+        )
 
     def test_built_using(self):
         # storeInDatabase parses Built-Using into BinarySourceReference
         # rows, and also adds the unparsed contents to user_defined_fields.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         control = self.getBaseControl()
         control["Built-Using"] = b"bar (= 0.1)"
         uploadfile.parseControl(control)
         build = self.factory.makeBinaryPackageBuild()
         spph = self.factory.makeSourcePackagePublishingHistory(
-            archive=build.archive, distroseries=build.distro_series,
-            pocket=build.pocket, sourcepackagename="bar", version="0.1")
+            archive=build.archive,
+            distroseries=build.distro_series,
+            pocket=build.pocket,
+            sourcepackagename="bar",
+            version="0.1",
+        )
         bpr = uploadfile.storeInDatabase(build)
         self.assertThat(
             bpr.built_using_references,
@@ -613,33 +829,46 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
                     binary_package_release=bpr,
                     source_package_release=spph.sourcepackagerelease,
                     reference_type=BinarySourceReferenceType.BUILT_USING,
-                    )))
+                )
+            ),
+        )
         self.assertEqual(
-            [["Built-Using", "bar (= 0.1)"]], bpr.user_defined_fields)
+            [["Built-Using", "bar (= 0.1)"]], bpr.user_defined_fields
+        )
 
     def test_homepage(self):
         # storeInDatabase stores homepage field.
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         control = self.getBaseControl()
         control["Python-Version"] = b"2.5"
         uploadfile.parseControl(control)
         build = self.factory.makeBinaryPackageBuild()
         bpr = uploadfile.storeInDatabase(build)
-        self.assertEqual(
-            "http://samba.org/~jelmer/dulwich";, bpr.homepage)
+        self.assertEqual("http://samba.org/~jelmer/dulwich";, bpr.homepage)
 
     def test_checkBuild(self):
         # checkBuild() verifies consistency with a build.
         das = self.factory.makeDistroArchSeries(
-            distroseries=self.policy.distroseries, architecturetag="i386")
+            distroseries=self.policy.distroseries, architecturetag="i386"
+        )
         build = self.factory.makeBinaryPackageBuild(
-            distroarchseries=das,
-            archive=self.policy.archive)
+            distroarchseries=das, archive=self.policy.archive
+        )
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         uploadfile.checkBuild(build)
         # checkBuild() sets the build status to FULLYBUILT and
         # removes the upload log.
@@ -650,46 +879,67 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
         # checkBuild() raises UploadError if inconsistencies between build
         # and upload file are found.
         das = self.factory.makeDistroArchSeries(
-            distroseries=self.policy.distroseries, architecturetag="amd64")
+            distroseries=self.policy.distroseries, architecturetag="amd64"
+        )
         build = self.factory.makeBinaryPackageBuild(
-            distroarchseries=das,
-            archive=self.policy.archive)
+            distroarchseries=das, archive=self.policy.archive
+        )
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         self.assertRaises(UploadError, uploadfile.checkBuild, build)
 
     def test_findSourcePackageRelease(self):
         # findSourcePackageRelease finds the matching SourcePackageRelease.
         das = self.factory.makeDistroArchSeries(
-            distroseries=self.policy.distroseries, architecturetag="i386")
+            distroseries=self.policy.distroseries, architecturetag="i386"
+        )
         self.factory.makeBinaryPackageBuild(
-            distroarchseries=das,
-            archive=self.policy.archive)
+            distroarchseries=das, archive=self.policy.archive
+        )
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         spph = self.factory.makeSourcePackagePublishingHistory(
             sourcepackagename=self.factory.makeSourcePackageName("foo"),
             distroseries=self.policy.distroseries,
-            version="0.42", archive=self.policy.archive)
+            version="0.42",
+            archive=self.policy.archive,
+        )
         control = self.getBaseControl()
         control["Source"] = b"foo"
         uploadfile.parseControl(control)
         self.assertEqual(
-            spph.sourcepackagerelease, uploadfile.findSourcePackageRelease())
+            spph.sourcepackagerelease, uploadfile.findSourcePackageRelease()
+        )
 
     def test_findSourcePackageRelease_no_spph(self):
         # findSourcePackageRelease raises UploadError if there is no
         # SourcePackageRelease.
         das = self.factory.makeDistroArchSeries(
-            distroseries=self.policy.distroseries, architecturetag="i386")
+            distroseries=self.policy.distroseries, architecturetag="i386"
+        )
         self.factory.makeBinaryPackageBuild(
-            distroarchseries=das,
-            archive=self.policy.archive)
+            distroarchseries=das, archive=self.policy.archive
+        )
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         control = self.getBaseControl()
         control["Source"] = b"foo"
         uploadfile.parseControl(control)
@@ -700,26 +950,37 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
         # SourcePackageRelease and can deal with multiple pending source
         # package releases.
         das = self.factory.makeDistroArchSeries(
-            distroseries=self.policy.distroseries, architecturetag="i386")
+            distroseries=self.policy.distroseries, architecturetag="i386"
+        )
         self.factory.makeBinaryPackageBuild(
-            distroarchseries=das,
-            archive=self.policy.archive)
+            distroarchseries=das, archive=self.policy.archive
+        )
         uploadfile = self.createDebBinaryUploadFile(
-            "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
-            None)
+            "foo_0.42_i386.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+        )
         spn = self.factory.makeSourcePackageName("foo")
         self.factory.makeSourcePackagePublishingHistory(
             sourcepackagename=spn,
             distroseries=self.policy.distroseries,
-            version="0.42", archive=self.policy.archive,
-            status=PackagePublishingStatus.PUBLISHED)
+            version="0.42",
+            archive=self.policy.archive,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         spph2 = self.factory.makeSourcePackagePublishingHistory(
             sourcepackagename=spn,
             distroseries=self.policy.distroseries,
-            version="0.42", archive=self.policy.archive,
-            status=PackagePublishingStatus.PENDING)
+            version="0.42",
+            archive=self.policy.archive,
+            status=PackagePublishingStatus.PENDING,
+        )
         control = self.getBaseControl()
         control["Source"] = b"foo"
         uploadfile.parseControl(control)
         self.assertEqual(
-            spph2.sourcepackagerelease, uploadfile.findSourcePackageRelease())
+            spph2.sourcepackagerelease, uploadfile.findSourcePackageRelease()
+        )
diff --git a/lib/lp/archiveuploader/tests/test_ocirecipeupload.py b/lib/lp/archiveuploader/tests/test_ocirecipeupload.py
index f46785f..70ae29e 100644
--- a/lib/lp/archiveuploader/tests/test_ocirecipeupload.py
+++ b/lib/lp/archiveuploader/tests/test_ocirecipeupload.py
@@ -10,11 +10,8 @@ from storm.store import Store
 
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
-    )
-from lp.archiveuploader.uploadprocessor import (
-    UploadHandler,
-    UploadStatusEnum,
-    )
+)
+from lp.archiveuploader.uploadprocessor import UploadHandler, UploadStatusEnum
 from lp.buildmaster.enums import BuildStatus
 from lp.oci.interfaces.ocirecipe import OCI_RECIPE_ALLOW_CREATE
 from lp.oci.tests.helpers import OCIConfigHelperMixin
@@ -24,10 +21,9 @@ from lp.services.propertycache import get_property_cache
 
 
 class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
-
     def setUp(self):
         super().setUp()
-        self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: 'on'}))
+        self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: "on"}))
 
         self.setupBreezy()
 
@@ -38,20 +34,23 @@ class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
         self.options.context = "buildd"
 
         self.uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
-
-        self.digests = [{
-            "diff_id_1": {
-                "digest": "digest_1",
-                "source": "test/base_1",
-                "layer_id": "layer_1"
-            },
-            "diff_id_2": {
-                "digest": "digest_2",
-                "source": "",
-                "layer_id": "layer_2"
+            self.layer.txn, builds=True
+        )
+
+        self.digests = [
+            {
+                "diff_id_1": {
+                    "digest": "digest_1",
+                    "source": "test/base_1",
+                    "layer_id": "layer_1",
+                },
+                "diff_id_2": {
+                    "digest": "digest_2",
+                    "source": "",
+                    "layer_id": "layer_2",
+                },
             }
-        }]
+        ]
 
     def test_sets_build_and_state(self):
         # The upload processor uploads files and sets the correct status.
@@ -59,19 +58,24 @@ class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
         del get_property_cache(self.build).manifest
         del get_property_cache(self.build).digests
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "layer_1.tar.gz"), b"layer_1")
         write_file(os.path.join(upload_dir, "layer_2.tar.gz"), b"layer_2")
         write_file(
             os.path.join(upload_dir, "digests.json"),
-            json.dumps(self.digests).encode("UTF-8"))
+            json.dumps(self.digests).encode("UTF-8"),
+        )
         write_file(os.path.join(upload_dir, "manifest.json"), b"manifest")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processOCIRecipe(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "OCI upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "OCI upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertTrue(self.build.verifySuccessfulUpload())
 
@@ -82,7 +86,8 @@ class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
         self.setConfig()
         distribution = self.factory.makeDistribution()
         distribution.oci_registry_credentials = (
-            self.factory.makeOCIRegistryCredentials())
+            self.factory.makeOCIRegistryCredentials()
+        )
         oci_project = self.factory.makeOCIProject(pillar=distribution)
         recipe = self.factory.makeOCIRecipe(oci_project=oci_project)
         build = self.factory.makeOCIRecipeBuild(recipe=recipe)
@@ -94,19 +99,24 @@ class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
         del get_property_cache(build).manifest
         del get_property_cache(build).digests
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(build.id), "ubuntu")
+            self.incoming_folder, "test", str(build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "layer_1.tar.gz"), b"layer_1")
         write_file(os.path.join(upload_dir, "layer_2.tar.gz"), b"layer_2")
         write_file(
             os.path.join(upload_dir, "digests.json"),
-            json.dumps(self.digests).encode("UTF-8"))
+            json.dumps(self.digests).encode("UTF-8"),
+        )
         write_file(os.path.join(upload_dir, "manifest.json"), b"manifest")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", build)
+            self.uploadprocessor, self.incoming_folder, "test", build
+        )
         result = handler.processOCIRecipe(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "OCI upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "OCI upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, build.status)
         self.assertTrue(build.verifySuccessfulUpload())
 
@@ -117,15 +127,18 @@ class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
         del get_property_cache(self.build).manifest
         del get_property_cache(self.build).digests
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "layer_1.tar.gz"), b"layer_1")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processOCIRecipe(self.log)
         self.assertEqual(UploadStatusEnum.REJECTED, result)
         self.assertIn(
             "ERROR Build did not produce a digests.json.",
-            self.log.getLogBuffer())
+            self.log.getLogBuffer(),
+        )
         self.assertFalse(self.build.verifySuccessfulUpload())
 
     def test_missing_layer_file(self):
@@ -134,18 +147,21 @@ class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
         del get_property_cache(self.build).manifest
         del get_property_cache(self.build).digests
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "layer_1.tar.gz"), b"layer_1")
         write_file(
             os.path.join(upload_dir, "digests.json"),
-            json.dumps(self.digests).encode("UTF-8"))
+            json.dumps(self.digests).encode("UTF-8"),
+        )
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processOCIRecipe(self.log)
         self.assertEqual(UploadStatusEnum.REJECTED, result)
         self.assertIn(
-            "ERROR Missing layer file: layer_2.",
-            self.log.getLogBuffer())
+            "ERROR Missing layer file: layer_2.", self.log.getLogBuffer()
+        )
         self.assertFalse(self.build.verifySuccessfulUpload())
 
     def test_reuse_existing_file(self):
@@ -155,12 +171,14 @@ class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
         del get_property_cache(self.build).manifest
         del get_property_cache(self.build).digests
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "layer_1.tar.gz"), b"layer_1")
         write_file(os.path.join(upload_dir, "manifest.json"), b"manifest")
         write_file(
             os.path.join(upload_dir, "digests.json"),
-            json.dumps(self.digests).encode("UTF-8"))
+            json.dumps(self.digests).encode("UTF-8"),
+        )
 
         # create the existing file
         self.switchToAdmin()
@@ -169,10 +187,13 @@ class TestOCIRecipeUploads(OCIConfigHelperMixin, TestUploadProcessorBase):
         self.switchToUploader()
 
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processOCIRecipe(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "OCI upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "OCI upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertTrue(self.build.verifySuccessfulUpload())
diff --git a/lib/lp/archiveuploader/tests/test_ppauploadprocessor.py b/lib/lp/archiveuploader/tests/test_ppauploadprocessor.py
index 767324d..1091e56 100644
--- a/lib/lp/archiveuploader/tests/test_ppauploadprocessor.py
+++ b/lib/lp/archiveuploader/tests/test_ppauploadprocessor.py
@@ -3,9 +3,9 @@
 
 """Functional tests for uploadprocessor.py."""
 
-from operator import itemgetter
 import os
 import shutil
+from operator import itemgetter
 
 import transaction
 from zope.component import getUtility
@@ -14,7 +14,7 @@ from zope.security.proxy import removeSecurityProxy
 from lp.app.errors import NotFoundError
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
-    )
+)
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.person import IPersonSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
@@ -26,12 +26,12 @@ from lp.soyuz.enums import (
     PackagePublishingStatus,
     PackageUploadStatus,
     SourcePackageFormat,
-    )
+)
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.queue import NonBuildableSourceUploadError
 from lp.soyuz.interfaces.sourcepackageformat import (
     ISourcePackageFormatSelectionSet,
-    )
+)
 from lp.soyuz.model.component import Component
 from lp.soyuz.model.publishing import BinaryPackagePublishingHistory
 from lp.soyuz.tests.fakepackager import FakePackager
@@ -51,8 +51,9 @@ class TestPPAUploadProcessorBase(TestUploadProcessorBase):
         """
         super().setUp()
         self.build_uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
-        self.ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
+            self.layer.txn, builds=True
+        )
+        self.ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
 
         # create name16 PPA
         self.name16 = getUtility(IPersonSet).getByName("name16")
@@ -62,11 +63,11 @@ class TestPPAUploadProcessorBase(TestUploadProcessorBase):
         transaction.commit()
 
         # Set up the uploadprocessor with appropriate options and logger
-        self.options.context = 'insecure'
+        self.options.context = "insecure"
         self.uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
     def makeArchive(self, owner):
-        return self.factory.makeArchive(owner=owner, name='ppa')
+        return self.factory.makeArchive(owner=owner, name="ppa")
 
     def assertEmails(self, expected):
         """Check recent email upload notification attributes.
@@ -83,7 +84,8 @@ class TestPPAUploadProcessorBase(TestUploadProcessorBase):
             explicitly set to None for non-PPA or rejection notifications.
         """
         notifications = self.assertEmailQueueLength(
-            len(expected), sort_key=itemgetter('X-Envelope-To'))
+            len(expected), sort_key=itemgetter("X-Envelope-To")
+        )
 
         for item, msg in zip(expected, notifications):
             recipient = item.get("recipient", self.name16_recipient)
@@ -94,17 +96,17 @@ class TestPPAUploadProcessorBase(TestUploadProcessorBase):
             self.assertFalse(msg.is_multipart())
             body = msg.get_payload(decode=True).decode("UTF-8")
 
-            self.assertEqual(recipient, msg['X-Envelope-To'])
+            self.assertEqual(recipient, msg["X-Envelope-To"])
 
-            subject = "Subject: %s\n" % msg['Subject']
+            subject = "Subject: %s\n" % msg["Subject"]
             body = subject + body
 
             for content in list(contents):
                 self.assertIn(content, body)
 
             if ppa_header is not None:
-                self.assertIn('X-Launchpad-PPA', msg.keys())
-                self.assertEqual(msg['X-Launchpad-PPA'], ppa_header)
+                self.assertIn("X-Launchpad-PPA", msg.keys())
+                self.assertEqual(msg["X-Launchpad-PPA"], ppa_header)
 
     def checkFilesRestrictedInLibrarian(self, queue_item, condition):
         """Check the libraryfilealias restricted flag.
@@ -116,14 +118,14 @@ class TestPPAUploadProcessorBase(TestUploadProcessorBase):
 
         for source in queue_item.sources:
             for source_file in source.sourcepackagerelease.files:
-                self.assertEqual(
-                    source_file.libraryfile.restricted, condition)
+                self.assertEqual(source_file.libraryfile.restricted, condition)
 
         for build in queue_item.builds:
             for binarypackage in build.build.binarypackages:
                 for binary_file in binarypackage.files:
                     self.assertEqual(
-                        binary_file.libraryfile.restricted, condition)
+                        binary_file.libraryfile.restricted, condition
+                    )
 
         for custom in queue_item.customfiles:
             custom_file = custom.libraryfilealias
@@ -155,11 +157,16 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.DONE, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.DONE,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
 
         self.assertEqual(queue_item.archive, self.name16.archive)
         self.assertEqual(queue_item.pocket, PackagePublishingPocket.RELEASE)
@@ -172,16 +179,17 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         self.assertEqual(pending_ppas.count(), 1)
         self.assertEqual(pending_ppas[0], self.name16.archive)
 
-        pub_bar = self.name16.archive.getPublishedSources(name='bar').one()
+        pub_bar = self.name16.archive.getPublishedSources(name="bar").one()
 
-        self.assertEqual(pub_bar.sourcepackagerelease.version, '1.0-1')
+        self.assertEqual(pub_bar.sourcepackagerelease.version, "1.0-1")
         self.assertEqual(pub_bar.status, PackagePublishingStatus.PENDING)
-        self.assertEqual(pub_bar.component.name, 'main')
+        self.assertEqual(pub_bar.component.name, "main")
 
-        [build] = self.name16.archive.getBuildRecords(name='bar')
+        [build] = self.name16.archive.getBuildRecords(name="bar")
         self.assertEqual(
-            build.title, 'i386 build of bar 1.0-1 in ubuntu breezy RELEASE')
-        self.assertEqual(build.status.name, 'NEEDSBUILD')
+            build.title, "i386 build of bar 1.0-1 in ubuntu breezy RELEASE"
+        )
+        self.assertEqual(build.status.name, "NEEDSBUILD")
         self.assertNotEqual(0, build.buildqueue_record.lastscore)
 
         #
@@ -195,19 +203,21 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
-        pub_sources = self.name16.archive.getPublishedSources(name='bar')
+        pub_sources = self.name16.archive.getPublishedSources(name="bar")
         [pub_bar_10, pub_bar] = pub_sources
 
-        self.assertEqual(pub_bar_10.sourcepackagerelease.version, '1.0-10')
+        self.assertEqual(pub_bar_10.sourcepackagerelease.version, "1.0-10")
         self.assertEqual(pub_bar_10.status, PackagePublishingStatus.PENDING)
-        self.assertEqual(pub_bar_10.component.name, 'main')
+        self.assertEqual(pub_bar_10.component.name, "main")
 
-        [build, build_old] = self.name16.archive.getBuildRecords(name='bar')
+        [build, build_old] = self.name16.archive.getBuildRecords(name="bar")
         self.assertEqual(
-            build.title, 'i386 build of bar 1.0-10 in ubuntu breezy RELEASE')
-        self.assertEqual(build.status.name, 'NEEDSBUILD')
+            build.title, "i386 build of bar 1.0-10 in ubuntu breezy RELEASE"
+        )
+        self.assertEqual(build.status.name, "NEEDSBUILD")
         self.assertNotEqual(0, build.buildqueue_record.lastscore)
 
         #
@@ -219,8 +229,9 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.rejection_message,
-            'bar_1.0-2.dsc: Version older than that in the archive. '
-            '1.0-2 <= 1.0-10')
+            "bar_1.0-2.dsc: Version older than that in the archive. "
+            "1.0-2 <= 1.0-10",
+        )
 
     def testNamedPPAUploadDefault(self):
         """Test PPA uploads to the default PPA."""
@@ -236,12 +247,13 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         # Subject and PPA emails header contain the owner name since
         # it's the default PPA.
         contents = [
-            "Subject: [~name16/ubuntu/ppa/breezy] bar 1.0-1 (Accepted)"]
+            "Subject: [~name16/ubuntu/ppa/breezy] bar 1.0-1 (Accepted)"
+        ]
         self.assertEmails([{"contents": contents, "ppa_header": "name16"}])
 
     def testNamedPPAUploadNonDefault(self):
         """Test PPA uploads to a named PPA."""
-        other_ppa = self.factory.makeArchive(owner=self.name16, name='testing')
+        other_ppa = self.factory.makeArchive(owner=self.name16, name="testing")
 
         # Upload to a named PPA.
         upload_dir = self.queueUpload("bar_1.0-1", "~name16/testing/ubuntu")
@@ -254,9 +266,11 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         # Subject and PPA email-header are specific for this named-ppa.
         contents = [
-            "Subject: [~name16/ubuntu/testing/breezy] bar 1.0-1 (Accepted)"]
+            "Subject: [~name16/ubuntu/testing/breezy] bar 1.0-1 (Accepted)"
+        ]
         self.assertEmails(
-            [{"contents": contents, "ppa_header": "name16-testing"}])
+            [{"contents": contents, "ppa_header": "name16-testing"}]
+        )
 
     def testNamedPPAUploadWithSeries(self):
         """Test PPA uploads to a named PPA location and with a distroseries.
@@ -289,33 +303,39 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
         # Consume the test email so the assertion further down does not fail.
         pop_notifications()
 
         # The SourcePackageRelease still has a component of universe:
         pub_foo = self.name16.archive.getPublishedSources(name="bar").one()
         self.assertEqual(
-            pub_foo.sourcepackagerelease.component.name, "universe")
+            pub_foo.sourcepackagerelease.component.name, "universe"
+        )
 
         # But the publishing record has main:
-        self.assertEqual(pub_foo.component.name, 'main')
+        self.assertEqual(pub_foo.component.name, "main")
 
         # Continue with a binary upload:
         [build] = self.name16.archive.getBuildRecords(name="bar")
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         upload_dir = self.queueUpload(
-            "bar_1.0-1_binary_universe", "~name16/ubuntu")
-        self.processUpload(
-            self.build_uploadprocessor, upload_dir, build=build)
+            "bar_1.0-1_binary_universe", "~name16/ubuntu"
+        )
+        self.processUpload(self.build_uploadprocessor, upload_dir, build=build)
 
         # No mails are sent for successful binary uploads.
         self.assertEmailQueueLength(0)
 
         # Publish the binary.
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.ACCEPTED,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
         self.switchToAdmin()
         queue_item.realiseUpload()
         self.switchToUploader()
@@ -325,7 +345,8 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
             [binary_pub] = IStore(BinaryPackagePublishingHistory).find(
                 BinaryPackagePublishingHistory,
                 binarypackagerelease=binary_package,
-                archive=self.name16.archive)
+                archive=self.name16.archive,
+            )
             self.assertEqual(binary_pub.component.name, "main")
 
     def testPPABinaryUploads(self):
@@ -336,32 +357,37 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # Source publication and build record for breezy-i386
         # distroarchseries were created as expected. The source is ready
         # to receive the binary upload.
-        pub_bar = self.name16.archive.getPublishedSources(name='bar').one()
-        self.assertEqual(pub_bar.sourcepackagerelease.version, '1.0-1')
+        pub_bar = self.name16.archive.getPublishedSources(name="bar").one()
+        self.assertEqual(pub_bar.sourcepackagerelease.version, "1.0-1")
         self.assertEqual(pub_bar.status, PackagePublishingStatus.PENDING)
-        self.assertEqual(pub_bar.component.name, 'main')
+        self.assertEqual(pub_bar.component.name, "main")
 
-        [build] = self.name16.archive.getBuildRecords(name='bar')
+        [build] = self.name16.archive.getBuildRecords(name="bar")
         self.assertEqual(
-            build.title, 'i386 build of bar 1.0-1 in ubuntu breezy RELEASE')
-        self.assertEqual(build.status.name, 'NEEDSBUILD')
+            build.title, "i386 build of bar 1.0-1 in ubuntu breezy RELEASE"
+        )
+        self.assertEqual(build.status.name, "NEEDSBUILD")
         self.assertNotEqual(0, build.buildqueue_record.lastscore)
 
         # Binary upload to the just-created build record.
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(
-            self.build_uploadprocessor, upload_dir, build=build)
+        self.processUpload(self.build_uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted and it's waiting in the queue.
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.ACCEPTED,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
         self.assertEqual(queue_items.count(), 1)
 
         # All the files associated with this binary upload must be in the
@@ -388,34 +414,46 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # Copy source uploaded to name16 PPA to cprov's PPA.
         name16_pub_bar = self.name16.archive.getPublishedSources(
-            name='bar').one()
+            name="bar"
+        ).one()
         cprov = getUtility(IPersonSet).getByName("cprov")
         cprov_pub_bar = name16_pub_bar.copyTo(
-            self.breezy, PackagePublishingPocket.RELEASE, cprov.archive)
+            self.breezy, PackagePublishingPocket.RELEASE, cprov.archive
+        )
         self.assertEqual(
             cprov_pub_bar.sourcepackagerelease.upload_archive.displayname,
-            'PPA for Foo Bar')
+            "PPA for Foo Bar",
+        )
 
         # Create a build record for source bar for breezy-i386
         # distroarchseries in cprov PPA.
         build_bar_i386 = getUtility(IBinaryPackageBuildSet).new(
-            cprov_pub_bar.sourcepackagerelease, cprov.archive,
-            self.breezy['i386'], PackagePublishingPocket.RELEASE)
+            cprov_pub_bar.sourcepackagerelease,
+            cprov.archive,
+            self.breezy["i386"],
+            PackagePublishingPocket.RELEASE,
+        )
 
         # Binary upload to the just-created build record.
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu")
         self.processUpload(
-            self.build_uploadprocessor, upload_dir, build=build_bar_i386)
+            self.build_uploadprocessor, upload_dir, build=build_bar_i386
+        )
 
         # The binary upload was accepted and it's waiting in the queue.
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED, name="bar",
-            version="1.0-1", exact_match=True, archive=cprov.archive)
+            status=PackageUploadStatus.ACCEPTED,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=cprov.archive,
+        )
         self.assertEqual(queue_items.count(), 1)
 
     def testUploadDoesNotEmailMaintainerOrChangedBy(self):
@@ -425,7 +463,8 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         it's the uploader only who gets emailed.
         """
         upload_dir = self.queueUpload(
-            "bar_1.0-1_valid_maintainer", "~name16/ubuntu")
+            "bar_1.0-1_valid_maintainer", "~name16/ubuntu"
+        )
         self.processUpload(self.uploadprocessor, upload_dir)
         # name16 is Foo Bar, who signed the upload.  The package that was
         # uploaded also contains two other valid (in sampledata) email
@@ -464,10 +503,14 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         expected_recipients = (
             self.name16_recipient,
             team.preferredemail.email,
-            name12.preferredemail.email)
-        self.assertEmails([
-            {"ppa_header": "cprov", "recipient": expected_recipient}
-            for expected_recipient in sorted(expected_recipients)])
+            name12.preferredemail.email,
+        )
+        self.assertEmails(
+            [
+                {"ppa_header": "cprov", "recipient": expected_recipient}
+                for expected_recipient in sorted(expected_recipients)
+            ]
+        )
 
     def testPPADistroSeriesOverrides(self):
         """It's possible to override target distroseries of PPA uploads.
@@ -479,26 +522,29 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
          * The modified PPA is found by getPendingPublicationPPA() lookup.
         """
         self.switchToAdmin()
-        hoary = self.ubuntu['hoary']
-        fake_chroot = self.addMockFile('fake_chroot.tar.gz')
-        hoary['i386'].addOrUpdateChroot(fake_chroot)
+        hoary = self.ubuntu["hoary"]
+        fake_chroot = self.addMockFile("fake_chroot.tar.gz")
+        hoary["i386"].addOrUpdateChroot(fake_chroot)
         self.switchToUploader()
 
-        upload_dir = self.queueUpload(
-            "bar_1.0-1", "~name16/ubuntu/ppa/hoary")
+        upload_dir = self.queueUpload("bar_1.0-1", "~name16/ubuntu/ppa/hoary")
         self.processUpload(self.uploadprocessor, upload_dir)
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         [queue_item] = hoary.getPackageUploads(
-            status=PackageUploadStatus.DONE, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.DONE,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
 
         self.assertEqual(queue_item.archive, self.name16.archive)
-        self.assertEqual(
-            queue_item.pocket, PackagePublishingPocket.RELEASE)
+        self.assertEqual(queue_item.pocket, PackagePublishingPocket.RELEASE)
 
         pending_ppas = self.ubuntu.getPendingPublicationPPAs()
         self.assertEqual(pending_ppas.count(), 1)
@@ -518,21 +564,27 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.DONE, name="bar",
-            version="1.0-1", exact_match=True, archive=ubuntu_team.archive)
+            status=PackageUploadStatus.DONE,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=ubuntu_team.archive,
+        )
         self.assertEqual(queue_items.count(), 1)
 
         pending_ppas = self.ubuntu.getPendingPublicationPPAs()
         self.assertEqual(pending_ppas.count(), 1)
         self.assertEqual(pending_ppas[0], ubuntu_team.archive)
 
-        [build] = ubuntu_team.archive.getBuildRecords(name='bar')
+        [build] = ubuntu_team.archive.getBuildRecords(name="bar")
         self.assertEqual(
-            build.title, 'i386 build of bar 1.0-1 in ubuntu breezy RELEASE')
-        self.assertEqual(build.status.name, 'NEEDSBUILD')
+            build.title, "i386 build of bar 1.0-1 in ubuntu breezy RELEASE"
+        )
+        self.assertEqual(build.status.name, "NEEDSBUILD")
         self.assertNotEqual(0, build.buildqueue_record.lastscore)
 
     def testNotMemberUploadToTeamPPA(self):
@@ -542,12 +594,14 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         be returned since nothing was accepted.
         """
         ubuntu_translators = getUtility(IPersonSet).getByName(
-            "ubuntu-translators")
+            "ubuntu-translators"
+        )
         self.makeArchive(owner=ubuntu_translators)
         transaction.commit()
 
         upload_dir = self.queueUpload(
-            "bar_1.0-1", "~ubuntu-translators/ubuntu")
+            "bar_1.0-1", "~ubuntu-translators/ubuntu"
+        )
         self.processUpload(self.uploadprocessor, upload_dir)
 
         pending_ppas = self.ubuntu.getPendingPublicationPPAs()
@@ -564,7 +618,8 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.rejection_message,
-            "Signer has no upload rights to this PPA.")
+            "Signer has no upload rights to this PPA.",
+        )
 
     def testPPAPartnerUpload(self):
         """Upload a partner package to a PPA and ensure it's not rejected."""
@@ -574,7 +629,8 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         # Check it's been successfully accepted.
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # We rely on the fact that the component on the source package
         # release is unmodified, only the publishing component is
@@ -583,20 +639,22 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         # source's component if the standard auto-overrides don't match
         # an existing publication.
         pub_foocomm = self.name16.archive.getPublishedSources(
-            name='foocomm').one()
+            name="foocomm"
+        ).one()
         self.assertEqual(
-            pub_foocomm.sourcepackagerelease.component.name, 'partner')
-        self.assertEqual(pub_foocomm.component.name, 'main')
+            pub_foocomm.sourcepackagerelease.component.name, "partner"
+        )
+        self.assertEqual(pub_foocomm.component.name, "main")
 
     def testMixedUpload(self):
         """Mixed PPA uploads are rejected with a appropriate message."""
-        upload_dir = self.queueUpload(
-            "bar_1.0-1-mixed", "~name16/ubuntu")
+        upload_dir = self.queueUpload("bar_1.0-1-mixed", "~name16/ubuntu")
         self.processUpload(self.uploadprocessor, upload_dir)
 
         self.assertIn(
-            'Source/binary (i.e. mixed) uploads are not allowed.',
-            self.uploadprocessor.last_processed_upload.rejection_message)
+            "Source/binary (i.e. mixed) uploads are not allowed.",
+            self.uploadprocessor.last_processed_upload.rejection_message,
+        )
 
     def testPGPSignatureNotPreserved(self):
         """PGP signatures should be removed from PPA .changes files.
@@ -618,16 +676,21 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         [build] = self.name16.archive.getBuildRecords(name="bar")
 
         test_files_dir = os.path.join(
-            config.root, "lib/lp/archiveuploader/tests/data/")
+            config.root, "lib/lp/archiveuploader/tests/data/"
+        )
         self.options.context = "buildd"
         upload_dir = self.queueUpload(
-            "debian-installer", "~name16/ubuntu/ppa/breezy",
-            test_files_dir=test_files_dir)
+            "debian-installer",
+            "~name16/ubuntu/ppa/breezy",
+            test_files_dir=test_files_dir,
+        )
         self.processUpload(self.build_uploadprocessor, upload_dir, build=build)
 
         [queue_item] = self.breezy.getPackageUploads(
-            name="debian-installer", status=PackageUploadStatus.ACCEPTED,
-            archive=self.name16.archive)
+            name="debian-installer",
+            status=PackageUploadStatus.ACCEPTED,
+            archive=self.name16.archive,
+        )
         return queue_item
 
     def testCustomUploadToPPA(self):
@@ -659,22 +722,29 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         self.processUpload(self.uploadprocessor, upload_dir)
 
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.DONE, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.DONE,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
 
         self.checkFilesRestrictedInLibrarian(queue_item, True)
 
         # Now that we have source uploaded, we can upload a build.
-        [build] = self.name16.archive.getBuildRecords(name='bar')
-        self.options.context = 'buildd'
+        [build] = self.name16.archive.getBuildRecords(name="bar")
+        self.options.context = "buildd"
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(
-            self.build_uploadprocessor, upload_dir, build=build)
+        self.processUpload(self.build_uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted and it's waiting in the queue.
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.ACCEPTED,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
 
         # All the files associated with this binary upload must be in the
         # restricted librarian as the PPA is private.
@@ -688,32 +758,42 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         """
         # The component contrib does not exist in the sample data, so
         # add it here.
-        Component(name='contrib')
+        Component(name="contrib")
 
         # Upload a source package first.
         upload_dir = self.queueUpload(
-            "bar_1.0-1_contrib_component", "~name16/ubuntu")
+            "bar_1.0-1_contrib_component", "~name16/ubuntu"
+        )
         self.processUpload(self.uploadprocessor, upload_dir)
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.DONE, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.DONE,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
 
         # The upload was accepted despite the fact that it does
         # not have a valid component:
         self.assertTrue(
-            queue_item.sourcepackagerelease.component not in
-            self.breezy.upload_components)
+            queue_item.sourcepackagerelease.component
+            not in self.breezy.upload_components
+        )
 
         # Binary uploads should exhibit the same behaviour:
         [build] = self.name16.archive.getBuildRecords(name="bar")
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         upload_dir = self.queueUpload(
-            "bar_1.0-1_contrib_binary", "~name16/ubuntu")
-        self.processUpload(
-            self.build_uploadprocessor, upload_dir, build=build)
+            "bar_1.0-1_contrib_binary", "~name16/ubuntu"
+        )
+        self.processUpload(self.build_uploadprocessor, upload_dir, build=build)
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.ACCEPTED,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
 
         # The binary is accepted despite the fact that it does not have
         # a valid component:
@@ -722,7 +802,8 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         [build] = queue_item.builds
         for binary in build.build.binarypackages:
             self.assertTrue(
-                binary.component not in self.breezy.upload_components)
+                binary.component not in self.breezy.upload_components
+            )
 
     def testPPAUploadResultingInNoBuilds(self):
         """Source uploads resulting in no builds are rejected.
@@ -737,29 +818,33 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         """
         # First upload gets in because breezy/i386 is supported in PPA.
         packager = FakePackager(
-            'biscuit', '1.0', 'foo.bar@xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')
+            "biscuit", "1.0", "foo.bar@xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
+        )
         packager.buildUpstream(suite=self.breezy.name, arch="i386")
         packager.buildSource()
         biscuit_pub = packager.uploadSourceVersion(
-            '1.0-1', archive=self.name16.archive)
+            "1.0-1", archive=self.name16.archive
+        )
         self.assertEqual(biscuit_pub.status, PackagePublishingStatus.PENDING)
 
         # Remove breezy/i386 PPA support.
         self.switchToAdmin()
-        self.breezy['i386'].processor.supports_virtualized = False
+        self.breezy["i386"].processor.supports_virtualized = False
         self.switchToUploader()
 
         # Next version can't be accepted because it can't be built.
-        packager.buildVersion('1.0-2', suite=self.breezy.name, arch="i386")
+        packager.buildVersion("1.0-2", suite=self.breezy.name, arch="i386")
         packager.buildSource()
         upload = packager.uploadSourceVersion(
-            '1.0-2', archive=self.name16.archive, auto_accept=False)
+            "1.0-2", archive=self.name16.archive, auto_accept=False
+        )
 
         error = self.assertRaisesAndReturnError(
-            NonBuildableSourceUploadError, upload.storeObjectsInDatabase)
+            NonBuildableSourceUploadError, upload.storeObjectsInDatabase
+        )
         self.assertEqual(
-            str(error),
-            "Cannot build any of the architectures requested: i386")
+            str(error), "Cannot build any of the architectures requested: i386"
+        )
 
     def testUploadPathErrorIntendedForHumans(self):
         # PPA upload path errors are augmented with documentation
@@ -770,35 +855,44 @@ class TestPPAUploadProcessor(TestPPAUploadProcessorBase):
         upload_dir = self.queueUpload("bar_1.0-1", "~boing/ppa")
         self.processUpload(self.uploadprocessor, upload_dir)
         rejection_message = (
-            self.uploadprocessor.last_processed_upload.rejection_message)
+            self.uploadprocessor.last_processed_upload.rejection_message
+        )
         self.assertEqual(
-            ["Launchpad failed to process the upload path '~boing/ppa':",
-             '',
-             "Could not find person or team named 'boing'.",
-             '',
-             'It is likely that you have a configuration problem with '
-                 'dput/dupload.',
-             'Please check the documentation at '
-                 'https://help.launchpad.net/Packaging/PPA/Uploading '
-                 'and update your configuration.',
-             '',
-             'Further error processing not possible because of a critical '
-                 'previous error.'], rejection_message.splitlines())
+            [
+                "Launchpad failed to process the upload path '~boing/ppa':",
+                "",
+                "Could not find person or team named 'boing'.",
+                "",
+                "It is likely that you have a configuration problem with "
+                "dput/dupload.",
+                "Please check the documentation at "
+                "https://help.launchpad.net/Packaging/PPA/Uploading "
+                "and update your configuration.",
+                "",
+                "Further error processing not possible because of a critical "
+                "previous error.",
+            ],
+            rejection_message.splitlines(),
+        )
 
         contents = [
             "Subject: [~cprov/ubuntu/ppa] bar_1.0-1_source.changes (Rejected)",
             "Could not find person or team named 'boing'",
             "https://help.launchpad.net/Packaging/PPA/Uploading";,
             "If you don't understand why your files were rejected please "
-                "send an email",
-            ("to %s for help (requires membership)."
-             % config.launchpad.users_address),
-            "You are receiving this email because you made this upload."]
+            "send an email",
+            (
+                "to %s for help (requires membership)."
+                % config.launchpad.users_address
+            ),
+            "You are receiving this email because you made this upload.",
+        ]
         self.assertEmails([{"contents": contents, "ppa_header": None}])
 
 
 class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
     """Functional test for uploadprocessor.py file-lookups in PPA."""
+
     # XXX cprov 20071204: the DSCFile tests are not yet implemented, this
     # issue should be addressed by bug #106084, while implementing those
     # tests we should revisit this test-suite checking if we have a
@@ -815,11 +909,15 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.NEW)
+            PackageUploadStatus.NEW,
+        )
 
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="bar",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
         queue_item.setAccepted()
         queue_item.realiseUpload()
         transaction.commit()
@@ -831,9 +929,9 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
         system.
         """
         try:
-            self.ubuntu.main_archive.getFileByName('bar_1.0.orig.tar.gz')
+            self.ubuntu.main_archive.getFileByName("bar_1.0.orig.tar.gz")
         except NotFoundError:
-            self.fail('bar_1.0.orig.tar.gz is not yet published.')
+            self.fail("bar_1.0.orig.tar.gz is not yet published.")
 
         # Please note: this upload goes to the Ubuntu main archive.
         upload_dir = self.queueUpload("bar_1.0-10")
@@ -842,7 +940,8 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
     def testPPAReusingOrigFromUbuntu(self):
         """Official 'orig.tar.gz' can be reused for PPA uploads."""
@@ -857,11 +956,13 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # Cleanup queue directory in order to re-upload the same source.
         shutil.rmtree(
-            os.path.join(self.queue_folder, 'incoming', 'bar_1.0-10'))
+            os.path.join(self.queue_folder, "incoming", "bar_1.0-10")
+        )
 
         # Upload a higher version of bar that relies on the official
         # orig.tar.gz availability.
@@ -874,9 +975,13 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         publisher.getPubSource(
-            sourcename="bar", version="1.0-1", section="web",
-            archive=self.name16_ppa, distroseries=self.breezy,
-            status=PackagePublishingStatus.PUBLISHED)
+            sourcename="bar",
+            version="1.0-1",
+            section="web",
+            archive=self.name16_ppa,
+            distroseries=self.breezy,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         self.switchToUploader()
 
         # Now upload bar 1.0-3, which has section "devel".
@@ -887,16 +992,19 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # The published section should be "devel" and not "web".
-        pub_sources = self.name16.archive.getPublishedSources(name='bar')
+        pub_sources = self.name16.archive.getPublishedSources(name="bar")
         [pub_bar2, pub_bar1] = pub_sources
 
         section = pub_bar2.section.name
         self.assertEqual(
-            section, 'devel',
-            "Expected a section of 'devel', actually got '%s'" % section)
+            section,
+            "devel",
+            "Expected a section of 'devel', actually got '%s'" % section,
+        )
 
     def testPPAOrigGetsPrecedence(self):
         """When available, the PPA overridden 'orig.tar.gz' gets precedence.
@@ -918,7 +1026,8 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # Make the official bar orig.tar.gz available in the system.
         self.uploadNewBarToUbuntu()
@@ -931,7 +1040,8 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # Upload a higher version of bar that relies on the official
         # orig.tar.gz availability.
@@ -955,7 +1065,8 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
         self.processUpload(self.uploadprocessor, upload_dir)
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # The same 'bar' version will fail due to the conflicting
         # file contents.
@@ -966,16 +1077,17 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
         # errors.
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.rejection_message,
-            'File bar_1.0.orig.tar.gz already exists in unicode PPA name: '
-            'áří, but uploaded version has different '
-            'contents. See more information about this error in '
-            'https://help.launchpad.net/Packaging/UploadErrors.\n'
-            'File bar_1.0-1.diff.gz already exists in unicode PPA name: '
-            'áří, but uploaded version has different contents. See more '
-            'information about this error in '
-            'https://help.launchpad.net/Packaging/UploadErrors.\n'
-            'Files specified in DSC are broken or missing, skipping package '
-            'unpack verification.')
+            "File bar_1.0.orig.tar.gz already exists in unicode PPA name: "
+            "áří, but uploaded version has different "
+            "contents. See more information about this error in "
+            "https://help.launchpad.net/Packaging/UploadErrors.\n";
+            "File bar_1.0-1.diff.gz already exists in unicode PPA name: "
+            "áří, but uploaded version has different contents. See more "
+            "information about this error in "
+            "https://help.launchpad.net/Packaging/UploadErrors.\n";
+            "Files specified in DSC are broken or missing, skipping package "
+            "unpack verification.",
+        )
 
         # Also, the email generated should be sane.  Any of the multiple
         # notifications will do.
@@ -984,7 +1096,8 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertTrue(
             "File bar_1.0.orig.tar.gz already exists in unicode PPA name: "
-            "áří" in body)
+            "áří" in body
+        )
 
     def testErrorMessagesWithArchiveDisplayNameUnicodeArchiveDisabled(self):
         """Check that unicode errors messages are handled correctly.
@@ -1014,7 +1127,8 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
             "https://help.launchpad.net/Packaging/PPA/Uploading and update "
             "your configuration.\n\n"
             "Further error processing not possible because of a critical "
-            "previous error." in body)
+            "previous error." in body
+        )
 
     def testPPAConflictingOrigFiles(self):
         """When available, the official 'orig.tar.gz' restricts PPA uploads.
@@ -1036,12 +1150,13 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.rejection_message,
-            'File bar_1.0.orig.tar.gz already exists in Primary Archive '
-            'for Ubuntu Linux, but uploaded version has different '
-            'contents. See more information about this error in '
-            'https://help.launchpad.net/Packaging/UploadErrors.\nFiles '
-            'specified in DSC are broken or missing, skipping package '
-            'unpack verification.')
+            "File bar_1.0.orig.tar.gz already exists in Primary Archive "
+            "for Ubuntu Linux, but uploaded version has different "
+            "contents. See more information about this error in "
+            "https://help.launchpad.net/Packaging/UploadErrors.\nFiles "
+            "specified in DSC are broken or missing, skipping package "
+            "unpack verification.",
+        )
 
         # The same happens with higher versions of 'bar' depending on the
         # unofficial 'orig.tar.gz'.
@@ -1050,16 +1165,16 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.rejection_message,
-            'File bar_1.0.orig.tar.gz already exists in Primary Archive for '
-            'Ubuntu Linux, but uploaded version has different contents. See '
-            'more information about this error in '
-            'https://help.launchpad.net/Packaging/UploadErrors.\nFiles '
-            'specified in DSC are broken or missing, skipping package unpack '
-            'verification.')
+            "File bar_1.0.orig.tar.gz already exists in Primary Archive for "
+            "Ubuntu Linux, but uploaded version has different contents. See "
+            "more information about this error in "
+            "https://help.launchpad.net/Packaging/UploadErrors.\nFiles "
+            "specified in DSC are broken or missing, skipping package unpack "
+            "verification.",
+        )
 
         # Cleanup queue directory in order to re-upload the same source.
-        shutil.rmtree(
-            os.path.join(self.queue_folder, 'incoming', 'bar_1.0-1'))
+        shutil.rmtree(os.path.join(self.queue_folder, "incoming", "bar_1.0-1"))
 
         # Only versions of 'bar' matching the official 'orig.tar.gz' will
         # be accepted.
@@ -1068,14 +1183,16 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         upload_dir = self.queueUpload("bar_1.0-10", "~name16/ubuntu")
         self.processUpload(self.uploadprocessor, upload_dir)
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
     def test_conflicting_deleted_orig_file(self):
         # Uploading a conflicting orig file should be disallowed even if
@@ -1084,7 +1201,8 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
         self.processUpload(self.uploadprocessor, upload_dir)
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # Delete the published file.
         self.switchToAdmin()
@@ -1097,11 +1215,11 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
         # different contents than the one we previously uploaded.
         upload_dir = self.queueUpload("bar_1.0-3", "~name16/ubuntu")
         self.processUpload(self.uploadprocessor, upload_dir)
-        self.assertTrue(
-            self.uploadprocessor.last_processed_upload.is_rejected)
+        self.assertTrue(self.uploadprocessor.last_processed_upload.is_rejected)
         self.assertIn(
-            'File bar_1.0.orig.tar.gz already exists in ',
-            self.uploadprocessor.last_processed_upload.rejection_message)
+            "File bar_1.0.orig.tar.gz already exists in ",
+            self.uploadprocessor.last_processed_upload.rejection_message,
+        )
 
     def test30QuiltMultipleReusedOrigs(self):
         """Official orig*.tar.* can be reused for PPA uploads.
@@ -1113,9 +1231,10 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
         # We need to accept unsigned .changes and .dscs, and 3.0 (quilt)
         # sources.
         self.switchToAdmin()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         getUtility(ISourcePackageFormatSelectionSet).add(
-            self.breezy, SourcePackageFormat.FORMAT_3_0_QUILT)
+            self.breezy, SourcePackageFormat.FORMAT_3_0_QUILT
+        )
         self.switchToUploader()
 
         # First upload a complete 3.0 (quilt) source to the primary
@@ -1125,11 +1244,15 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.NEW)
+            PackageUploadStatus.NEW,
+        )
 
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="bar",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
         queue_item.setAccepted()
         queue_item.realiseUpload()
         transaction.commit()
@@ -1139,16 +1262,18 @@ class TestPPAUploadProcessorFileLookups(TestPPAUploadProcessorBase):
         # PPA. All of the missing files will be retrieved from the
         # primary archive.
         upload_dir = self.queueUpload(
-            "bar_1.0-2_3.0-quilt_without_orig", "~name16/ubuntu")
+            "bar_1.0-2_3.0-quilt_without_orig", "~name16/ubuntu"
+        )
         self.assertEqual(
-            self.processUpload(self.uploadprocessor, upload_dir),
-            ['accepted'])
+            self.processUpload(self.uploadprocessor, upload_dir), ["accepted"]
+        )
 
         queue_item = self.uploadprocessor.last_processed_upload.queue_root
 
         self.assertEqual(queue_item.status, PackageUploadStatus.DONE)
         self.assertEqual(
-            len(queue_item.sources[0].sourcepackagerelease.files), 7)
+            len(queue_item.sources[0].sourcepackagerelease.files), 7
+        )
 
 
 class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
@@ -1168,11 +1293,13 @@ class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         pub_src = publisher.getPubSource(
-            archive=archive, distroseries=self.breezy,
-            status=PackagePublishingStatus.PUBLISHED)
+            archive=archive,
+            distroseries=self.breezy,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         alias_id = pub_src.sourcepackagerelease.files[0].libraryfile.id
 
-        switch_dbuser('librariangc')
+        switch_dbuser("librariangc")
         content = getUtility(ILibraryFileAliasSet)[alias_id].content
         # Decrement the archive index cruft automatically added by
         # IArchive.estimated_size.
@@ -1192,13 +1319,13 @@ class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
         """
         # Stuff 2048 MiB in name16 PPA, so anything will be above the
         # default quota limit, 2048 MiB.
-        self._fillArchive(self.name16.archive, 2048 * (2 ** 20))
+        self._fillArchive(self.name16.archive, 2048 * (2**20))
 
         upload_dir = self.queueUpload("bar_1.0-1", "~name16/ubuntu")
         upload_results = self.processUpload(self.uploadprocessor, upload_dir)
 
         # Upload got rejected.
-        self.assertEqual(upload_results, ['rejected'])
+        self.assertEqual(upload_results, ["rejected"])
 
         # An email communicating the rejection and the reason why it was
         # rejected is sent to the uploaders.
@@ -1208,7 +1335,8 @@ class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
             "Rejected:",
             "PPA exceeded its size limit (2048.00 of 2048.00 MiB). "
             "Ask a question in https://answers.launchpad.net/soyuz/ "
-            "if you need more space."]
+            "if you need more space.",
+        ]
         self.assertEmails([{"contents": contents}])
 
     def testPPASizeNoQuota(self):
@@ -1216,11 +1344,13 @@ class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
         upload_dir = self.queueUpload("bar_1.0-1", "~name16/ubuntu")
         self.processUpload(self.uploadprocessor, upload_dir)
         contents = [
-            "Subject: [~name16/ubuntu/ppa/breezy] bar 1.0-1 (Accepted)"]
+            "Subject: [~name16/ubuntu/ppa/breezy] bar 1.0-1 (Accepted)"
+        ]
         self.assertEmails([{"contents": contents}])
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
     def testPPASizeQuotaSourceWarning(self):
         """Verify the size quota warning for PPA near size limit.
@@ -1230,7 +1360,7 @@ class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
         """
         # Stuff 1945 MiB into name16 PPA, approximately 95 % of
         # the default quota limit, 2048 MiB.
-        self._fillArchive(self.name16.archive, 2000 * (2 ** 20))
+        self._fillArchive(self.name16.archive, 2000 * (2**20))
 
         # Ensure the warning is sent in the acceptance notification.
         upload_dir = self.queueUpload("bar_1.0-1", "~name16/ubuntu")
@@ -1240,14 +1370,16 @@ class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
             "Upload Warnings:",
             "PPA exceeded 95 % of its size limit (2000.00 of 2048.00 MiB). "
             "Ask a question in https://answers.launchpad.net/soyuz/ "
-            "if you need more space."]
+            "if you need more space.",
+        ]
         self.assertEmails([{"contents": contents}])
 
         # User was warned about quota limits but the source was accepted
         # as informed in the upload notification.
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
     def testPPADoNotCheckSizeQuotaForBinary(self):
         """Verify the size quota check for internal binary PPA uploads.
@@ -1261,26 +1393,30 @@ class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
 
         self.assertEqual(
             self.uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.DONE)
+            PackageUploadStatus.DONE,
+        )
 
         # Retrieve the build record for source bar in breezy-i386
         # distroarchseries, and setup a appropriate upload policy
         # in preparation to the corresponding binary upload.
-        [build] = self.name16.archive.getBuildRecords(name='bar')
-        self.options.context = 'buildd'
+        [build] = self.name16.archive.getBuildRecords(name="bar")
+        self.options.context = "buildd"
 
         # Stuff 2048 MiB in name16 PPA, so anything will be above the
         # default quota limit, 2048 MiB.
-        self._fillArchive(self.name16.archive, 2048 * (2 ** 20))
+        self._fillArchive(self.name16.archive, 2048 * (2**20))
 
         upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
-        self.processUpload(
-            self.build_uploadprocessor, upload_dir, build=build)
+        self.processUpload(self.build_uploadprocessor, upload_dir, build=build)
 
         # The binary upload was accepted, and it's waiting in the queue.
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED, name="bar",
-            version="1.0-1", exact_match=True, archive=self.name16.archive)
+            status=PackageUploadStatus.ACCEPTED,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+            archive=self.name16.archive,
+        )
         self.assertEqual(queue_items.count(), 1)
 
     def testArchiveBinarySize(self):
@@ -1298,19 +1434,23 @@ class TestPPAUploadProcessorQuotaChecks(TestPPAUploadProcessorBase):
 
         # Publish To Breezy:
         publisher.getPubBinaries(
-            archive=self.name16.archive, distroseries=self.breezy,
-            status=PackagePublishingStatus.PUBLISHED)
+            archive=self.name16.archive,
+            distroseries=self.breezy,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
 
         # Create chroot for warty/i386, allowing binaries to build and
         # thus be published in this architecture.
-        warty = self.ubuntu['warty']
-        fake_chroot = self.addMockFile('fake_chroot.tar.gz')
-        warty['i386'].addOrUpdateChroot(fake_chroot)
+        warty = self.ubuntu["warty"]
+        fake_chroot = self.addMockFile("fake_chroot.tar.gz")
+        warty["i386"].addOrUpdateChroot(fake_chroot)
 
         # Publish To Warty:
         publisher.getPubBinaries(
-            archive=self.name16.archive, distroseries=warty,
-            status=PackagePublishingStatus.PUBLISHED)
+            archive=self.name16.archive,
+            distroseries=warty,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
 
         self.switchToUploader()
 
diff --git a/lib/lp/archiveuploader/tests/test_private_maintainers.py b/lib/lp/archiveuploader/tests/test_private_maintainers.py
index 9500ec2..7576d2c 100644
--- a/lib/lp/archiveuploader/tests/test_private_maintainers.py
+++ b/lib/lp/archiveuploader/tests/test_private_maintainers.py
@@ -4,10 +4,7 @@
 from lp.archiveuploader.dscfile import SignableTagFile
 from lp.archiveuploader.nascentuploadfile import UploadError
 from lp.registry.interfaces.person import PersonVisibility
-from lp.testing import (
-    celebrity_logged_in,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCaseWithFactory, celebrity_logged_in
 from lp.testing.layers import DatabaseFunctionalLayer
 
 
@@ -17,10 +14,14 @@ class TestPrivateMaintainers(TestCaseWithFactory):
 
     def test_private_team_maintainer(self):
         # Maintainers can not be private teams.
-        with celebrity_logged_in('admin'):
+        with celebrity_logged_in("admin"):
             self.factory.makeTeam(
-                email="foo@xxxxxxx", visibility=PersonVisibility.PRIVATE)
+                email="foo@xxxxxxx", visibility=PersonVisibility.PRIVATE
+            )
         sigfile = SignableTagFile()
         self.assertRaisesWithContent(
-            UploadError, 'Invalid Maintainer.', sigfile.parseAddress,
-            "foo@xxxxxxx")
+            UploadError,
+            "Invalid Maintainer.",
+            sigfile.parseAddress,
+            "foo@xxxxxxx",
+        )
diff --git a/lib/lp/archiveuploader/tests/test_processupload.py b/lib/lp/archiveuploader/tests/test_processupload.py
index 6bfed0f..e3f14de 100644
--- a/lib/lp/archiveuploader/tests/test_processupload.py
+++ b/lib/lp/archiveuploader/tests/test_processupload.py
@@ -16,6 +16,7 @@ from lp.testing.layers import LaunchpadZopelessLayer
 
 class TestProcessUpload(unittest.TestCase):
     """Test the process-upload.py script."""
+
     layer = LaunchpadZopelessLayer
 
     def setUp(self):
@@ -32,7 +33,8 @@ class TestProcessUpload(unittest.TestCase):
         args = [script, "-vvv", self.queue_location]
         args.extend(extra_args)
         process = subprocess.Popen(
-            args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+            args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+        )
         stdout, stderr = process.communicate()
         return (process.returncode, stdout, stderr)
 
@@ -40,7 +42,8 @@ class TestProcessUpload(unittest.TestCase):
         """Check if given path exists within the current queue_location."""
         probe_path = os.path.join(self.queue_location, path)
         self.assertTrue(
-            os.path.exists(probe_path), "'%s' does not exist." % path)
+            os.path.exists(probe_path), "'%s' does not exist." % path
+        )
 
     def testSimpleRun(self):
         """Try a simple process-upload run.
@@ -51,24 +54,26 @@ class TestProcessUpload(unittest.TestCase):
         It should also generate some scriptactivity.
         """
         # No scriptactivity should exist before it's run.
-        activity = getUtility(
-            IScriptActivitySet).getLastActivity('process-upload')
+        activity = getUtility(IScriptActivitySet).getLastActivity(
+            "process-upload"
+        )
         self.assertTrue(activity is None, "'activity' should be None")
 
         returncode, out, err = self.runProcessUpload()
         self.assertEqual(0, returncode)
 
         # There should now be some scriptactivity.
-        activity = getUtility(
-            IScriptActivitySet).getLastActivity('process-upload')
+        activity = getUtility(IScriptActivitySet).getLastActivity(
+            "process-upload"
+        )
         self.assertFalse(activity is None, "'activity' should not be None")
 
         # directory tree in place.
-        for directory in ['incoming', 'accepted', 'rejected', 'failed']:
+        for directory in ["incoming", "accepted", "rejected", "failed"]:
             self.assertQueuePath(directory)
 
         # just to check if local assertion is working as expect.
-        self.assertRaises(AssertionError, self.assertQueuePath, 'foobar')
+        self.assertRaises(AssertionError, self.assertQueuePath, "foobar")
 
         # Explicitly mark the database dirty.
         self.layer.force_dirty_database()
diff --git a/lib/lp/archiveuploader/tests/test_recipeuploads.py b/lib/lp/archiveuploader/tests/test_recipeuploads.py
index 013f5ef..7702c81 100644
--- a/lib/lp/archiveuploader/tests/test_recipeuploads.py
+++ b/lib/lp/archiveuploader/tests/test_recipeuploads.py
@@ -10,19 +10,15 @@ from zope.component import getUtility
 
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
-    )
-from lp.archiveuploader.uploadprocessor import (
-    UploadHandler,
-    UploadStatusEnum,
-    )
+)
+from lp.archiveuploader.uploadprocessor import UploadHandler, UploadStatusEnum
 from lp.buildmaster.enums import BuildStatus
 from lp.code.interfaces.sourcepackagerecipebuild import (
     ISourcePackageRecipeBuildSource,
-    )
+)
 
 
 class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):
-
     def setUp(self):
         super().setUp()
 
@@ -34,14 +30,17 @@ class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):
             distroseries=self.breezy,
             recipe=self.recipe,
             archive=self.factory.makeArchive(
-                distribution=self.ubuntu, owner=self.recipe.owner),
-            requester=self.recipe.owner)
+                distribution=self.ubuntu, owner=self.recipe.owner
+            ),
+            requester=self.recipe.owner,
+        )
         Store.of(self.build).flush()
         self.switchToUploader()
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
 
         self.uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
+            self.layer.txn, builds=True
+        )
 
     def testSetsBuildAndState(self):
         # Ensure that the upload processor correctly links the SPR to
@@ -50,16 +49,21 @@ class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):
         # (it does not matter who) on SPRB.{status,upload_log}.
         self.assertIs(None, self.build.source_package_release)
         self.assertEqual(False, self.build.verifySuccessfulUpload())
-        self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id)
+        self.queueUpload("bar_1.0-1", "%d/ubuntu" % self.build.archive.id)
         fsroot = os.path.join(self.queue_folder, "incoming")
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, fsroot, 'bar_1.0-1', self.build)
+            self.uploadprocessor, fsroot, "bar_1.0-1", self.build
+        )
         result = handler.processChangesFile(
-            '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id)
+            "%d/ubuntu/bar_1.0-1_source.changes" % self.build.archive.id
+        )
         self.layer.txn.commit()
 
-        self.assertEqual(UploadStatusEnum.ACCEPTED, result,
-            "Source upload failed\nGot: %s" % self.log.getLogBuffer())
+        self.assertEqual(
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "Source upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
 
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertEqual(True, self.build.verifySuccessfulUpload())
diff --git a/lib/lp/archiveuploader/tests/test_snapupload.py b/lib/lp/archiveuploader/tests/test_snapupload.py
index f7a9e14..08421fd 100644
--- a/lib/lp/archiveuploader/tests/test_snapupload.py
+++ b/lib/lp/archiveuploader/tests/test_snapupload.py
@@ -11,11 +11,8 @@ from zope.component import getUtility
 
 from lp.archiveuploader.tests.test_uploadprocessor import (
     TestUploadProcessorBase,
-    )
-from lp.archiveuploader.uploadprocessor import (
-    UploadHandler,
-    UploadStatusEnum,
-    )
+)
+from lp.archiveuploader.uploadprocessor import UploadHandler, UploadStatusEnum
 from lp.buildmaster.enums import BuildStatus
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.osutils import write_file
@@ -33,35 +30,45 @@ class TestSnapBuildUploads(TestUploadProcessorBase):
         self.switchToAdmin()
         self.snap = self.factory.makeSnap()
         self.build = getUtility(ISnapBuildSet).new(
-            requester=self.snap.owner, snap=self.snap,
+            requester=self.snap.owner,
+            snap=self.snap,
             archive=self.factory.makeArchive(
-                distribution=self.ubuntu, owner=self.snap.owner),
+                distribution=self.ubuntu, owner=self.snap.owner
+            ),
             distro_arch_series=self.breezy["i386"],
-            pocket=PackagePublishingPocket.RELEASE)
+            pocket=PackagePublishingPocket.RELEASE,
+        )
         self.build.updateStatus(BuildStatus.UPLOADING)
         Store.of(self.build).flush()
         self.switchToUploader()
         self.options.context = "buildd"
 
         self.uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
+            self.layer.txn, builds=True
+        )
 
     def test_sets_build_and_state(self):
         # The upload processor uploads files and sets the correct status.
         self.assertFalse(self.build.verifySuccessfulUpload())
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "wget_0_all.snap"), b"snap")
         write_file(
-            os.path.join(upload_dir, "wget_0_all.manifest"), b"manifest")
-        write_file(os.path.join(upload_dir, "wget_0_all.dpkg.yaml"),
-                   b"yaml file")
+            os.path.join(upload_dir, "wget_0_all.manifest"), b"manifest"
+        )
+        write_file(
+            os.path.join(upload_dir, "wget_0_all.dpkg.yaml"), b"yaml file"
+        )
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processSnap(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "Snap upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "Snap upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertTrue(self.build.verifySuccessfulUpload())
         self.assertEqual(3, len(list(self.build.getFiles())))
@@ -71,26 +78,33 @@ class TestSnapBuildUploads(TestUploadProcessorBase):
         # .snap files.
         self.assertFalse(self.build.verifySuccessfulUpload())
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(
-            os.path.join(upload_dir, "wget_0_all.manifest"), b"manifest")
+            os.path.join(upload_dir, "wget_0_all.manifest"), b"manifest"
+        )
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processSnap(self.log)
         self.assertEqual(UploadStatusEnum.REJECTED, result)
         self.assertIn(
             "ERROR Build did not produce any snap packages.",
-            self.log.getLogBuffer())
+            self.log.getLogBuffer(),
+        )
         self.assertFalse(self.build.verifySuccessfulUpload())
 
     def test_triggers_store_uploads(self):
         # The upload processor triggers store uploads if appropriate.
         self.pushConfig(
-            "snappy", store_url="http://sca.example/";,
-            store_upload_url="http://updown.example/";)
+            "snappy",
+            store_url="http://sca.example/";,
+            store_upload_url="http://updown.example/";,
+        )
         self.switchToAdmin()
         self.snap.store_series = self.factory.makeSnappySeries(
-            usable_distro_series=[self.snap.distro_series])
+            usable_distro_series=[self.snap.distro_series]
+        )
         self.snap.store_name = self.snap.name
         self.snap.store_upload = True
         self.snap.store_secrets = {"root": Macaroon().serialize()}
@@ -98,16 +112,21 @@ class TestSnapBuildUploads(TestUploadProcessorBase):
         self.switchToUploader()
         self.assertFalse(self.build.verifySuccessfulUpload())
         upload_dir = os.path.join(
-            self.incoming_folder, "test", str(self.build.id), "ubuntu")
+            self.incoming_folder, "test", str(self.build.id), "ubuntu"
+        )
         write_file(os.path.join(upload_dir, "wget_0_all.snap"), b"snap")
-        write_file(os.path.join(upload_dir, "wget_0_all.dpkg.yaml"),
-                   b"yaml file")
+        write_file(
+            os.path.join(upload_dir, "wget_0_all.dpkg.yaml"), b"yaml file"
+        )
         handler = UploadHandler.forProcessor(
-            self.uploadprocessor, self.incoming_folder, "test", self.build)
+            self.uploadprocessor, self.incoming_folder, "test", self.build
+        )
         result = handler.processSnap(self.log)
         self.assertEqual(
-            UploadStatusEnum.ACCEPTED, result,
-            "Snap upload failed\nGot: %s" % self.log.getLogBuffer())
+            UploadStatusEnum.ACCEPTED,
+            result,
+            "Snap upload failed\nGot: %s" % self.log.getLogBuffer(),
+        )
         self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
         self.assertTrue(self.build.verifySuccessfulUpload())
         self.assertEqual(1, len(list(self.build.store_upload_jobs)))
diff --git a/lib/lp/archiveuploader/tests/test_sync_notification.py b/lib/lp/archiveuploader/tests/test_sync_notification.py
index 893e391..7e5ca9f 100644
--- a/lib/lp/archiveuploader/tests/test_sync_notification.py
+++ b/lib/lp/archiveuploader/tests/test_sync_notification.py
@@ -7,25 +7,16 @@ import os.path
 
 from zope.component import getUtility
 
-from lp.archiveuploader.nascentupload import (
-    NascentUpload,
-    UploadError,
-    )
+from lp.archiveuploader.nascentupload import NascentUpload, UploadError
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.log.logger import DevNullLogger
-from lp.soyuz.enums import (
-    ArchivePermissionType,
-    SourcePackageFormat,
-    )
+from lp.soyuz.enums import ArchivePermissionType, SourcePackageFormat
 from lp.soyuz.interfaces.sourcepackageformat import (
     ISourcePackageFormatSelectionSet,
-    )
+)
 from lp.soyuz.model.archivepermission import ArchivePermission
 from lp.soyuz.scripts.packagecopier import do_copy
-from lp.testing import (
-    login,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCaseWithFactory, login
 from lp.testing.fakemethod import FakeMethod
 from lp.testing.layers import LaunchpadZopelessLayer
 from lp.testing.mail_helpers import pop_notifications
@@ -48,9 +39,9 @@ class FakeChangesFile:
         self.files = []
         self.filepath = file_path
         self.filename = os.path.basename(file_path)
-        self.architectures = ['i386']
-        self.suite_name = '-'.join([spph.distroseries.name, spph.pocket.name])
-        with open(file_path, 'rb') as f:
+        self.architectures = ["i386"]
+        self.suite_name = "-".join([spph.distroseries.name, spph.pocket.name])
+        with open(file_path, "rb") as f:
             self.raw_content = f.read()
         self.signingkey = None
 
@@ -74,32 +65,45 @@ class TestSyncNotification(TestCaseWithFactory):
     def makeSPPH(self, distroseries, maintainer_address):
         """Create a `SourcePackagePublishingHistory`."""
         return self.factory.makeSourcePackagePublishingHistory(
-            distroseries=distroseries, pocket=PackagePublishingPocket.RELEASE,
-            dsc_maintainer_rfc822=maintainer_address)
+            distroseries=distroseries,
+            pocket=PackagePublishingPocket.RELEASE,
+            dsc_maintainer_rfc822=maintainer_address,
+        )
 
     def makeUploader(self, person, archive, component):
         """Grant a person upload privileges for archive/component."""
         ArchivePermission(
-            person=person, archive=archive, component=component,
-            permission=ArchivePermissionType.UPLOAD)
+            person=person,
+            archive=archive,
+            component=component,
+            permission=ArchivePermissionType.UPLOAD,
+        )
 
     def syncSource(self, spph, target_distroseries, requester):
         """Sync `spph` into `target_distroseries`."""
         getUtility(ISourcePackageFormatSelectionSet).add(
-            target_distroseries, SourcePackageFormat.FORMAT_1_0)
+            target_distroseries, SourcePackageFormat.FORMAT_1_0
+        )
         target_archive = target_distroseries.main_archive
         self.makeUploader(requester, target_archive, spph.component)
         [synced_spph] = do_copy(
-            [spph], target_archive, target_distroseries,
-            pocket=spph.pocket, person=requester, close_bugs=False)
+            [spph],
+            target_archive,
+            target_distroseries,
+            pocket=spph.pocket,
+            person=requester,
+            close_bugs=False,
+        )
         return synced_spph
 
-    def makeChangesFile(self, spph, maintainer, maintainer_address,
-                        changer, changer_address):
+    def makeChangesFile(
+        self, spph, maintainer, maintainer_address, changer, changer_address
+    ):
         temp_dir = self.makeTemporaryDirectory()
         changes_file = os.path.join(
-            temp_dir, "%s.changes" % spph.source_package_name)
-        with open(changes_file, 'w') as changes:
+            temp_dir, "%s.changes" % spph.source_package_name
+        )
+        with open(changes_file, "w") as changes:
             changes.write(
                 "Maintainer: %s <%s>\n"
                 "Changed-By: %s <%s>\n"
@@ -108,35 +112,41 @@ class TestSyncNotification(TestCaseWithFactory):
                     maintainer_address,
                     changer.name,
                     changer_address,
-                    ))
+                )
+            )
         return FakeChangesFile(spph, changes_file)
 
-    def makeNascentUpload(self, spph, maintainer, maintainer_address,
-                          changer, changer_address):
+    def makeNascentUpload(
+        self, spph, maintainer, maintainer_address, changer, changer_address
+    ):
         """Create a `NascentUpload` for `spph`."""
         changes = self.makeChangesFile(
-            spph, maintainer, maintainer_address, changer, changer_address)
+            spph, maintainer, maintainer_address, changer, changer_address
+        )
         upload = NascentUpload(
-            changes, FakeUploadPolicy(spph), DevNullLogger())
+            changes, FakeUploadPolicy(spph), DevNullLogger()
+        )
         upload.queue_root = upload._createQueueEntry()
-        das = self.factory.makeDistroArchSeries(
-            distroseries=spph.distroseries)
+        das = self.factory.makeDistroArchSeries(distroseries=spph.distroseries)
         bpb = self.factory.makeBinaryPackageBuild(
             source_package_release=spph.sourcepackagerelease,
-            archive=spph.archive, distroarchseries=das, pocket=spph.pocket,
-            sourcepackagename=spph.sourcepackagename)
+            archive=spph.archive,
+            distroarchseries=das,
+            pocket=spph.pocket,
+            sourcepackagename=spph.sourcepackagename,
+        )
         upload.queue_root.addBuild(bpb)
         return upload
 
     def processAndRejectUpload(self, nascent_upload):
         nascent_upload.process()
         # Obtain the required privileges for do_reject.
-        login('foo.bar@xxxxxxxxxxxxx')
+        login("foo.bar@xxxxxxxxxxxxx")
         nascent_upload.do_reject(notify=True)
 
     def getNotifiedAddresses(self):
         """Get email addresses that were notified."""
-        return [message['to'] for message in pop_notifications()]
+        return [message["to"] for message in pop_notifications()]
 
     def test_failed_copy_builds_do_not_spam_upstream(self):
         """Failed builds do not spam people who are not responsible for them.
@@ -164,14 +174,19 @@ class TestSyncNotification(TestCaseWithFactory):
         original_spph = self.makeSPPH(dsp.parent_series, maintainer_address)
         sync_requester, syncer_address = self.makePersonWithEmail()
         synced_spph = self.syncSource(
-            original_spph, dsp.derived_series, sync_requester)
+            original_spph, dsp.derived_series, sync_requester
+        )
         nascent_upload = self.makeNascentUpload(
-            synced_spph, maintainer, maintainer_address,
-            changer, changer_address)
+            synced_spph,
+            maintainer,
+            maintainer_address,
+            changer,
+            changer_address,
+        )
         pop_notifications()
         self.processAndRejectUpload(nascent_upload)
 
-        notified_addresses = '\n'.join(self.getNotifiedAddresses())
+        notified_addresses = "\n".join(self.getNotifiedAddresses())
 
         self.assertNotIn(maintainer_address, notified_addresses)
         self.assertNotIn(changer_address, notified_addresses)
diff --git a/lib/lp/archiveuploader/tests/test_tagfiles.py b/lib/lp/archiveuploader/tests/test_tagfiles.py
index 522ad64..108d9ee 100755
--- a/lib/lp/archiveuploader/tests/test_tagfiles.py
+++ b/lib/lp/archiveuploader/tests/test_tagfiles.py
@@ -7,18 +7,14 @@ import unittest
 
 import apt_pkg
 
-from lp.archiveuploader.tagfiles import (
-    parse_tagfile,
-    TagFileParseError,
-    )
+from lp.archiveuploader.tagfiles import TagFileParseError, parse_tagfile
 from lp.archiveuploader.tests import datadir
 
 
 class Testtagfiles(unittest.TestCase):
-
     def testCheckParseChangesOkay(self):
         """lp.archiveuploader.tagfiles.parse_tagfile should work on a good
-           changes file
+        changes file
         """
         parse_tagfile(datadir("good-signed-changes"))
 
@@ -29,7 +25,7 @@ class Testtagfiles(unittest.TestCase):
         reject them if it can't understand.
         """
         parsed = parse_tagfile(datadir("bad-multiline-changes"))
-        self.assertEqual(b'unstable', parsed['Distribution'])
+        self.assertEqual(b"unstable", parsed["Distribution"])
 
     def testCheckParseMalformedMultiline(self):
         """Malformed but somewhat readable files do not raise an exception.
@@ -38,34 +34,38 @@ class Testtagfiles(unittest.TestCase):
         reject them if it can't understand.
         """
         parsed = parse_tagfile(datadir("bad-multiline-changes"))
-        self.assertEqual(b'unstable', parsed['Distribution'])
-        self.assertRaises(KeyError, parsed.__getitem__, 'Fish')
+        self.assertEqual(b"unstable", parsed["Distribution"])
+        self.assertRaises(KeyError, parsed.__getitem__, "Fish")
 
     def testCheckParseEmptyChangesRaises(self):
         """lp.archiveuploader.tagfiles.parse_chantges should raise
-           TagFileParseError on empty
+        TagFileParseError on empty
         """
-        self.assertRaises(TagFileParseError,
-                          parse_tagfile, datadir("empty-file"))
+        self.assertRaises(
+            TagFileParseError, parse_tagfile, datadir("empty-file")
+        )
 
     def testCheckParseMalformedSigRaises(self):
         """lp.archiveuploader.tagfiles.parse_chantges should raise
-           TagFileParseError on malformed signatures
+        TagFileParseError on malformed signatures
         """
-        self.assertRaises(TagFileParseError,
-                          parse_tagfile, datadir("malformed-sig-changes"))
+        self.assertRaises(
+            TagFileParseError, parse_tagfile, datadir("malformed-sig-changes")
+        )
 
     def testCheckParseUnterminatedSigRaises(self):
         """lp.archiveuploader.tagfiles.parse_changes should raise
-           TagFileParseError on unterminated signatures
+        TagFileParseError on unterminated signatures
         """
-        self.assertRaises(TagFileParseError,
-                          parse_tagfile,
-                          datadir("unterminated-sig-changes"))
+        self.assertRaises(
+            TagFileParseError,
+            parse_tagfile,
+            datadir("unterminated-sig-changes"),
+        )
 
     def testParseChangesNotVulnerableToArchExploit(self):
         """lp.archiveuploader.tagfiles.parse_tagfile should not be vulnerable
-           to tags outside of the signed portion
+        to tags outside of the signed portion
         """
         tf = parse_tagfile(datadir("changes-with-exploit-top"))
         self.assertRaises(KeyError, tf.__getitem__, "you")
@@ -74,7 +74,6 @@ class Testtagfiles(unittest.TestCase):
 
 
 class TestTagFileDebianPolicyCompat(unittest.TestCase):
-
     def setUp(self):
         """Parse the test file using apt_pkg for comparison."""
 
@@ -96,17 +95,16 @@ class TestTagFileDebianPolicyCompat(unittest.TestCase):
         """
 
         expected_bytes = (
-            b'test75874 anotherbinary\n'
-            b' andanother andonemore\n'
-            b'\tlastone')
+            b"test75874 anotherbinary\n"
+            b" andanother andonemore\n"
+            b"\tlastone"
+        )
 
         self.assertEqual(
-            expected_bytes,
-            self.apt_pkg_parsed_version.section['Binary'])
+            expected_bytes, self.apt_pkg_parsed_version.section["Binary"]
+        )
 
-        self.assertEqual(
-            expected_bytes,
-            self.parse_tagfile_version['Binary'])
+        self.assertEqual(expected_bytes, self.parse_tagfile_version["Binary"])
 
     def test_parse_tagfile_with_newline_delimited_field(self):
         """parse_tagfile should not leave leading or tailing '\n' when
@@ -122,19 +120,18 @@ class TestTagFileDebianPolicyCompat(unittest.TestCase):
         """  # noqa: E501
 
         expected_bytes = (
-            b'f26bb9b29b1108e53139da3584a4dc92 1511 test75874_0.1.tar.gz\n '
-            b'29c955ff520cea32ab3e0316306d0ac1 393742 '
-                b'pmount_0.9.7.orig.tar.gz\n'
-            b' 91a8f46d372c406fadcb57c6ff7016f3 5302 '
-                b'pmount_0.9.7-2ubuntu2.diff.gz')
+            b"f26bb9b29b1108e53139da3584a4dc92 1511 test75874_0.1.tar.gz\n "
+            b"29c955ff520cea32ab3e0316306d0ac1 393742 "
+            b"pmount_0.9.7.orig.tar.gz\n"
+            b" 91a8f46d372c406fadcb57c6ff7016f3 5302 "
+            b"pmount_0.9.7-2ubuntu2.diff.gz"
+        )
 
         self.assertEqual(
-            expected_bytes,
-            self.apt_pkg_parsed_version.section['Files'])
+            expected_bytes, self.apt_pkg_parsed_version.section["Files"]
+        )
 
-        self.assertEqual(
-            expected_bytes,
-            self.parse_tagfile_version['Files'])
+        self.assertEqual(expected_bytes, self.parse_tagfile_version["Files"])
 
     def test_parse_description_field(self):
         """Apt-pkg preserves the blank-line indicator and does not strip
@@ -151,16 +148,17 @@ class TestTagFileDebianPolicyCompat(unittest.TestCase):
             b" it will be displayed verbatim. Like this one:\n"
             b"  Example verbatim line.\n"
             b"    Another verbatim line.\n"
-            b" OK, back to normal.")
+            b" OK, back to normal."
+        )
 
         self.assertEqual(
-            expected_bytes,
-            self.apt_pkg_parsed_version.section['Description'])
+            expected_bytes, self.apt_pkg_parsed_version.section["Description"]
+        )
 
         # In the past our parse_tagfile function replaced blank-line
         # indicators in the description (' .\n') with new lines ('\n'),
         # but it is now compatible with ParseTagFiles (and ready to be
         # replaced by ParseTagFiles).
         self.assertEqual(
-            expected_bytes,
-            self.parse_tagfile_version['Description'])
+            expected_bytes, self.parse_tagfile_version["Description"]
+        )
diff --git a/lib/lp/archiveuploader/tests/test_uploadpolicy.py b/lib/lp/archiveuploader/tests/test_uploadpolicy.py
index 6de69d5..042901f 100644
--- a/lib/lp/archiveuploader/tests/test_uploadpolicy.py
+++ b/lib/lp/archiveuploader/tests/test_uploadpolicy.py
@@ -11,25 +11,24 @@ from lp.archiveuploader.nascentuploadfile import CustomUploadFile
 from lp.archiveuploader.uploadpolicy import (
     AbstractUploadPolicy,
     ArchiveUploadType,
-    findPolicyByName,
     IArchiveUploadPolicy,
     InsecureUploadPolicy,
-    )
+    findPolicyByName,
+)
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.interfaces.series import SeriesStatus
 from lp.services.database.sqlbase import flush_database_updates
 from lp.testing import (
-    celebrity_logged_in,
-    person_logged_in,
     TestCase,
     TestCaseWithFactory,
-    )
+    celebrity_logged_in,
+    person_logged_in,
+)
 from lp.testing.layers import DatabaseFunctionalLayer
 
 
 class FakeNascentUpload:
-
     def __init__(self, sourceful, binaryful, is_ppa):
         self.sourceful = sourceful
         self.binaryful = binaryful
@@ -95,8 +94,9 @@ class TestUploadPolicy_validateUploadType(TestCase):
         policy.validateUploadType(upload)
 
         self.assertIn(
-            'Sourceful uploads are not accepted by this policy.',
-            upload.rejections)
+            "Sourceful uploads are not accepted by this policy.",
+            upload.rejections,
+        )
 
     def test_binaryful_not_accepted(self):
         policy = make_policy(accepted_type=ArchiveUploadType.SOURCE_ONLY)
@@ -106,8 +106,9 @@ class TestUploadPolicy_validateUploadType(TestCase):
 
         self.assertTrue(len(upload.rejections) > 0)
         self.assertIn(
-            'Upload rejected because it contains binary packages.',
-            upload.rejections[0])
+            "Upload rejected because it contains binary packages.",
+            upload.rejections[0],
+        )
 
     def test_mixed_not_accepted(self):
         policy = make_policy(accepted_type=ArchiveUploadType.SOURCE_ONLY)
@@ -116,8 +117,9 @@ class TestUploadPolicy_validateUploadType(TestCase):
         policy.validateUploadType(upload)
 
         self.assertIn(
-            'Source/binary (i.e. mixed) uploads are not allowed.',
-            upload.rejections)
+            "Source/binary (i.e. mixed) uploads are not allowed.",
+            upload.rejections,
+        )
 
     def test_sourceful_when_only_mixed_accepted(self):
         policy = make_policy(accepted_type=ArchiveUploadType.MIXED_ONLY)
@@ -126,8 +128,9 @@ class TestUploadPolicy_validateUploadType(TestCase):
         policy.validateUploadType(upload)
 
         self.assertIn(
-            'Sourceful uploads are not accepted by this policy.',
-            upload.rejections)
+            "Sourceful uploads are not accepted by this policy.",
+            upload.rejections,
+        )
 
     def test_binaryful_when_only_mixed_accepted(self):
         policy = make_policy(accepted_type=ArchiveUploadType.MIXED_ONLY)
@@ -137,8 +140,9 @@ class TestUploadPolicy_validateUploadType(TestCase):
 
         self.assertTrue(len(upload.rejections) > 0)
         self.assertIn(
-            'Upload rejected because it contains binary packages.',
-            upload.rejections[0])
+            "Upload rejected because it contains binary packages.",
+            upload.rejections[0],
+        )
 
 
 class TestUploadPolicy(TestCaseWithFactory):
@@ -194,8 +198,10 @@ class TestUploadPolicy(TestCaseWithFactory):
         policy = AbstractUploadPolicy()
         policy.distro = self.factory.makeDistribution()
         self.assertRaises(
-            NotFoundError, policy.setDistroSeriesAndPocket,
-            'nonexistent_security')
+            NotFoundError,
+            policy.setDistroSeriesAndPocket,
+            "nonexistent_security",
+        )
 
     def test_setDistroSeriesAndPocket_honours_aliases(self):
         # setDistroSeriesAndPocket honours uploads to the development series
@@ -203,9 +209,11 @@ class TestUploadPolicy(TestCaseWithFactory):
         policy = AbstractUploadPolicy()
         policy.distro = self.factory.makeDistribution()
         series = self.factory.makeDistroSeries(
-            distribution=policy.distro, status=SeriesStatus.DEVELOPMENT)
+            distribution=policy.distro, status=SeriesStatus.DEVELOPMENT
+        )
         self.assertRaises(
-            NotFoundError, policy.setDistroSeriesAndPocket, "devel")
+            NotFoundError, policy.setDistroSeriesAndPocket, "devel"
+        )
         with person_logged_in(policy.distro.owner):
             policy.distro.development_series_alias = "devel"
         policy.setDistroSeriesAndPocket("devel")
@@ -223,7 +231,8 @@ class TestUploadPolicy(TestCaseWithFactory):
         insecure_policy.setOptions(FakeOptions(distroseries="hoary"))
         self.assertEqual("hoary", insecure_policy.distroseries.name)
         self.assertEqual(
-            PackagePublishingPocket.PROPOSED, insecure_policy.pocket)
+            PackagePublishingPocket.PROPOSED, insecure_policy.pocket
+        )
 
     def test_redirect_release_uploads_ppa(self):
         # The Distribution.redirect_release_uploads flag does not affect PPA
@@ -237,7 +246,8 @@ class TestUploadPolicy(TestCaseWithFactory):
         insecure_policy.setOptions(FakeOptions(distroseries="hoary"))
         self.assertEqual("hoary", insecure_policy.distroseries.name)
         self.assertEqual(
-            PackagePublishingPocket.RELEASE, insecure_policy.pocket)
+            PackagePublishingPocket.RELEASE, insecure_policy.pocket
+        )
 
     def setHoaryStatus(self, status):
         ubuntu = getUtility(IDistributionSet)["ubuntu"]
@@ -251,10 +261,12 @@ class TestUploadPolicy(TestCaseWithFactory):
         insecure_policy = findPolicyByName("insecure")
         insecure_policy.setOptions(FakeOptions(distroseries="hoary"))
         self.assertEqual(
-            SeriesStatus.DEVELOPMENT, insecure_policy.distroseries.status)
+            SeriesStatus.DEVELOPMENT, insecure_policy.distroseries.status
+        )
         self.assertTrue(insecure_policy.autoApprove(make_fake_upload()))
-        self.assertTrue(insecure_policy.autoApprove(
-            make_fake_upload(is_ppa=True)))
+        self.assertTrue(
+            insecure_policy.autoApprove(make_fake_upload(is_ppa=True))
+        )
 
     def test_insecure_approves_proposed(self):
         # Uploads to the PROPOSED pocket of non-FROZEN distroseries are
@@ -262,7 +274,8 @@ class TestUploadPolicy(TestCaseWithFactory):
         insecure_policy = findPolicyByName("insecure")
         insecure_policy.setOptions(FakeOptions(distroseries="hoary-proposed"))
         self.assertEqual(
-            SeriesStatus.DEVELOPMENT, insecure_policy.distroseries.status)
+            SeriesStatus.DEVELOPMENT, insecure_policy.distroseries.status
+        )
         self.assertTrue(insecure_policy.autoApprove(make_fake_upload()))
 
     def test_insecure_does_not_approve_proposed_post_release(self):
@@ -280,8 +293,9 @@ class TestUploadPolicy(TestCaseWithFactory):
         insecure_policy = findPolicyByName("insecure")
         insecure_policy.setOptions(FakeOptions(distroseries="hoary-proposed"))
         self.assertFalse(insecure_policy.autoApprove(make_fake_upload()))
-        self.assertTrue(insecure_policy.autoApprove(
-            make_fake_upload(is_ppa=True)))
+        self.assertTrue(
+            insecure_policy.autoApprove(make_fake_upload(is_ppa=True))
+        )
 
     def test_insecure_does_not_approve_updates(self):
         # Uploads to the UPDATES pocket are not auto-approved by the
@@ -292,16 +306,23 @@ class TestUploadPolicy(TestCaseWithFactory):
         insecure_policy = findPolicyByName("insecure")
         insecure_policy.setOptions(FakeOptions(distroseries="hoary-updates"))
         self.assertFalse(insecure_policy.autoApprove(make_fake_upload()))
-        self.assertTrue(insecure_policy.autoApprove(
-            make_fake_upload(is_ppa=True)))
+        self.assertTrue(
+            insecure_policy.autoApprove(make_fake_upload(is_ppa=True))
+        )
 
     def test_buildd_does_not_approve_uefi(self):
         # Uploads to the primary archive containing UEFI custom files are
         # not approved.
         buildd_policy = findPolicyByName("buildd")
         uploadfile = CustomUploadFile(
-            "uefi.tar.gz", None, 0, "main/raw-uefi", "extra", buildd_policy,
-            None)
+            "uefi.tar.gz",
+            None,
+            0,
+            "main/raw-uefi",
+            "extra",
+            buildd_policy,
+            None,
+        )
         upload = make_fake_upload(binaryful=True)
         upload.changes = FakeChangesFile(custom_files=[uploadfile])
         self.assertFalse(buildd_policy.autoApprove(upload))
@@ -311,8 +332,14 @@ class TestUploadPolicy(TestCaseWithFactory):
         # not approved.
         buildd_policy = findPolicyByName("buildd")
         uploadfile = CustomUploadFile(
-            "uefi.tar.gz", None, 0, "main/raw-signing", "extra", buildd_policy,
-            None)
+            "uefi.tar.gz",
+            None,
+            0,
+            "main/raw-signing",
+            "extra",
+            buildd_policy,
+            None,
+        )
         upload = make_fake_upload(binaryful=True)
         upload.changes = FakeChangesFile(custom_files=[uploadfile])
         self.assertFalse(buildd_policy.autoApprove(upload))
@@ -321,8 +348,14 @@ class TestUploadPolicy(TestCaseWithFactory):
         # Uploads to PPAs containing UEFI custom files are auto-approved.
         buildd_policy = findPolicyByName("buildd")
         uploadfile = CustomUploadFile(
-            "uefi.tar.gz", None, 0, "main/raw-uefi", "extra", buildd_policy,
-            None)
+            "uefi.tar.gz",
+            None,
+            0,
+            "main/raw-uefi",
+            "extra",
+            buildd_policy,
+            None,
+        )
         upload = make_fake_upload(binaryful=True, is_ppa=True)
         upload.changes = FakeChangesFile(custom_files=[uploadfile])
         self.assertTrue(buildd_policy.autoApprove(upload))
@@ -331,8 +364,14 @@ class TestUploadPolicy(TestCaseWithFactory):
         # Uploads to PPAs containing UEFI custom files are auto-approved.
         buildd_policy = findPolicyByName("buildd")
         uploadfile = CustomUploadFile(
-            "uefi.tar.gz", None, 0, "main/raw-signing", "extra", buildd_policy,
-            None)
+            "uefi.tar.gz",
+            None,
+            0,
+            "main/raw-signing",
+            "extra",
+            buildd_policy,
+            None,
+        )
         upload = make_fake_upload(binaryful=True, is_ppa=True)
         upload.changes = FakeChangesFile(custom_files=[uploadfile])
         self.assertTrue(buildd_policy.autoApprove(upload))
diff --git a/lib/lp/archiveuploader/tests/test_uploadprocessor.py b/lib/lp/archiveuploader/tests/test_uploadprocessor.py
index 11e739d..4b931d9 100644
--- a/lib/lp/archiveuploader/tests/test_uploadprocessor.py
+++ b/lib/lp/archiveuploader/tests/test_uploadprocessor.py
@@ -6,51 +6,42 @@
 __all__ = [
     "MockOptions",
     "TestUploadProcessorBase",
-    ]
+]
 
 import io
 import os
 import shutil
 import tempfile
 
-from fixtures import MonkeyPatch
 import six
+from fixtures import MonkeyPatch
 from storm.locals import Store
 from testtools.matchers import LessThan
-from zope.component import (
-    getGlobalSiteManager,
-    getUtility,
-    )
+from zope.component import getGlobalSiteManager, getUtility
 from zope.security.proxy import removeSecurityProxy
 
 from lp.app.errors import NotFoundError
 from lp.archiveuploader.nascentupload import NascentUpload
 from lp.archiveuploader.nascentuploadfile import DdebBinaryUploadFile
-from lp.archiveuploader.tests import (
-    datadir,
-    getPolicy,
-    )
+from lp.archiveuploader.tests import datadir, getPolicy
 from lp.archiveuploader.uploadpolicy import (
     AbstractUploadPolicy,
     ArchiveUploadType,
-    findPolicyByName,
     IArchiveUploadPolicy,
-    )
+    findPolicyByName,
+)
 from lp.archiveuploader.uploadprocessor import (
     BuildUploadHandler,
     CannotGetBuild,
-    parse_build_upload_leaf_name,
     UploadHandler,
     UploadProcessor,
     UploadStatusEnum,
-    )
-from lp.buildmaster.enums import (
-    BuildFarmJobType,
-    BuildStatus,
-    )
+    parse_build_upload_leaf_name,
+)
+from lp.buildmaster.enums import BuildFarmJobType, BuildStatus
 from lp.buildmaster.interfaces.buildfarmjobbehaviour import (
     IBuildFarmJobBehaviour,
-    )
+)
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.gpg import IGPGKeySet
 from lp.registry.interfaces.person import IPersonSet
@@ -63,16 +54,13 @@ from lp.services.config import config
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.interfaces import IStore
 from lp.services.librarian.interfaces import ILibraryFileAliasSet
-from lp.services.log.logger import (
-    BufferLogger,
-    DevNullLogger,
-    )
+from lp.services.log.logger import BufferLogger, DevNullLogger
 from lp.soyuz.enums import (
     ArchivePermissionType,
     ArchivePurpose,
     PackageUploadStatus,
     SourcePackageFormat,
-    )
+)
 from lp.soyuz.interfaces.archive import IArchiveSet
 from lp.soyuz.interfaces.archivepermission import IArchivePermissionSet
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
@@ -81,11 +69,11 @@ from lp.soyuz.interfaces.packageset import IPackagesetSet
 from lp.soyuz.interfaces.publishing import (
     IPublishingSet,
     PackagePublishingStatus,
-    )
+)
 from lp.soyuz.interfaces.queue import QueueInconsistentStateError
 from lp.soyuz.interfaces.sourcepackageformat import (
     ISourcePackageFormatSelectionSet,
-    )
+)
 from lp.soyuz.model.archivepermission import ArchivePermission
 from lp.soyuz.model.binarypackagename import BinaryPackageName
 from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
@@ -93,26 +81,21 @@ from lp.soyuz.model.component import Component
 from lp.soyuz.model.publishing import (
     BinaryPackagePublishingHistory,
     SourcePackagePublishingHistory,
-    )
+)
 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
 from lp.soyuz.scripts.initialize_distroseries import InitializeDistroSeries
 from lp.soyuz.tests.fakepackager import FakePackager
-from lp.testing import (
-    TestCase,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCase, TestCaseWithFactory
 from lp.testing.dbuser import switch_dbuser
 from lp.testing.fakemethod import FakeMethod
-from lp.testing.gpgkeys import (
-    import_public_key,
-    import_public_test_keys,
-    )
+from lp.testing.gpgkeys import import_public_key, import_public_test_keys
 from lp.testing.layers import LaunchpadZopelessLayer
 from lp.testing.mail_helpers import pop_notifications
 
 
 class MockOptions:
     """Use in place of an options object, adding more attributes if needed."""
+
     keep = False
     dryrun = False
 
@@ -134,6 +117,7 @@ class BrokenUploadPolicy(AbstractUploadPolicy):
 
 class TestUploadProcessorBase(TestCaseWithFactory):
     """Base class for functional tests over uploadprocessor.py."""
+
     layer = LaunchpadZopelessLayer
 
     def switchToUploader(self):
@@ -150,8 +134,9 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         self.failed_folder = os.path.join(self.queue_folder, "failed")
         os.makedirs(self.incoming_folder)
 
-        self.test_files_dir = os.path.join(config.root,
-            "lib/lp/archiveuploader/tests/data/suite")
+        self.test_files_dir = os.path.join(
+            config.root, "lib/lp/archiveuploader/tests/data/suite"
+        )
 
         import_public_test_keys()
 
@@ -162,7 +147,7 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         self.options.distro = "ubuntu"
         self.options.distroseries = None
         self.options.nomails = False
-        self.options.context = 'insecure'
+        self.options.context = "insecure"
 
         # common recipient
         self.name16_recipient = "foo.bar@xxxxxxxxxxxxx"
@@ -193,14 +178,26 @@ class TestUploadProcessorBase(TestCaseWithFactory):
             return policy
 
         upload_processor = UploadProcessor(
-            self.options.base_fsroot, self.options.dryrun,
-            self.options.nomails, builds, self.options.keep, getUploadPolicy,
-            txn, self.log)
+            self.options.base_fsroot,
+            self.options.dryrun,
+            self.options.nomails,
+            builds,
+            self.options.keep,
+            getUploadPolicy,
+            txn,
+            self.log,
+        )
         self.switchToUploader()
         return upload_processor
 
-    def publishPackage(self, packagename, version, source=True, archive=None,
-                       component_override=None):
+    def publishPackage(
+        self,
+        packagename,
+        version,
+        source=True,
+        archive=None,
+        component_override=None,
+    ):
         """Publish a single package that is currently NEW in the queue."""
         self.switchToAdmin()
 
@@ -208,8 +205,12 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         if version is not None:
             version = six.ensure_text(version)
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name=packagename,
-            version=version, exact_match=True, archive=archive)
+            status=PackageUploadStatus.NEW,
+            name=packagename,
+            version=version,
+            exact_match=True,
+            archive=archive,
+        )
         self.assertEqual(queue_items.count(), 1)
         queue_item = queue_items[0]
         queue_item.setAccepted()
@@ -226,11 +227,13 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         """Assert if a given line is present in the log messages."""
         log_lines = self.log.getLogBuffer()
         self.assertTrue(
-            line in log_lines, "'%s' is not in logged output\n\n%s" % (
-                line, log_lines))
+            line in log_lines,
+            "'%s' is not in logged output\n\n%s" % (line, log_lines),
+        )
 
-    def assertRaisesAndReturnError(self, excClass, callableObj, *args,
-                                   **kwargs):
+    def assertRaisesAndReturnError(
+        self, excClass, callableObj, *args, **kwargs
+    ):
         """See `TestCase.assertRaises`.
 
         Unlike `TestCase.assertRaises`, this method returns the exception
@@ -242,7 +245,7 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         except excClass as error:
             return error
         else:
-            if getattr(excClass, '__name__', None) is not None:
+            if getattr(excClass, "__name__", None) is not None:
                 excName = excClass.__name__
             else:
                 excName = str(excClass)
@@ -264,20 +267,26 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         """
         self.switchToAdmin()
 
-        self.ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
-        bat = self.ubuntu['breezy-autotest']
+        self.ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
+        bat = self.ubuntu["breezy-autotest"]
         self.breezy = self.ubuntu.newSeries(
-            name, 'Breezy Badger',
-            'The Breezy Badger', 'Black and White', 'Someone',
-            '5.10', None, bat.owner)
+            name,
+            "Breezy Badger",
+            "The Breezy Badger",
+            "Black and White",
+            "Someone",
+            "5.10",
+            None,
+            bat.owner,
+        )
 
         self.breezy.previous_series = bat
-        self.breezy.changeslist = 'breezy-changes@xxxxxxxxxx'
+        self.breezy.changeslist = "breezy-changes@xxxxxxxxxx"
         ids = InitializeDistroSeries(self.breezy, [bat.id])
         ids.initialize()
 
-        fake_chroot = self.addMockFile('fake_chroot.tar.gz')
-        self.breezy['i386'].addOrUpdateChroot(fake_chroot)
+        fake_chroot = self.addMockFile("fake_chroot.tar.gz")
+        self.breezy["i386"].addOrUpdateChroot(fake_chroot)
 
         if permitted_formats is None:
             permitted_formats = [SourcePackageFormat.FORMAT_1_0]
@@ -285,18 +294,24 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         for format in permitted_formats:
             if not self.breezy.isSourcePackageFormatPermitted(format):
                 getUtility(ISourcePackageFormatSelectionSet).add(
-                    self.breezy, format)
+                    self.breezy, format
+                )
 
         self.switchToUploader()
 
     def addMockFile(self, filename, content=b"anything"):
         """Return a librarian file."""
         return getUtility(ILibraryFileAliasSet).create(
-            filename, len(content), io.BytesIO(content),
-            'application/x-gtar')
-
-    def queueUpload(self, upload_name, relative_path="", test_files_dir=None,
-            queue_entry=None):
+            filename, len(content), io.BytesIO(content), "application/x-gtar"
+        )
+
+    def queueUpload(
+        self,
+        upload_name,
+        relative_path="",
+        test_files_dir=None,
+        queue_entry=None,
+    ):
         """Queue one of our test uploads.
 
         upload_name is the name of the test upload directory. If there
@@ -310,7 +325,8 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         if queue_entry is None:
             queue_entry = upload_name
         target_path = os.path.join(
-            self.incoming_folder, queue_entry, relative_path)
+            self.incoming_folder, queue_entry, relative_path
+        )
         if test_files_dir is None:
             test_files_dir = self.test_files_dir
         upload_dir = os.path.join(test_files_dir, upload_name)
@@ -328,8 +344,7 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         """
         results = []
         self.assertEqual(processor.builds, build is not None)
-        handler = UploadHandler.forProcessor(
-            processor, '.', upload_dir, build)
+        handler = UploadHandler.forProcessor(processor, ".", upload_dir, build)
         changes_files = handler.locateChangesFiles()
         for changes_file in changes_files:
             result = handler.processChangesFile(changes_file)
@@ -379,15 +394,15 @@ class TestUploadProcessorBase(TestCaseWithFactory):
             # Only check the recipient if the caller didn't explicitly pass
             # "recipient": None.
             if recipient is not None:
-                self.assertEqual(recipient, msg['X-Envelope-To'])
+                self.assertEqual(recipient, msg["X-Envelope-To"])
 
-            subject = "Subject: %s\n" % msg['Subject']
+            subject = "Subject: %s\n" % msg["Subject"]
             body = subject + body
 
             for content in list(contents):
                 self.assertTrue(
-                    content in body,
-                    "Expect: '%s'\nGot:\n%s" % (content, body))
+                    content in body, "Expect: '%s'\nGot:\n%s" % (content, body)
+                )
 
     def PGPSignatureNotPreserved(self, archive=None):
         """PGP signatures should be removed from .changes files.
@@ -396,21 +411,27 @@ class TestUploadProcessorBase(TestCaseWithFactory):
         both have the PGP signature removed.
         """
         bar = archive.getPublishedSources(
-            name='bar', version="1.0-1", exact_match=True)
+            name="bar", version="1.0-1", exact_match=True
+        )
         changes_lfa = getUtility(IPublishingSet).getChangesFileLFA(
-            bar.first().sourcepackagerelease)
+            bar.first().sourcepackagerelease
+        )
         changes_file = changes_lfa.read()
         self.assertTrue(
-            b"Format: " in changes_file, "Does not look like a changes file")
+            b"Format: " in changes_file, "Does not look like a changes file"
+        )
         self.assertTrue(
             b"-----BEGIN PGP SIGNED MESSAGE-----" not in changes_file,
-            "Unexpected PGP header found")
+            "Unexpected PGP header found",
+        )
         self.assertTrue(
             b"-----BEGIN PGP SIGNATURE-----" not in changes_file,
-            "Unexpected start of PGP signature found")
+            "Unexpected start of PGP signature found",
+        )
         self.assertTrue(
             b"-----END PGP SIGNATURE-----" not in changes_file,
-            "Unexpected end of PGP signature found")
+            "Unexpected end of PGP signature found",
+        )
 
 
 class TestUploadProcessor(TestUploadProcessorBase):
@@ -430,7 +451,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.assertEqual("foo.bar@xxxxxxxxxxxxx", msg["X-Envelope-To"])
         self.assertTrue(
             "rejected" not in str(msg),
-            "Expected acceptance email not rejection. Actually Got:\n%s" % msg)
+            "Expected acceptance email not rejection. Actually Got:\n%s" % msg,
+        )
 
     def testInstantiate(self):
         """UploadProcessor should instantiate"""
@@ -446,7 +468,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
 
             up = self.getUploadProcessor(None)
             located_dirs = up.locateDirectories(testdir)
-            self.assertEqual(located_dirs, ['dir1', 'dir2', 'dir3'])
+            self.assertEqual(located_dirs, ["dir1", "dir2", "dir3"])
         finally:
             shutil.rmtree(testdir)
 
@@ -467,7 +489,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
             # Move it
             self.options.base_fsroot = testdir
             up = self.getUploadProcessor(None)
-            handler = UploadHandler(up, '.', upload)
+            handler = UploadHandler(up, ".", upload)
             handler.moveUpload(target_name, self.log)
 
             # Check it moved
@@ -485,7 +507,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
             # Remove it
             self.options.base_fsroot = testdir
             up = self.getUploadProcessor(None)
-            handler = UploadHandler(up, '.', upload)
+            handler = UploadHandler(up, ".", upload)
             handler.moveProcessedUpload("accepted", self.log)
 
             # Check it was removed, not moved
@@ -505,12 +527,13 @@ class TestUploadProcessor(TestUploadProcessorBase):
             # Move it
             self.options.base_fsroot = testdir
             up = self.getUploadProcessor(None)
-            handler = UploadHandler(up, '.', upload)
+            handler = UploadHandler(up, ".", upload)
             handler.moveProcessedUpload("rejected", self.log)
 
             # Check it moved
-            self.assertTrue(os.path.exists(os.path.join(
-                testdir, "rejected", upload_name)))
+            self.assertTrue(
+                os.path.exists(os.path.join(testdir, "rejected", upload_name))
+            )
             self.assertFalse(os.path.exists(upload))
         finally:
             shutil.rmtree(testdir)
@@ -526,7 +549,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
             # Remove it
             self.options.base_fsroot = testdir
             up = self.getUploadProcessor(None)
-            handler = UploadHandler(up, '.', upload)
+            handler = UploadHandler(up, ".", upload)
             handler.removeUpload(self.log)
 
             # Check it was removed, not moved
@@ -551,11 +574,13 @@ class TestUploadProcessor(TestUploadProcessorBase):
 
         See bug 35965.
         """
-        self.options.context = 'broken'
+        self.options.context = "broken"
         # Register our broken upload policy.
         getGlobalSiteManager().registerUtility(
-            component=BrokenUploadPolicy, provided=IArchiveUploadPolicy,
-            name=self.options.context)
+            component=BrokenUploadPolicy,
+            provided=IArchiveUploadPolicy,
+            name=self.options.context,
+        )
 
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
@@ -570,10 +595,12 @@ class TestUploadProcessor(TestUploadProcessorBase):
         body = body.get_payload(decode=True).decode("UTF-8")
 
         self.assertEqual(
-            "daniel.silverstone@xxxxxxxxxxxxx", msg["X-Envelope-To"])
-        self.assertTrue("Unhandled exception processing upload: Exception "
-                        "raised by BrokenUploadPolicy for testing."
-                        in body)
+            "daniel.silverstone@xxxxxxxxxxxxx", msg["X-Envelope-To"]
+        )
+        self.assertTrue(
+            "Unhandled exception processing upload: Exception "
+            "raised by BrokenUploadPolicy for testing." in body
+        )
 
     def testUploadToFrozenDistro(self):
         """Uploads to a frozen distroseries should work, but be unapproved.
@@ -607,15 +634,19 @@ class TestUploadProcessor(TestUploadProcessorBase):
             self.assertEqual(expected_to_addr, msg["X-Envelope-To"])
             self.assertTrue(
                 "NEW" in str(msg),
-                "Expected email containing 'NEW', got:\n%s" % msg)
+                "Expected email containing 'NEW', got:\n%s" % msg,
+            )
 
         # Accept and publish the upload.
         # This is required so that the next upload of a later version of
         # the same package will work correctly.
         self.switchToAdmin()
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="bar",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
         self.assertEqual(queue_items.count(), 1)
         queue_item = queue_items[0]
 
@@ -640,20 +671,27 @@ class TestUploadProcessor(TestUploadProcessorBase):
             self.assertEqual(expected_to_addr, msg["X-Envelope-To"])
             self.assertTrue(
                 "Waiting for approval" in str(msg),
-                "Expected an 'upload awaits approval' email.\nGot:\n%s" % msg)
+                "Expected an 'upload awaits approval' email.\nGot:\n%s" % msg,
+            )
 
         # And verify that the queue item is in the unapproved state.
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.UNAPPROVED, name="bar",
-            version="1.0-2", exact_match=True)
+            status=PackageUploadStatus.UNAPPROVED,
+            name="bar",
+            version="1.0-2",
+            exact_match=True,
+        )
         self.assertEqual(queue_items.count(), 1)
         queue_item = queue_items[0]
         self.assertEqual(
-            queue_item.status, PackageUploadStatus.UNAPPROVED,
-            "Expected queue item to be in UNAPPROVED status.")
-
-    def _checkCopyArchiveUploadToDistro(self, pocket_to_check,
-                                        status_to_check):
+            queue_item.status,
+            PackageUploadStatus.UNAPPROVED,
+            "Expected queue item to be in UNAPPROVED status.",
+        )
+
+    def _checkCopyArchiveUploadToDistro(
+        self, pocket_to_check, status_to_check
+    ):
         """Check binary copy archive uploads for given pocket and status.
 
         This helper method tests that buildd binary uploads to copy
@@ -669,28 +707,30 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Upload 'bar-1.0-1' source and binary to ubuntu/breezy.
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
-        bar_source_pub = self.publishPackage('bar', '1.0-1')
+        bar_source_pub = self.publishPackage("bar", "1.0-1")
         [bar_original_build] = bar_source_pub.createMissingBuilds()
 
         # Move the source from the accepted queue.
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED,
-            version="1.0-1",
-            name="bar")
+            status=PackageUploadStatus.ACCEPTED, version="1.0-1", name="bar"
+        )
         queue_item.setDone()
 
         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         self.layer.txn.commit()
         upload_dir = self.queueUpload("bar_1.0-1_binary")
         build_uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
-        self.processUpload(build_uploadprocessor, upload_dir,
-            build=bar_original_build)
+            self.layer.txn, builds=True
+        )
+        self.processUpload(
+            build_uploadprocessor, upload_dir, build=bar_original_build
+        )
         self.assertEqual(
-            uploadprocessor.last_processed_upload.is_rejected, False)
-        bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False)
+            uploadprocessor.last_processed_upload.is_rejected, False
+        )
+        bar_bin_pubs = self.publishPackage("bar", "1.0-1", source=False)
         # Mangle its publishing component to "restricted" so we can check
         # the copy archive ancestry override later.
         self.switchToAdmin()
@@ -700,15 +740,19 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.switchToUploader()
 
         # Create a COPY archive for building in non-virtual builds.
-        uploader = getUtility(IPersonSet).getByName('name16')
+        uploader = getUtility(IPersonSet).getByName("name16")
         copy_archive = getUtility(IArchiveSet).new(
-            owner=uploader, purpose=ArchivePurpose.COPY,
-            distribution=self.ubuntu, name='the-copy-archive')
+            owner=uploader,
+            purpose=ArchivePurpose.COPY,
+            distribution=self.ubuntu,
+            name="the-copy-archive",
+        )
         copy_archive.require_virtualized = False
 
         # Copy 'bar-1.0-1' source to the COPY archive.
         bar_copied_source = bar_source_pub.copyTo(
-            bar_source_pub.distroseries, pocket_to_check, copy_archive)
+            bar_source_pub.distroseries, pocket_to_check, copy_archive
+        )
         [bar_copied_build] = bar_copied_source.createMissingBuilds()
 
         # Make ubuntu/breezy the current distro.
@@ -716,15 +760,18 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.layer.txn.commit()
 
         shutil.rmtree(upload_dir)
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         upload_dir = self.queueUpload(
-            "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
-        self.processUpload(build_uploadprocessor, upload_dir,
-             build=bar_copied_build)
+            "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id
+        )
+        self.processUpload(
+            build_uploadprocessor, upload_dir, build=bar_copied_build
+        )
 
         # Make sure the upload succeeded.
         self.assertEqual(
-            build_uploadprocessor.last_processed_upload.is_rejected, False)
+            build_uploadprocessor.last_processed_upload.is_rejected, False
+        )
 
         # The upload should also be auto-accepted even though there's no
         # ancestry.  This means items should go to ACCEPTED and not NEW.
@@ -732,10 +779,13 @@ class TestUploadProcessor(TestUploadProcessorBase):
             status=PackageUploadStatus.ACCEPTED,
             version="1.0-1",
             name="bar",
-            archive=copy_archive)
+            archive=copy_archive,
+        )
         self.assertEqual(
-            queue_items.count(), 1,
-            "Binary upload was not accepted when it should have been.")
+            queue_items.count(),
+            1,
+            "Binary upload was not accepted when it should have been.",
+        )
 
         # The copy archive binary published component should have been
         # inherited from the main archive's.
@@ -755,7 +805,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         See bug 369512.
         """
         self._checkCopyArchiveUploadToDistro(
-            PackagePublishingPocket.RELEASE, SeriesStatus.CURRENT)
+            PackagePublishingPocket.RELEASE, SeriesStatus.CURRENT
+        )
 
     def testCopyArchiveUploadToSupportedDistro(self):
         """Check binary copy archive uploads to RELEASE pockets.
@@ -767,7 +818,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         See bug 369512.
         """
         self._checkCopyArchiveUploadToDistro(
-            PackagePublishingPocket.RELEASE, SeriesStatus.SUPPORTED)
+            PackagePublishingPocket.RELEASE, SeriesStatus.SUPPORTED
+        )
 
     def testDuplicatedBinaryUploadGetsRejected(self):
         """The upload processor rejects duplicated binary uploads.
@@ -787,23 +839,25 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Upload 'bar-1.0-1' source and binary to ubuntu/breezy.
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
-        bar_source_pub = self.publishPackage('bar', '1.0-1')
+        bar_source_pub = self.publishPackage("bar", "1.0-1")
         [bar_original_build] = bar_source_pub.createMissingBuilds()
 
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         upload_dir = self.queueUpload("bar_1.0-1_binary")
         build_uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
+            self.layer.txn, builds=True
+        )
         self.processUpload(
-            build_uploadprocessor, upload_dir, build=bar_original_build)
+            build_uploadprocessor, upload_dir, build=bar_original_build
+        )
         [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
 
         # Prepare ubuntu/breezy-autotest to build sources in i386.
         self.switchToAdmin()
-        breezy_autotest = self.ubuntu['breezy-autotest']
-        breezy_autotest_i386 = breezy_autotest['i386']
+        breezy_autotest = self.ubuntu["breezy-autotest"]
+        breezy_autotest_i386 = breezy_autotest["i386"]
         breezy_autotest.nominatedarchindep = breezy_autotest_i386
-        fake_chroot = self.addMockFile('fake_chroot.tar.gz')
+        fake_chroot = self.addMockFile("fake_chroot.tar.gz")
         breezy_autotest_i386.addOrUpdateChroot(fake_chroot)
         self.layer.txn.commit()
         self.switchToUploader()
@@ -812,33 +866,43 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # create a build there (this would never happen in reality, it
         # just suits the purposes of this test).
         bar_copied_source = bar_source_pub.copyTo(
-            breezy_autotest, PackagePublishingPocket.RELEASE,
-            self.ubuntu.main_archive)
+            breezy_autotest,
+            PackagePublishingPocket.RELEASE,
+            self.ubuntu.main_archive,
+        )
         bar_copied_build = getUtility(IBinaryPackageBuildSet).new(
-            bar_copied_source.sourcepackagerelease, self.ubuntu.main_archive,
-            breezy_autotest_i386, PackagePublishingPocket.RELEASE)
+            bar_copied_source.sourcepackagerelease,
+            self.ubuntu.main_archive,
+            breezy_autotest_i386,
+            PackagePublishingPocket.RELEASE,
+        )
 
         # Re-upload the same 'bar-1.0-1' binary as if it was rebuilt
         # in breezy-autotest context.
         shutil.rmtree(upload_dir)
         self.options.distroseries = breezy_autotest.name
         upload_dir = self.queueUpload("bar_1.0-1_binary")
-        self.processUpload(build_uploadprocessor, upload_dir,
-            build=bar_copied_build)
+        self.processUpload(
+            build_uploadprocessor, upload_dir, build=bar_copied_build
+        )
         [duplicated_binary_upload] = breezy_autotest.getPackageUploads(
-            status=PackageUploadStatus.NEW, name='bar',
-            version='1.0-1', exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
 
         # The just uploaded binary cannot be accepted because its
         # filename 'bar_1.0-1_i386.deb' is already published in the
         # archive.
         error = self.assertRaisesAndReturnError(
-            QueueInconsistentStateError,
-            duplicated_binary_upload.setAccepted)
+            QueueInconsistentStateError, duplicated_binary_upload.setAccepted
+        )
         self.assertEqual(
             str(error),
             "The following files are already published in Primary "
-            "Archive for Ubuntu Linux:\nbar_1.0-1_i386.deb")
+            "Archive for Ubuntu Linux:\nbar_1.0-1_i386.deb",
+        )
 
     def testBinaryUploadToCopyArchive(self):
         """Copy archive binaries are not checked against the primary archive.
@@ -852,47 +916,56 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Upload 'bar-1.0-1' source and binary to ubuntu/breezy.
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
-        bar_source_old = self.publishPackage('bar', '1.0-1')
+        bar_source_old = self.publishPackage("bar", "1.0-1")
 
         # Upload 'bar-1.0-1' source and binary to ubuntu/breezy.
         upload_dir = self.queueUpload("bar_1.0-2")
         self.processUpload(uploadprocessor, upload_dir)
         bar_source_pub = self.ubuntu.main_archive.getPublishedSources(
-            name='bar', version='1.0-2', exact_match=True).one()
+            name="bar", version="1.0-2", exact_match=True
+        ).one()
         [bar_original_build] = bar_source_pub.getBuilds()
 
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         upload_dir = self.queueUpload("bar_1.0-2_binary")
         build_uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
-        self.processUpload(build_uploadprocessor, upload_dir,
-            build=bar_original_build)
+            self.layer.txn, builds=True
+        )
+        self.processUpload(
+            build_uploadprocessor, upload_dir, build=bar_original_build
+        )
         [bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)
 
         # Create a COPY archive for building in non-virtual builds.
-        uploader = getUtility(IPersonSet).getByName('name16')
+        uploader = getUtility(IPersonSet).getByName("name16")
         copy_archive = getUtility(IArchiveSet).new(
-            owner=uploader, purpose=ArchivePurpose.COPY,
-            distribution=self.ubuntu, name='no-source-uploads')
+            owner=uploader,
+            purpose=ArchivePurpose.COPY,
+            distribution=self.ubuntu,
+            name="no-source-uploads",
+        )
         copy_archive.require_virtualized = False
 
         # Copy 'bar-1.0-1' source to the COPY archive.
         bar_copied_source = bar_source_old.copyTo(
-            bar_source_pub.distroseries, bar_source_pub.pocket,
-            copy_archive)
+            bar_source_pub.distroseries, bar_source_pub.pocket, copy_archive
+        )
         [bar_copied_build] = bar_copied_source.createMissingBuilds()
 
         shutil.rmtree(upload_dir)
         upload_dir = self.queueUpload(
-            "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
-        self.processUpload(build_uploadprocessor, upload_dir,
-            build=bar_copied_build)
+            "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id
+        )
+        self.processUpload(
+            build_uploadprocessor, upload_dir, build=bar_copied_build
+        )
 
         # The binary just uploaded is accepted because it's destined for a
         # copy archive and the PRIMARY and the COPY archives are isolated
         # from each other.
         self.assertEqual(
-            build_uploadprocessor.last_processed_upload.is_rejected, False)
+            build_uploadprocessor.last_processed_upload.is_rejected, False
+        )
 
     def testPartnerArchiveMissingForPartnerUploadFails(self):
         """A missing partner archive should produce a rejection email.
@@ -902,12 +975,14 @@ class TestUploadProcessor(TestUploadProcessorBase):
         error email should be generated.
         """
         uploadprocessor = self.setupBreezyAndGetUploadProcessor(
-            policy='anything')
+            policy="anything"
+        )
 
         # Fudge the partner archive in the sample data temporarily so that
         # it's now a PPA instead.
         archive = getUtility(IArchiveSet).getByDistroPurpose(
-            distribution=self.ubuntu, purpose=ArchivePurpose.PARTNER)
+            distribution=self.ubuntu, purpose=ArchivePurpose.PARTNER
+        )
         removeSecurityProxy(archive).purpose = ArchivePurpose.PPA
 
         self.layer.txn.commit()
@@ -920,7 +995,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         [msg] = pop_notifications()
         self.assertIn(
             "Partner archive for distro '%s' not found" % self.ubuntu.name,
-            str(msg))
+            str(msg),
+        )
 
     def testMixedPartnerUploadFails(self):
         """Uploads with partner and non-partner files are rejected.
@@ -929,7 +1005,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         is rejected.  Partner uploads should be entirely partner.
         """
         uploadprocessor = self.setupBreezyAndGetUploadProcessor(
-            policy='anything')
+            policy="anything"
+        )
 
         # Upload a package for Breezy.
         upload_dir = self.queueUpload("foocomm_1.0-1-illegal-component-mix")
@@ -941,34 +1018,41 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.assertTrue(
             "Cannot mix partner files with non-partner." in str(msg),
             "Expected email containing 'Cannot mix partner files with "
-            "non-partner.', got:\n%s" % msg)
+            "non-partner.', got:\n%s" % msg,
+        )
 
     def testPartnerReusingOrigFromPartner(self):
         """Partner uploads reuse 'orig.tar.gz' from the partner archive."""
         # Make the official bar orig.tar.gz available in the system.
         uploadprocessor = self.setupBreezyAndGetUploadProcessor(
-            policy='absolutely-anything')
+            policy="absolutely-anything"
+        )
 
         upload_dir = self.queueUpload("foocomm_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
 
         self.assertEqual(
             uploadprocessor.last_processed_upload.queue_root.status,
-            PackageUploadStatus.NEW)
+            PackageUploadStatus.NEW,
+        )
 
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="foocomm",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="foocomm",
+            version="1.0-1",
+            exact_match=True,
+        )
         queue_item.setAccepted()
         queue_item.realiseUpload()
         self.layer.commit()
 
         archive = getUtility(IArchiveSet).getByDistroPurpose(
-            distribution=self.ubuntu, purpose=ArchivePurpose.PARTNER)
+            distribution=self.ubuntu, purpose=ArchivePurpose.PARTNER
+        )
         try:
-            archive.getFileByName('foocomm_1.0.orig.tar.gz')
+            archive.getFileByName("foocomm_1.0.orig.tar.gz")
         except NotFoundError:
-            self.fail('foocomm_1.0.orig.tar.gz is not yet published.')
+            self.fail("foocomm_1.0.orig.tar.gz is not yet published.")
 
         # Please note: this upload goes to the Ubuntu main archive.
         upload_dir = self.queueUpload("foocomm_1.0-3")
@@ -981,11 +1065,13 @@ class TestUploadProcessor(TestUploadProcessorBase):
         body = body.get_payload(decode=True).decode("UTF-8")
 
         self.assertEqual(
-            '[ubuntu/partner/breezy] foocomm 1.0-3 (Accepted)', msg['Subject'])
+            "[ubuntu/partner/breezy] foocomm 1.0-3 (Accepted)", msg["Subject"]
+        )
         self.assertFalse(
-            'Unable to find foocomm_1.0.orig.tar.gz in upload or '
-            'distribution.' in body,
-            'Unable to find foocomm_1.0.orig.tar.gz')
+            "Unable to find foocomm_1.0.orig.tar.gz in upload or "
+            "distribution." in body,
+            "Unable to find foocomm_1.0.orig.tar.gz",
+        )
 
     def testPartnerUpload(self):
         """Partner packages should be uploaded to the partner archive.
@@ -995,7 +1081,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         ArchivePurpose.PARTNER.
         """
         uploadprocessor = self.setupBreezyAndGetUploadProcessor(
-            policy='anything')
+            policy="anything"
+        )
 
         # Upload a package for Breezy.
         upload_dir = self.queueUpload("foocomm_1.0-1")
@@ -1007,45 +1094,57 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Find the sourcepackagerelease and check its component.
         foocomm_name = SourcePackageName.selectOneBy(name="foocomm")
         foocomm_spr = SourcePackageRelease.selectOneBy(
-           sourcepackagename=foocomm_name)
-        self.assertEqual(foocomm_spr.component.name, 'partner')
+            sourcepackagename=foocomm_name
+        )
+        self.assertEqual(foocomm_spr.component.name, "partner")
 
         # Check that the right archive was picked.
-        self.assertEqual(foocomm_spr.upload_archive.description,
-            'Partner archive')
+        self.assertEqual(
+            foocomm_spr.upload_archive.description, "Partner archive"
+        )
 
         # Accept and publish the upload.
         partner_archive = getUtility(IArchiveSet).getByDistroPurpose(
-            self.ubuntu, ArchivePurpose.PARTNER)
+            self.ubuntu, ArchivePurpose.PARTNER
+        )
         self.assertTrue(partner_archive)
         self.publishPackage("foocomm", "1.0-1", archive=partner_archive)
 
         # Check the publishing record's archive and component.
-        foocomm_spph = IStore(SourcePackagePublishingHistory).find(
-            SourcePackagePublishingHistory,
-            sourcepackagerelease=foocomm_spr).one()
-        self.assertEqual(foocomm_spph.archive.description,
-            'Partner archive')
-        self.assertEqual(foocomm_spph.component.name,
-            'partner')
+        foocomm_spph = (
+            IStore(SourcePackagePublishingHistory)
+            .find(
+                SourcePackagePublishingHistory,
+                sourcepackagerelease=foocomm_spr,
+            )
+            .one()
+        )
+        self.assertEqual(foocomm_spph.archive.description, "Partner archive")
+        self.assertEqual(foocomm_spph.component.name, "partner")
 
         # Fudge a build for foocomm so that it's not in the partner archive.
         # We can then test that uploading a binary package must match the
         # build's archive.
         foocomm_build = getUtility(IBinaryPackageBuildSet).new(
-            foocomm_spr, self.ubuntu.main_archive, self.breezy['i386'],
-            PackagePublishingPocket.RELEASE)
+            foocomm_spr,
+            self.ubuntu.main_archive,
+            self.breezy["i386"],
+            PackagePublishingPocket.RELEASE,
+        )
         self.layer.txn.commit()
         upload_dir = self.queueUpload("foocomm_1.0-1_binary")
         build_uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
+            self.layer.txn, builds=True
+        )
         self.processUpload(
-            build_uploadprocessor, upload_dir, build=foocomm_build)
+            build_uploadprocessor, upload_dir, build=foocomm_build
+        )
 
         contents = [
             "Subject: [ubuntu/partner] foocomm_1.0-1_i386.changes (Rejected)",
             "Attempt to upload binaries specifying build %d, "
-            "where they don't fit." % foocomm_build.id]
+            "where they don't fit." % foocomm_build.id,
+        ]
         self.assertEmails([{"contents": contents}])
 
         # Reset upload queue directory for a new upload.
@@ -1059,20 +1158,24 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Find the binarypackagerelease and check its component.
         foocomm_binname = BinaryPackageName.selectOneBy(name="foocomm")
         foocomm_bpr = BinaryPackageRelease.selectOneBy(
-            binarypackagename=foocomm_binname)
-        self.assertEqual(foocomm_bpr.component.name, 'partner')
+            binarypackagename=foocomm_binname
+        )
+        self.assertEqual(foocomm_bpr.component.name, "partner")
 
         # Publish the upload so we can check the publishing record.
         self.publishPackage("foocomm", "1.0-1", source=False)
 
         # Check the publishing record's archive and component.
-        foocomm_bpph = IStore(BinaryPackagePublishingHistory).find(
-            BinaryPackagePublishingHistory,
-            binarypackagerelease=foocomm_bpr).one()
-        self.assertEqual(foocomm_bpph.archive.description,
-            'Partner archive')
-        self.assertEqual(foocomm_bpph.component.name,
-            'partner')
+        foocomm_bpph = (
+            IStore(BinaryPackagePublishingHistory)
+            .find(
+                BinaryPackagePublishingHistory,
+                binarypackagerelease=foocomm_bpr,
+            )
+            .one()
+        )
+        self.assertEqual(foocomm_bpph.archive.description, "Partner archive")
+        self.assertEqual(foocomm_bpph.component.name, "partner")
 
     def testUploadAncestry(self):
         """Check that an upload correctly finds any file ancestors.
@@ -1087,7 +1190,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Use the 'absolutely-anything' policy which allows unsigned
         # DSC and changes files.
         uploadprocessor = self.setupBreezyAndGetUploadProcessor(
-            policy='absolutely-anything')
+            policy="absolutely-anything"
+        )
 
         # Upload a package for Breezy.
         upload_dir = self.queueUpload("foocomm_1.0-1")
@@ -1097,11 +1201,13 @@ class TestUploadProcessor(TestUploadProcessorBase):
         [msg] = pop_notifications()
         self.assertTrue(
             "NEW" in str(msg),
-            "Expected email containing 'NEW', got:\n%s" % msg)
+            "Expected email containing 'NEW', got:\n%s" % msg,
+        )
 
         # Accept and publish the upload.
         partner_archive = getUtility(IArchiveSet).getByDistroPurpose(
-            self.ubuntu, ArchivePurpose.PARTNER)
+            self.ubuntu, ArchivePurpose.PARTNER
+        )
         self.publishPackage("foocomm", "1.0-1", archive=partner_archive)
 
         # Now do the same thing with a binary package.
@@ -1109,8 +1215,9 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.processUpload(uploadprocessor, upload_dir)
 
         # Accept and publish the upload.
-        self.publishPackage("foocomm", "1.0-1", source=False,
-                             archive=partner_archive)
+        self.publishPackage(
+            "foocomm", "1.0-1", source=False, archive=partner_archive
+        )
 
         # Upload the next source version of the package.
         upload_dir = self.queueUpload("foocomm_1.0-2")
@@ -1119,21 +1226,21 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Check the upload is in the DONE queue since single source uploads
         # with ancestry (previously uploaded) will skip the ACCEPTED state.
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.DONE,
-            version="1.0-2",
-            name="foocomm")
+            status=PackageUploadStatus.DONE, version="1.0-2", name="foocomm"
+        )
         self.assertEqual(queue_items.count(), 1)
 
         # Single source uploads also get their corrsponding builds created
         # at upload-time. 'foocomm' only builds in 'i386', thus only one
         # build gets created.
         foocomm_source = partner_archive.getPublishedSources(
-            name='foocomm', version='1.0-2').one()
+            name="foocomm", version="1.0-2"
+        ).one()
         [build] = foocomm_source.sourcepackagerelease.builds
         self.assertEqual(
-            build.title,
-            'i386 build of foocomm 1.0-2 in ubuntu breezy RELEASE')
-        self.assertEqual(build.status.name, 'NEEDSBUILD')
+            build.title, "i386 build of foocomm 1.0-2 in ubuntu breezy RELEASE"
+        )
+        self.assertEqual(build.status.name, "NEEDSBUILD")
         self.assertTrue(build.buildqueue_record.lastscore is not None)
 
         # Upload the next binary version of the package.
@@ -1144,7 +1251,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         queue_items = self.breezy.getPackageUploads(
             status=PackageUploadStatus.ACCEPTED,
             version="1.0-2",
-            name="foocomm")
+            name="foocomm",
+        )
         self.assertEqual(queue_items.count(), 1)
 
     def testPartnerUploadToProposedPocket(self):
@@ -1152,7 +1260,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.setupBreezy()
         self.breezy.status = SeriesStatus.CURRENT
         self.layer.txn.commit()
-        self.options.context = 'insecure'
+        self.options.context = "insecure"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload a package for Breezy.
@@ -1170,7 +1278,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.setupBreezy()
         self.breezy.status = SeriesStatus.CURRENT
         self.layer.txn.commit()
-        self.options.context = 'insecure'
+        self.options.context = "insecure"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload a package for Breezy.
@@ -1186,7 +1294,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
         pocket and ensure it fails."""
         # Set up the uploadprocessor with appropriate options and logger.
         new_index = len(self.oopses)
-        self.options.context = 'insecure'
+        self.options.context = "insecure"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload a package for Breezy.
@@ -1194,18 +1302,21 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.processUpload(uploadprocessor, upload_dir)
 
         # Check it is rejected.
-        expect_msg = ("Partner uploads must be for the RELEASE or "
-                      "PROPOSED pocket.")
+        expect_msg = (
+            "Partner uploads must be for the RELEASE or " "PROPOSED pocket."
+        )
         [msg] = pop_notifications()
         self.assertTrue(
             expect_msg in str(msg),
-            "Expected email with %s, got:\n%s" % (expect_msg, msg))
+            "Expected email with %s, got:\n%s" % (expect_msg, msg),
+        )
 
         # And an oops should be filed for the error.
         error_report = self.oopses[new_index]
         expected_explanation = (
-            "Verification failed 3 times: ['No data', 'No data', 'No data']")
-        self.assertIn(expected_explanation, error_report['value'])
+            "Verification failed 3 times: ['No data', 'No data', 'No data']"
+        )
+        self.assertIn(expected_explanation, error_report["value"])
 
         # Housekeeping so the next test won't fail.
         shutil.rmtree(upload_dir)
@@ -1238,45 +1349,52 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self._uploadPartnerToNonReleasePocketAndCheckFail()
 
     def assertRejectionMessage(self, uploadprocessor, msg, with_file=True):
-        expected = ''
-        for part in ('-1.dsc', '.orig.tar.gz', '-1.diff.gz'):
+        expected = ""
+        for part in ("-1.dsc", ".orig.tar.gz", "-1.diff.gz"):
             if with_file:
-                expected += 'bar_1.0%s: %s\n' % (part, msg)
+                expected += "bar_1.0%s: %s\n" % (part, msg)
             else:
-                expected += '%s\n' % msg
-        expected += ('Further error processing not possible because of a '
-            'critical previous error.')
+                expected += "%s\n" % msg
+        expected += (
+            "Further error processing not possible because of a "
+            "critical previous error."
+        )
         self.assertEqual(
-            expected, uploadprocessor.last_processed_upload.rejection_message)
+            expected, uploadprocessor.last_processed_upload.rejection_message
+        )
 
     def testUploadWithUnknownSectionIsRejected(self):
         uploadprocessor = self.setupBreezyAndGetUploadProcessor()
         upload_dir = self.queueUpload("bar_1.0-1_bad_section")
         self.processUpload(uploadprocessor, upload_dir)
         self.assertRejectionMessage(
-            uploadprocessor, "Unknown section 'badsection'")
+            uploadprocessor, "Unknown section 'badsection'"
+        )
 
     def testUploadWithMalformedSectionIsRejected(self):
         uploadprocessor = self.setupBreezyAndGetUploadProcessor()
         upload_dir = self.queueUpload("bar_1.0-1_malformed_section")
         self.processUpload(uploadprocessor, upload_dir)
         expected = (
-            'Wrong number of fields in Files field line.\n'
-            'Further error processing not possible because of a '
-            'critical previous error.')
+            "Wrong number of fields in Files field line.\n"
+            "Further error processing not possible because of a "
+            "critical previous error."
+        )
         self.assertEqual(
-            expected, uploadprocessor.last_processed_upload.rejection_message)
+            expected, uploadprocessor.last_processed_upload.rejection_message
+        )
 
     def testUploadWithUnknownComponentIsRejected(self):
         uploadprocessor = self.setupBreezyAndGetUploadProcessor()
         upload_dir = self.queueUpload("bar_1.0-1_contrib_component")
         self.processUpload(uploadprocessor, upload_dir)
         self.assertRejectionMessage(
-            uploadprocessor, "Unknown component 'contrib'")
+            uploadprocessor, "Unknown component 'contrib'"
+        )
 
     def testSourceUploadToBuilddPath(self):
         """Source uploads to buildd upload paths are not permitted."""
-        ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
+        ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
         primary = ubuntu.main_archive
 
         uploadprocessor = self.setupBreezyAndGetUploadProcessor()
@@ -1285,9 +1403,11 @@ class TestUploadProcessor(TestUploadProcessorBase):
 
         # Check that the sourceful upload to the copy archive is rejected.
         contents = [
-            "Invalid upload path (1/ubuntu) for this policy (insecure)"]
+            "Invalid upload path (1/ubuntu) for this policy (insecure)"
+        ]
         self.assertEmails(
-            [{"contents": contents, "recipient": None}], allow_leftover=True)
+            [{"contents": contents, "recipient": None}], allow_leftover=True
+        )
 
     # Uploads that are new should have the component overridden
     # such that:
@@ -1299,8 +1419,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
     # the default action taken anyway.
     #
     # The following three tests check this.
-    def checkComponentOverride(self, upload_dir_name,
-                               expected_component_name):
+    def checkComponentOverride(self, upload_dir_name, expected_component_name):
         """Helper function to check overridden source component names.
 
         Upload a 'bar' package from upload_dir_name, then
@@ -1315,28 +1434,34 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.processUpload(uploadprocessor, upload_dir)
 
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="bar",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
         [queue_item] = queue_items
         self.assertEqual(
             queue_item.sourcepackagerelease.component.name,
-            expected_component_name)
+            expected_component_name,
+        )
 
     def testUploadContribComponentOverride(self):
         """Test the overriding of the contrib component on uploads."""
         # The component contrib does not exist in the sample data, so
         # add it here.
-        Component(name='contrib')
+        Component(name="contrib")
         self.checkComponentOverride(
-            "bar_1.0-1_contrib_component", "multiverse")
+            "bar_1.0-1_contrib_component", "multiverse"
+        )
 
     def testUploadNonfreeComponentOverride(self):
         """Test the overriding of the non-free component on uploads."""
         # The component non-free does not exist in the sample data, so
         # add it here.
-        Component(name='non-free')
+        Component(name="non-free")
         self.checkComponentOverride(
-            "bar_1.0-1_nonfree_component", "multiverse")
+            "bar_1.0-1_nonfree_component", "multiverse"
+        )
 
     def testUploadDefaultComponentOverride(self):
         """Test the overriding of the component on uploads.
@@ -1346,8 +1471,9 @@ class TestUploadProcessor(TestUploadProcessorBase):
         """
         self.checkComponentOverride("bar_1.0-1", "universe")
 
-    def checkBinaryComponentOverride(self, component_override=None,
-                                     expected_component_name='universe'):
+    def checkBinaryComponentOverride(
+        self, component_override=None, expected_component_name="universe"
+    ):
         """Helper function to check overridden binary component names.
 
         Upload a 'bar' package from upload_dir_name, publish it, and then
@@ -1363,21 +1489,26 @@ class TestUploadProcessor(TestUploadProcessorBase):
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
         bar_source_pub = self.publishPackage(
-            'bar', '1.0-1', component_override=component_override)
+            "bar", "1.0-1", component_override=component_override
+        )
         [bar_original_build] = bar_source_pub.createMissingBuilds()
 
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         upload_dir = self.queueUpload("bar_1.0-1_binary")
         build_uploadprocessor = self.getUploadProcessor(
-            self.layer.txn, builds=True)
+            self.layer.txn, builds=True
+        )
         self.processUpload(
-            build_uploadprocessor, upload_dir, build=bar_original_build)
+            build_uploadprocessor, upload_dir, build=bar_original_build
+        )
         [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
         self.assertEqual(
-            bar_binary_pub.component.name, expected_component_name)
+            bar_binary_pub.component.name, expected_component_name
+        )
         self.assertEqual(
             bar_binary_pub.binarypackagerelease.component.name,
-            expected_component_name)
+            expected_component_name,
+        )
 
     def testBinaryPackageDefaultComponent(self):
         """The default component is universe."""
@@ -1398,17 +1529,20 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Inject an unhandled exception into the upload processor.
         class SomeException(Exception):
             pass
+
         self.useFixture(
             MonkeyPatch(
-                'lp.archiveuploader.nascentupload.NascentUpload.'
-                'from_changesfile_path',
-                FakeMethod(failure=SomeException("I am an explanation."))))
+                "lp.archiveuploader.nascentupload.NascentUpload."
+                "from_changesfile_path",
+                FakeMethod(failure=SomeException("I am an explanation.")),
+            )
+        )
 
         processor.processUploadQueue()
 
         error_report = self.oopses[0]
-        self.assertEqual('SomeException', error_report['type'])
-        self.assertIn("I am an explanation", error_report['tb_text'])
+        self.assertEqual("SomeException", error_report["type"])
+        self.assertIn("I am an explanation", error_report["tb_text"])
 
     def testOopsTimeline(self):
         """Each upload has an independent OOPS timeline."""
@@ -1421,11 +1555,14 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Inject an unhandled exception into the upload processor.
         class SomeException(Exception):
             pass
+
         self.useFixture(
             MonkeyPatch(
                 "lp.archiveuploader.nascentupload.NascentUpload."
                 "from_changesfile_path",
-                FakeMethod(failure=SomeException("I am an explanation."))))
+                FakeMethod(failure=SomeException("I am an explanation.")),
+            )
+        )
 
         processor.processUploadQueue()
 
@@ -1444,7 +1581,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Setup the test.
         self.setupBreezy()
         self.layer.txn.commit()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload the source first to enable the binary later:
@@ -1453,7 +1590,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Make sure it went ok:
         [msg] = pop_notifications()
         self.assertFalse(
-            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg)
+            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg
+        )
         self.publishPackage("bar", "1.0-1")
         # Clear out emails generated during upload.
         pop_notifications()
@@ -1467,12 +1605,17 @@ class TestUploadProcessor(TestUploadProcessorBase):
 
         # Check in the queue to see if it really made it:
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="bar",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
         self.assertEqual(
-            queue_items.count(), 1,
+            queue_items.count(),
+            1,
             "Expected one 'bar' item in the queue, actually got %d."
-                % queue_items.count())
+            % queue_items.count(),
+        )
 
     def testXZDebUpload(self):
         """Make sure that data files compressed with xz in Debs work.
@@ -1483,7 +1626,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Setup the test.
         self.setupBreezy()
         self.layer.txn.commit()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload the source first to enable the binary later:
@@ -1492,7 +1635,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Make sure it went ok:
         [msg] = pop_notifications()
         self.assertFalse(
-            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg)
+            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg
+        )
         self.publishPackage("bar", "1.0-1")
         # Clear out emails generated during upload.
         pop_notifications()
@@ -1506,12 +1650,17 @@ class TestUploadProcessor(TestUploadProcessorBase):
 
         # Check in the queue to see if it really made it:
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="bar",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
         self.assertEqual(
-            queue_items.count(), 1,
+            queue_items.count(),
+            1,
             "Expected one 'bar' item in the queue, actually got %d."
-                % queue_items.count())
+            % queue_items.count(),
+        )
 
     def testSourceUploadWithoutBinaryField(self):
         """Source uploads may omit the Binary field.
@@ -1521,26 +1670,28 @@ class TestUploadProcessor(TestUploadProcessorBase):
         """
         self.setupBreezy()
         self.layer.txn.commit()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         upload_dir = self.queueUpload("bar_1.0-1_no_binary_field")
         self.processUpload(uploadprocessor, upload_dir)
         [msg] = pop_notifications()
         self.assertNotIn(
-            "rejected", str(msg), "Failed to upload bar source:\n%s" % msg)
+            "rejected", str(msg), "Failed to upload bar source:\n%s" % msg
+        )
         spph = self.publishPackage("bar", "1.0-1")
 
         self.assertEqual(
-            sorted((sprf.libraryfile.filename, sprf.filetype)
-                   for sprf in spph.sourcepackagerelease.files),
-            [('bar_1.0-1.diff.gz',
-              SourcePackageFileType.DIFF),
-             ('bar_1.0-1.dsc',
-              SourcePackageFileType.DSC),
-             ('bar_1.0.orig.tar.gz',
-              SourcePackageFileType.ORIG_TARBALL),
-             ])
+            sorted(
+                (sprf.libraryfile.filename, sprf.filetype)
+                for sprf in spph.sourcepackagerelease.files
+            ),
+            [
+                ("bar_1.0-1.diff.gz", SourcePackageFileType.DIFF),
+                ("bar_1.0-1.dsc", SourcePackageFileType.DSC),
+                ("bar_1.0.orig.tar.gz", SourcePackageFileType.ORIG_TARBALL),
+            ],
+        )
 
     def testUploadResultingInNoBuilds(self):
         """Source uploads resulting in no builds.
@@ -1568,11 +1719,11 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # New 'biscuit' source building in 'm68k' only can't be accepted.
         # The archive-admin will be forced to reject it manually.
         packager = FakePackager(
-            'biscuit', '1.0', 'foo.bar@xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')
+            "biscuit", "1.0", "foo.bar@xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
+        )
         packager.buildUpstream(suite=self.breezy.name, arch="m68k")
         packager.buildSource()
-        upload = packager.uploadSourceVersion(
-            '1.0-1', auto_accept=False)
+        upload = packager.uploadSourceVersion("1.0-1", auto_accept=False)
         upload.do_accept(notify=False)
 
         # Let's commit because acceptFromQueue needs to access the
@@ -1582,17 +1733,17 @@ class TestUploadProcessor(TestUploadProcessorBase):
         upload.queue_root.acceptFromQueue()
 
         # 'biscuit_1.0-2' building on i386 get accepted and published.
-        packager.buildVersion('1.0-2', suite=self.breezy.name, arch="i386")
+        packager.buildVersion("1.0-2", suite=self.breezy.name, arch="i386")
         packager.buildSource()
-        biscuit_pub = packager.uploadSourceVersion('1.0-2')
+        biscuit_pub = packager.uploadSourceVersion("1.0-2")
         self.assertEqual(biscuit_pub.status, PackagePublishingStatus.PENDING)
 
         # A auto-accepted version building only in m68k, which also doesn't
         # exist in breezy gets rejected yet in upload time (meaning, the
         # uploader will receive a rejection email).
-        packager.buildVersion('1.0-3', suite=self.breezy.name, arch="m68k")
+        packager.buildVersion("1.0-3", suite=self.breezy.name, arch="m68k")
         packager.buildSource()
-        upload = packager.uploadSourceVersion('1.0-3', auto_accept=False)
+        upload = packager.uploadSourceVersion("1.0-3", auto_accept=False)
 
         upload.storeObjectsInDatabase()
 
@@ -1606,8 +1757,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.setupBreezy()
         # Remove our favourite uploader from the team that has
         # permissions to all components at upload time.
-        uploader = getUtility(IPersonSet).getByName('name16')
-        distro_team = getUtility(IPersonSet).getByName('ubuntu-team')
+        uploader = getUtility(IPersonSet).getByName("name16")
+        distro_team = getUtility(IPersonSet).getByName("ubuntu-team")
         self.switchToAdmin()
         uploader.leave(distro_team)
 
@@ -1615,8 +1766,10 @@ class TestUploadProcessor(TestUploadProcessorBase):
         restricted = getUtility(IComponentSet)["restricted"]
         ArchivePermission(
             archive=self.ubuntu.main_archive,
-            permission=ArchivePermissionType.UPLOAD, person=uploader,
-            component=restricted)
+            permission=ArchivePermissionType.UPLOAD,
+            person=uploader,
+            component=restricted,
+        )
         self.switchToUploader()
 
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
@@ -1626,7 +1779,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # components that they do not have direct rights to.
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
-        self.publishPackage('bar', '1.0-1')
+        self.publishPackage("bar", "1.0-1")
         # Clear out emails generated during upload.
         pop_notifications()
 
@@ -1637,15 +1790,18 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Make sure it failed.
         self.assertEqual(
             uploadprocessor.last_processed_upload.rejection_message,
-            "Signer is not permitted to upload to the component 'universe'.")
+            "Signer is not permitted to upload to the component 'universe'.",
+        )
 
         # Now add permission to upload "bar" for name16.
         self.switchToAdmin()
         bar_package = getUtility(ISourcePackageNameSet).queryByName("bar")
         ArchivePermission(
             archive=self.ubuntu.main_archive,
-            permission=ArchivePermissionType.UPLOAD, person=uploader,
-            sourcepackagename=bar_package)
+            permission=ArchivePermissionType.UPLOAD,
+            person=uploader,
+            sourcepackagename=bar_package,
+        )
         self.switchToUploader()
 
         # Upload the package again.
@@ -1654,16 +1810,18 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Check that it worked,
         status = uploadprocessor.last_processed_upload.queue_root.status
         self.assertEqual(
-            status, PackageUploadStatus.DONE,
-            "Expected NEW status, got %s" % status.value)
+            status,
+            PackageUploadStatus.DONE,
+            "Expected NEW status, got %s" % status.value,
+        )
 
     def testPackagesetUploadPermissions(self):
         """Test package set based upload permissions."""
         self.setupBreezy()
         # Remove our favourite uploader from the team that has
         # permissions to all components at upload time.
-        uploader = getUtility(IPersonSet).getByName('name16')
-        distro_team = getUtility(IPersonSet).getByName('ubuntu-team')
+        uploader = getUtility(IPersonSet).getByName("name16")
+        distro_team = getUtility(IPersonSet).getByName("ubuntu-team")
         self.switchToAdmin()
         uploader.leave(distro_team)
 
@@ -1671,8 +1829,10 @@ class TestUploadProcessor(TestUploadProcessorBase):
         restricted = getUtility(IComponentSet)["restricted"]
         ArchivePermission(
             archive=self.ubuntu.main_archive,
-            permission=ArchivePermissionType.UPLOAD, person=uploader,
-            component=restricted)
+            permission=ArchivePermissionType.UPLOAD,
+            person=uploader,
+            component=restricted,
+        )
         self.switchToUploader()
 
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
@@ -1682,7 +1842,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # components that they do not have direct rights to.
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
-        self.publishPackage('bar', '1.0-1')
+        self.publishPackage("bar", "1.0-1")
         # Clear out emails generated during upload.
         pop_notifications()
 
@@ -1693,7 +1853,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Make sure it failed.
         self.assertEqual(
             uploadprocessor.last_processed_upload.rejection_message,
-            "Signer is not permitted to upload to the component 'universe'.")
+            "Signer is not permitted to upload to the component 'universe'.",
+        )
 
         # Now put in place a package set, add 'bar' to it and define a
         # permission for the former.
@@ -1702,24 +1863,34 @@ class TestUploadProcessor(TestUploadProcessorBase):
         ap_set = getUtility(IArchivePermissionSet)
         ps_set = getUtility(IPackagesetSet)
         foo_ps = ps_set.new(
-            'foo-pkg-set', 'Packages that require special care.', uploader,
-            distroseries=self.ubuntu['grumpy'])
+            "foo-pkg-set",
+            "Packages that require special care.",
+            uploader,
+            distroseries=self.ubuntu["grumpy"],
+        )
         self.layer.txn.commit()
 
-        foo_ps.add((bar_package, ))
+        foo_ps.add((bar_package,))
         ap_set.newPackagesetUploader(
-            self.ubuntu.main_archive, uploader, foo_ps)
+            self.ubuntu.main_archive, uploader, foo_ps
+        )
         self.switchToUploader()
 
         # The uploader now does have a package set based upload permissions
         # to 'bar' in 'grumpy' but not in 'breezy'.
         self.assertTrue(
             ap_set.isSourceUploadAllowed(
-                self.ubuntu.main_archive, 'bar', uploader,
-                self.ubuntu['grumpy']))
+                self.ubuntu.main_archive,
+                "bar",
+                uploader,
+                self.ubuntu["grumpy"],
+            )
+        )
         self.assertFalse(
             ap_set.isSourceUploadAllowed(
-                self.ubuntu.main_archive, 'bar', uploader, self.breezy))
+                self.ubuntu.main_archive, "bar", uploader, self.breezy
+            )
+        )
 
         # Upload the package again.
         self.processUpload(uploadprocessor, upload_dir)
@@ -1728,29 +1899,38 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Any of the multiple notifications will do.
         msg = pop_notifications()[-1]
         self.assertEqual(
-            msg['Subject'], '[ubuntu] bar_1.0-2_source.changes (Rejected)')
+            msg["Subject"], "[ubuntu] bar_1.0-2_source.changes (Rejected)"
+        )
 
         # Grant the permissions in the proper series.
         self.switchToAdmin()
         breezy_ps = ps_set.new(
-            'foo-pkg-set-breezy', 'Packages that require special care.',
-            uploader, distroseries=self.breezy)
-        breezy_ps.add((bar_package, ))
+            "foo-pkg-set-breezy",
+            "Packages that require special care.",
+            uploader,
+            distroseries=self.breezy,
+        )
+        breezy_ps.add((bar_package,))
         ap_set.newPackagesetUploader(
-            self.ubuntu.main_archive, uploader, breezy_ps)
+            self.ubuntu.main_archive, uploader, breezy_ps
+        )
         self.switchToUploader()
         # The uploader now does have a package set based upload permission
         # to 'bar' in 'breezy'.
         self.assertTrue(
             ap_set.isSourceUploadAllowed(
-                self.ubuntu.main_archive, 'bar', uploader, self.breezy))
+                self.ubuntu.main_archive, "bar", uploader, self.breezy
+            )
+        )
         # Upload the package again.
         self.processUpload(uploadprocessor, upload_dir)
         # Check that it worked.
         status = uploadprocessor.last_processed_upload.queue_root.status
         self.assertEqual(
-            status, PackageUploadStatus.DONE,
-            "Expected DONE status, got %s" % status.value)
+            status,
+            PackageUploadStatus.DONE,
+            "Expected DONE status, got %s" % status.value,
+        )
 
     def testUploadPathErrorIntendedForHumans(self):
         # Distribution upload path errors are augmented with a hint
@@ -1764,41 +1944,53 @@ class TestUploadProcessor(TestUploadProcessorBase):
         upload_dir = self.queueUpload("bar_1.0-1", "boing")
         self.processUpload(uploadprocessor, upload_dir)
         rejection_message = (
-            uploadprocessor.last_processed_upload.rejection_message)
+            uploadprocessor.last_processed_upload.rejection_message
+        )
         self.assertEqual(
-            ["Launchpad failed to process the upload path 'boing':",
-             '',
-             "Could not find distribution 'boing'.",
-             '',
-             'It is likely that you have a configuration problem with '
-                'dput/dupload.',
-             'Please update your dput/dupload configuration and then '
-                're-upload.',
-             '',
-             'Further error processing not possible because of a critical '
-                'previous error.',
-             ],
-            rejection_message.splitlines())
+            [
+                "Launchpad failed to process the upload path 'boing':",
+                "",
+                "Could not find distribution 'boing'.",
+                "",
+                "It is likely that you have a configuration problem with "
+                "dput/dupload.",
+                "Please update your dput/dupload configuration and then "
+                "re-upload.",
+                "",
+                "Further error processing not possible because of a critical "
+                "previous error.",
+            ],
+            rejection_message.splitlines(),
+        )
 
         base_contents = [
             "Subject: [ubuntu] bar_1.0-1_source.changes (Rejected)",
             "Could not find distribution 'boing'",
             "If you don't understand why your files were rejected",
             "http://answers.launchpad.net/soyuz";,
-            ]
+        ]
         expected = []
-        expected.append({
-            "contents": base_contents + [
-                "You are receiving this email because you are the most "
+        expected.append(
+            {
+                "contents": base_contents
+                + [
+                    "You are receiving this email because you are the most "
                     "recent person",
-                "listed in this package's changelog."],
-            "recipient": "daniel.silverstone@xxxxxxxxxxxxx",
-            })
-        expected.append({
-            "contents": base_contents + [
-                "You are receiving this email because you made this upload."],
-            "recipient": "foo.bar@xxxxxxxxxxxxx",
-            })
+                    "listed in this package's changelog.",
+                ],
+                "recipient": "daniel.silverstone@xxxxxxxxxxxxx",
+            }
+        )
+        expected.append(
+            {
+                "contents": base_contents
+                + [
+                    "You are receiving this email because you made this "
+                    "upload."
+                ],
+                "recipient": "foo.bar@xxxxxxxxxxxxx",
+            }
+        )
         self.assertEmails(expected)
 
     def test30QuiltUploadToUnsupportingSeriesIsRejected(self):
@@ -1809,7 +2001,7 @@ class TestUploadProcessor(TestUploadProcessorBase):
         """
         self.setupBreezy()
         self.layer.txn.commit()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload the source.
@@ -1820,14 +2012,16 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.assertTrue(
             "bar_1.0-1.dsc: format '3.0 (quilt)' is not permitted in "
             "breezy." in str(msg),
-            "Source was not rejected properly:\n%s" % msg)
+            "Source was not rejected properly:\n%s" % msg,
+        )
 
     def test30QuiltUpload(self):
-        """Ensure that 3.0 (quilt) uploads work properly. """
+        """Ensure that 3.0 (quilt) uploads work properly."""
         self.setupBreezy(
-            permitted_formats=[SourcePackageFormat.FORMAT_3_0_QUILT])
+            permitted_formats=[SourcePackageFormat.FORMAT_3_0_QUILT]
+        )
         self.layer.txn.commit()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload the source.
@@ -1836,37 +2030,52 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Make sure it went ok:
         [msg] = pop_notifications()
         self.assertFalse(
-            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg)
+            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg
+        )
         spph = self.publishPackage("bar", "1.0-1")
 
         self.assertEqual(
-            sorted((sprf.libraryfile.filename, sprf.filetype)
-                   for sprf in spph.sourcepackagerelease.files),
-            [('bar_1.0-1.debian.tar.bz2',
-              SourcePackageFileType.DEBIAN_TARBALL),
-             ('bar_1.0-1.dsc',
-              SourcePackageFileType.DSC),
-             ('bar_1.0.orig-comp1.tar.gz',
-              SourcePackageFileType.COMPONENT_ORIG_TARBALL),
-             ('bar_1.0.orig-comp1.tar.gz.asc',
-              SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE),
-             ('bar_1.0.orig-comp2.tar.bz2',
-              SourcePackageFileType.COMPONENT_ORIG_TARBALL),
-             ('bar_1.0.orig-comp3.tar.xz',
-              SourcePackageFileType.COMPONENT_ORIG_TARBALL),
-             ('bar_1.0.orig.tar.gz',
-              SourcePackageFileType.ORIG_TARBALL),
-             ('bar_1.0.orig.tar.gz.asc',
-              SourcePackageFileType.ORIG_TARBALL_SIGNATURE),
-             ])
+            sorted(
+                (sprf.libraryfile.filename, sprf.filetype)
+                for sprf in spph.sourcepackagerelease.files
+            ),
+            [
+                (
+                    "bar_1.0-1.debian.tar.bz2",
+                    SourcePackageFileType.DEBIAN_TARBALL,
+                ),
+                ("bar_1.0-1.dsc", SourcePackageFileType.DSC),
+                (
+                    "bar_1.0.orig-comp1.tar.gz",
+                    SourcePackageFileType.COMPONENT_ORIG_TARBALL,
+                ),
+                (
+                    "bar_1.0.orig-comp1.tar.gz.asc",
+                    SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE,
+                ),
+                (
+                    "bar_1.0.orig-comp2.tar.bz2",
+                    SourcePackageFileType.COMPONENT_ORIG_TARBALL,
+                ),
+                (
+                    "bar_1.0.orig-comp3.tar.xz",
+                    SourcePackageFileType.COMPONENT_ORIG_TARBALL,
+                ),
+                ("bar_1.0.orig.tar.gz", SourcePackageFileType.ORIG_TARBALL),
+                (
+                    "bar_1.0.orig.tar.gz.asc",
+                    SourcePackageFileType.ORIG_TARBALL_SIGNATURE,
+                ),
+            ],
+        )
 
     def test30QuiltUploadWithSameComponentOrig(self):
-        """Ensure that 3.0 (quilt) uploads with shared component origs work.
-        """
+        """Ensure that 3.0 (quilt) uploads with shared component origs work."""
         self.setupBreezy(
-            permitted_formats=[SourcePackageFormat.FORMAT_3_0_QUILT])
+            permitted_formats=[SourcePackageFormat.FORMAT_3_0_QUILT]
+        )
         self.layer.txn.commit()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload the first source.
@@ -1875,40 +2084,55 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Make sure it went ok:
         [msg] = pop_notifications()
         self.assertFalse(
-            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg)
+            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg
+        )
         self.publishPackage("bar", "1.0-1")
 
         # Upload another source sharing the same (component) orig.
         upload_dir = self.queueUpload("bar_1.0-2_3.0-quilt_without_orig")
         self.assertEqual(
-            self.processUpload(uploadprocessor, upload_dir), ['accepted'])
+            self.processUpload(uploadprocessor, upload_dir), ["accepted"]
+        )
 
         queue_item = uploadprocessor.last_processed_upload.queue_root
         self.assertEqual(
-            sorted((sprf.libraryfile.filename, sprf.filetype) for sprf
-                   in queue_item.sources[0].sourcepackagerelease.files),
-            [('bar_1.0-2.debian.tar.bz2',
-              SourcePackageFileType.DEBIAN_TARBALL),
-             ('bar_1.0-2.dsc',
-              SourcePackageFileType.DSC),
-             ('bar_1.0.orig-comp1.tar.gz',
-              SourcePackageFileType.COMPONENT_ORIG_TARBALL),
-             ('bar_1.0.orig-comp1.tar.gz.asc',
-              SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE),
-             ('bar_1.0.orig-comp2.tar.bz2',
-              SourcePackageFileType.COMPONENT_ORIG_TARBALL),
-             ('bar_1.0.orig.tar.gz',
-              SourcePackageFileType.ORIG_TARBALL),
-             ('bar_1.0.orig.tar.gz.asc',
-              SourcePackageFileType.ORIG_TARBALL_SIGNATURE),
-             ])
+            sorted(
+                (sprf.libraryfile.filename, sprf.filetype)
+                for sprf in queue_item.sources[0].sourcepackagerelease.files
+            ),
+            [
+                (
+                    "bar_1.0-2.debian.tar.bz2",
+                    SourcePackageFileType.DEBIAN_TARBALL,
+                ),
+                ("bar_1.0-2.dsc", SourcePackageFileType.DSC),
+                (
+                    "bar_1.0.orig-comp1.tar.gz",
+                    SourcePackageFileType.COMPONENT_ORIG_TARBALL,
+                ),
+                (
+                    "bar_1.0.orig-comp1.tar.gz.asc",
+                    SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE,
+                ),
+                (
+                    "bar_1.0.orig-comp2.tar.bz2",
+                    SourcePackageFileType.COMPONENT_ORIG_TARBALL,
+                ),
+                ("bar_1.0.orig.tar.gz", SourcePackageFileType.ORIG_TARBALL),
+                (
+                    "bar_1.0.orig.tar.gz.asc",
+                    SourcePackageFileType.ORIG_TARBALL_SIGNATURE,
+                ),
+            ],
+        )
 
     def test30NativeUpload(self):
-        """Ensure that 3.0 (native) uploads work properly. """
+        """Ensure that 3.0 (native) uploads work properly."""
         self.setupBreezy(
-            permitted_formats=[SourcePackageFormat.FORMAT_3_0_NATIVE])
+            permitted_formats=[SourcePackageFormat.FORMAT_3_0_NATIVE]
+        )
         self.layer.txn.commit()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload the source.
@@ -1917,22 +2141,26 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Make sure it went ok:
         [msg] = pop_notifications()
         self.assertFalse(
-            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg)
+            "rejected" in str(msg), "Failed to upload bar source:\n%s" % msg
+        )
         spph = self.publishPackage("bar", "1.0")
 
         self.assertEqual(
-            sorted((sprf.libraryfile.filename, sprf.filetype)
-                   for sprf in spph.sourcepackagerelease.files),
-            [('bar_1.0.dsc',
-              SourcePackageFileType.DSC),
-             ('bar_1.0.tar.bz2',
-              SourcePackageFileType.NATIVE_TARBALL)])
+            sorted(
+                (sprf.libraryfile.filename, sprf.filetype)
+                for sprf in spph.sourcepackagerelease.files
+            ),
+            [
+                ("bar_1.0.dsc", SourcePackageFileType.DSC),
+                ("bar_1.0.tar.bz2", SourcePackageFileType.NATIVE_TARBALL),
+            ],
+        )
 
     def test10Bzip2UploadIsRejected(self):
         """Ensure that 1.0 sources with bzip2 compression are rejected."""
         self.setupBreezy()
         self.layer.txn.commit()
-        self.options.context = 'absolutely-anything'
+        self.options.context = "absolutely-anything"
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         # Upload the source.
@@ -1943,23 +2171,26 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.assertTrue(
             "bar_1.0-1.dsc: is format 1.0 but uses bzip2 compression."
             in str(msg),
-            "Source was not rejected properly:\n%s" % msg)
+            "Source was not rejected properly:\n%s" % msg,
+        )
 
     def testUploadToWrongPocketIsRejected(self):
         # Uploads to the wrong pocket are rejected.
         self.setupBreezy()
-        breezy = self.ubuntu['breezy']
+        breezy = self.ubuntu["breezy"]
         breezy.status = SeriesStatus.CURRENT
         uploadprocessor = self.getUploadProcessor(self.layer.txn)
 
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
         rejection_message = (
-            uploadprocessor.last_processed_upload.rejection_message)
+            uploadprocessor.last_processed_upload.rejection_message
+        )
         self.assertEqual(
             "Not permitted to upload to the RELEASE pocket in a series in "
             "the 'CURRENT' state.",
-            rejection_message)
+            rejection_message,
+        )
 
         base_contents = [
             "Subject: [ubuntu] bar_1.0-1_source.changes (Rejected)",
@@ -1967,20 +2198,29 @@ class TestUploadProcessor(TestUploadProcessorBase):
             "in the 'CURRENT' state.",
             "If you don't understand why your files were rejected",
             "http://answers.launchpad.net/soyuz";,
-            ]
+        ]
         expected = []
-        expected.append({
-            "contents": base_contents + [
-                "You are receiving this email because you are the most "
+        expected.append(
+            {
+                "contents": base_contents
+                + [
+                    "You are receiving this email because you are the most "
                     "recent person",
-                "listed in this package's changelog."],
-            "recipient": "daniel.silverstone@xxxxxxxxxxxxx",
-            })
-        expected.append({
-            "contents": base_contents + [
-                "You are receiving this email because you made this upload."],
-            "recipient": "foo.bar@xxxxxxxxxxxxx",
-            })
+                    "listed in this package's changelog.",
+                ],
+                "recipient": "daniel.silverstone@xxxxxxxxxxxxx",
+            }
+        )
+        expected.append(
+            {
+                "contents": base_contents
+                + [
+                    "You are receiving this email because you made this "
+                    "upload."
+                ],
+                "recipient": "foo.bar@xxxxxxxxxxxxx",
+            }
+        )
         self.assertEmails(expected)
 
     def testPGPSignatureNotPreserved(self):
@@ -1990,13 +2230,17 @@ class TestUploadProcessor(TestUploadProcessorBase):
         should both have the PGP signature removed.
         """
         uploadprocessor = self.setupBreezyAndGetUploadProcessor(
-            policy='insecure')
+            policy="insecure"
+        )
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
         # ACCEPT the upload
         queue_items = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="bar",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
         self.assertEqual(queue_items.count(), 1)
         self.switchToAdmin()
         queue_item = queue_items[0]
@@ -2017,7 +2261,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
 
         self.assertEqual(UploadStatusEnum.REJECTED, result)
         self.assertLogContains(
-            "INFO Not sending rejection notice without a signing key.")
+            "INFO Not sending rejection notice without a signing key."
+        )
         self.assertEmailQueueLength(0)
         self.assertEqual([], self.oopses)
 
@@ -2039,22 +2284,31 @@ class TestUploadProcessor(TestUploadProcessorBase):
         base_contents = [
             "Subject: [ubuntu] netapplet_1.0-1_source.changes (Rejected)",
             "File %s/netapplet_1.0-1-signed/netapplet_1.0-1_source.changes "
-            "is signed with a deactivated key %s" % (
-                self.incoming_folder, fingerprint),
-            ]
+            "is signed with a deactivated key %s"
+            % (self.incoming_folder, fingerprint),
+        ]
         expected = []
-        expected.append({
-            "contents": base_contents + [
-                "You are receiving this email because you are the most "
+        expected.append(
+            {
+                "contents": base_contents
+                + [
+                    "You are receiving this email because you are the most "
                     "recent person",
-                "listed in this package's changelog."],
-            "recipient": "daniel.silverstone@xxxxxxxxxxxxx",
-            })
-        expected.append({
-            "contents": base_contents + [
-                "You are receiving this email because you made this upload."],
-            "recipient": "foo.bar@xxxxxxxxxxxxx",
-            })
+                    "listed in this package's changelog.",
+                ],
+                "recipient": "daniel.silverstone@xxxxxxxxxxxxx",
+            }
+        )
+        expected.append(
+            {
+                "contents": base_contents
+                + [
+                    "You are receiving this email because you made this "
+                    "upload."
+                ],
+                "recipient": "foo.bar@xxxxxxxxxxxxx",
+            }
+        )
         self.assertEmails(expected)
         self.assertEqual([], self.oopses)
 
@@ -2080,34 +2334,44 @@ class TestUploadProcessor(TestUploadProcessorBase):
             "Subject: [ubuntu] netapplet_1.0-1_source.changes (Rejected)",
             "File "
             "%s/netapplet_1.0-1-expiredkey/netapplet_1.0-1_source.changes "
-            "is signed with an expired key %s" % (
-                self.incoming_folder, fingerprint),
-            ]
+            "is signed with an expired key %s"
+            % (self.incoming_folder, fingerprint),
+        ]
         expected = []
-        expected.append({
-            "contents": base_contents + [
-                "You are receiving this email because you are the most "
+        expected.append(
+            {
+                "contents": base_contents
+                + [
+                    "You are receiving this email because you are the most "
                     "recent person",
-                "listed in this package's changelog."],
-            "recipient": "daniel.silverstone@xxxxxxxxxxxxx",
-            })
-        expected.append({
-            "contents": base_contents + [
-                "You are receiving this email because you made this upload."],
-            "recipient": email,
-            })
+                    "listed in this package's changelog.",
+                ],
+                "recipient": "daniel.silverstone@xxxxxxxxxxxxx",
+            }
+        )
+        expected.append(
+            {
+                "contents": base_contents
+                + [
+                    "You are receiving this email because you made this "
+                    "upload."
+                ],
+                "recipient": email,
+            }
+        )
         self.assertEmails(expected)
         self.assertEqual([], self.oopses)
 
     def test_ddeb_upload_overrides(self):
         # DDEBs should always be overridden to the same values as their
         # counterpart DEB's.
-        policy = getPolicy(
-            name="sync", distro="ubuntu", distroseries="hoary")
+        policy = getPolicy(name="sync", distro="ubuntu", distroseries="hoary")
         policy.accepted_type = ArchiveUploadType.BINARY_ONLY
         uploader = NascentUpload.from_changesfile_path(
             datadir("suite/debug_1.0-1/debug_1.0-1_i386.changes"),
-            policy, DevNullLogger())
+            policy,
+            DevNullLogger(),
+        )
         uploader.process()
 
         # The package data on disk that we just uploaded has a different
@@ -2126,7 +2390,8 @@ class TestUploadProcessor(TestUploadProcessorBase):
         # Setting the Distribution.redirect_release_uploads flag causes
         # release pocket uploads to be redirected to proposed.
         uploadprocessor = self.setupBreezyAndGetUploadProcessor(
-            policy="insecure")
+            policy="insecure"
+        )
         self.switchToAdmin()
         self.ubuntu.redirect_release_uploads = True
         # Don't bother with announcements.
@@ -2134,42 +2399,62 @@ class TestUploadProcessor(TestUploadProcessorBase):
         self.switchToUploader()
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(uploadprocessor, upload_dir)
-        self.assertEmails([{
-            "contents":
-                ["Redirecting ubuntu breezy to ubuntu breezy-proposed."],
-            "recipient": None,
-            }], allow_leftover=True)
+        self.assertEmails(
+            [
+                {
+                    "contents": [
+                        "Redirecting ubuntu breezy to ubuntu breezy-proposed."
+                    ],
+                    "recipient": None,
+                }
+            ],
+            allow_leftover=True,
+        )
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.NEW, name="bar",
-            version="1.0-1", exact_match=True)
+            status=PackageUploadStatus.NEW,
+            name="bar",
+            version="1.0-1",
+            exact_match=True,
+        )
         self.assertEqual(PackagePublishingPocket.PROPOSED, queue_item.pocket)
 
         queue_item.acceptFromQueue()
         pop_notifications()
         upload_dir = self.queueUpload("bar_1.0-2")
         self.processUpload(uploadprocessor, upload_dir)
-        self.assertEmails([{
-            "contents":
-                ["Redirecting ubuntu breezy to ubuntu breezy-proposed."],
-            "recipient": None,
-            }], allow_leftover=True)
+        self.assertEmails(
+            [
+                {
+                    "contents": [
+                        "Redirecting ubuntu breezy to ubuntu breezy-proposed."
+                    ],
+                    "recipient": None,
+                }
+            ],
+            allow_leftover=True,
+        )
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.DONE, name="bar",
-            version="1.0-2", exact_match=True)
+            status=PackageUploadStatus.DONE,
+            name="bar",
+            version="1.0-2",
+            exact_match=True,
+        )
         self.assertEqual(PackagePublishingPocket.PROPOSED, queue_item.pocket)
 
     def test_source_buildinfo(self):
         # A buildinfo file is attached to the SPR.
         uploadprocessor = self.setupBreezyAndGetUploadProcessor()
         upload_dir = self.queueUpload("bar_1.0-1_buildinfo")
-        with open(os.path.join(upload_dir, "bar_1.0-1_source.buildinfo"),
-                  "rb") as f:
+        with open(
+            os.path.join(upload_dir, "bar_1.0-1_source.buildinfo"), "rb"
+        ) as f:
             buildinfo_contents = f.read()
         self.processUpload(uploadprocessor, upload_dir)
         source_pub = self.publishPackage("bar", "1.0-1")
         self.assertEqual(
             buildinfo_contents,
-            source_pub.sourcepackagerelease.buildinfo.read())
+            source_pub.sourcepackagerelease.buildinfo.read(),
+        )
 
     def test_binary_buildinfo(self):
         # A buildinfo file is attached to the BPB.
@@ -2180,26 +2465,30 @@ class TestUploadProcessor(TestUploadProcessorBase):
         [build] = source_pub.createMissingBuilds()
         self.switchToAdmin()
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED,
-            version="1.0-1", name="bar")
+            status=PackageUploadStatus.ACCEPTED, version="1.0-1", name="bar"
+        )
         queue_item.setDone()
         build.buildqueue_record.markAsBuilding(self.factory.makeBuilder())
         build.updateStatus(
-            BuildStatus.UPLOADING, builder=build.buildqueue_record.builder)
+            BuildStatus.UPLOADING, builder=build.buildqueue_record.builder
+        )
         self.switchToUploader()
         shutil.rmtree(upload_dir)
         self.layer.txn.commit()
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
         upload_dir = self.queueUpload(
-            "bar_1.0-1_binary_buildinfo", queue_entry=leaf_name)
-        with open(os.path.join(upload_dir, "bar_1.0-1_i386.buildinfo"),
-                  "rb") as f:
+            "bar_1.0-1_binary_buildinfo", queue_entry=leaf_name
+        )
+        with open(
+            os.path.join(upload_dir, "bar_1.0-1_i386.buildinfo"), "rb"
+        ) as f:
             buildinfo_contents = f.read()
         self.options.context = "buildd"
         self.options.builds = True
         BuildUploadHandler(
-            uploadprocessor, self.incoming_folder, leaf_name).process()
+            uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.assertEqual(BuildStatus.FULLYBUILT, build.status)
         self.assertEqual(buildinfo_contents, build.buildinfo.read())
 
@@ -2212,32 +2501,35 @@ class TestUploadProcessor(TestUploadProcessorBase):
         [build] = source_pub.createMissingBuilds()
         self.switchToAdmin()
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED,
-            version="1.0-1", name="bar")
+            status=PackageUploadStatus.ACCEPTED, version="1.0-1", name="bar"
+        )
         queue_item.setDone()
         build.buildqueue_record.markAsBuilding(self.factory.makeBuilder())
         build.updateStatus(
-            BuildStatus.UPLOADING, builder=build.buildqueue_record.builder)
+            BuildStatus.UPLOADING, builder=build.buildqueue_record.builder
+        )
         self.switchToUploader()
         shutil.rmtree(upload_dir)
         self.layer.txn.commit()
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
         upload_dir = self.queueUpload(
-            "bar_1.0-1_binary_buildinfo_indep", queue_entry=leaf_name)
-        with open(os.path.join(upload_dir, "bar_1.0-1_i386.buildinfo"),
-                  "rb") as f:
+            "bar_1.0-1_binary_buildinfo_indep", queue_entry=leaf_name
+        )
+        with open(
+            os.path.join(upload_dir, "bar_1.0-1_i386.buildinfo"), "rb"
+        ) as f:
             buildinfo_contents = f.read()
         self.options.context = "buildd"
         self.options.builds = True
         BuildUploadHandler(
-            uploadprocessor, self.incoming_folder, leaf_name).process()
+            uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.assertEqual(BuildStatus.FULLYBUILT, build.status)
         self.assertEqual(buildinfo_contents, build.buildinfo.read())
 
 
 class TestUploadHandler(TestUploadProcessorBase):
-
     def setUp(self):
         super().setUp()
         self.uploadprocessor = self.setupBreezyAndGetUploadProcessor()
@@ -2247,11 +2539,15 @@ class TestUploadHandler(TestUploadProcessorBase):
         # and a warning logged.
         upload_dir = self.queueUpload("bar_1.0-1", queue_entry="bar")
         e = self.assertRaises(
-            CannotGetBuild, BuildUploadHandler, self.uploadprocessor,
-            upload_dir, "bar")
+            CannotGetBuild,
+            BuildUploadHandler,
+            self.uploadprocessor,
+            upload_dir,
+            "bar",
+        )
         self.assertIn(
-            'Unable to extract build id from leaf name bar, skipping.',
-            str(e))
+            "Unable to extract build id from leaf name bar, skipping.", str(e)
+        )
 
     def testNoBuildEntry(self):
         # Directories with that refer to a nonexistent build
@@ -2260,9 +2556,14 @@ class TestUploadHandler(TestUploadProcessorBase):
         upload_dir = self.queueUpload("bar_1.0-1", queue_entry=cookie)
         e = self.assertRaises(
             CannotGetBuild,
-            BuildUploadHandler, self.uploadprocessor, upload_dir, cookie)
+            BuildUploadHandler,
+            self.uploadprocessor,
+            upload_dir,
+            cookie,
+        )
         self.assertIn(
-            "Unable to find PACKAGEBUILD with id 42. Skipping.", str(e))
+            "Unable to find PACKAGEBUILD with id 42. Skipping.", str(e)
+        )
 
     def testBinaryPackageBuild_fail(self):
         # If the upload directory is empty, the upload
@@ -2271,20 +2572,21 @@ class TestUploadHandler(TestUploadProcessorBase):
         # Upload a source package
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(self.uploadprocessor, upload_dir)
-        source_pub = self.publishPackage('bar', '1.0-1')
+        source_pub = self.publishPackage("bar", "1.0-1")
         [build] = source_pub.createMissingBuilds()
 
         # Move the source from the accepted queue.
         self.switchToAdmin()
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED,
-            version="1.0-1", name="bar")
+            status=PackageUploadStatus.ACCEPTED, version="1.0-1", name="bar"
+        )
         queue_item.setDone()
 
         builder = self.factory.makeBuilder()
         build.buildqueue_record.markAsBuilding(builder)
         build.updateStatus(
-            BuildStatus.UPLOADING, builder=build.buildqueue_record.builder)
+            BuildStatus.UPLOADING, builder=build.buildqueue_record.builder
+        )
         self.switchToUploader()
 
         # Upload and accept a binary for the primary archive source.
@@ -2295,18 +2597,20 @@ class TestUploadHandler(TestUploadProcessorBase):
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
         os.mkdir(os.path.join(self.incoming_folder, leaf_name))
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         self.options.builds = True
-        BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
-            leaf_name).process()
+        BuildUploadHandler(
+            self.uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.assertEqual(1, len(self.oopses))
         self.assertEqual(BuildStatus.FAILEDTOUPLOAD, build.status)
         self.assertEqual(builder, build.builder)
         self.assertIsNot(None, build.duration)
         log_contents = build.upload_log.read()
         self.assertIn(
-            b'ERROR Exception while processing upload ', log_contents)
-        self.assertNotIn(b'DEBUG Moving upload directory ', log_contents)
+            b"ERROR Exception while processing upload ", log_contents
+        )
+        self.assertNotIn(b"DEBUG Moving upload directory ", log_contents)
 
     def testBinaryPackageBuilds(self):
         # Properly uploaded binaries should result in the
@@ -2314,14 +2618,14 @@ class TestUploadHandler(TestUploadProcessorBase):
         # Upload a source package
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(self.uploadprocessor, upload_dir)
-        source_pub = self.publishPackage('bar', '1.0-1')
+        source_pub = self.publishPackage("bar", "1.0-1")
         [build] = source_pub.createMissingBuilds()
 
         # Move the source from the accepted queue.
         self.switchToAdmin()
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED,
-            version="1.0-1", name="bar")
+            status=PackageUploadStatus.ACCEPTED, version="1.0-1", name="bar"
+        )
         queue_item.setDone()
 
         build.buildqueue_record.markAsBuilding(self.factory.makeBuilder())
@@ -2335,13 +2639,15 @@ class TestUploadHandler(TestUploadProcessorBase):
         self.layer.txn.commit()
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
-        upload_dir = self.queueUpload("bar_1.0-1_binary",
-                queue_entry=leaf_name)
-        self.options.context = 'buildd'
+        upload_dir = self.queueUpload(
+            "bar_1.0-1_binary", queue_entry=leaf_name
+        )
+        self.options.context = "buildd"
         self.options.builds = True
         pop_notifications()
-        BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
-            leaf_name).process()
+        BuildUploadHandler(
+            self.uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.layer.txn.commit()
         # No emails are sent on success
         self.assertEmailQueueLength(0)
@@ -2354,9 +2660,12 @@ class TestUploadHandler(TestUploadProcessorBase):
         self.switchToAdmin()
         archive = self.factory.makeArchive()
         archive.require_virtualized = False
-        build = self.factory.makeSourcePackageRecipeBuild(sourcename="bar",
-            distroseries=self.breezy, archive=archive,
-            requester=archive.owner)
+        build = self.factory.makeSourcePackageRecipeBuild(
+            sourcename="bar",
+            distroseries=self.breezy,
+            archive=archive,
+            requester=archive.owner,
+        )
         self.assertEqual(archive.owner, build.requester)
         self.switchToUploader()
         # Commit so the build cookie has the right ids.
@@ -2364,17 +2673,22 @@ class TestUploadHandler(TestUploadProcessorBase):
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
         relative_path = "~%s/%s/%s/%s" % (
-            archive.owner.name, archive.name, self.breezy.distribution.name,
-            self.breezy.name)
+            archive.owner.name,
+            archive.name,
+            self.breezy.distribution.name,
+            self.breezy.name,
+        )
         self.queueUpload(
-            "bar_1.0-1", queue_entry=leaf_name, relative_path=relative_path)
-        self.options.context = 'buildd'
+            "bar_1.0-1", queue_entry=leaf_name, relative_path=relative_path
+        )
+        self.options.context = "buildd"
         self.options.builds = True
         build.updateStatus(BuildStatus.BUILDING)
         build.updateStatus(BuildStatus.UPLOADING)
         self.switchToUploader()
-        BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
-            leaf_name).process()
+        BuildUploadHandler(
+            self.uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.layer.txn.commit()
         return build
 
@@ -2403,20 +2717,22 @@ class TestUploadHandler(TestUploadProcessorBase):
         self.switchToAdmin()
         archive = self.factory.makeArchive()
         archive.require_virtualized = False
-        build = self.factory.makeSourcePackageRecipeBuild(sourcename="bar",
-            distroseries=self.breezy, archive=archive)
+        build = self.factory.makeSourcePackageRecipeBuild(
+            sourcename="bar", distroseries=self.breezy, archive=archive
+        )
         # Commit so the build cookie has the right ids.
         Store.of(build).flush()
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
         os.mkdir(os.path.join(self.incoming_folder, leaf_name))
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         self.options.builds = True
         build.updateStatus(BuildStatus.BUILDING)
         build.updateStatus(BuildStatus.UPLOADING)
         self.switchToUploader()
-        BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
-            leaf_name).process()
+        BuildUploadHandler(
+            self.uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.layer.txn.commit()
         return build
 
@@ -2432,10 +2748,10 @@ class TestUploadHandler(TestUploadProcessorBase):
         self.doFailureRecipeBuild()
         (mail,) = pop_notifications()
         # Unfold continuation lines.
-        subject = mail['Subject'].replace('\n ', ' ')
-        self.assertIn('Failed to upload', subject)
-        body = mail.get_payload(decode=True).decode('UTF-8')
-        self.assertIn('Upload Log: http', body)
+        subject = mail["Subject"].replace("\n ", " ")
+        self.assertIn("Failed to upload", subject)
+        body = mail.get_payload(decode=True).decode("UTF-8")
+        self.assertIn("Upload Log: http", body)
 
     def doDeletedRecipeBuild(self):
         # A source package recipe build will fail if the recipe is deleted.
@@ -2444,15 +2760,16 @@ class TestUploadHandler(TestUploadProcessorBase):
         self.switchToAdmin()
         archive = self.factory.makeArchive()
         archive.require_virtualized = False
-        build = self.factory.makeSourcePackageRecipeBuild(sourcename="bar",
-            distroseries=self.breezy, archive=archive)
+        build = self.factory.makeSourcePackageRecipeBuild(
+            sourcename="bar", distroseries=self.breezy, archive=archive
+        )
         self.switchToUploader()
         # Commit so the build cookie has the right ids.
         Store.of(build).flush()
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
         os.mkdir(os.path.join(self.incoming_folder, leaf_name))
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         self.options.builds = True
         build.updateStatus(BuildStatus.BUILDING)
         self.switchToAdmin()
@@ -2462,8 +2779,9 @@ class TestUploadHandler(TestUploadProcessorBase):
         # violations later.
         Store.of(build).flush()
         build.updateStatus(BuildStatus.UPLOADING)
-        BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
-            leaf_name).process()
+        BuildUploadHandler(
+            self.uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.layer.txn.commit()
         return build
 
@@ -2483,30 +2801,33 @@ class TestUploadHandler(TestUploadProcessorBase):
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
         os.mkdir(os.path.join(self.incoming_folder, leaf_name))
-        self.options.context = 'buildd'
+        self.options.context = "buildd"
         self.options.builds = True
         build.updateStatus(BuildStatus.UPLOADING)
         self.switchToAdmin()
         build.snap.destroySelf()
         self.switchToUploader()
         BuildUploadHandler(
-            self.uploadprocessor, self.incoming_folder, leaf_name).process()
+            self.uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.assertFalse(
-            os.path.exists(os.path.join(self.incoming_folder, leaf_name)))
+            os.path.exists(os.path.join(self.incoming_folder, leaf_name))
+        )
         self.assertTrue(
-            os.path.exists(os.path.join(self.failed_folder, leaf_name)))
+            os.path.exists(os.path.join(self.failed_folder, leaf_name))
+        )
 
     def processUploadWithBuildStatus(self, status):
         upload_dir = self.queueUpload("bar_1.0-1")
         self.processUpload(self.uploadprocessor, upload_dir)
-        source_pub = self.publishPackage('bar', '1.0-1')
+        source_pub = self.publishPackage("bar", "1.0-1")
         [build] = source_pub.createMissingBuilds()
 
         # Move the source from the accepted queue.
         self.switchToAdmin()
         [queue_item] = self.breezy.getPackageUploads(
-            status=PackageUploadStatus.ACCEPTED,
-            version="1.0-1", name="bar")
+            status=PackageUploadStatus.ACCEPTED, version="1.0-1", name="bar"
+        )
         queue_item.setDone()
         pop_notifications()
 
@@ -2521,12 +2842,14 @@ class TestUploadHandler(TestUploadProcessorBase):
         behaviour = IBuildFarmJobBehaviour(build)
         leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
         upload_dir = self.queueUpload(
-            "bar_1.0-1_binary", queue_entry=leaf_name)
-        self.options.context = 'buildd'
+            "bar_1.0-1_binary", queue_entry=leaf_name
+        )
+        self.options.context = "buildd"
         self.options.builds = True
         self.assertEmailQueueLength(0)
-        BuildUploadHandler(self.uploadprocessor, self.incoming_folder,
-            leaf_name).process()
+        BuildUploadHandler(
+            self.uploadprocessor, self.incoming_folder, leaf_name
+        ).process()
         self.layer.txn.commit()
 
         return build, leaf_name
@@ -2536,10 +2859,12 @@ class TestUploadHandler(TestUploadProcessorBase):
         # upload directory may already be in place, but buildd-manager
         # will set the status to UPLOADING when it's handed off.
         build, leaf_name = self.processUploadWithBuildStatus(
-            BuildStatus.BUILDING)
+            BuildStatus.BUILDING
+        )
         # The build status is not changed
         self.assertTrue(
-            os.path.exists(os.path.join(self.incoming_folder, leaf_name)))
+            os.path.exists(os.path.join(self.incoming_folder, leaf_name))
+        )
         self.assertEqual(BuildStatus.BUILDING, build.status)
         self.assertLogContains("Build status is BUILDING. Ignoring.")
 
@@ -2548,21 +2873,27 @@ class TestUploadHandler(TestUploadProcessorBase):
         # should trigger a failure. We've probably raced with
         # buildd-manager due to a new and assuredly extra-special bug.
         build, leaf_name = self.processUploadWithBuildStatus(
-            BuildStatus.NEEDSBUILD)
+            BuildStatus.NEEDSBUILD
+        )
         # The build status is not changed, but the upload has moved.
         self.assertFalse(
-            os.path.exists(os.path.join(self.incoming_folder, leaf_name)))
+            os.path.exists(os.path.join(self.incoming_folder, leaf_name))
+        )
         self.assertTrue(
-            os.path.exists(os.path.join(self.failed_folder, leaf_name)))
+            os.path.exists(os.path.join(self.failed_folder, leaf_name))
+        )
         self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
         self.assertLogContains(
             "Expected build status to be UPLOADING or BUILDING, was "
-            "NEEDSBUILD.")
+            "NEEDSBUILD."
+        )
 
     def testOrderFilenames(self):
         """orderFilenames sorts _source.changes ahead of other files."""
-        self.assertEqual(["d_source.changes", "a", "b", "c"],
-            UploadHandler.orderFilenames(["b", "a", "d_source.changes", "c"]))
+        self.assertEqual(
+            ["d_source.changes", "a", "b", "c"],
+            UploadHandler.orderFilenames(["b", "a", "d_source.changes", "c"]),
+        )
 
     def testLocateChangesFiles(self):
         """locateChangesFiles should return the .changes files in a folder.
@@ -2577,10 +2908,9 @@ class TestUploadHandler(TestUploadProcessorBase):
             open("%s/3.not_changes" % testdir, "w").close()
 
             up = self.getUploadProcessor(None)
-            handler = UploadHandler(up, '.', testdir)
+            handler = UploadHandler(up, ".", testdir)
             located_files = handler.locateChangesFiles()
-            self.assertEqual(
-                located_files, ["2_source.changes", "1.changes"])
+            self.assertEqual(located_files, ["2_source.changes", "1.changes"])
         finally:
             shutil.rmtree(testdir)
 
@@ -2590,10 +2920,13 @@ class ParseBuildUploadLeafNameTests(TestCase):
 
     def test_valid(self):
         self.assertEqual(
-            ('PACKAGEBUILD', 60),
-            parse_build_upload_leaf_name("20100812-PACKAGEBUILD-60"))
+            ("PACKAGEBUILD", 60),
+            parse_build_upload_leaf_name("20100812-PACKAGEBUILD-60"),
+        )
 
     def test_invalid_jobid(self):
         self.assertRaises(
-            ValueError, parse_build_upload_leaf_name,
-            "aaba-a42-PACKAGEBUILD-abc")
+            ValueError,
+            parse_build_upload_leaf_name,
+            "aaba-a42-PACKAGEBUILD-abc",
+        )
diff --git a/lib/lp/archiveuploader/tests/test_utils.py b/lib/lp/archiveuploader/tests/test_utils.py
index e65a6b5..6829fb7 100755
--- a/lib/lp/archiveuploader/tests/test_utils.py
+++ b/lib/lp/archiveuploader/tests/test_utils.py
@@ -7,139 +7,163 @@ import os
 
 from lp.archiveuploader.tests import datadir
 from lp.archiveuploader.utils import (
+    DpkgSourceError,
+    ParseMaintError,
     determine_binary_file_type,
     determine_source_file_type,
-    DpkgSourceError,
     extract_dpkg_source,
-    ParseMaintError,
     re_isadeb,
     re_issource,
-    )
+)
 from lp.registry.interfaces.sourcepackage import SourcePackageFileType
 from lp.soyuz.enums import BinaryPackageFileType
 from lp.testing import TestCase
 
 
 class TestUtilities(TestCase):
-
     def test_determine_source_file_type(self):
         """lp.archiveuploader.utils.determine_source_file_type should work."""
 
         # .dsc -> DSC
         self.assertEqual(
-            determine_source_file_type('foo_1.0-1.dsc'),
-            SourcePackageFileType.DSC)
+            determine_source_file_type("foo_1.0-1.dsc"),
+            SourcePackageFileType.DSC,
+        )
 
         # .diff.gz -> DIFF
         self.assertEqual(
-            determine_source_file_type('foo_1.0-1.diff.gz'),
-            SourcePackageFileType.DIFF)
+            determine_source_file_type("foo_1.0-1.diff.gz"),
+            SourcePackageFileType.DIFF,
+        )
 
         # DIFFs can only be gzipped.
-        self.assertEqual(
-            determine_source_file_type('foo_1.0.diff.bz2'), None)
+        self.assertEqual(determine_source_file_type("foo_1.0.diff.bz2"), None)
 
         # Plain original tarballs can be gzipped or bzip2ed.
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig.tar.gz'),
-            SourcePackageFileType.ORIG_TARBALL)
+            determine_source_file_type("foo_1.0.orig.tar.gz"),
+            SourcePackageFileType.ORIG_TARBALL,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig.tar.bz2'),
-            SourcePackageFileType.ORIG_TARBALL)
+            determine_source_file_type("foo_1.0.orig.tar.bz2"),
+            SourcePackageFileType.ORIG_TARBALL,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig.tar.xz'),
-            SourcePackageFileType.ORIG_TARBALL)
+            determine_source_file_type("foo_1.0.orig.tar.xz"),
+            SourcePackageFileType.ORIG_TARBALL,
+        )
 
         # Component original tarballs too.
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig-foo.tar.gz'),
-            SourcePackageFileType.COMPONENT_ORIG_TARBALL)
+            determine_source_file_type("foo_1.0.orig-foo.tar.gz"),
+            SourcePackageFileType.COMPONENT_ORIG_TARBALL,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig-bar.tar.bz2'),
-            SourcePackageFileType.COMPONENT_ORIG_TARBALL)
+            determine_source_file_type("foo_1.0.orig-bar.tar.bz2"),
+            SourcePackageFileType.COMPONENT_ORIG_TARBALL,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig-bar.tar.xz'),
-            SourcePackageFileType.COMPONENT_ORIG_TARBALL)
+            determine_source_file_type("foo_1.0.orig-bar.tar.xz"),
+            SourcePackageFileType.COMPONENT_ORIG_TARBALL,
+        )
 
         # And Debian tarballs...
         self.assertEqual(
-            determine_source_file_type('foo_1.0-1.debian.tar.gz'),
-            SourcePackageFileType.DEBIAN_TARBALL)
+            determine_source_file_type("foo_1.0-1.debian.tar.gz"),
+            SourcePackageFileType.DEBIAN_TARBALL,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0-2.debian.tar.bz2'),
-            SourcePackageFileType.DEBIAN_TARBALL)
+            determine_source_file_type("foo_1.0-2.debian.tar.bz2"),
+            SourcePackageFileType.DEBIAN_TARBALL,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0-2.debian.tar.xz'),
-            SourcePackageFileType.DEBIAN_TARBALL)
+            determine_source_file_type("foo_1.0-2.debian.tar.xz"),
+            SourcePackageFileType.DEBIAN_TARBALL,
+        )
 
         # And even native tarballs!
         self.assertEqual(
-            determine_source_file_type('foo_1.0.tar.gz'),
-            SourcePackageFileType.NATIVE_TARBALL)
+            determine_source_file_type("foo_1.0.tar.gz"),
+            SourcePackageFileType.NATIVE_TARBALL,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.tar.bz2'),
-            SourcePackageFileType.NATIVE_TARBALL)
+            determine_source_file_type("foo_1.0.tar.bz2"),
+            SourcePackageFileType.NATIVE_TARBALL,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.tar.xz'),
-            SourcePackageFileType.NATIVE_TARBALL)
+            determine_source_file_type("foo_1.0.tar.xz"),
+            SourcePackageFileType.NATIVE_TARBALL,
+        )
 
         # (Component) original tarball signatures are detected for any
         # supported compression method.
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig.tar.gz.asc'),
-            SourcePackageFileType.ORIG_TARBALL_SIGNATURE)
+            determine_source_file_type("foo_1.0.orig.tar.gz.asc"),
+            SourcePackageFileType.ORIG_TARBALL_SIGNATURE,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig.tar.bz2.asc'),
-            SourcePackageFileType.ORIG_TARBALL_SIGNATURE)
+            determine_source_file_type("foo_1.0.orig.tar.bz2.asc"),
+            SourcePackageFileType.ORIG_TARBALL_SIGNATURE,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig.tar.xz.asc'),
-            SourcePackageFileType.ORIG_TARBALL_SIGNATURE)
+            determine_source_file_type("foo_1.0.orig.tar.xz.asc"),
+            SourcePackageFileType.ORIG_TARBALL_SIGNATURE,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig-foo.tar.gz.asc'),
-            SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE)
+            determine_source_file_type("foo_1.0.orig-foo.tar.gz.asc"),
+            SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig-bar.tar.bz2.asc'),
-            SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE)
+            determine_source_file_type("foo_1.0.orig-bar.tar.bz2.asc"),
+            SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE,
+        )
         self.assertEqual(
-            determine_source_file_type('foo_1.0.orig-bar.tar.xz.asc'),
-            SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE)
+            determine_source_file_type("foo_1.0.orig-bar.tar.xz.asc"),
+            SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE,
+        )
 
-        self.assertIsNone(determine_source_file_type('foo_1.0'))
-        self.assertIsNone(determine_source_file_type('foo_1.0.blah.gz'))
+        self.assertIsNone(determine_source_file_type("foo_1.0"))
+        self.assertIsNone(determine_source_file_type("foo_1.0.blah.gz"))
 
     def test_determine_binary_file_type(self):
         """lp.archiveuploader.utils.determine_binary_file_type should work."""
         # .deb -> DEB
         self.assertEqual(
-            determine_binary_file_type('foo_1.0-1_all.deb'),
-            BinaryPackageFileType.DEB)
+            determine_binary_file_type("foo_1.0-1_all.deb"),
+            BinaryPackageFileType.DEB,
+        )
 
         # .ddeb -> DDEB
         self.assertEqual(
-            determine_binary_file_type('foo_1.0-1_all.ddeb'),
-            BinaryPackageFileType.DDEB)
+            determine_binary_file_type("foo_1.0-1_all.ddeb"),
+            BinaryPackageFileType.DDEB,
+        )
 
         # .udeb -> UDEB
         self.assertEqual(
-            determine_binary_file_type('foo_1.0-1_all.udeb'),
-            BinaryPackageFileType.UDEB)
+            determine_binary_file_type("foo_1.0-1_all.udeb"),
+            BinaryPackageFileType.UDEB,
+        )
 
-        self.assertEqual(determine_binary_file_type('foo_1.0'), None)
-        self.assertEqual(determine_binary_file_type('foo_1.0.notdeb'), None)
+        self.assertEqual(determine_binary_file_type("foo_1.0"), None)
+        self.assertEqual(determine_binary_file_type("foo_1.0.notdeb"), None)
 
     def testPrefixMultilineString(self):
         """lp.archiveuploader.utils.prefix_multi_line_string should work"""
         from lp.archiveuploader.utils import prefix_multi_line_string
-        self.assertEqual("A:foo\nA:bar",
-                         prefix_multi_line_string("foo\nbar", "A:"))
-        self.assertEqual("A:foo\nA:bar",
-                         prefix_multi_line_string("foo\n\nbar", "A:"))
-        self.assertEqual("A:foo\nA:\nA:bar",
-                         prefix_multi_line_string("foo\n\nbar", "A:", 1))
+
+        self.assertEqual(
+            "A:foo\nA:bar", prefix_multi_line_string("foo\nbar", "A:")
+        )
+        self.assertEqual(
+            "A:foo\nA:bar", prefix_multi_line_string("foo\n\nbar", "A:")
+        )
+        self.assertEqual(
+            "A:foo\nA:\nA:bar", prefix_multi_line_string("foo\n\nbar", "A:", 1)
+        )
 
     def testExtractComponent(self):
-        """lp.archiveuploader.utils.extract_component_from_section should work
-        """
+        """lp.archiveuploader.utils.extract_component_from_section works."""
         from lp.archiveuploader.utils import extract_component_from_section
 
         (sect, comp) = extract_component_from_section("libs")
@@ -154,151 +178,182 @@ class TestUtilities(TestCase):
         self.assertEqual(sect, "libs")
         self.assertEqual(comp, "multiverse")
 
-        (sect, comp) = extract_component_from_section("restricted/libs",
-                                                      "multiverse")
+        (sect, comp) = extract_component_from_section(
+            "restricted/libs", "multiverse"
+        )
         self.assertEqual(sect, "libs")
         self.assertEqual(comp, "restricted")
 
     def testParseMaintainerOkay(self):
-        """lp.archiveuploader.utils.parse_maintainer should parse correctly
-        """
+        """lp.archiveuploader.utils.parse_maintainer should parse correctly"""
         from lp.archiveuploader.utils import (
             parse_maintainer_bytes,
             rfc822_encode_address,
-            )
+        )
+
         cases = (
-            (b"No\xc3\xa8l K\xc3\xb6the <noel@xxxxxxxxxx>",
-             "No\xe8l K\xf6the <noel@xxxxxxxxxx>",
-             "No\xe8l K\xf6the",
-             "noel@xxxxxxxxxx"),
-
-            (b"No\xe8l K\xf6the <noel@xxxxxxxxxx>",
-             "No\xe8l K\xf6the <noel@xxxxxxxxxx>",
-             "No\xe8l K\xf6the",
-             "noel@xxxxxxxxxx"),
-
-            ("James Troup <james@xxxxxxxxxx>",
-             "James Troup <james@xxxxxxxxxx>",
-             "James Troup",
-             "james@xxxxxxxxxx"),
-
-            ("James J. Troup <james@xxxxxxxxxx>",
-             "james@xxxxxxxxxx (James J. Troup)",
-             "James J. Troup",
-             "james@xxxxxxxxxx"),
-
-            ("James J, Troup <james@xxxxxxxxxx>",
-             "james@xxxxxxxxxx (James J, Troup)",
-             "James J, Troup",
-             "james@xxxxxxxxxx"),
-
-            ("james@xxxxxxxxxx",
-             " <james@xxxxxxxxxx>",
-             "",
-             "james@xxxxxxxxxx"),
-
-            ("<james@xxxxxxxxxx>",
-             " <james@xxxxxxxxxx>",
-             "",
-             "james@xxxxxxxxxx"),
-
-            ("Cris van Pelt <\"Cris van Pelt\"@tribe.eu.org>",
-             "Cris van Pelt <\"Cris van Pelt\"@tribe.eu.org>",
-             "Cris van Pelt",
-             "\"Cris van Pelt\"@tribe.eu.org"),
-
-            ("Zak B. Elep <zakame@xxxxxxxxxx>",
-             "zakame@xxxxxxxxxx (Zak B. Elep)",
-             "Zak B. Elep",
-             "zakame@xxxxxxxxxx"),
-
-            ("zakame@xxxxxxxxxx (Zak B. Elep)",
-             " <zakame@xxxxxxxxxx (Zak B. Elep)>",
-             "",
-             "zakame@xxxxxxxxxx (Zak B. Elep)"),
-             )
+            (
+                b"No\xc3\xa8l K\xc3\xb6the <noel@xxxxxxxxxx>",
+                "No\xe8l K\xf6the <noel@xxxxxxxxxx>",
+                "No\xe8l K\xf6the",
+                "noel@xxxxxxxxxx",
+            ),
+            (
+                b"No\xe8l K\xf6the <noel@xxxxxxxxxx>",
+                "No\xe8l K\xf6the <noel@xxxxxxxxxx>",
+                "No\xe8l K\xf6the",
+                "noel@xxxxxxxxxx",
+            ),
+            (
+                "James Troup <james@xxxxxxxxxx>",
+                "James Troup <james@xxxxxxxxxx>",
+                "James Troup",
+                "james@xxxxxxxxxx",
+            ),
+            (
+                "James J. Troup <james@xxxxxxxxxx>",
+                "james@xxxxxxxxxx (James J. Troup)",
+                "James J. Troup",
+                "james@xxxxxxxxxx",
+            ),
+            (
+                "James J, Troup <james@xxxxxxxxxx>",
+                "james@xxxxxxxxxx (James J, Troup)",
+                "James J, Troup",
+                "james@xxxxxxxxxx",
+            ),
+            (
+                "james@xxxxxxxxxx",
+                " <james@xxxxxxxxxx>",
+                "",
+                "james@xxxxxxxxxx",
+            ),
+            (
+                "<james@xxxxxxxxxx>",
+                " <james@xxxxxxxxxx>",
+                "",
+                "james@xxxxxxxxxx",
+            ),
+            (
+                'Cris van Pelt <"Cris van Pelt"@tribe.eu.org>',
+                'Cris van Pelt <"Cris van Pelt"@tribe.eu.org>',
+                "Cris van Pelt",
+                '"Cris van Pelt"@tribe.eu.org',
+            ),
+            (
+                "Zak B. Elep <zakame@xxxxxxxxxx>",
+                "zakame@xxxxxxxxxx (Zak B. Elep)",
+                "Zak B. Elep",
+                "zakame@xxxxxxxxxx",
+            ),
+            (
+                "zakame@xxxxxxxxxx (Zak B. Elep)",
+                " <zakame@xxxxxxxxxx (Zak B. Elep)>",
+                "",
+                "zakame@xxxxxxxxxx (Zak B. Elep)",
+            ),
+        )
 
         for case in cases:
-            (name, email) = parse_maintainer_bytes(case[0], 'Maintainer')
+            (name, email) = parse_maintainer_bytes(case[0], "Maintainer")
             self.assertEqual(case[2], name)
             self.assertEqual(case[3], email)
             self.assertEqual(case[1], rfc822_encode_address(name, email))
 
     def testParseMaintainerRaises(self):
         """lp.archiveuploader.utils.parse_maintainer should raise on incorrect
-           values
+        values
         """
         from lp.archiveuploader.utils import parse_maintainer_bytes
 
         cases = (
-            ("James Troup",
-             'James Troup: no @ found in email address part.'),
-
-            ("James Troup <james>",
-             'James Troup <james>: no @ found in email address part.'),
-
-            ("James Troup <james@xxxxxxxxxx",
-             ("James Troup <james@xxxxxxxxxx: "
-              "doesn't parse as a valid Maintainer field.")),
-
-            (b"No\xc3\xa8l K\xc3\xb6the",
-             (b'No\xc3\xa8l K\xc3\xb6the: '
-              b'no @ found in email address '
-              b'part.').decode('utf-8')),
+            ("James Troup", "James Troup: no @ found in email address part."),
+            (
+                "James Troup <james>",
+                "James Troup <james>: no @ found in email address part.",
+            ),
+            (
+                "James Troup <james@xxxxxxxxxx",
+                (
+                    "James Troup <james@xxxxxxxxxx: "
+                    "doesn't parse as a valid Maintainer field."
+                ),
+            ),
+            (
+                b"No\xc3\xa8l K\xc3\xb6the",
+                (
+                    b"No\xc3\xa8l K\xc3\xb6the: "
+                    b"no @ found in email address "
+                    b"part."
+                ).decode("utf-8"),
+            ),
         )
 
         for case in cases:
             try:
-                parse_maintainer_bytes(case[0], 'Maintainer')
+                parse_maintainer_bytes(case[0], "Maintainer")
             except ParseMaintError as e:
                 self.assertEqual(case[1], str(e))
             else:
-                self.fail('ParseMaintError not raised')
+                self.fail("ParseMaintError not raised")
 
 
 class TestFilenameRegularExpressions(TestCase):
-
     def test_re_isadeb(self):
         # Verify that the three binary extensions match the regexp.
-        for extension in ('deb', 'ddeb', 'udeb'):
+        for extension in ("deb", "ddeb", "udeb"):
             self.assertEqual(
-                ('foo-bar', '1.0', 'i386', extension),
-                re_isadeb.match('foo-bar_1.0_i386.%s' % extension).groups())
+                ("foo-bar", "1.0", "i386", extension),
+                re_isadeb.match("foo-bar_1.0_i386.%s" % extension).groups(),
+            )
 
         # Some other extension doesn't match.
-        self.assertIs(None, re_isadeb.match('foo-bar_1.0_i386.notdeb'))
+        self.assertIs(None, re_isadeb.match("foo-bar_1.0_i386.notdeb"))
 
         # A missing architecture also doesn't match.
-        self.assertIs(None, re_isadeb.match('foo-bar_1.0.deb'))
+        self.assertIs(None, re_isadeb.match("foo-bar_1.0.deb"))
 
     def test_re_issource(self):
         # Verify that various source extensions match the regexp.
         extensions = (
-            'dsc', 'tar.gz', 'tar.bz2', 'tar.xz', 'diff.gz',
-            'orig.tar.gz', 'orig.tar.bz2', 'orig.tar.xz',
-            'orig-bar.tar.gz', 'orig-bar.tar.bz2', 'orig-bar.tar.xz',
-            'orig-foo_bar.tar.gz',
-            'debian.tar.gz', 'debian.tar.bz2', 'debian.tar.xz')
+            "dsc",
+            "tar.gz",
+            "tar.bz2",
+            "tar.xz",
+            "diff.gz",
+            "orig.tar.gz",
+            "orig.tar.bz2",
+            "orig.tar.xz",
+            "orig-bar.tar.gz",
+            "orig-bar.tar.bz2",
+            "orig-bar.tar.xz",
+            "orig-foo_bar.tar.gz",
+            "debian.tar.gz",
+            "debian.tar.bz2",
+            "debian.tar.xz",
+        )
         for extension in extensions:
             self.assertEqual(
-                ('foo-bar', '1.0', extension),
-                re_issource.match('foo-bar_1.0.%s' % extension).groups())
+                ("foo-bar", "1.0", extension),
+                re_issource.match("foo-bar_1.0.%s" % extension).groups(),
+            )
 
         # While orig-*.tar.gz is all interpreted as extension, *orig-*.tar.gz
         # is taken to have an extension of just 'tar.gz'.
         self.assertEqual(
-            ('foo-bar', '1.0.porig-bar', 'tar.gz'),
-            re_issource.match('foo-bar_1.0.porig-bar.tar.gz').groups())
+            ("foo-bar", "1.0.porig-bar", "tar.gz"),
+            re_issource.match("foo-bar_1.0.porig-bar.tar.gz").groups(),
+        )
 
         # Some other extension doesn't match.
-        self.assertIs(None, re_issource.match('foo-bar_1.0.notdsc'))
+        self.assertIs(None, re_issource.match("foo-bar_1.0.notdsc"))
 
         # A badly formatted name also doesn't match.
-        self.assertIs(None, re_issource.match('foo-bar.dsc'))
+        self.assertIs(None, re_issource.match("foo-bar.dsc"))
 
         # bzip2/xz compression for files which must be gzipped is invalid.
-        self.assertIs(None, re_issource.match('foo-bar_1.0.diff.bz2'))
-        self.assertIs(None, re_issource.match('foo-bar_1.0.diff.xz'))
+        self.assertIs(None, re_issource.match("foo-bar_1.0.diff.bz2"))
+        self.assertIs(None, re_issource.match("foo-bar_1.0.diff.xz"))
 
 
 class TestExtractDpkgSource(TestCase):
@@ -309,17 +364,22 @@ class TestExtractDpkgSource(TestCase):
         # path.
         temp_dir = self.makeTemporaryDirectory()
         extract_dpkg_source(
-            datadir(os.path.join('suite', 'bar_1.0-1', 'bar_1.0-1.dsc')),
-            temp_dir)
+            datadir(os.path.join("suite", "bar_1.0-1", "bar_1.0-1.dsc")),
+            temp_dir,
+        )
         self.assertEqual(["bar-1.0"], os.listdir(temp_dir))
         self.assertContentEqual(
             ["THIS_IS_BAR", "debian"],
-            os.listdir(os.path.join(temp_dir, "bar-1.0")))
+            os.listdir(os.path.join(temp_dir, "bar-1.0")),
+        )
 
     def test_nonexistent(self):
         temp_dir = self.makeTemporaryDirectory()
         err = self.assertRaises(
-            DpkgSourceError, extract_dpkg_source,
-            "thispathdoesntexist", temp_dir)
+            DpkgSourceError,
+            extract_dpkg_source,
+            "thispathdoesntexist",
+            temp_dir,
+        )
         self.assertNotEqual(0, err.result)
         self.assertEqual("", err.output)
diff --git a/lib/lp/archiveuploader/uploadpolicy.py b/lib/lp/archiveuploader/uploadpolicy.py
index f27bbdf..1448e31 100644
--- a/lib/lp/archiveuploader/uploadpolicy.py
+++ b/lib/lp/archive