← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:black-soyuz into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:black-soyuz into launchpad:master.

Commit message:
lp.soyuz: Apply black

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/427279
-- 
The attached diff has been truncated due to its size.
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:black-soyuz into launchpad:master.
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index a4e636a..4961658 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -86,3 +86,5 @@ ed7d7b97b8fb4ebe92799f922b0fa9c4bd1714e8
 4719b7aa672a2674c7fdbbde58772871b77c3301
 # apply black to lp.snappy
 cf7c6a08bd010dd260bff4690d64479fadf37e67
+# apply black to lp.soyuz
+5a98ef6df022b52adc06787b56f2482bc4a28a3e
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a665559..b0c79ac 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -57,6 +57,7 @@ repos:
             |scripts
             |services
             |snappy
+            |soyuz
           )/
 -   repo: https://github.com/PyCQA/isort
     rev: 5.9.2
@@ -90,6 +91,7 @@ repos:
             |scripts
             |services
             |snappy
+            |soyuz
           )/
     -   id: isort
         alias: isort-black
@@ -113,6 +115,7 @@ repos:
             |scripts
             |services
             |snappy
+            |soyuz
           )/
 -   repo: https://github.com/PyCQA/flake8
     rev: 3.9.2
diff --git a/lib/lp/soyuz/adapters/archivedependencies.py b/lib/lp/soyuz/adapters/archivedependencies.py
index 48626f9..ceb4ae4 100644
--- a/lib/lp/soyuz/adapters/archivedependencies.py
+++ b/lib/lp/soyuz/adapters/archivedependencies.py
@@ -25,14 +25,14 @@ Auxiliary functions exposed for testing purposes:
 """
 
 __all__ = [
-    'default_component_dependency_name',
-    'default_pocket_dependency',
-    'expand_dependencies',
-    'get_components_for_context',
-    'get_primary_current_component',
-    'get_sources_list_for_building',
-    'pocket_dependencies',
-    ]
+    "default_component_dependency_name",
+    "default_pocket_dependency",
+    "expand_dependencies",
+    "get_components_for_context",
+    "get_primary_current_component",
+    "get_sources_list_for_building",
+    "pocket_dependencies",
+]
 
 import base64
 import logging
@@ -46,69 +46,57 @@ from zope.security.proxy import removeSecurityProxy
 
 from lp.app.errors import NotFoundError
 from lp.registry.interfaces.distroseriesparent import IDistroSeriesParentSet
-from lp.registry.interfaces.pocket import (
-    PackagePublishingPocket,
-    pocketsuffix,
-    )
-from lp.services.gpg.interfaces import (
-    GPGKeyNotFoundError,
-    IGPGHandler,
-    )
+from lp.registry.interfaces.pocket import PackagePublishingPocket, pocketsuffix
+from lp.services.gpg.interfaces import GPGKeyNotFoundError, IGPGHandler
 from lp.services.timeout import default_timeout
-from lp.soyuz.enums import (
-    ArchivePurpose,
-    PackagePublishingStatus,
-    )
+from lp.soyuz.enums import ArchivePurpose, PackagePublishingStatus
 from lp.soyuz.interfaces.archive import ALLOW_RELEASE_BUILDS
 from lp.soyuz.interfaces.component import IComponentSet
 
-
 component_dependencies = {
-    'main': ['main'],
-    'restricted': ['main', 'restricted'],
-    'universe': ['main', 'universe'],
-    'multiverse': ['main', 'restricted', 'universe', 'multiverse'],
-    'partner': ['partner'],
-    }
+    "main": ["main"],
+    "restricted": ["main", "restricted"],
+    "universe": ["main", "universe"],
+    "multiverse": ["main", "restricted", "universe", "multiverse"],
+    "partner": ["partner"],
+}
 
 # If strict_supported_component_dependencies is disabled, treat the
 # left-hand components like the right-hand components for the purposes of
 # finding component dependencies.
 lax_component_map = {
-    'main': 'universe',
-    'restricted': 'multiverse',
-    }
+    "main": "universe",
+    "restricted": "multiverse",
+}
 
 pocket_dependencies = {
-    PackagePublishingPocket.RELEASE: (
-        PackagePublishingPocket.RELEASE,
-        ),
+    PackagePublishingPocket.RELEASE: (PackagePublishingPocket.RELEASE,),
     PackagePublishingPocket.SECURITY: (
         PackagePublishingPocket.RELEASE,
         PackagePublishingPocket.SECURITY,
-        ),
+    ),
     PackagePublishingPocket.UPDATES: (
         PackagePublishingPocket.RELEASE,
         PackagePublishingPocket.SECURITY,
         PackagePublishingPocket.UPDATES,
-        ),
+    ),
     PackagePublishingPocket.BACKPORTS: (
         PackagePublishingPocket.RELEASE,
         PackagePublishingPocket.SECURITY,
         PackagePublishingPocket.UPDATES,
         PackagePublishingPocket.BACKPORTS,
-        ),
+    ),
     PackagePublishingPocket.PROPOSED: (
         PackagePublishingPocket.RELEASE,
         PackagePublishingPocket.SECURITY,
         PackagePublishingPocket.UPDATES,
         PackagePublishingPocket.PROPOSED,
-        ),
-    }
+    ),
+}
 
 default_pocket_dependency = PackagePublishingPocket.UPDATES
 
-default_component_dependency_name = 'multiverse'
+default_component_dependency_name = "multiverse"
 
 
 def get_components_for_context(component, distroseries, pocket):
@@ -123,7 +111,7 @@ def get_components_for_context(component, distroseries, pocket):
     # component in order to cope with component changes occurring
     # across distroseries. See bug #198936 for further information.
     if pocket == PackagePublishingPocket.BACKPORTS:
-        return component_dependencies['multiverse']
+        return component_dependencies["multiverse"]
 
     component_name = component.name
     if not distroseries.strict_supported_component_dependencies:
@@ -141,19 +129,26 @@ def get_primary_current_component(archive, distroseries, sourcepackagename):
         ancestry = None
     else:
         ancestry = primary_archive.getPublishedSources(
-            name=sourcepackagename,
-            distroseries=distroseries, exact_match=True).first()
+            name=sourcepackagename, distroseries=distroseries, exact_match=True
+        ).first()
 
     if ancestry is not None:
         return ancestry.component
     else:
-        return getUtility(IComponentSet)['universe']
-
-
-def expand_dependencies(archive, distro_arch_series, pocket, component,
-                        source_package_name, archive_dependencies,
-                        tools_source=None, tools_fingerprint=None,
-                        logger=None):
+        return getUtility(IComponentSet)["universe"]
+
+
+def expand_dependencies(
+    archive,
+    distro_arch_series,
+    pocket,
+    component,
+    source_package_name,
+    archive_dependencies,
+    tools_source=None,
+    tools_fingerprint=None,
+    logger=None,
+):
     """Return the set of dependency archives, pockets and components.
 
     :param archive: the context `IArchive`.
@@ -179,12 +174,19 @@ def expand_dependencies(archive, distro_arch_series, pocket, component,
     if archive.purpose in ALLOW_RELEASE_BUILDS:
         for expanded_pocket in pocket_dependencies[pocket]:
             deps.append(
-                (archive, distro_arch_series, expanded_pocket,
-                 get_components_for_context(
-                     component, distro_series, expanded_pocket)))
+                (
+                    archive,
+                    distro_arch_series,
+                    expanded_pocket,
+                    get_components_for_context(
+                        component, distro_series, expanded_pocket
+                    ),
+                )
+            )
 
     primary_component = get_primary_current_component(
-        archive, distro_series, source_package_name)
+        archive, distro_series, source_package_name
+    )
     # Consider user-selected archive dependencies.
     for archive_dependency in archive_dependencies:
         # When the dependency component is undefined, we should use
@@ -195,33 +197,45 @@ def expand_dependencies(archive, distro_arch_series, pocket, component,
         else:
             archive_component = archive_dependency.component
         components = get_components_for_context(
-            archive_component, distro_series, archive_dependency.pocket)
+            archive_component, distro_series, archive_dependency.pocket
+        )
         # Follow pocket dependencies.
         for expanded_pocket in pocket_dependencies[archive_dependency.pocket]:
             deps.append(
-                (archive_dependency.dependency, distro_arch_series,
-                 expanded_pocket, components))
+                (
+                    archive_dependency.dependency,
+                    distro_arch_series,
+                    expanded_pocket,
+                    components,
+                )
+            )
 
     # Consider build tools archive dependencies.
     if tools_source is not None:
         try:
             deps.append(
-                (tools_source % {'series': distro_series.name},
-                 tools_fingerprint))
+                (
+                    tools_source % {"series": distro_series.name},
+                    tools_fingerprint,
+                )
+            )
         except Exception:
             # Someone messed up the configuration; don't add it.
             if logger is not None:
                 logger.error(
                     "Exception processing build tools sources.list entry:\n%s"
-                    % traceback.format_exc())
+                    % traceback.format_exc()
+                )
 
     # Consider primary archive dependency override. Add the default
     # primary archive dependencies if it's not present.
     if not any(
-            archive_dependency.dependency == archive.distribution.main_archive
-            for archive_dependency in archive_dependencies):
+        archive_dependency.dependency == archive.distribution.main_archive
+        for archive_dependency in archive_dependencies
+    ):
         primary_dependencies = _get_default_primary_dependencies(
-            archive, distro_arch_series, component, pocket)
+            archive, distro_arch_series, component, pocket
+        )
         deps.extend(primary_dependencies)
 
     # Add dependencies for overlay archives defined in DistroSeriesParent.
@@ -232,15 +246,17 @@ def expand_dependencies(archive, distro_arch_series, pocket, component,
     for dsp in dsp_set.getFlattenedOverlayTree(distro_series):
         try:
             dep_arch_series = dsp.parent_series.getDistroArchSeries(
-                distro_arch_series.architecturetag)
+                distro_arch_series.architecturetag
+            )
             dep_archive = dsp.parent_series.distribution.main_archive
             components = get_components_for_context(
-                dsp.component, dep_arch_series.distroseries, dsp.pocket)
+                dsp.component, dep_arch_series.distroseries, dsp.pocket
+            )
             # Follow pocket dependencies.
             for expanded_pocket in pocket_dependencies[dsp.pocket]:
                 deps.append(
-                    (dep_archive, dep_arch_series, expanded_pocket,
-                     components))
+                    (dep_archive, dep_arch_series, expanded_pocket, components)
+                )
         except NotFoundError:
             pass
 
@@ -248,10 +264,15 @@ def expand_dependencies(archive, distro_arch_series, pocket, component,
 
 
 @defer.inlineCallbacks
-def get_sources_list_for_building(behaviour, distroarchseries,
-                                  sourcepackagename, archive_dependencies=None,
-                                  tools_source=None, tools_fingerprint=None,
-                                  logger=None):
+def get_sources_list_for_building(
+    behaviour,
+    distroarchseries,
+    sourcepackagename,
+    archive_dependencies=None,
+    tools_source=None,
+    tools_fingerprint=None,
+    logger=None,
+):
     """Return sources.list entries and keys required to build the given item.
 
     The sources.list entries are returned in the order that is most useful:
@@ -283,13 +304,22 @@ def get_sources_list_for_building(behaviour, distroarchseries,
     if archive_dependencies is None:
         archive_dependencies = build.archive.dependencies
     deps = expand_dependencies(
-        build.archive, distroarchseries, build.pocket,
-        build.current_component, sourcepackagename, archive_dependencies,
-        tools_source=tools_source, tools_fingerprint=tools_fingerprint,
-        logger=logger)
-    sources_list_lines, trusted_keys = (
-        yield _get_sources_list_for_dependencies(
-            behaviour, deps, logger=logger))
+        build.archive,
+        distroarchseries,
+        build.pocket,
+        build.current_component,
+        sourcepackagename,
+        archive_dependencies,
+        tools_source=tools_source,
+        tools_fingerprint=tools_fingerprint,
+        logger=logger,
+    )
+    (
+        sources_list_lines,
+        trusted_keys,
+    ) = yield _get_sources_list_for_dependencies(
+        behaviour, deps, logger=logger
+    )
 
     external_dep_lines = []
     # Append external sources.list lines for this build if specified.  No
@@ -306,7 +336,8 @@ def get_sources_list_for_building(behaviour, distroarchseries,
         if dependencies is not None:
             for archive_dep in dependencies.splitlines():
                 line = archive_dep % (
-                    {'series': distroarchseries.distroseries.name})
+                    {"series": distroarchseries.distroseries.name}
+                )
                 external_dep_lines.append(line)
     except Exception:
         # Malformed external dependencies can incapacitate the build farm
@@ -314,8 +345,9 @@ def get_sources_list_for_building(behaviour, distroarchseries,
         # Log the error, and disable the PPA.
         logger = logging.getLogger()
         logger.error(
-            'Exception during external dependency processing:\n%s'
-            % traceback.format_exc())
+            "Exception during external dependency processing:\n%s"
+            % traceback.format_exc()
+        )
         # Disable the PPA if needed. This will suspend all the pending binary
         # builds associated with the problematic PPA.
         if build.archive.enabled == True:
@@ -327,7 +359,8 @@ def get_sources_list_for_building(behaviour, distroarchseries,
     # after the archive itself, but before any other dependencies.
     return (
         [sources_list_lines[0]] + external_dep_lines + sources_list_lines[1:],
-        trusted_keys)
+        trusted_keys,
+    )
 
 
 def _has_published_binaries(archive, distroarchseries, pocket):
@@ -337,14 +370,17 @@ def _has_published_binaries(archive, distroarchseries, pocket):
         return True
 
     published_binaries = archive.getAllPublishedBinaries(
-        distroarchseries=distroarchseries, pocket=pocket,
-        status=PackagePublishingStatus.PUBLISHED)
+        distroarchseries=distroarchseries,
+        pocket=pocket,
+        status=PackagePublishingStatus.PUBLISHED,
+    )
     return not published_binaries.is_empty()
 
 
 @defer.inlineCallbacks
-def _get_binary_sources_list_line(behaviour, archive, distroarchseries, pocket,
-                                  components):
+def _get_binary_sources_list_line(
+    behaviour, archive, distroarchseries, pocket, components
+):
     """Return the corresponding binary sources_list line."""
     # Encode the private PPA repository password in the
     # sources_list line. Note that the buildlog will be
@@ -358,7 +394,7 @@ def _get_binary_sources_list_line(behaviour, archive, distroarchseries, pocket,
         url = archive.archive_url
 
     suite = distroarchseries.distroseries.name + pocketsuffix[pocket]
-    return 'deb %s %s %s' % (url, suite, ' '.join(components))
+    return "deb %s %s %s" % (url, suite, " ".join(components))
 
 
 @defer.inlineCallbacks
@@ -390,21 +426,28 @@ def _get_sources_list_for_dependencies(behaviour, dependencies, logger=None):
         else:
             archive, distro_arch_series, pocket, components = dep
             has_published_binaries = _has_published_binaries(
-                archive, distro_arch_series, pocket)
+                archive, distro_arch_series, pocket
+            )
             if not has_published_binaries:
                 continue
             archive_components = {
                 component.name
                 for component in archive.getComponentsForSeries(
-                    distro_arch_series.distroseries)}
+                    distro_arch_series.distroseries
+                )
+            }
             components = [
-                component for component in components
-                if component in archive_components]
+                component
+                for component in components
+                if component in archive_components
+            ]
             sources_list_line = yield _get_binary_sources_list_line(
-                behaviour, archive, distro_arch_series, pocket, components)
+                behaviour, archive, distro_arch_series, pocket, components
+            )
             sources_list_lines.append(sources_list_line)
             fingerprint = archive.signing_key_fingerprint
         if fingerprint is not None and fingerprint not in trusted_keys:
+
             def get_key():
                 with default_timeout(15.0):
                     try:
@@ -420,14 +463,16 @@ def _get_sources_list_for_dependencies(behaviour, dependencies, logger=None):
 
             key = yield deferToThread(get_key)
             if key is not None:
-                trusted_keys[fingerprint] = (
-                    base64.b64encode(key.export()).decode("ASCII"))
+                trusted_keys[fingerprint] = base64.b64encode(
+                    key.export()
+                ).decode("ASCII")
 
     return (sources_list_lines, [v for k, v in sorted(trusted_keys.items())])
 
 
-def _get_default_primary_dependencies(archive, distro_arch_series, component,
-                                      pocket):
+def _get_default_primary_dependencies(
+    archive, distro_arch_series, component, pocket
+):
     """Return the default primary dependencies for a given context.
 
     :param archive: the context `IArchive`.
@@ -440,16 +485,23 @@ def _get_default_primary_dependencies(archive, distro_arch_series, component,
     """
     if archive.purpose in ALLOW_RELEASE_BUILDS:
         component = getUtility(IComponentSet)[
-            default_component_dependency_name]
+            default_component_dependency_name
+        ]
         pocket = default_pocket_dependency
     primary_components = get_components_for_context(
-        component, distro_arch_series.distroseries, pocket)
+        component, distro_arch_series.distroseries, pocket
+    )
     primary_pockets = pocket_dependencies[pocket]
 
     primary_dependencies = []
     for pocket in primary_pockets:
         primary_dependencies.append(
-            (archive.distribution.main_archive, distro_arch_series, pocket,
-             primary_components))
+            (
+                archive.distribution.main_archive,
+                distro_arch_series,
+                pocket,
+                primary_components,
+            )
+        )
 
     return primary_dependencies
diff --git a/lib/lp/soyuz/adapters/archivesourcepublication.py b/lib/lp/soyuz/adapters/archivesourcepublication.py
index ee5ee6b..d9d106c 100644
--- a/lib/lp/soyuz/adapters/archivesourcepublication.py
+++ b/lib/lp/soyuz/adapters/archivesourcepublication.py
@@ -9,8 +9,8 @@ references needed to present them properly in the PPA pages.
 """
 
 __all__ = [
-    'ArchiveSourcePublications',
-    ]
+    "ArchiveSourcePublications",
+]
 
 from collections import defaultdict
 
@@ -24,7 +24,7 @@ from lp.services.librarian.browser import ProxiedLibraryFileAlias
 from lp.soyuz.interfaces.publishing import (
     IPublishingSet,
     ISourcePackagePublishingHistory,
-    )
+)
 from lp.soyuz.interfaces.sourcepackagerelease import ISourcePackageRelease
 
 
@@ -63,11 +63,13 @@ class ArchiveSourcePublication:
     def sourcepackagerelease(self):
         if self._changesfile is not None:
             changesfile = ProxiedLibraryFileAlias(
-                self._changesfile, self.context.archive)
+                self._changesfile, self.context.archive
+            )
         else:
             changesfile = None
         return ArchiveSourcePackageRelease(
-            self.context.sourcepackagerelease, changesfile)
+            self.context.sourcepackagerelease, changesfile
+        )
 
     def getStatusSummaryForBuilds(self):
         """See `ISourcePackagePublishingHistory`."""
@@ -90,7 +92,8 @@ class ArchiveSourcePublications:
         """Map changesfiles by their corresponding source publications."""
         publishing_set = getUtility(IPublishingSet)
         changesfile_set = publishing_set.getChangesFilesForSources(
-            self._source_publications)
+            self._source_publications
+        )
         changesfile_mapping = {}
         for entry in changesfile_set:
             source, queue_record, source_release, changesfile, content = entry
@@ -116,8 +119,11 @@ class ArchiveSourcePublications:
         spn_ids = set()
         for spph in self._source_publications:
             spn_ids.add(spph.sourcepackagerelease.sourcepackagenameID)
-        list(IStore(SourcePackageName).find(SourcePackageName,
-            SourcePackageName.id.is_in(spn_ids)))
+        list(
+            IStore(SourcePackageName).find(
+                SourcePackageName, SourcePackageName.id.is_in(spn_ids)
+            )
+        )
         DistroSeries.setNewerDistroSeriesVersions(self._source_publications)
         # Load all the build status summaries at once.
         publishing_set = getUtility(IPublishingSet)
@@ -128,14 +134,17 @@ class ArchiveSourcePublications:
         for archive, pub_ids in archive_pub_ids.items():
             status_summaries.update(
                 publishing_set.getBuildStatusSummariesForSourceIdsAndArchive(
-                    pub_ids, archive))
+                    pub_ids, archive
+                )
+            )
 
         # Build the decorated object with the information we have.
         for pub in self._source_publications:
             changesfile = changesfiles_by_source.get(pub, None)
             status_summary = status_summaries[pub.id]
             complete_pub = ArchiveSourcePublication(
-                pub, changesfile=changesfile, status_summary=status_summary)
+                pub, changesfile=changesfile, status_summary=status_summary
+            )
             results.append(complete_pub)
 
         return iter(results)
diff --git a/lib/lp/soyuz/adapters/buildarch.py b/lib/lp/soyuz/adapters/buildarch.py
index 982305a..95345e3 100644
--- a/lib/lp/soyuz/adapters/buildarch.py
+++ b/lib/lp/soyuz/adapters/buildarch.py
@@ -2,8 +2,8 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'determine_architectures_to_build',
-    ]
+    "determine_architectures_to_build",
+]
 
 
 import os
@@ -29,9 +29,10 @@ class DpkgArchitectureCache:
             action = timeline.start(
                 "dpkg-architecture",
                 "-i%s DEB_HOST_ARCH=%s" % (wildcard, arch),
-                allow_nested=True)
+                allow_nested=True,
+            )
             try:
-                ret = (subprocess.call(command, env=env) == 0)
+                ret = subprocess.call(command, env=env) == 0
             finally:
                 action.finish()
             self._matches[(arch, wildcard)] = ret
@@ -56,11 +57,18 @@ def resolve_arch_spec(hintlist, valid_archs):
     if hint_archs == {"all"}:
         return set(), True
     return (
-        set(dpkg_architecture.findAllMatches(valid_archs, hint_archs)), False)
-
-
-def determine_architectures_to_build(hint_list, indep_hint_list, need_archs,
-                                     nominated_arch_indep, need_arch_indep):
+        set(dpkg_architecture.findAllMatches(valid_archs, hint_archs)),
+        False,
+    )
+
+
+def determine_architectures_to_build(
+    hint_list,
+    indep_hint_list,
+    need_archs,
+    nominated_arch_indep,
+    need_arch_indep,
+):
     """Return a set of architectures to build.
 
     :param hint_list: a string of the architectures this source package
diff --git a/lib/lp/soyuz/adapters/copypolicy.py b/lib/lp/soyuz/adapters/copypolicy.py
index 3b11921..0e78ced 100644
--- a/lib/lp/soyuz/adapters/copypolicy.py
+++ b/lib/lp/soyuz/adapters/copypolicy.py
@@ -9,8 +9,7 @@ decided at runtime, such as whether to auto-accept a package or not.
 
 # All of this module's functionality can be reached through the
 # ICopyPolicy adapter.
-__all__ = [
-    ]
+__all__ = []
 
 
 from zope.interface import implementer
@@ -47,10 +46,12 @@ class BasicCopyPolicy:
         auto_approve_pockets = (
             PackagePublishingPocket.RELEASE,
             PackagePublishingPocket.PROPOSED,
-            )
-        if (pocket in auto_approve_pockets and
-            distroseries.isUnstable() and
-            distroseries.status != SeriesStatus.FROZEN):
+        )
+        if (
+            pocket in auto_approve_pockets
+            and distroseries.isUnstable()
+            and distroseries.status != SeriesStatus.FROZEN
+        ):
             return True
 
         return False
@@ -82,7 +83,7 @@ class MassSyncCopyPolicy(BasicCopyPolicy):
 policies = [
     InsecureCopyPolicy,
     MassSyncCopyPolicy,
-    ]
+]
 
 
 enum_to_policy = {policy.enum_value: policy() for policy in policies}
diff --git a/lib/lp/soyuz/adapters/gomodparser.py b/lib/lp/soyuz/adapters/gomodparser.py
index 715d7c1..fc8bd15 100644
--- a/lib/lp/soyuz/adapters/gomodparser.py
+++ b/lib/lp/soyuz/adapters/gomodparser.py
@@ -11,8 +11,8 @@ __all__ = [
     "parse_go_mod",
 ]
 
-from contextlib import contextmanager
 import string
+from contextlib import contextmanager
 from typing import Iterator
 
 import pyparsing as pp
diff --git a/lib/lp/soyuz/adapters/overrides.py b/lib/lp/soyuz/adapters/overrides.py
index f78c769..31153d0 100644
--- a/lib/lp/soyuz/adapters/overrides.py
+++ b/lib/lp/soyuz/adapters/overrides.py
@@ -4,29 +4,21 @@
 """Generic Override Policy classes."""
 
 __all__ = [
-    'BinaryOverride',
-    'ConstantOverridePolicy',
-    'FallbackOverridePolicy',
-    'FromExistingOverridePolicy',
-    'FromSourceOverridePolicy',
-    'IBinaryOverride',
-    'ISourceOverride',
-    'SourceOverride',
-    'UnknownOverridePolicy',
-    ]
-
-
-from storm.expr import (
-    And,
-    Desc,
-    Or,
-    )
+    "BinaryOverride",
+    "ConstantOverridePolicy",
+    "FallbackOverridePolicy",
+    "FromExistingOverridePolicy",
+    "FromSourceOverridePolicy",
+    "IBinaryOverride",
+    "ISourceOverride",
+    "SourceOverride",
+    "UnknownOverridePolicy",
+]
+
+
+from storm.expr import And, Desc, Or
 from zope.component import getUtility
-from zope.interface import (
-    Attribute,
-    implementer,
-    Interface,
-    )
+from zope.interface import Attribute, Interface, implementer
 from zope.security.proxy import isinstance as zope_isinstance
 
 from lp.registry.model.sourcepackagename import SourcePackageName
@@ -43,7 +35,7 @@ from lp.soyuz.model.distroarchseries import DistroArchSeries
 from lp.soyuz.model.publishing import (
     BinaryPackagePublishingHistory,
     SourcePackagePublishingHistory,
-    )
+)
 from lp.soyuz.model.section import Section
 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
 
@@ -70,16 +62,19 @@ class IBinaryOverride(IOverride):
     """Binary-specific overrides on a publication."""
 
     binary_package_name = Attribute(
-        "The IBinaryPackageName that's being overridden")
-    architecture_tag = Attribute(
-        "The architecture tag for the publication")
+        "The IBinaryPackageName that's being overridden"
+    )
+    architecture_tag = Attribute("The architecture tag for the publication")
     priority = Attribute(
-        "The PackagePublishingPriority that's being overridden")
+        "The PackagePublishingPriority that's being overridden"
+    )
     phased_update_percentage = Attribute(
-        "The phased update percentage that's being overridden")
+        "The phased update percentage that's being overridden"
+    )
 
     source_override = Attribute(
-        "A source override from which to determine defaults.")
+        "A source override from which to determine defaults."
+    )
 
 
 class Override:
@@ -98,7 +93,8 @@ class Override:
         # Prevent people getting very confused with these new classes,
         # should their instances ever be put in a dict or set.
         raise NotImplementedError(
-            "%s objects are not hashable." % self.__class__.__name__)
+            "%s objects are not hashable." % self.__class__.__name__
+        )
 
 
 @implementer(ISourceOverride)
@@ -107,51 +103,74 @@ class SourceOverride(Override):
 
     def __eq__(self, other):
         return (
-            self.__class__ == other.__class__ and
-            self.component == other.component and
-            self.section == other.section and
-            self.version == other.version and
-            self.new == other.new)
+            self.__class__ == other.__class__
+            and self.component == other.component
+            and self.section == other.section
+            and self.version == other.version
+            and self.new == other.new
+        )
 
     def __repr__(self):
-        return (
-            "<%s at %x component=%r section=%r version=%r new=%r>" %
-            (self.__class__.__name__, id(self), self.component, self.section,
-             self.version, self.new))
+        return "<%s at %x component=%r section=%r version=%r new=%r>" % (
+            self.__class__.__name__,
+            id(self),
+            self.component,
+            self.section,
+            self.version,
+            self.new,
+        )
 
 
 @implementer(IBinaryOverride)
 class BinaryOverride(Override):
     """See `IBinaryOverride`."""
 
-    def __init__(self, component=None, section=None, priority=None,
-                 phased_update_percentage=None, version=None, new=None,
-                 source_override=None):
+    def __init__(
+        self,
+        component=None,
+        section=None,
+        priority=None,
+        phased_update_percentage=None,
+        version=None,
+        new=None,
+        source_override=None,
+    ):
         super().__init__(
-            component=component, section=section, version=version, new=new)
+            component=component, section=section, version=version, new=new
+        )
         self.priority = priority
         self.phased_update_percentage = phased_update_percentage
         self.source_override = source_override
 
     def __eq__(self, other):
         return (
-            self.__class__ == other.__class__ and
-            self.component == other.component and
-            self.section == other.section and
-            self.priority == other.priority and
-            self.phased_update_percentage == other.phased_update_percentage and
-            self.version == other.version and
-            self.new == other.new and
-            self.source_override == other.source_override)
+            self.__class__ == other.__class__
+            and self.component == other.component
+            and self.section == other.section
+            and self.priority == other.priority
+            and self.phased_update_percentage == other.phased_update_percentage
+            and self.version == other.version
+            and self.new == other.new
+            and self.source_override == other.source_override
+        )
 
     def __repr__(self):
         return (
             "<%s at %x component=%r section=%r priority=%r "
             "phased_update_percentage=%r version=%r new=%r "
-            "source_override=%r>" %
-            (self.__class__.__name__, id(self), self.component, self.section,
-             self.priority, self.phased_update_percentage, self.version,
-             self.new, self.source_override))
+            "source_override=%r>"
+            % (
+                self.__class__.__name__,
+                id(self),
+                self.component,
+                self.section,
+                self.priority,
+                self.phased_update_percentage,
+                self.version,
+                self.new,
+                self.source_override,
+            )
+        )
 
 
 class IOverridePolicy(Interface):
@@ -165,7 +184,8 @@ class IOverridePolicy(Interface):
     """
 
     phased_update_percentage = Attribute(
-        "The phased update percentage to apply to binary publications.")
+        "The phased update percentage to apply to binary publications."
+    )
 
     def calculateSourceOverrides(archive, distroseries, pocket, sources):
         """Calculate source overrides.
@@ -198,9 +218,9 @@ class IOverridePolicy(Interface):
 
 @implementer(IOverridePolicy)
 class BaseOverridePolicy:
-
-    def __init__(self, archive, distroseries, pocket,
-                 phased_update_percentage=None):
+    def __init__(
+        self, archive, distroseries, pocket, phased_update_percentage=None
+    ):
         super().__init__()
         self.archive = archive
         self.distroseries = distroseries
@@ -224,8 +244,8 @@ class FromExistingOverridePolicy(BaseOverridePolicy):
     """
 
     def __init__(self, *args, **kwargs):
-        self.any_arch = kwargs.pop('any_arch', False)
-        self.include_deleted = kwargs.pop('include_deleted', False)
+        self.any_arch = kwargs.pop("any_arch", False)
+        self.include_deleted = kwargs.pop("include_deleted", False)
         super().__init__(*args, **kwargs)
 
     def getExistingPublishingStatuses(self, include_deleted):
@@ -244,38 +264,56 @@ class FromExistingOverridePolicy(BaseOverridePolicy):
         other_conditions = []
         if self.pocket is not None:
             other_conditions.append(
-                SourcePackagePublishingHistory.pocket == self.pocket)
+                SourcePackagePublishingHistory.pocket == self.pocket
+            )
         already_published = DecoratedResultSet(
             store.find(
-                (SourcePackagePublishingHistory.sourcepackagenameID,
-                 SourcePackagePublishingHistory.componentID,
-                 SourcePackagePublishingHistory.sectionID,
-                 SourcePackagePublishingHistory.status,
-                 SourcePackageRelease.version),
-                SourcePackageRelease.id ==
-                    SourcePackagePublishingHistory.sourcepackagereleaseID,
+                (
+                    SourcePackagePublishingHistory.sourcepackagenameID,
+                    SourcePackagePublishingHistory.componentID,
+                    SourcePackagePublishingHistory.sectionID,
+                    SourcePackagePublishingHistory.status,
+                    SourcePackageRelease.version,
+                ),
+                SourcePackageRelease.id
+                == SourcePackagePublishingHistory.sourcepackagereleaseID,
                 SourcePackagePublishingHistory.archiveID == self.archive.id,
-                SourcePackagePublishingHistory.distroseriesID ==
-                    self.distroseries.id,
+                SourcePackagePublishingHistory.distroseriesID
+                == self.distroseries.id,
                 SourcePackagePublishingHistory.status.is_in(
-                    self.getExistingPublishingStatuses(self.include_deleted)),
+                    self.getExistingPublishingStatuses(self.include_deleted)
+                ),
                 SourcePackagePublishingHistory.sourcepackagenameID.is_in(
-                    spn.id for spn in spns),
-                *other_conditions).order_by(
-                        SourcePackagePublishingHistory.sourcepackagenameID,
-                        Desc(SourcePackagePublishingHistory.datecreated),
-                        Desc(SourcePackagePublishingHistory.id),
-                ).config(
-                    distinct=(
-                        SourcePackagePublishingHistory.sourcepackagenameID,)),
+                    spn.id for spn in spns
+                ),
+                *other_conditions,
+            )
+            .order_by(
+                SourcePackagePublishingHistory.sourcepackagenameID,
+                Desc(SourcePackagePublishingHistory.datecreated),
+                Desc(SourcePackagePublishingHistory.id),
+            )
+            .config(
+                distinct=(SourcePackagePublishingHistory.sourcepackagenameID,)
+            ),
             id_resolver((SourcePackageName, Component, Section, None, None)),
-            pre_iter_hook=eager_load)
+            pre_iter_hook=eager_load,
+        )
         return {
             name: SourceOverride(
-                component=component, section=section, version=version,
-                new=(status == PackagePublishingStatus.DELETED))
-            for (name, component, section, status, version)
-            in already_published}
+                component=component,
+                section=section,
+                version=version,
+                new=(status == PackagePublishingStatus.DELETED),
+            )
+            for (
+                name,
+                component,
+                section,
+                status,
+                version,
+            ) in already_published
+        }
 
     def calculateBinaryOverrides(self, binaries):
         def eager_load(rows):
@@ -288,56 +326,79 @@ class FromExistingOverridePolicy(BaseOverridePolicy):
             expanded = calculate_target_das(self.distroseries, binaries.keys())
             candidates = [
                 make_package_condition(self.archive, das, bpn)
-                for bpn, das in expanded if das is not None]
+                for bpn, das in expanded
+                if das is not None
+            ]
         else:
             candidates = []
             archtags = set()
             for bpn, archtag in binaries.keys():
                 candidates.append(
-                    BinaryPackagePublishingHistory.binarypackagenameID ==
-                        bpn.id)
+                    BinaryPackagePublishingHistory.binarypackagenameID
+                    == bpn.id
+                )
                 archtags.add(archtag)
-            other_conditions.extend([
-                BinaryPackagePublishingHistory.archiveID == self.archive.id,
-                DistroArchSeries.distroseriesID == self.distroseries.id,
-                BinaryPackagePublishingHistory.distroarchseriesID ==
-                    DistroArchSeries.id,
-                ])
+            other_conditions.extend(
+                [
+                    BinaryPackagePublishingHistory.archiveID
+                    == self.archive.id,
+                    DistroArchSeries.distroseriesID == self.distroseries.id,
+                    BinaryPackagePublishingHistory.distroarchseriesID
+                    == DistroArchSeries.id,
+                ]
+            )
         if len(candidates) == 0:
             return {}
         if self.pocket is not None:
             other_conditions.append(
-                BinaryPackagePublishingHistory.pocket == self.pocket)
+                BinaryPackagePublishingHistory.pocket == self.pocket
+            )
         # Do not copy phased_update_percentage from existing publications;
         # it is too context-dependent to copy.
         already_published = DecoratedResultSet(
             store.find(
-                (BinaryPackagePublishingHistory.binarypackagenameID,
-                 BinaryPackagePublishingHistory.distroarchseriesID,
-                 BinaryPackagePublishingHistory.componentID,
-                 BinaryPackagePublishingHistory.sectionID,
-                 BinaryPackagePublishingHistory.priority,
-                 BinaryPackagePublishingHistory.status,
-                 BinaryPackageRelease.version),
-                BinaryPackageRelease.id ==
-                    BinaryPackagePublishingHistory.binarypackagereleaseID,
+                (
+                    BinaryPackagePublishingHistory.binarypackagenameID,
+                    BinaryPackagePublishingHistory.distroarchseriesID,
+                    BinaryPackagePublishingHistory.componentID,
+                    BinaryPackagePublishingHistory.sectionID,
+                    BinaryPackagePublishingHistory.priority,
+                    BinaryPackagePublishingHistory.status,
+                    BinaryPackageRelease.version,
+                ),
+                BinaryPackageRelease.id
+                == BinaryPackagePublishingHistory.binarypackagereleaseID,
                 BinaryPackagePublishingHistory.status.is_in(
-                    self.getExistingPublishingStatuses(self.include_deleted)),
+                    self.getExistingPublishingStatuses(self.include_deleted)
+                ),
                 Or(*candidates),
-                *other_conditions).order_by(
+                *other_conditions,
+            )
+            .order_by(
+                BinaryPackagePublishingHistory.distroarchseriesID,
+                BinaryPackagePublishingHistory.binarypackagenameID,
+                Desc(BinaryPackagePublishingHistory.datecreated),
+                Desc(BinaryPackagePublishingHistory.id),
+            )
+            .config(
+                distinct=(
                     BinaryPackagePublishingHistory.distroarchseriesID,
                     BinaryPackagePublishingHistory.binarypackagenameID,
-                    Desc(BinaryPackagePublishingHistory.datecreated),
-                    Desc(BinaryPackagePublishingHistory.id),
-                ).config(distinct=(
-                    BinaryPackagePublishingHistory.distroarchseriesID,
-                    BinaryPackagePublishingHistory.binarypackagenameID,
-                    )
-                ),
+                )
+            ),
             id_resolver(
-                (BinaryPackageName, DistroArchSeries, Component, Section,
-                None, None, None)),
-            pre_iter_hook=eager_load)
+                (
+                    BinaryPackageName,
+                    DistroArchSeries,
+                    Component,
+                    Section,
+                    None,
+                    None,
+                    None,
+                )
+            ),
+            pre_iter_hook=eager_load,
+        )
         overrides = {}
         for (name, das, comp, sect, prio, status, ver) in already_published:
             # These details can always fulfill their own archtag, and may
@@ -348,15 +409,19 @@ class FromExistingOverridePolicy(BaseOverridePolicy):
                     matching_keys.append((name, None))
             else:
                 matching_keys = [
-                    (name, archtag) for archtag in archtags | {None}]
+                    (name, archtag) for archtag in archtags | {None}
+                ]
             for key in matching_keys:
                 if key not in binaries:
                     continue
                 overrides[key] = BinaryOverride(
-                    component=comp, section=sect, priority=prio,
+                    component=comp,
+                    section=sect,
+                    priority=prio,
                     phased_update_percentage=self.phased_update_percentage,
                     version=ver,
-                    new=(status == PackagePublishingStatus.DELETED))
+                    new=(status == PackagePublishingStatus.DELETED),
+                )
         return overrides
 
 
@@ -372,10 +437,13 @@ class FromSourceOverridePolicy(BaseOverridePolicy):
     def calculateBinaryOverrides(self, binaries):
         overrides = {}
         for key, override_in in binaries.items():
-            if (override_in.source_override is not None
-                    and override_in.source_override.component is not None):
+            if (
+                override_in.source_override is not None
+                and override_in.source_override.component is not None
+            ):
                 overrides[key] = BinaryOverride(
-                    component=override_in.source_override.component, new=True)
+                    component=override_in.source_override.component, new=True
+                )
         return overrides
 
 
@@ -397,11 +465,11 @@ class UnknownOverridePolicy(BaseOverridePolicy):
     """
 
     DEBIAN_COMPONENT_OVERRIDE_MAP = {
-        'contrib': 'multiverse',
-        'non-free': 'multiverse',
-        }
+        "contrib": "multiverse",
+        "non-free": "multiverse",
+    }
 
-    DEFAULT_OVERRIDE_COMPONENT = 'universe'
+    DEFAULT_OVERRIDE_COMPONENT = "universe"
 
     @classmethod
     def getComponentOverride(cls, component=None, return_component=False):
@@ -409,7 +477,8 @@ class UnknownOverridePolicy(BaseOverridePolicy):
         if zope_isinstance(component, Component):
             component = component.name
         override_component_name = cls.DEBIAN_COMPONENT_OVERRIDE_MAP.get(
-            component, cls.DEFAULT_OVERRIDE_COMPONENT)
+            component, cls.DEFAULT_OVERRIDE_COMPONENT
+        )
         if return_component:
             return getUtility(IComponentSet)[override_component_name]
         else:
@@ -419,24 +488,36 @@ class UnknownOverridePolicy(BaseOverridePolicy):
         return {
             spn: SourceOverride(
                 component=UnknownOverridePolicy.getComponentOverride(
-                    override.component, return_component=True),
-                new=True)
-            for spn, override in sources.items()}
+                    override.component, return_component=True
+                ),
+                new=True,
+            )
+            for spn, override in sources.items()
+        }
 
     def calculateBinaryOverrides(self, binaries):
-        default_component = getUtility(IComponentSet)['universe']
+        default_component = getUtility(IComponentSet)["universe"]
         return {
             (binary_package_name, architecture_tag): BinaryOverride(
-                component=default_component, new=True,
-                phased_update_percentage=self.phased_update_percentage)
-            for binary_package_name, architecture_tag in binaries.keys()}
+                component=default_component,
+                new=True,
+                phased_update_percentage=self.phased_update_percentage,
+            )
+            for binary_package_name, architecture_tag in binaries.keys()
+        }
 
 
 class ConstantOverridePolicy(BaseOverridePolicy):
     """Override policy that returns constant values."""
 
-    def __init__(self, component=None, section=None, priority=None,
-                 phased_update_percentage=None, new=None):
+    def __init__(
+        self,
+        component=None,
+        section=None,
+        priority=None,
+        phased_update_percentage=None,
+        new=None,
+    ):
         self.component = component
         self.section = section
         self.priority = priority
@@ -446,16 +527,22 @@ class ConstantOverridePolicy(BaseOverridePolicy):
     def calculateSourceOverrides(self, sources):
         return {
             key: SourceOverride(
-                component=self.component, section=self.section,
-                new=self.new) for key in sources.keys()}
+                component=self.component, section=self.section, new=self.new
+            )
+            for key in sources.keys()
+        }
 
     def calculateBinaryOverrides(self, binaries):
         return {
             key: BinaryOverride(
-                component=self.component, section=self.section,
+                component=self.component,
+                section=self.section,
                 priority=self.priority,
                 phased_update_percentage=self.phased_update_percentage,
-                new=self.new) for key in binaries.keys()}
+                new=self.new,
+            )
+            for key in binaries.keys()
+        }
 
 
 class FallbackOverridePolicy(BaseOverridePolicy):
@@ -471,7 +558,8 @@ class FallbackOverridePolicy(BaseOverridePolicy):
             if not missing:
                 break
             these_overrides = policy.calculateSourceOverrides(
-                {spn: sources[spn] for spn in missing})
+                {spn: sources[spn] for spn in missing}
+            )
             overrides.update(these_overrides)
             missing -= set(these_overrides.keys())
         return overrides
@@ -483,7 +571,8 @@ class FallbackOverridePolicy(BaseOverridePolicy):
             if not missing:
                 break
             these_overrides = policy.calculateBinaryOverrides(
-                {key: binaries[key] for key in missing})
+                {key: binaries[key] for key in missing}
+            )
             overrides.update(these_overrides)
             missing -= set(these_overrides.keys())
         return overrides
@@ -492,7 +581,8 @@ class FallbackOverridePolicy(BaseOverridePolicy):
 def calculate_target_das(distroseries, binaries):
     arch_map = {
         arch.architecturetag: arch
-        for arch in distroseries.enabled_architectures}
+        for arch in distroseries.enabled_architectures
+    }
 
     with_das = []
     for bpn, archtag in binaries:
@@ -507,7 +597,8 @@ def make_package_condition(archive, das, bpn):
     return And(
         BinaryPackagePublishingHistory.archiveID == archive.id,
         BinaryPackagePublishingHistory.distroarchseriesID == das.id,
-        BinaryPackagePublishingHistory.binarypackagenameID == bpn.id)
+        BinaryPackagePublishingHistory.binarypackagenameID == bpn.id,
+    )
 
 
 def id_resolver(lookups):
@@ -515,6 +606,7 @@ def id_resolver(lookups):
         store = IStore(SourcePackagePublishingHistory)
         return tuple(
             (value if cls is None else store.get(cls, value))
-            for value, cls in zip(row, lookups))
+            for value, cls in zip(row, lookups)
+        )
 
     return _resolve
diff --git a/lib/lp/soyuz/adapters/packagelocation.py b/lib/lp/soyuz/adapters/packagelocation.py
index 2cf1cb4..bfc1493 100644
--- a/lib/lp/soyuz/adapters/packagelocation.py
+++ b/lib/lp/soyuz/adapters/packagelocation.py
@@ -4,10 +4,10 @@
 """Logic for bulk copying of source/binary publishing history data."""
 
 __all__ = [
-    'build_package_location',
-    'PackageLocation',
-    'PackageLocationError',
-    ]
+    "build_package_location",
+    "PackageLocation",
+    "PackageLocationError",
+]
 
 
 from zope.component import getUtility
@@ -23,6 +23,7 @@ class PackageLocation:
     It groups distribution, distroseries and pocket in a way they
     can be easily manipulated and compared.
     """
+
     archive = None
     distribution = None
     distroseries = None
@@ -31,8 +32,16 @@ class PackageLocation:
     packagesets = None
     channel = None
 
-    def __init__(self, archive, distribution, distroseries, pocket,
-                 component=None, packagesets=None, channel=None):
+    def __init__(
+        self,
+        archive,
+        distribution,
+        distroseries,
+        pocket,
+        component=None,
+        packagesets=None,
+        channel=None,
+    ):
         """Initialize the PackageLocation from the given parameters."""
         self.archive = archive
         self.distribution = distribution
@@ -43,40 +52,48 @@ class PackageLocation:
         self.channel = channel
 
     def __eq__(self, other):
-        if (self.distribution == other.distribution and
-            self.archive == other.archive and
-            self.distroseries == other.distroseries and
-            self.component == other.component and
-            self.pocket == other.pocket and
-            self.packagesets == other.packagesets and
-            self.channel == other.channel):
+        if (
+            self.distribution == other.distribution
+            and self.archive == other.archive
+            and self.distroseries == other.distroseries
+            and self.component == other.component
+            and self.pocket == other.pocket
+            and self.packagesets == other.packagesets
+            and self.channel == other.channel
+        ):
             return True
         return False
 
     def __hash__(self):
-        return hash((
-            self.archive,
-            self.distribution,
-            self.distroseries,
-            self.pocket,
-            self.component,
-            None if self.packagesets is None else tuple(self.packagesets),
-            self.channel,
-            ))
+        return hash(
+            (
+                self.archive,
+                self.distribution,
+                self.distroseries,
+                self.pocket,
+                self.component,
+                None if self.packagesets is None else tuple(self.packagesets),
+                self.channel,
+            )
+        )
 
     def __str__(self):
-        result = '%s: %s-%s' % (
-            self.archive.reference, self.distroseries.name, self.pocket.name)
+        result = "%s: %s-%s" % (
+            self.archive.reference,
+            self.distroseries.name,
+            self.pocket.name,
+        )
 
         if self.component is not None:
-            result += ' (%s)' % self.component.name
+            result += " (%s)" % self.component.name
 
         if len(self.packagesets) > 0:
-            result += ' [%s]' % (
-                ", ".join([str(p.name) for p in self.packagesets]),)
+            result += " [%s]" % (
+                ", ".join([str(p.name) for p in self.packagesets]),
+            )
 
         if self.channel is not None:
-            result += ' {%s}' % self.channel
+            result += " {%s}" % self.channel
 
         return result
 
@@ -85,9 +102,15 @@ class PackageLocationError(Exception):
     """Raised when something went wrong when building PackageLocation."""
 
 
-def build_package_location(distribution_name, suite=None, purpose=None,
-                           person_name=None, archive_name=None,
-                           packageset_names=None, channel=None):
+def build_package_location(
+    distribution_name,
+    suite=None,
+    purpose=None,
+    person_name=None,
+    archive_name=None,
+    packageset_names=None,
+    channel=None,
+):
     """Convenience function to build PackageLocation objects."""
 
     # XXX kiko 2007-10-24:
@@ -108,61 +131,70 @@ def build_package_location(distribution_name, suite=None, purpose=None,
     try:
         distribution = getUtility(IDistributionSet)[distribution_name]
     except NotFoundError as err:
-        raise PackageLocationError(
-            "Could not find distribution %s" % err)
+        raise PackageLocationError("Could not find distribution %s" % err)
 
     if purpose == ArchivePurpose.PPA:
-        assert person_name is not None and archive_name is not None, (
-            "person_name and archive_name should be passed for PPA archives.")
+        assert (
+            person_name is not None and archive_name is not None
+        ), "person_name and archive_name should be passed for PPA archives."
         archive = getUtility(IArchiveSet).getPPAByDistributionAndOwnerName(
-            distribution, person_name, archive_name)
+            distribution, person_name, archive_name
+        )
         if archive is None:
             raise PackageLocationError(
                 "Could not find a PPA for %s named %s"
-                % (person_name, archive_name))
+                % (person_name, archive_name)
+            )
         if distribution != archive.distribution:
             raise PackageLocationError(
                 "The specified archive is not for distribution %s"
-                % distribution_name)
+                % distribution_name
+            )
     elif purpose == ArchivePurpose.PARTNER:
         assert person_name is None and archive_name is None, (
             "person_name and archive_name shoudn't be passed for "
-            "PARTNER archive.")
+            "PARTNER archive."
+        )
         archive = getUtility(IArchiveSet).getByDistroPurpose(
-            distribution, purpose)
+            distribution, purpose
+        )
         if archive is None:
             raise PackageLocationError(
-                "Could not find %s archive for %s" % (
-                purpose.title, distribution_name))
+                "Could not find %s archive for %s"
+                % (purpose.title, distribution_name)
+            )
     elif purpose == ArchivePurpose.COPY:
-        assert archive_name is not None, (
-            "archive_name should be passed for COPY archives")
+        assert (
+            archive_name is not None
+        ), "archive_name should be passed for COPY archives"
         archive = getUtility(IArchiveSet).getByDistroPurpose(
-            distribution, purpose, name=archive_name)
+            distribution, purpose, name=archive_name
+        )
         if archive is None:
             raise PackageLocationError(
-                "Could not find %s archive with the name '%s' for %s" % (
-                    purpose.title, archive_name, distribution.name))
+                "Could not find %s archive with the name '%s' for %s"
+                % (purpose.title, archive_name, distribution.name)
+            )
     else:
         assert person_name is None and archive_name is None, (
             "person_name and archive_name shoudn't be passed when purpose "
-            "is omitted.")
+            "is omitted."
+        )
         archive = distribution.main_archive
 
     if suite is not None:
         try:
-            distroseries, pocket = distribution.getDistroSeriesAndPocket(
-                suite)
+            distroseries, pocket = distribution.getDistroSeriesAndPocket(suite)
         except NotFoundError as err:
-            raise PackageLocationError(
-                "Could not find suite %s" % err)
+            raise PackageLocationError("Could not find suite %s" % err)
     else:
         distroseries = distribution.currentseries
         pocket = PackagePublishingPocket.RELEASE
 
     if pocket != PackagePublishingPocket.RELEASE and channel is not None:
         raise PackageLocationError(
-            "Channels may only be used with the RELEASE pocket.")
+            "Channels may only be used with the RELEASE pocket."
+        )
 
     packagesets = []
     if packageset_names:
@@ -170,12 +202,19 @@ def build_package_location(distribution_name, suite=None, purpose=None,
         for packageset_name in packageset_names:
             try:
                 packageset = packageset_set.getByName(
-                    distroseries, packageset_name)
+                    distroseries, packageset_name
+                )
             except NotFoundError as err:
                 raise PackageLocationError(
-                    "Could not find packageset %s" % err)
+                    "Could not find packageset %s" % err
+                )
             packagesets.append(packageset)
 
     return PackageLocation(
-        archive, distribution, distroseries, pocket,
-        packagesets=packagesets, channel=channel)
+        archive,
+        distribution,
+        distroseries,
+        pocket,
+        packagesets=packagesets,
+        channel=channel,
+    )
diff --git a/lib/lp/soyuz/adapters/proxiedsourcefiles.py b/lib/lp/soyuz/adapters/proxiedsourcefiles.py
index 613e45e..cd52fce 100644
--- a/lib/lp/soyuz/adapters/proxiedsourcefiles.py
+++ b/lib/lp/soyuz/adapters/proxiedsourcefiles.py
@@ -4,8 +4,8 @@
 """Proxied source files."""
 
 __all__ = [
-    'ProxiedSourceLibraryFileAlias',
-    ]
+    "ProxiedSourceLibraryFileAlias",
+]
 
 from lp.services.librarian.browser import ProxiedLibraryFileAlias
 from lp.services.librarian.client import url_path_quote
@@ -27,7 +27,14 @@ class ProxiedSourceLibraryFileAlias(ProxiedLibraryFileAlias):
             return None
 
         url = canonical_url(self.parent.archive, request=self.request)
-        return urlappend(url, '/'.join([
-            '+sourcefiles', self.parent.source_package_name,
-            self.parent.source_package_version,
-            url_path_quote(self.context.filename.encode('utf-8'))]))
+        return urlappend(
+            url,
+            "/".join(
+                [
+                    "+sourcefiles",
+                    self.parent.source_package_name,
+                    self.parent.source_package_version,
+                    url_path_quote(self.context.filename.encode("utf-8")),
+                ]
+            ),
+        )
diff --git a/lib/lp/soyuz/adapters/tests/test_archivedependencies.py b/lib/lp/soyuz/adapters/tests/test_archivedependencies.py
index 5f49b53..f415d0e 100644
--- a/lib/lp/soyuz/adapters/tests/test_archivedependencies.py
+++ b/lib/lp/soyuz/adapters/tests/test_archivedependencies.py
@@ -5,12 +5,9 @@
 
 import os.path
 
-from testtools.matchers import (
-    MatchesSetwise,
-    StartsWith,
-    )
-from testtools.twistedsupport import AsynchronousDeferredRunTest
 import transaction
+from testtools.matchers import MatchesSetwise, StartsWith
+from testtools.twistedsupport import AsynchronousDeferredRunTest
 from twisted.internet import defer
 from twisted.internet.threads import deferToThread
 from zope.component import getUtility
@@ -18,10 +15,10 @@ from zope.security.proxy import removeSecurityProxy
 
 from lp.archivepublisher.interfaces.archivegpgsigningkey import (
     IArchiveGPGSigningKey,
-    )
+)
 from lp.buildmaster.interfaces.buildfarmjobbehaviour import (
     IBuildFarmJobBehaviour,
-    )
+)
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.config import config
@@ -34,7 +31,7 @@ from lp.soyuz.adapters.archivedependencies import (
     get_primary_current_component,
     get_sources_list_for_building,
     pocket_dependencies,
-    )
+)
 from lp.soyuz.enums import PackagePublishingStatus
 from lp.soyuz.interfaces.archive import IArchive
 from lp.soyuz.interfaces.component import IComponentSet
@@ -43,10 +40,7 @@ from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
 from lp.testing import TestCaseWithFactory
 from lp.testing.gpgkeys import gpgkeysdir
 from lp.testing.keyserver import InProcessKeyServerFixture
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    ZopelessDatabaseLayer,
-    )
+from lp.testing.layers import LaunchpadZopelessLayer, ZopelessDatabaseLayer
 
 
 class TestOgreModel(TestCaseWithFactory):
@@ -68,7 +62,8 @@ class TestOgreModel(TestCaseWithFactory):
             component = getUtility(IComponentSet)[component_name]
             self.assertEqual(
                 expected_components,
-                get_components_for_context(component, distroseries, pocket))
+                get_components_for_context(component, distroseries, pocket),
+            )
 
     def test_strict_supported_component_dependencies(self):
         # In strict-supported-component-dependencies mode, a source
@@ -82,10 +77,11 @@ class TestOgreModel(TestCaseWithFactory):
             "universe": ["main", "universe"],
             "multiverse": ["main", "restricted", "universe", "multiverse"],
             "partner": ["partner"],
-            }
+        }
         self.setUpComponents(distroseries, expected.keys())
         self.assertComponentMap(
-            expected, distroseries, PackagePublishingPocket.RELEASE)
+            expected, distroseries, PackagePublishingPocket.RELEASE
+        )
 
     def test_lax_supported_component_dependencies(self):
         # In lax-supported-component-dependencies mode, source packages in
@@ -100,10 +96,11 @@ class TestOgreModel(TestCaseWithFactory):
             "universe": ["main", "universe"],
             "multiverse": ["main", "restricted", "universe", "multiverse"],
             "partner": ["partner"],
-            }
+        }
         self.setUpComponents(distroseries, expected.keys())
         self.assertComponentMap(
-            expected, distroseries, PackagePublishingPocket.RELEASE)
+            expected, distroseries, PackagePublishingPocket.RELEASE
+        )
 
     def test_backports(self):
         # Source packages in the BACKPORTS pocket are allowed to
@@ -117,10 +114,11 @@ class TestOgreModel(TestCaseWithFactory):
             "universe": ["main", "restricted", "universe", "multiverse"],
             "multiverse": ["main", "restricted", "universe", "multiverse"],
             "partner": ["main", "restricted", "universe", "multiverse"],
-            }
+        }
         self.setUpComponents(distroseries, expected.keys())
         self.assertComponentMap(
-            expected, distroseries, PackagePublishingPocket.BACKPORTS)
+            expected, distroseries, PackagePublishingPocket.BACKPORTS
+        )
 
 
 class TestSourcesList(TestCaseWithFactory):
@@ -130,13 +128,19 @@ class TestSourcesList(TestCaseWithFactory):
     run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=30)
 
     ubuntu_components = [
-        "main", "restricted", "universe", "multiverse", "partner"]
+        "main",
+        "restricted",
+        "universe",
+        "multiverse",
+        "partner",
+    ]
 
     fingerprints = {
         "ppa-sample@xxxxxxxxxxxxx": "0D57E99656BEFB0897606EE9A022DD1F5001B46D",
         "ppa-sample-4096@xxxxxxxxxxxxx": (
-            "B7B1966662BA8D3F5A6ED89BD640F4A593B2CF67"),
-        }
+            "B7B1966662BA8D3F5A6ED89BD640F4A593B2CF67"
+        ),
+    }
 
     def setUp(self):
         super().setUp()
@@ -155,17 +159,25 @@ class TestSourcesList(TestCaseWithFactory):
         # Updates pockets from the primary archive, and all its available
         # components.
         self.assertEqual(
-            PackagePublishingPocket.UPDATES, default_pocket_dependency)
+            PackagePublishingPocket.UPDATES, default_pocket_dependency
+        )
         self.assertEqual("multiverse", default_component_dependency_name)
         self.assertEqual(
-            (PackagePublishingPocket.RELEASE,
-             PackagePublishingPocket.SECURITY,
-             PackagePublishingPocket.UPDATES),
-            pocket_dependencies[default_pocket_dependency])
+            (
+                PackagePublishingPocket.RELEASE,
+                PackagePublishingPocket.SECURITY,
+                PackagePublishingPocket.UPDATES,
+            ),
+            pocket_dependencies[default_pocket_dependency],
+        )
 
     @defer.inlineCallbacks
-    def makeArchive(self, signing_key_name="ppa-sample@xxxxxxxxxxxxx",
-                    publish_binary=False, **kwargs):
+    def makeArchive(
+        self,
+        signing_key_name="ppa-sample@xxxxxxxxxxxxx",
+        publish_binary=False,
+        **kwargs
+    ):
         try:
             getattr(config, "in-process-key-server-fixture")
         except AttributeError:
@@ -174,10 +186,12 @@ class TestSourcesList(TestCaseWithFactory):
         if signing_key_name is not None:
             key_path = os.path.join(gpgkeysdir, "%s.sec" % signing_key_name)
             yield IArchiveGPGSigningKey(archive).setSigningKey(
-                key_path, async_keyserver=True)
+                key_path, async_keyserver=True
+            )
         if publish_binary:
             self.publisher.getPubBinaries(
-                archive=archive, status=PackagePublishingStatus.PUBLISHED)
+                archive=archive, status=PackagePublishingStatus.PUBLISHED
+            )
         return archive
 
     def makeBuild(self, **kwargs):
@@ -189,12 +203,16 @@ class TestSourcesList(TestCaseWithFactory):
         self.assertEqual(
             expected,
             get_primary_current_component(
-                build.archive, build.distro_series,
-                build.source_package_release.name).name)
+                build.archive,
+                build.distro_series,
+                build.source_package_release.name,
+            ).name,
+        )
 
     @defer.inlineCallbacks
-    def assertSourcesListAndKeys(self, expected_sources_list,
-                                 expected_key_names, build, **kwargs):
+    def assertSourcesListAndKeys(
+        self, expected_sources_list, expected_key_names, build, **kwargs
+    ):
         expected_lines = []
         for archive_or_prefix, suffixes in expected_sources_list:
             if IArchive.providedBy(archive_or_prefix):
@@ -204,12 +222,16 @@ class TestSourcesList(TestCaseWithFactory):
             expected_lines.extend([prefix + suffix for suffix in suffixes])
         behaviour = IBuildFarmJobBehaviour(build)
         sources_list, trusted_keys = yield get_sources_list_for_building(
-            behaviour, build.distro_arch_series,
-            build.source_package_release.name, **kwargs)
+            behaviour,
+            build.distro_arch_series,
+            build.source_package_release.name,
+            **kwargs,
+        )
         self.assertEqual(expected_lines, sources_list)
         key_matchers = [
             Base64KeyMatches(self.fingerprints[key_name])
-            for key_name in expected_key_names]
+            for key_name in expected_key_names
+        ]
         self.assertThat(trusted_keys, MatchesSetwise(*key_matchers))
 
     @defer.inlineCallbacks
@@ -219,16 +241,26 @@ class TestSourcesList(TestCaseWithFactory):
         ppa = yield self.makeArchive()
         build = self.makeBuild(archive=ppa)
         self.assertEqual(
-            0, ppa.getAllPublishedBinaries(
+            0,
+            ppa.getAllPublishedBinaries(
                 distroarchseries=build.distro_arch_series,
-                status=PackagePublishingStatus.PUBLISHED).count())
+                status=PackagePublishingStatus.PUBLISHED,
+            ).count(),
+        )
         yield self.assertSourcesListAndKeys(
-            [(self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ], [], build)
+            [
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            [],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_ppa_with_binaries(self):
@@ -237,56 +269,81 @@ class TestSourcesList(TestCaseWithFactory):
         ppa = yield self.makeArchive(publish_binary=True)
         build = self.makeBuild(archive=ppa)
         yield self.assertSourcesListAndKeys(
-            [(ppa, ["hoary main"]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ], ["ppa-sample@xxxxxxxxxxxxx"], build)
+            [
+                (ppa, ["hoary main"]),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            ["ppa-sample@xxxxxxxxxxxxx"],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_dependent_ppa_with_no_binaries(self):
         # A depended-upon PPA is not considered if it has no published
         # binaries.
         lower_ppa = yield self.makeArchive(
-            signing_key_name="ppa-sample-4096@xxxxxxxxxxxxx")
+            signing_key_name="ppa-sample-4096@xxxxxxxxxxxxx"
+        )
         upper_ppa = yield self.makeArchive(publish_binary=True)
         upper_ppa.addArchiveDependency(
-            lower_ppa, PackagePublishingPocket.RELEASE,
-            getUtility(IComponentSet)["main"])
+            lower_ppa,
+            PackagePublishingPocket.RELEASE,
+            getUtility(IComponentSet)["main"],
+        )
         build = self.makeBuild(archive=upper_ppa)
         yield self.assertSourcesListAndKeys(
-            [(upper_ppa, ["hoary main"]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ], ["ppa-sample@xxxxxxxxxxxxx"], build)
+            [
+                (upper_ppa, ["hoary main"]),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            ["ppa-sample@xxxxxxxxxxxxx"],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_dependent_ppa_with_binaries(self):
         # A depended-upon PPA is considered if it has published binaries.
         lower_ppa = yield self.makeArchive(
             signing_key_name="ppa-sample-4096@xxxxxxxxxxxxx",
-            publish_binary=True)
+            publish_binary=True,
+        )
         upper_ppa = yield self.makeArchive(publish_binary=True)
         upper_ppa.addArchiveDependency(
-            lower_ppa, PackagePublishingPocket.RELEASE,
-            getUtility(IComponentSet)["main"])
+            lower_ppa,
+            PackagePublishingPocket.RELEASE,
+            getUtility(IComponentSet)["main"],
+        )
         build = self.makeBuild(archive=upper_ppa)
         yield self.assertSourcesListAndKeys(
-            [(upper_ppa, ["hoary main"]),
-             (lower_ppa, ["hoary main"]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ],
+            [
+                (upper_ppa, ["hoary main"]),
+                (lower_ppa, ["hoary main"]),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
             ["ppa-sample@xxxxxxxxxxxxx", "ppa-sample-4096@xxxxxxxxxxxxx"],
-            build)
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_lax_supported_component_dependencies(self):
@@ -295,39 +352,54 @@ class TestSourcesList(TestCaseWithFactory):
         # PPAs only have the "main" component.
         lower_ppa = yield self.makeArchive(
             signing_key_name="ppa-sample-4096@xxxxxxxxxxxxx",
-            publish_binary=True)
+            publish_binary=True,
+        )
         upper_ppa = yield self.makeArchive(publish_binary=True)
         upper_ppa.addArchiveDependency(
-            lower_ppa, PackagePublishingPocket.RELEASE,
-            getUtility(IComponentSet)["main"])
+            lower_ppa,
+            PackagePublishingPocket.RELEASE,
+            getUtility(IComponentSet)["main"],
+        )
         upper_ppa.addArchiveDependency(
-            self.ubuntu.main_archive, PackagePublishingPocket.UPDATES,
-            getUtility(IComponentSet)["restricted"])
+            self.ubuntu.main_archive,
+            PackagePublishingPocket.UPDATES,
+            getUtility(IComponentSet)["restricted"],
+        )
         build = self.makeBuild(archive=upper_ppa)
         yield self.assertSourcesListAndKeys(
-            [(upper_ppa, ["hoary main"]),
-             (lower_ppa, ["hoary main"]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted",
-                 "hoary-security main restricted",
-                 "hoary-updates main restricted",
-                 ]),
-             ],
+            [
+                (upper_ppa, ["hoary main"]),
+                (lower_ppa, ["hoary main"]),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted",
+                        "hoary-security main restricted",
+                        "hoary-updates main restricted",
+                    ],
+                ),
+            ],
             ["ppa-sample@xxxxxxxxxxxxx", "ppa-sample-4096@xxxxxxxxxxxxx"],
-            build)
+            build,
+        )
         self.hoary.strict_supported_component_dependencies = False
         transaction.commit()
         yield self.assertSourcesListAndKeys(
-            [(upper_ppa, ["hoary main"]),
-             (lower_ppa, ["hoary main"]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ],
+            [
+                (upper_ppa, ["hoary main"]),
+                (lower_ppa, ["hoary main"]),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
             ["ppa-sample@xxxxxxxxxxxxx", "ppa-sample-4096@xxxxxxxxxxxxx"],
-            build)
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_no_op_primary_archive_dependency(self):
@@ -335,16 +407,25 @@ class TestSourcesList(TestCaseWithFactory):
         # the same values has no effect.
         ppa = yield self.makeArchive()
         ppa.addArchiveDependency(
-            self.ubuntu.main_archive, PackagePublishingPocket.UPDATES,
-            getUtility(IComponentSet)["multiverse"])
+            self.ubuntu.main_archive,
+            PackagePublishingPocket.UPDATES,
+            getUtility(IComponentSet)["multiverse"],
+        )
         build = self.makeBuild(archive=ppa)
         yield self.assertSourcesListAndKeys(
-            [(self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ], [], build)
+            [
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            [],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_primary_archive_dependency_security(self):
@@ -355,28 +436,45 @@ class TestSourcesList(TestCaseWithFactory):
         # the source was last published in the primary archive.
         ppa = yield self.makeArchive()
         ppa.addArchiveDependency(
-            self.ubuntu.main_archive, PackagePublishingPocket.SECURITY)
+            self.ubuntu.main_archive, PackagePublishingPocket.SECURITY
+        )
         build = self.makeBuild(archive=ppa)
         self.assertPrimaryCurrentComponent("universe", build)
         yield self.assertSourcesListAndKeys(
-            [(self.ubuntu.main_archive, [
-                 "hoary main universe",
-                 "hoary-security main universe",
-                 ]),
-             ], [], build)
+            [
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main universe",
+                        "hoary-security main universe",
+                    ],
+                ),
+            ],
+            [],
+            build,
+        )
         self.publisher.getPubSource(
-            sourcename="with-ancestry", version="1.0",
-            archive=self.ubuntu.main_archive)
+            sourcename="with-ancestry",
+            version="1.0",
+            archive=self.ubuntu.main_archive,
+        )
         [build_with_ancestry] = self.publisher.getPubSource(
-            sourcename="with-ancestry", version="1.1",
-            archive=ppa).createMissingBuilds()
+            sourcename="with-ancestry", version="1.1", archive=ppa
+        ).createMissingBuilds()
         self.assertPrimaryCurrentComponent("main", build_with_ancestry)
         yield self.assertSourcesListAndKeys(
-            [(self.ubuntu.main_archive, [
-                 "hoary main",
-                 "hoary-security main",
-                 ]),
-             ], [], build_with_ancestry)
+            [
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main",
+                        "hoary-security main",
+                    ],
+                ),
+            ],
+            [],
+            build_with_ancestry,
+        )
 
     @defer.inlineCallbacks
     def test_primary_archive_dependency_release(self):
@@ -385,11 +483,14 @@ class TestSourcesList(TestCaseWithFactory):
         # original release of the corresponding series.
         ppa = yield self.makeArchive()
         ppa.addArchiveDependency(
-            self.ubuntu.main_archive, PackagePublishingPocket.RELEASE,
-            getUtility(IComponentSet)["restricted"])
+            self.ubuntu.main_archive,
+            PackagePublishingPocket.RELEASE,
+            getUtility(IComponentSet)["restricted"],
+        )
         build = self.makeBuild(archive=ppa)
         yield self.assertSourcesListAndKeys(
-            [(self.ubuntu.main_archive, ["hoary main restricted"])], [], build)
+            [(self.ubuntu.main_archive, ["hoary main restricted"])], [], build
+        )
 
     @defer.inlineCallbacks
     def test_primary_archive_dependency_proposed(self):
@@ -397,17 +498,26 @@ class TestSourcesList(TestCaseWithFactory):
         # environment for PROPOSED.
         ppa = yield self.makeArchive()
         ppa.addArchiveDependency(
-            self.ubuntu.main_archive, PackagePublishingPocket.PROPOSED,
-            getUtility(IComponentSet)["multiverse"])
+            self.ubuntu.main_archive,
+            PackagePublishingPocket.PROPOSED,
+            getUtility(IComponentSet)["multiverse"],
+        )
         build = self.makeBuild(archive=ppa)
         yield self.assertSourcesListAndKeys(
-            [(self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 "hoary-proposed main restricted universe multiverse",
-                 ]),
-             ], [], build)
+            [
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                        "hoary-proposed main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            [],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_primary_archive_dependency_backports(self):
@@ -415,17 +525,26 @@ class TestSourcesList(TestCaseWithFactory):
         # environment for PROPOSED.
         ppa = yield self.makeArchive()
         ppa.addArchiveDependency(
-            self.ubuntu.main_archive, PackagePublishingPocket.BACKPORTS,
-            getUtility(IComponentSet)["multiverse"])
+            self.ubuntu.main_archive,
+            PackagePublishingPocket.BACKPORTS,
+            getUtility(IComponentSet)["multiverse"],
+        )
         build = self.makeBuild(archive=ppa)
         yield self.assertSourcesListAndKeys(
-            [(self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 "hoary-backports main restricted universe multiverse",
-                 ]),
-             ], [], build)
+            [
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                        "hoary-backports main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            [],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_partner(self):
@@ -436,17 +555,26 @@ class TestSourcesList(TestCaseWithFactory):
         # partner applications.
         primary, partner = self.ubuntu.all_distro_archives
         self.publisher.getPubBinaries(
-            archive=partner, component="partner",
-            status=PackagePublishingStatus.PUBLISHED)
+            archive=partner,
+            component="partner",
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         build = self.makeBuild(archive=partner, component="partner")
         yield self.assertSourcesListAndKeys(
-            [(partner, ["hoary partner"]),
-             (primary, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ], [], build)
+            [
+                (partner, ["hoary partner"]),
+                (
+                    primary,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            [],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_partner_proposed(self):
@@ -455,26 +583,42 @@ class TestSourcesList(TestCaseWithFactory):
         # unless overridden by ArchiveDependency.
         primary, partner = self.ubuntu.all_distro_archives
         self.publisher.getPubBinaries(
-            archive=partner, component="partner",
-            status=PackagePublishingStatus.PUBLISHED)
+            archive=partner,
+            component="partner",
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         self.publisher.getPubBinaries(
-            archive=partner, component="partner",
+            archive=partner,
+            component="partner",
             status=PackagePublishingStatus.PUBLISHED,
-            pocket=PackagePublishingPocket.PROPOSED)
+            pocket=PackagePublishingPocket.PROPOSED,
+        )
         build = self.makeBuild(
-            archive=partner, component="partner",
-            pocket=PackagePublishingPocket.PROPOSED)
+            archive=partner,
+            component="partner",
+            pocket=PackagePublishingPocket.PROPOSED,
+        )
         yield self.assertSourcesListAndKeys(
-            [(partner, [
-                 "hoary partner",
-                 "hoary-proposed partner",
-                 ]),
-             (primary, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ], [], build)
+            [
+                (
+                    partner,
+                    [
+                        "hoary partner",
+                        "hoary-proposed partner",
+                    ],
+                ),
+                (
+                    primary,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            [],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_archive_external_dependencies(self):
@@ -485,21 +629,32 @@ class TestSourcesList(TestCaseWithFactory):
         ppa.external_dependencies = (
             "deb http://user:pass@repository zoing everything\n"
             "deb http://user:pass@repository %(series)s public private\n"
-            "deb http://user:pass@repository %(series)s-extra public")
+            "deb http://user:pass@repository %(series)s-extra public"
+        )
         build = self.makeBuild(archive=ppa)
         yield self.assertSourcesListAndKeys(
-            [(ppa, ["hoary main"]),
-             ("deb http://user:pass@repository";, [
-                 "zoing everything",
-                 "hoary public private",
-                 "hoary-extra public",
-                 ]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ], ["ppa-sample@xxxxxxxxxxxxx"], build)
+            [
+                (ppa, ["hoary main"]),
+                (
+                    "deb http://user:pass@repository";,
+                    [
+                        "zoing everything",
+                        "hoary public private",
+                        "hoary-extra public",
+                    ],
+                ),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            ["ppa-sample@xxxxxxxxxxxxx"],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_build_external_dependencies(self):
@@ -508,16 +663,24 @@ class TestSourcesList(TestCaseWithFactory):
         ppa = yield self.makeArchive(publish_binary=True)
         build = self.makeBuild(archive=ppa)
         build.api_external_dependencies = (
-            "deb http://user:pass@repository foo bar")
+            "deb http://user:pass@repository foo bar"
+        )
         yield self.assertSourcesListAndKeys(
-            [(ppa, ["hoary main"]),
-             ("deb http://user:pass@repository";, ["foo bar"]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ], ["ppa-sample@xxxxxxxxxxxxx"], build)
+            [
+                (ppa, ["hoary main"]),
+                ("deb http://user:pass@repository";, ["foo bar"]),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            ["ppa-sample@xxxxxxxxxxxxx"],
+            build,
+        )
 
     @defer.inlineCallbacks
     def test_build_tools(self):
@@ -535,20 +698,27 @@ class TestSourcesList(TestCaseWithFactory):
         gpghandler = removeSecurityProxy(getUtility(IGPGHandler))
         gpghandler.importSecretKey(secret_key_export)
         yield deferToThread(
-            gpghandler.uploadPublicKey, self.fingerprints[tools_key_name])
+            gpghandler.uploadPublicKey, self.fingerprints[tools_key_name]
+        )
 
         yield self.assertSourcesListAndKeys(
-            [(ppa, ["hoary main"]),
-             ("deb http://example.org";, ["hoary main"]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ],
-            ["ppa-sample@xxxxxxxxxxxxx", tools_key_name], build,
+            [
+                (ppa, ["hoary main"]),
+                ("deb http://example.org";, ["hoary main"]),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            ["ppa-sample@xxxxxxxxxxxxx", tools_key_name],
+            build,
             tools_source="deb http://example.org %(series)s main",
-            tools_fingerprint=self.fingerprints[tools_key_name])
+            tools_fingerprint=self.fingerprints[tools_key_name],
+        )
 
     @defer.inlineCallbacks
     def test_build_tools_bad_formatting(self):
@@ -558,18 +728,28 @@ class TestSourcesList(TestCaseWithFactory):
         build = self.makeBuild(archive=ppa)
         logger = BufferLogger()
         yield self.assertSourcesListAndKeys(
-            [(ppa, ["hoary main"]),
-             (self.ubuntu.main_archive, [
-                 "hoary main restricted universe multiverse",
-                 "hoary-security main restricted universe multiverse",
-                 "hoary-updates main restricted universe multiverse",
-                 ]),
-             ],
-            ["ppa-sample@xxxxxxxxxxxxx"], build,
+            [
+                (ppa, ["hoary main"]),
+                (
+                    self.ubuntu.main_archive,
+                    [
+                        "hoary main restricted universe multiverse",
+                        "hoary-security main restricted universe multiverse",
+                        "hoary-updates main restricted universe multiverse",
+                    ],
+                ),
+            ],
+            ["ppa-sample@xxxxxxxxxxxxx"],
+            build,
             tools_source="deb http://example.org %(series) main",
-            logger=logger)
-        self.assertThat(logger.getLogBuffer(), StartsWith(
-            "ERROR Exception processing build tools sources.list entry:\n"))
+            logger=logger,
+        )
+        self.assertThat(
+            logger.getLogBuffer(),
+            StartsWith(
+                "ERROR Exception processing build tools sources.list entry:\n"
+            ),
+        )
 
     @defer.inlineCallbacks
     def test_overlay(self):
@@ -577,25 +757,38 @@ class TestSourcesList(TestCaseWithFactory):
         # a PPA.  This means that the parent's details gets added to the
         # sources.list passed to the builders.
         depdistro = self.factory.makeDistribution(
-            "depdistro", publish_base_url="http://archive.launchpad.test/";)
+            "depdistro", publish_base_url="http://archive.launchpad.test/";
+        )
         depseries = self.factory.makeDistroSeries(
-            distribution=depdistro, name="depseries")
+            distribution=depdistro, name="depseries"
+        )
         self.factory.makeDistroArchSeries(
-            distroseries=depseries, architecturetag="i386")
+            distroseries=depseries, architecturetag="i386"
+        )
         self.publisher.addFakeChroots(depseries)
         for component_name in self.ubuntu_components:
             component = getUtility(IComponentSet)[component_name]
             self.factory.makeComponentSelection(depseries, component)
         self.factory.makeDistroSeriesParent(
-            derived_series=self.hoary, parent_series=depseries,
-            initialized=True, is_overlay=True,
+            derived_series=self.hoary,
+            parent_series=depseries,
+            initialized=True,
+            is_overlay=True,
             pocket=PackagePublishingPocket.SECURITY,
-            component=getUtility(IComponentSet)["universe"])
+            component=getUtility(IComponentSet)["universe"],
+        )
         build = self.makeBuild()
         yield self.assertSourcesListAndKeys(
-            [(self.ubuntu.main_archive, ["hoary main"]),
-             (depdistro.main_archive, [
-                 "depseries main universe",
-                 "depseries-security main universe",
-                 ]),
-             ], [], build)
+            [
+                (self.ubuntu.main_archive, ["hoary main"]),
+                (
+                    depdistro.main_archive,
+                    [
+                        "depseries main universe",
+                        "depseries-security main universe",
+                    ],
+                ),
+            ],
+            [],
+            build,
+        )
diff --git a/lib/lp/soyuz/adapters/tests/test_buildarch.py b/lib/lp/soyuz/adapters/tests/test_buildarch.py
index 50d7bc3..1b76ea7 100644
--- a/lib/lp/soyuz/adapters/tests/test_buildarch.py
+++ b/lib/lp/soyuz/adapters/tests/test_buildarch.py
@@ -1,21 +1,17 @@
 # Copyright 2009-2017 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from testtools.matchers import (
-    MatchesListwise,
-    MatchesStructure,
-    )
+from testtools.matchers import MatchesListwise, MatchesStructure
 
 from lp.soyuz.adapters.buildarch import (
-    determine_architectures_to_build,
     DpkgArchitectureCache,
-    )
+    determine_architectures_to_build,
+)
 from lp.testing import TestCase
 from lp.testing.fixture import CaptureTimeline
 
 
 class TestDpkgArchitectureCache(TestCase):
-
     def setUp(self):
         super().setUp()
         self.timeline = self.useFixture(CaptureTimeline()).timeline
@@ -23,207 +19,257 @@ class TestDpkgArchitectureCache(TestCase):
     def assertTimeline(self, expected_details):
         matchers = []
         for expected_detail in expected_details:
-            matchers.append(MatchesStructure.byEquality(
-                category='dpkg-architecture-start', detail=expected_detail))
-            matchers.append(MatchesStructure.byEquality(
-                category='dpkg-architecture-stop', detail=expected_detail))
+            matchers.append(
+                MatchesStructure.byEquality(
+                    category="dpkg-architecture-start", detail=expected_detail
+                )
+            )
+            matchers.append(
+                MatchesStructure.byEquality(
+                    category="dpkg-architecture-stop", detail=expected_detail
+                )
+            )
         self.assertThat(self.timeline.actions, MatchesListwise(matchers))
 
     def test_multiple(self):
         self.assertContentEqual(
-            ['amd64', 'armhf'],
+            ["amd64", "armhf"],
             DpkgArchitectureCache().findAllMatches(
-                ['amd64', 'i386', 'armhf'], ['amd64', 'armhf']))
-        self.assertTimeline([
-            '-iamd64 DEB_HOST_ARCH=amd64',
-            '-iamd64 DEB_HOST_ARCH=i386',
-            '-iarmhf DEB_HOST_ARCH=i386',
-            '-iamd64 DEB_HOST_ARCH=armhf',
-            '-iarmhf DEB_HOST_ARCH=armhf',
-            ])
+                ["amd64", "i386", "armhf"], ["amd64", "armhf"]
+            ),
+        )
+        self.assertTimeline(
+            [
+                "-iamd64 DEB_HOST_ARCH=amd64",
+                "-iamd64 DEB_HOST_ARCH=i386",
+                "-iarmhf DEB_HOST_ARCH=i386",
+                "-iamd64 DEB_HOST_ARCH=armhf",
+                "-iarmhf DEB_HOST_ARCH=armhf",
+            ]
+        )
 
     def test_any(self):
         self.assertContentEqual(
-            ['amd64', 'i386', 'kfreebsd-amd64'],
+            ["amd64", "i386", "kfreebsd-amd64"],
             DpkgArchitectureCache().findAllMatches(
-                ['amd64', 'i386', 'kfreebsd-amd64'], ['any']))
-        self.assertTimeline([
-            '-iany DEB_HOST_ARCH=amd64',
-            '-iany DEB_HOST_ARCH=i386',
-            '-iany DEB_HOST_ARCH=kfreebsd-amd64',
-            ])
+                ["amd64", "i386", "kfreebsd-amd64"], ["any"]
+            ),
+        )
+        self.assertTimeline(
+            [
+                "-iany DEB_HOST_ARCH=amd64",
+                "-iany DEB_HOST_ARCH=i386",
+                "-iany DEB_HOST_ARCH=kfreebsd-amd64",
+            ]
+        )
 
     def test_all(self):
         self.assertContentEqual(
             [],
             DpkgArchitectureCache().findAllMatches(
-                ['amd64', 'i386', 'kfreebsd-amd64'], ['all']))
-        self.assertTimeline([
-            '-iall DEB_HOST_ARCH=amd64',
-            '-iall DEB_HOST_ARCH=i386',
-            '-iall DEB_HOST_ARCH=kfreebsd-amd64',
-            ])
+                ["amd64", "i386", "kfreebsd-amd64"], ["all"]
+            ),
+        )
+        self.assertTimeline(
+            [
+                "-iall DEB_HOST_ARCH=amd64",
+                "-iall DEB_HOST_ARCH=i386",
+                "-iall DEB_HOST_ARCH=kfreebsd-amd64",
+            ]
+        )
 
     def test_partial_wildcards(self):
         self.assertContentEqual(
-            ['amd64', 'i386', 'kfreebsd-amd64'],
+            ["amd64", "i386", "kfreebsd-amd64"],
             DpkgArchitectureCache().findAllMatches(
-                ['amd64', 'i386', 'kfreebsd-amd64', 'kfreebsd-i386'],
-                ['linux-any', 'any-amd64']))
-        self.assertTimeline([
-            '-ilinux-any DEB_HOST_ARCH=amd64',
-            '-ilinux-any DEB_HOST_ARCH=i386',
-            '-ilinux-any DEB_HOST_ARCH=kfreebsd-amd64',
-            '-iany-amd64 DEB_HOST_ARCH=kfreebsd-amd64',
-            '-ilinux-any DEB_HOST_ARCH=kfreebsd-i386',
-            '-iany-amd64 DEB_HOST_ARCH=kfreebsd-i386',
-            ])
+                ["amd64", "i386", "kfreebsd-amd64", "kfreebsd-i386"],
+                ["linux-any", "any-amd64"],
+            ),
+        )
+        self.assertTimeline(
+            [
+                "-ilinux-any DEB_HOST_ARCH=amd64",
+                "-ilinux-any DEB_HOST_ARCH=i386",
+                "-ilinux-any DEB_HOST_ARCH=kfreebsd-amd64",
+                "-iany-amd64 DEB_HOST_ARCH=kfreebsd-amd64",
+                "-ilinux-any DEB_HOST_ARCH=kfreebsd-i386",
+                "-iany-amd64 DEB_HOST_ARCH=kfreebsd-i386",
+            ]
+        )
 
 
 class TestDetermineArchitecturesToBuild(TestCase):
-    """Test that determine_architectures_to_build correctly interprets hints.
-    """
-
-    def assertArchsForHint(self, hint_string, expected_arch_tags,
-                           allowed_arch_tags=None, indep_hint_list=None,
-                           need_arch_indep=True):
+    """determine_architectures_to_build correctly interprets hints."""
+
+    def assertArchsForHint(
+        self,
+        hint_string,
+        expected_arch_tags,
+        allowed_arch_tags=None,
+        indep_hint_list=None,
+        need_arch_indep=True,
+    ):
         if allowed_arch_tags is None:
-            allowed_arch_tags = ['armel', 'hppa', 'i386']
+            allowed_arch_tags = ["armel", "hppa", "i386"]
         arch_tags = determine_architectures_to_build(
-            hint_string, indep_hint_list, allowed_arch_tags, 'i386',
-            need_arch_indep)
+            hint_string,
+            indep_hint_list,
+            allowed_arch_tags,
+            "i386",
+            need_arch_indep,
+        )
         self.assertContentEqual(expected_arch_tags.items(), arch_tags.items())
 
     def test_single_architecture(self):
         # A hint string with a single arch resolves to just that arch.
-        self.assertArchsForHint('hppa', {'hppa': True})
+        self.assertArchsForHint("hppa", {"hppa": True})
 
     def test_three_architectures(self):
         # A hint string with multiple archs resolves to just those
         # archs.
         self.assertArchsForHint(
-            'amd64 i386 hppa', {'hppa': False, 'i386': True})
+            "amd64 i386 hppa", {"hppa": False, "i386": True}
+        )
 
     def test_independent(self):
         # 'all' is special, meaning just a single build. The
         # nominatedarchindep architecture is used -- in this case i386.
-        self.assertArchsForHint('all', {'i386': True})
+        self.assertArchsForHint("all", {"i386": True})
 
     def test_one_and_independent(self):
         # 'all' is redundant if we have another build anyway.
-        self.assertArchsForHint('hppa all', {'hppa': True})
+        self.assertArchsForHint("hppa all", {"hppa": True})
 
     def test_fictional_and_independent(self):
         # 'all' doesn't make an unbuildable string buildable.
-        self.assertArchsForHint('fiction all', {})
+        self.assertArchsForHint("fiction all", {})
 
     def test_wildcard(self):
         # 'any' is a wildcard that matches all available archs.
         self.assertArchsForHint(
-            'any', {'armel': False, 'hppa': False, 'i386': True})
+            "any", {"armel": False, "hppa": False, "i386": True}
+        )
 
     def test_kernel_specific_architecture(self):
         # Since we only support Linux-based architectures, 'linux-foo'
         # is treated the same as 'foo'.
-        self.assertArchsForHint('linux-hppa', {'hppa': True})
+        self.assertArchsForHint("linux-hppa", {"hppa": True})
 
     def test_unknown_kernel_specific_architecture(self):
         # Non-Linux architectures aren't supported.
-        self.assertArchsForHint('kfreebsd-hppa', {})
+        self.assertArchsForHint("kfreebsd-hppa", {})
 
     def test_kernel_wildcard_architecture(self):
         # Wildcards work for kernels: 'any-foo' is treated like 'foo'.
-        self.assertArchsForHint('any-hppa', {'hppa': True})
+        self.assertArchsForHint("any-hppa", {"hppa": True})
 
     def test_kernel_wildcard_architecture_arm(self):
         # The second part of a wildcard matches the canonical CPU name, not
         # on the Debian architecture, so 'any-arm' matches 'armel'.
-        self.assertArchsForHint('any-arm', {'armel': True})
+        self.assertArchsForHint("any-arm", {"armel": True})
 
     def test_kernel_specific_architecture_wildcard(self):
         # Wildcards work for archs too: 'linux-any' is treated like 'any'.
         self.assertArchsForHint(
-            'linux-any', {'armel': False, 'hppa': False, 'i386': True})
+            "linux-any", {"armel": False, "hppa": False, "i386": True}
+        )
 
     def test_unknown_kernel_specific_architecture_wildcard(self):
         # But unknown kernels continue to result in nothing.
-        self.assertArchsForHint('kfreebsd-any', {})
+        self.assertArchsForHint("kfreebsd-any", {})
 
     def test_wildcard_and_independent(self):
         # 'all' continues to be ignored alongside a valid wildcard.
         self.assertArchsForHint(
-            'all linux-any', {'armel': False, 'hppa': False, 'i386': True})
+            "all linux-any", {"armel": False, "hppa": False, "i386": True}
+        )
 
     def test_kernel_independent_is_invalid(self):
         # 'linux-all' isn't supported.
-        self.assertArchsForHint('linux-all', {})
+        self.assertArchsForHint("linux-all", {})
 
     def test_double_wildcard_is_same_as_single(self):
         # 'any-any' is redundant with 'any', but dpkg-architecture supports
         # it anyway.
         self.assertArchsForHint(
-            'any-any', {'armel': False, 'hppa': False, 'i386': True})
+            "any-any", {"armel": False, "hppa": False, "i386": True}
+        )
 
     def test_disallowed_nominatedarchindep_falls_back(self):
         # Some archives don't allow nominatedarchindep builds. In that
         # case, one of the other architectures is chosen.
         self.assertArchsForHint(
-            'any all', {'hppa': True, 'armel': False},
-            allowed_arch_tags=['hppa', 'armel'])
+            "any all",
+            {"hppa": True, "armel": False},
+            allowed_arch_tags=["hppa", "armel"],
+        )
         self.assertArchsForHint(
-            'all', {'hppa': True}, allowed_arch_tags=['hppa', 'armel'])
+            "all", {"hppa": True}, allowed_arch_tags=["hppa", "armel"]
+        )
 
     def test_indep_hint_only(self):
         # Some packages need to build arch-indep builds on a specific
         # architecture, declared using XS-Build-Indep-Architecture.
-        self.assertArchsForHint('all', {'hppa': True}, indep_hint_list='hppa')
+        self.assertArchsForHint("all", {"hppa": True}, indep_hint_list="hppa")
 
     def test_indep_hint_only_multiple(self):
         # The earliest available architecture in the available list (not
         # the hint list) is chosen.
         self.assertArchsForHint(
-            'all', {'armel': True}, indep_hint_list='armel hppa')
+            "all", {"armel": True}, indep_hint_list="armel hppa"
+        )
         self.assertArchsForHint(
-            'all', {'hppa': True}, indep_hint_list='armel hppa',
-            allowed_arch_tags=['hppa', 'armel', 'i386'])
+            "all",
+            {"hppa": True},
+            indep_hint_list="armel hppa",
+            allowed_arch_tags=["hppa", "armel", "i386"],
+        )
 
     def test_indep_hint_only_unsatisfiable(self):
         # An indep hint list that matches nothing results in no builds
-        self.assertArchsForHint('all', {}, indep_hint_list='fiction')
+        self.assertArchsForHint("all", {}, indep_hint_list="fiction")
 
     def test_indep_hint(self):
         # Unlike nominatedarchindep, a hinted indep will cause an
         # additional build to be created if necessary.
         self.assertArchsForHint(
-            'armel all', {'armel': False, 'hppa': True},
-            indep_hint_list='hppa')
+            "armel all", {"armel": False, "hppa": True}, indep_hint_list="hppa"
+        )
 
     def test_indep_hint_wildcard(self):
         # An indep hint list can include wildcards.
         self.assertArchsForHint(
-            'armel all', {'armel': False, 'hppa': True},
-            indep_hint_list='any-hppa')
+            "armel all",
+            {"armel": False, "hppa": True},
+            indep_hint_list="any-hppa",
+        )
 
     def test_indep_hint_coalesces(self):
         # An indep hint list that matches an existing build will avoid
         # creating another.
         self.assertArchsForHint(
-            'hppa all', {'hppa': True}, indep_hint_list='linux-any')
+            "hppa all", {"hppa": True}, indep_hint_list="linux-any"
+        )
 
     def test_indep_hint_unsatisfiable(self):
         # An indep hint list that matches nothing results in no
         # additional builds
         self.assertArchsForHint(
-            'armel all', {'armel': False}, indep_hint_list='fiction')
+            "armel all", {"armel": False}, indep_hint_list="fiction"
+        )
 
     def test_no_need_arch_indep(self):
         self.assertArchsForHint(
-            'armel all', {'armel': False}, need_arch_indep=False)
+            "armel all", {"armel": False}, need_arch_indep=False
+        )
 
     def test_no_need_arch_indep_hint(self):
         self.assertArchsForHint(
-            'armel all', {'armel': False}, indep_hint_list='hppa',
-            need_arch_indep=False)
+            "armel all",
+            {"armel": False},
+            indep_hint_list="hppa",
+            need_arch_indep=False,
+        )
 
     def test_no_need_arch_indep_only(self):
-        self.assertArchsForHint('all', {}, need_arch_indep=False)
+        self.assertArchsForHint("all", {}, need_arch_indep=False)
diff --git a/lib/lp/soyuz/adapters/tests/test_copypolicy.py b/lib/lp/soyuz/adapters/tests/test_copypolicy.py
index 82db12f..32a41df 100644
--- a/lib/lp/soyuz/adapters/tests/test_copypolicy.py
+++ b/lib/lp/soyuz/adapters/tests/test_copypolicy.py
@@ -4,15 +4,9 @@
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.interfaces.series import SeriesStatus
 from lp.soyuz.adapters.copypolicy import InsecureCopyPolicy
-from lp.soyuz.enums import (
-    ArchivePurpose,
-    PackageCopyPolicy,
-    )
+from lp.soyuz.enums import ArchivePurpose, PackageCopyPolicy
 from lp.soyuz.interfaces.copypolicy import ICopyPolicy
-from lp.testing import (
-    TestCaseWithFactory,
-    verifyObject,
-    )
+from lp.testing import TestCaseWithFactory, verifyObject
 from lp.testing.layers import ZopelessDatabaseLayer
 
 
@@ -29,16 +23,20 @@ class TestCopyPolicy(TestCaseWithFactory):
             pocket = self.factory.getAnyPocket()
         return archive, distroseries, pocket
 
-    def assertApproved(self, archive_purpose, method,
-                       status=None, pocket=None):
+    def assertApproved(
+        self, archive_purpose, method, status=None, pocket=None
+    ):
         archive, distroseries, pocket = self._getUploadCriteria(
-            archive_purpose, status=status, pocket=pocket)
+            archive_purpose, status=status, pocket=pocket
+        )
         self.assertTrue(method(archive, distroseries, pocket))
 
-    def assertUnapproved(self, archive_purpose, method,
-                         status=None, pocket=None):
+    def assertUnapproved(
+        self, archive_purpose, method, status=None, pocket=None
+    ):
         archive, distroseries, pocket = self._getUploadCriteria(
-            archive_purpose, status=status, pocket=pocket)
+            archive_purpose, status=status, pocket=pocket
+        )
         self.assertFalse(method(archive, distroseries, pocket))
 
     def test_insecure_holds_new_distro_package(self):
@@ -52,39 +50,54 @@ class TestCopyPolicy(TestCaseWithFactory):
     def test_insecure_approves_known_distro_package_to_unfrozen_release(self):
         cp = InsecureCopyPolicy()
         self.assertApproved(
-            ArchivePurpose.PRIMARY, cp.autoApprove,
-            pocket=PackagePublishingPocket.RELEASE)
+            ArchivePurpose.PRIMARY,
+            cp.autoApprove,
+            pocket=PackagePublishingPocket.RELEASE,
+        )
 
     def test_insecure_holds_copy_to_updates_pocket_in_frozen_series(self):
         cp = InsecureCopyPolicy()
         self.assertUnapproved(
-            ArchivePurpose.PRIMARY, cp.autoApprove, status=SeriesStatus.FROZEN,
-            pocket=PackagePublishingPocket.UPDATES)
+            ArchivePurpose.PRIMARY,
+            cp.autoApprove,
+            status=SeriesStatus.FROZEN,
+            pocket=PackagePublishingPocket.UPDATES,
+        )
 
     def test_insecure_holds_copy_to_release_pocket_in_frozen_series(self):
         cp = InsecureCopyPolicy()
         self.assertUnapproved(
-            ArchivePurpose.PRIMARY, cp.autoApprove, status=SeriesStatus.FROZEN,
-            pocket=PackagePublishingPocket.RELEASE)
+            ArchivePurpose.PRIMARY,
+            cp.autoApprove,
+            status=SeriesStatus.FROZEN,
+            pocket=PackagePublishingPocket.RELEASE,
+        )
 
     def test_insecure_approves_copy_to_proposed_in_unfrozen_series(self):
         cp = InsecureCopyPolicy()
         self.assertApproved(
-            ArchivePurpose.PRIMARY, cp.autoApprove,
-            pocket=PackagePublishingPocket.PROPOSED)
+            ArchivePurpose.PRIMARY,
+            cp.autoApprove,
+            pocket=PackagePublishingPocket.PROPOSED,
+        )
 
     def test_insecure_holds_copy_to_proposed_in_frozen_series(self):
         cp = InsecureCopyPolicy()
         self.assertUnapproved(
-            ArchivePurpose.PRIMARY, cp.autoApprove, status=SeriesStatus.FROZEN,
-            pocket=PackagePublishingPocket.PROPOSED)
+            ArchivePurpose.PRIMARY,
+            cp.autoApprove,
+            status=SeriesStatus.FROZEN,
+            pocket=PackagePublishingPocket.PROPOSED,
+        )
 
     def test_insecure_holds_copy_to_proposed_in_current_series(self):
         cp = InsecureCopyPolicy()
         self.assertUnapproved(
-            ArchivePurpose.PRIMARY, cp.autoApprove,
+            ArchivePurpose.PRIMARY,
+            cp.autoApprove,
             status=SeriesStatus.CURRENT,
-            pocket=PackagePublishingPocket.PROPOSED)
+            pocket=PackagePublishingPocket.PROPOSED,
+        )
 
     def test_insecure_approves_existing_ppa_package(self):
         cp = InsecureCopyPolicy()
diff --git a/lib/lp/soyuz/adapters/tests/test_gomodparser.py b/lib/lp/soyuz/adapters/tests/test_gomodparser.py
index 0efdbc4..014f2aa 100644
--- a/lib/lp/soyuz/adapters/tests/test_gomodparser.py
+++ b/lib/lp/soyuz/adapters/tests/test_gomodparser.py
@@ -3,10 +3,7 @@
 
 """go.mod parser tests."""
 
-from lp.soyuz.adapters.gomodparser import (
-    GoModParserException,
-    parse_go_mod,
-    )
+from lp.soyuz.adapters.gomodparser import GoModParserException, parse_go_mod
 from lp.testing import TestCase
 from lp.testing.layers import BaseLayer
 
diff --git a/lib/lp/soyuz/adapters/tests/test_overrides.py b/lib/lp/soyuz/adapters/tests/test_overrides.py
index 772e767..63247ad 100644
--- a/lib/lp/soyuz/adapters/tests/test_overrides.py
+++ b/lib/lp/soyuz/adapters/tests/test_overrides.py
@@ -17,16 +17,10 @@ from lp.soyuz.adapters.overrides import (
     FromSourceOverridePolicy,
     SourceOverride,
     UnknownOverridePolicy,
-    )
-from lp.soyuz.enums import (
-    PackagePublishingPriority,
-    PackagePublishingStatus,
-    )
+)
+from lp.soyuz.enums import PackagePublishingPriority, PackagePublishingStatus
 from lp.soyuz.interfaces.component import IComponentSet
-from lp.testing import (
-    StormStatementRecorder,
-    TestCaseWithFactory,
-    )
+from lp.testing import StormStatementRecorder, TestCaseWithFactory
 from lp.testing.layers import ZopelessDatabaseLayer
 from lp.testing.matchers import HasQueryCount
 
@@ -40,13 +34,19 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         # overrides for that archive/distroseries are returned.
         spph = self.factory.makeSourcePackagePublishingHistory()
         policy = FromExistingOverridePolicy(
-            spph.distroseries.main_archive, spph.distroseries, spph.pocket)
+            spph.distroseries.main_archive, spph.distroseries, spph.pocket
+        )
         overrides = policy.calculateSourceOverrides(
-            {spph.sourcepackagerelease.sourcepackagename: SourceOverride()})
+            {spph.sourcepackagerelease.sourcepackagename: SourceOverride()}
+        )
         expected = {
             spph.sourcepackagerelease.sourcepackagename: SourceOverride(
-                component=spph.component, section=spph.section,
-                version=spph.sourcepackagerelease.version, new=False)}
+                component=spph.component,
+                section=spph.section,
+                version=spph.sourcepackagerelease.version,
+                new=False,
+            )
+        }
         self.assertEqual(expected, overrides)
 
     def test_source_overrides_pocket(self):
@@ -55,22 +55,26 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         spn = self.factory.makeSourcePackageName()
         distroseries = self.factory.makeDistroSeries()
         self.factory.makeSourcePackagePublishingHistory(
-            archive=distroseries.main_archive, distroseries=distroseries,
-            pocket=PackagePublishingPocket.RELEASE, sourcepackagename=spn)
+            archive=distroseries.main_archive,
+            distroseries=distroseries,
+            pocket=PackagePublishingPocket.RELEASE,
+            sourcepackagename=spn,
+        )
         overrides = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries,
-            PackagePublishingPocket.PROPOSED).calculateSourceOverrides(
-            {spn: SourceOverride()})
+            distroseries.main_archive,
+            distroseries,
+            PackagePublishingPocket.PROPOSED,
+        ).calculateSourceOverrides({spn: SourceOverride()})
         self.assertEqual(0, len(overrides))
         overrides = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries,
-            PackagePublishingPocket.RELEASE).calculateSourceOverrides(
-            {spn: SourceOverride()})
+            distroseries.main_archive,
+            distroseries,
+            PackagePublishingPocket.RELEASE,
+        ).calculateSourceOverrides({spn: SourceOverride()})
         self.assertEqual(1, len(overrides))
         overrides = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries,
-            None).calculateSourceOverrides(
-            {spn: SourceOverride()})
+            distroseries.main_archive, distroseries, None
+        ).calculateSourceOverrides({spn: SourceOverride()})
         self.assertEqual(1, len(overrides))
 
     def test_source_overrides_latest_only_is_returned(self):
@@ -80,23 +84,31 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         spn = self.factory.makeSourcePackageName()
         distroseries = self.factory.makeDistroSeries()
         published_spr = self.factory.makeSourcePackageRelease(
-            sourcepackagename=spn)
+            sourcepackagename=spn
+        )
         self.factory.makeSourcePackagePublishingHistory(
-            sourcepackagerelease=published_spr, distroseries=distroseries,
-            status=PackagePublishingStatus.PUBLISHED)
-        spr = self.factory.makeSourcePackageRelease(
-            sourcepackagename=spn)
+            sourcepackagerelease=published_spr,
+            distroseries=distroseries,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
+        spr = self.factory.makeSourcePackageRelease(sourcepackagename=spn)
         spph = self.factory.makeSourcePackagePublishingHistory(
-            sourcepackagerelease=spr, distroseries=distroseries)
+            sourcepackagerelease=spr, distroseries=distroseries
+        )
         overrides = FromExistingOverridePolicy(
-            spph.distroseries.main_archive, spph.distroseries,
-            spph.pocket).calculateSourceOverrides(
-            {spn: SourceOverride()})
+            spph.distroseries.main_archive, spph.distroseries, spph.pocket
+        ).calculateSourceOverrides({spn: SourceOverride()})
         self.assertEqual(
-            {spn: SourceOverride(
-                component=spph.component, section=spph.section,
-                version=spph.sourcepackagerelease.version, new=False)},
-            overrides)
+            {
+                spn: SourceOverride(
+                    component=spph.component,
+                    section=spph.section,
+                    version=spph.sourcepackagerelease.version,
+                    new=False,
+                )
+            },
+            overrides,
+        )
 
     def test_source_overrides_can_include_deleted(self):
         # include_deleted=True causes Deleted publications to be
@@ -105,38 +117,58 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         distroseries = self.factory.makeDistroSeries()
         spr = self.factory.makeSourcePackageRelease(sourcepackagename=spn)
         spph = self.factory.makeSourcePackagePublishingHistory(
-            archive=distroseries.main_archive, distroseries=distroseries,
-            sourcepackagerelease=spr, status=PackagePublishingStatus.PUBLISHED)
+            archive=distroseries.main_archive,
+            distroseries=distroseries,
+            sourcepackagerelease=spr,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         deleted_spr = self.factory.makeSourcePackageRelease(
-            sourcepackagename=spn)
+            sourcepackagename=spn
+        )
         deleted_spph = self.factory.makeSourcePackagePublishingHistory(
-            archive=distroseries.main_archive, distroseries=distroseries,
+            archive=distroseries.main_archive,
+            distroseries=distroseries,
             sourcepackagerelease=deleted_spr,
-            status=PackagePublishingStatus.DELETED, pocket=spph.pocket)
+            status=PackagePublishingStatus.DELETED,
+            pocket=spph.pocket,
+        )
 
         # With include_deleted=False only the Published ancestry is
         # found.
         overrides = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries,
-            spph.pocket).calculateSourceOverrides(
-            {spn: SourceOverride(spn)})
+            distroseries.main_archive, distroseries, spph.pocket
+        ).calculateSourceOverrides({spn: SourceOverride(spn)})
         self.assertEqual(
-            {spn: SourceOverride(
-                component=spph.component, section=spph.section,
-                version=spph.sourcepackagerelease.version, new=False)},
-            overrides)
+            {
+                spn: SourceOverride(
+                    component=spph.component,
+                    section=spph.section,
+                    version=spph.sourcepackagerelease.version,
+                    new=False,
+                )
+            },
+            overrides,
+        )
 
         # But with include_deleted=True the newer Deleted publication is
         # used.
         overrides = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries,
-            spph.pocket, include_deleted=True).calculateSourceOverrides(
-            {spn: SourceOverride(spn)})
+            distroseries.main_archive,
+            distroseries,
+            spph.pocket,
+            include_deleted=True,
+        ).calculateSourceOverrides({spn: SourceOverride(spn)})
         self.assertEqual(
-            {spn: SourceOverride(
-                component=deleted_spph.component, section=deleted_spph.section,
-                version=deleted_spph.sourcepackagerelease.version, new=True)},
-            overrides)
+            {
+                spn: SourceOverride(
+                    component=deleted_spph.component,
+                    section=deleted_spph.section,
+                    version=deleted_spph.sourcepackagerelease.version,
+                    new=True,
+                )
+            },
+            overrides,
+        )
 
     def test_source_overrides_constant_query_count(self):
         # The query count is constant, no matter how many sources are
@@ -146,17 +178,21 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         pocket = self.factory.getAnyPocket()
         for i in range(10):
             spph = self.factory.makeSourcePackagePublishingHistory(
-                distroseries=distroseries, archive=distroseries.main_archive,
-                pocket=pocket)
+                distroseries=distroseries,
+                archive=distroseries.main_archive,
+                pocket=pocket,
+            )
             spns.append(spph.sourcepackagerelease.sourcepackagename)
         flush_database_caches()
         distroseries.main_archive
         bulk.reload(spns)
         policy = FromExistingOverridePolicy(
-            spph.distroseries.main_archive, spph.distroseries, spph.pocket)
+            spph.distroseries.main_archive, spph.distroseries, spph.pocket
+        )
         with StormStatementRecorder() as recorder:
             policy.calculateSourceOverrides(
-                {spn: SourceOverride() for spn in spns})
+                {spn: SourceOverride() for spn in spns}
+            )
         self.assertThat(recorder, HasQueryCount(Equals(3)))
 
     def test_no_binary_overrides(self):
@@ -168,9 +204,11 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         bpn = self.factory.makeBinaryPackageName()
         pocket = self.factory.getAnyPocket()
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, pocket)
+            distroseries.main_archive, distroseries, pocket
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, None): BinaryOverride()})
+            {(bpn, None): BinaryOverride()}
+        )
         self.assertEqual({}, overrides)
 
     def test_binary_overrides(self):
@@ -180,40 +218,65 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         distroseries = self.factory.makeDistroSeries()
         bpph1 = self.factory.makeBinaryPackagePublishingHistory(
             archive=distroseries.main_archive,
-            distroarchseries=self.factory.makeDistroArchSeries(distroseries))
+            distroarchseries=self.factory.makeDistroArchSeries(distroseries),
+        )
         bpph2 = self.factory.makeBinaryPackagePublishingHistory(
-            archive=distroseries.main_archive, pocket=bpph1.pocket,
-            distroarchseries=self.factory.makeDistroArchSeries(distroseries))
+            archive=distroseries.main_archive,
+            pocket=bpph1.pocket,
+            distroarchseries=self.factory.makeDistroArchSeries(distroseries),
+        )
         distroseries.nominatedarchindep = bpph1.distroarchseries
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, bpph1.pocket)
+            distroseries.main_archive, distroseries, bpph1.pocket
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpph1.binarypackagerelease.binarypackagename,
-              bpph1.distroarchseries.architecturetag): BinaryOverride(),
-             (bpph2.binarypackagerelease.binarypackagename,
-              bpph2.distroarchseries.architecturetag): BinaryOverride(),
-             (bpph2.binarypackagerelease.binarypackagename, None):
-                BinaryOverride(),
-             })
-        expected = {
-            (bpph1.binarypackagerelease.binarypackagename,
-             bpph1.distroarchseries.architecturetag):
-                BinaryOverride(
-                    component=bpph1.component, section=bpph1.section,
-                    priority=bpph1.priority, new=False,
-                    version=bpph1.binarypackagerelease.version),
-            (bpph2.binarypackagerelease.binarypackagename,
-             bpph2.distroarchseries.architecturetag):
-                BinaryOverride(
-                    component=bpph2.component, section=bpph2.section,
-                    priority=bpph2.priority, new=False,
-                    version=bpph2.binarypackagerelease.version),
-            (bpph2.binarypackagerelease.binarypackagename, None):
-                BinaryOverride(
-                    component=bpph2.component, section=bpph2.section,
-                    priority=bpph2.priority, new=False,
-                    version=bpph2.binarypackagerelease.version),
+            {
+                (
+                    bpph1.binarypackagerelease.binarypackagename,
+                    bpph1.distroarchseries.architecturetag,
+                ): BinaryOverride(),
+                (
+                    bpph2.binarypackagerelease.binarypackagename,
+                    bpph2.distroarchseries.architecturetag,
+                ): BinaryOverride(),
+                (
+                    bpph2.binarypackagerelease.binarypackagename,
+                    None,
+                ): BinaryOverride(),
             }
+        )
+        expected = {
+            (
+                bpph1.binarypackagerelease.binarypackagename,
+                bpph1.distroarchseries.architecturetag,
+            ): BinaryOverride(
+                component=bpph1.component,
+                section=bpph1.section,
+                priority=bpph1.priority,
+                new=False,
+                version=bpph1.binarypackagerelease.version,
+            ),
+            (
+                bpph2.binarypackagerelease.binarypackagename,
+                bpph2.distroarchseries.architecturetag,
+            ): BinaryOverride(
+                component=bpph2.component,
+                section=bpph2.section,
+                priority=bpph2.priority,
+                new=False,
+                version=bpph2.binarypackagerelease.version,
+            ),
+            (
+                bpph2.binarypackagerelease.binarypackagename,
+                None,
+            ): BinaryOverride(
+                component=bpph2.component,
+                section=bpph2.section,
+                priority=bpph2.priority,
+                new=False,
+                version=bpph2.binarypackagerelease.version,
+            ),
+        }
         self.assertEqual(expected, overrides)
 
     def test_binary_overrides_pocket(self):
@@ -223,24 +286,35 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         das = self.factory.makeDistroArchSeries(distroseries=distroseries)
         bpn = self.factory.makeBinaryPackageName()
         self.factory.makeBinaryPackagePublishingHistory(
-            archive=distroseries.main_archive, distroarchseries=das,
-            pocket=PackagePublishingPocket.RELEASE, binarypackagename=bpn)
+            archive=distroseries.main_archive,
+            distroarchseries=das,
+            pocket=PackagePublishingPocket.RELEASE,
+            binarypackagename=bpn,
+        )
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries,
-            PackagePublishingPocket.PROPOSED)
+            distroseries.main_archive,
+            distroseries,
+            PackagePublishingPocket.PROPOSED,
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, das.architecturetag): BinaryOverride()})
+            {(bpn, das.architecturetag): BinaryOverride()}
+        )
         self.assertEqual(0, len(overrides))
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries,
-            PackagePublishingPocket.RELEASE)
+            distroseries.main_archive,
+            distroseries,
+            PackagePublishingPocket.RELEASE,
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, das.architecturetag): BinaryOverride()})
+            {(bpn, das.architecturetag): BinaryOverride()}
+        )
         self.assertEqual(1, len(overrides))
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, None)
+            distroseries.main_archive, distroseries, None
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, das.architecturetag): BinaryOverride()})
+            {(bpn, das.architecturetag): BinaryOverride()}
+        )
         self.assertEqual(1, len(overrides))
 
     def test_binary_overrides_skips_unknown_arch(self):
@@ -249,15 +323,17 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         # an empty list is returned.
         distroseries = self.factory.makeDistroSeries()
         das = self.factory.makeDistroArchSeries(
-            architecturetag='amd64',
-            distroseries=distroseries)
+            architecturetag="amd64", distroseries=distroseries
+        )
         distroseries.nominatedarchindep = das
         bpn = self.factory.makeBinaryPackageName()
         pocket = self.factory.getAnyPocket()
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, pocket)
+            distroseries.main_archive, distroseries, pocket
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, 'i386'): BinaryOverride()})
+            {(bpn, "i386"): BinaryOverride()}
+        )
         self.assertEqual({}, overrides)
 
     def test_binary_overrides_can_cross_archs(self):
@@ -265,86 +341,130 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         # and look for ancestry in any architecture.
         distroseries = self.factory.makeDistroSeries()
         amd64 = self.factory.makeDistroArchSeries(
-            architecturetag='amd64',
-            distroseries=distroseries)
+            architecturetag="amd64", distroseries=distroseries
+        )
         i386 = self.factory.makeDistroArchSeries(
-            architecturetag='i386',
-            distroseries=distroseries)
+            architecturetag="i386", distroseries=distroseries
+        )
         distroseries.nominatedarchindep = i386
         bpn = self.factory.makeBinaryPackageName()
         pocket = self.factory.getAnyPocket()
         bpph = self.factory.makeBinaryPackagePublishingHistory(
-            archive=distroseries.main_archive, distroarchseries=amd64,
-            pocket=pocket, binarypackagename=bpn, architecturespecific=True)
+            archive=distroseries.main_archive,
+            distroarchseries=amd64,
+            pocket=pocket,
+            binarypackagename=bpn,
+            architecturespecific=True,
+        )
         bpph_override = BinaryOverride(
-            component=bpph.component, section=bpph.section,
-            priority=bpph.priority, version=bpph.binarypackagerelease.version,
-            new=False)
+            component=bpph.component,
+            section=bpph.section,
+            priority=bpph.priority,
+            version=bpph.binarypackagerelease.version,
+            new=False,
+        )
 
         # With any_arch=False only amd64 is found.
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, pocket)
+            distroseries.main_archive, distroseries, pocket
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, 'i386'): BinaryOverride(),
-             (bpn, 'amd64'): BinaryOverride(),
-             (bpn, None): BinaryOverride()})
-        self.assertEqual({(bpn, 'amd64'): bpph_override}, overrides)
+            {
+                (bpn, "i386"): BinaryOverride(),
+                (bpn, "amd64"): BinaryOverride(),
+                (bpn, None): BinaryOverride(),
+            }
+        )
+        self.assertEqual({(bpn, "amd64"): bpph_override}, overrides)
 
         # But with any_arch=True we get the amd64 overrides everywhere.
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, pocket, any_arch=True)
+            distroseries.main_archive, distroseries, pocket, any_arch=True
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, 'i386'): BinaryOverride(),
-             (bpn, 'amd64'): BinaryOverride(),
-             (bpn, None): BinaryOverride()})
+            {
+                (bpn, "i386"): BinaryOverride(),
+                (bpn, "amd64"): BinaryOverride(),
+                (bpn, None): BinaryOverride(),
+            }
+        )
         self.assertEqual(
-            {(bpn, 'i386'): bpph_override,
-             (bpn, 'amd64'): bpph_override,
-             (bpn, None): bpph_override},
-            overrides)
+            {
+                (bpn, "i386"): bpph_override,
+                (bpn, "amd64"): bpph_override,
+                (bpn, None): bpph_override,
+            },
+            overrides,
+        )
 
     def test_binary_overrides_can_include_deleted(self):
         # calculateBinaryOverrides can be asked to include Deleted
         # publications.
         distroseries = self.factory.makeDistroSeries()
         das = self.factory.makeDistroArchSeries(
-            architecturetag='amd64',
-            distroseries=distroseries)
+            architecturetag="amd64", distroseries=distroseries
+        )
         bpn = self.factory.makeBinaryPackageName()
         pocket = self.factory.getAnyPocket()
         bpph = self.factory.makeBinaryPackagePublishingHistory(
-            archive=distroseries.main_archive, distroarchseries=das,
-            pocket=pocket, binarypackagename=bpn, architecturespecific=True,
-            status=PackagePublishingStatus.PUBLISHED)
+            archive=distroseries.main_archive,
+            distroarchseries=das,
+            pocket=pocket,
+            binarypackagename=bpn,
+            architecturespecific=True,
+            status=PackagePublishingStatus.PUBLISHED,
+        )
         deleted_bpph = self.factory.makeBinaryPackagePublishingHistory(
-            archive=distroseries.main_archive, distroarchseries=das,
-            pocket=pocket, binarypackagename=bpn, architecturespecific=True,
-            status=PackagePublishingStatus.DELETED)
+            archive=distroseries.main_archive,
+            distroarchseries=das,
+            pocket=pocket,
+            binarypackagename=bpn,
+            architecturespecific=True,
+            status=PackagePublishingStatus.DELETED,
+        )
 
         # With include_deleted=False the Published pub is found.
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, pocket)
+            distroseries.main_archive, distroseries, pocket
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, 'amd64'): BinaryOverride()})
+            {(bpn, "amd64"): BinaryOverride()}
+        )
         self.assertEqual(
-            {(bpn, 'amd64'): BinaryOverride(
-                component=bpph.component, section=bpph.section,
-                priority=bpph.priority,
-                version=bpph.binarypackagerelease.version, new=False)},
-            overrides)
+            {
+                (bpn, "amd64"): BinaryOverride(
+                    component=bpph.component,
+                    section=bpph.section,
+                    priority=bpph.priority,
+                    version=bpph.binarypackagerelease.version,
+                    new=False,
+                )
+            },
+            overrides,
+        )
 
         # But with include_deleted=True we get the newer Deleted pub instead.
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, pocket,
-            include_deleted=True)
+            distroseries.main_archive,
+            distroseries,
+            pocket,
+            include_deleted=True,
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, 'amd64'): BinaryOverride()})
+            {(bpn, "amd64"): BinaryOverride()}
+        )
         self.assertEqual(
-            {(bpn, 'amd64'): BinaryOverride(
-                component=deleted_bpph.component, section=deleted_bpph.section,
-                priority=deleted_bpph.priority,
-                version=deleted_bpph.binarypackagerelease.version, new=True)},
-            overrides)
+            {
+                (bpn, "amd64"): BinaryOverride(
+                    component=deleted_bpph.component,
+                    section=deleted_bpph.section,
+                    priority=deleted_bpph.priority,
+                    version=deleted_bpph.binarypackagerelease.version,
+                    new=True,
+                )
+            },
+            overrides,
+        )
 
     def test_binary_overrides_constant_query_count(self):
         # The query count is constant, no matter how many bpn-das pairs are
@@ -357,16 +477,20 @@ class TestFromExistingOverridePolicy(TestCaseWithFactory):
         for i in range(10):
             bpph = self.factory.makeBinaryPackagePublishingHistory(
                 distroarchseries=distroarchseries,
-                archive=distroseries.main_archive, pocket=pocket)
+                archive=distroseries.main_archive,
+                pocket=pocket,
+            )
             bpns.append((bpph.binarypackagerelease.binarypackagename, None))
         flush_database_caches()
         distroseries.main_archive
         bulk.reload(bpn[0] for bpn in bpns)
         policy = FromExistingOverridePolicy(
-            distroseries.main_archive, distroseries, pocket)
+            distroseries.main_archive, distroseries, pocket
+        )
         with StormStatementRecorder() as recorder:
             policy.calculateBinaryOverrides(
-                {(bpn, das): BinaryOverride() for bpn, das in bpns})
+                {(bpn, das): BinaryOverride() for bpn, das in bpns}
+            )
         self.assertThat(recorder, HasQueryCount(Equals(4)))
 
 
@@ -379,7 +503,9 @@ class TestFromSourceOverridePolicy(TestCaseWithFactory):
         self.assertEqual(
             {},
             FromSourceOverridePolicy().calculateSourceOverrides(
-                {self.factory.makeSourcePackageName(): SourceOverride()}))
+                {self.factory.makeSourcePackageName(): SourceOverride()}
+            ),
+        )
 
     def test_binaries(self):
         # Binaries are overridden with the component from their
@@ -391,11 +517,17 @@ class TestFromSourceOverridePolicy(TestCaseWithFactory):
         self.assertEqual(
             {(bpn, None): BinaryOverride(component=component, new=True)},
             FromSourceOverridePolicy().calculateBinaryOverrides(
-                {(bpn, None): BinaryOverride(
-                    component=random_component,
-                    source_override=SourceOverride(component=component)),
-                 (other_bpn, None): BinaryOverride(
-                     component=random_component)}))
+                {
+                    (bpn, None): BinaryOverride(
+                        component=random_component,
+                        source_override=SourceOverride(component=component),
+                    ),
+                    (other_bpn, None): BinaryOverride(
+                        component=random_component
+                    ),
+                }
+            ),
+        )
 
 
 class TestUnknownOverridePolicy(TestCaseWithFactory):
@@ -405,49 +537,59 @@ class TestUnknownOverridePolicy(TestCaseWithFactory):
     def test_getComponentOverride_default_name(self):
         # getComponentOverride returns the default component name when an
         # unknown component name is passed.
-        component_name = UnknownOverridePolicy.getComponentOverride('no-name')
+        component_name = UnknownOverridePolicy.getComponentOverride("no-name")
 
-        self.assertEqual('universe', component_name)
+        self.assertEqual("universe", component_name)
 
     def test_getComponentOverride_default_component(self):
         # getComponentOverride also accepts a component object (as
         # opposed to a component's name).
-        component = getUtility(IComponentSet)['universe']
+        component = getUtility(IComponentSet)["universe"]
         component_name = UnknownOverridePolicy.getComponentOverride(component)
 
-        self.assertEqual('universe', component_name)
+        self.assertEqual("universe", component_name)
 
     def test_getComponentOverride_return_component(self):
         # Passing return_component=True to getComponentOverride makes it
         # return the Component object (as opposed to the component's
         # name).
-        universe_component = getUtility(IComponentSet)['universe']
+        universe_component = getUtility(IComponentSet)["universe"]
         component = UnknownOverridePolicy.getComponentOverride(
-            universe_component, return_component=True)
+            universe_component, return_component=True
+        )
 
         self.assertEqual(universe_component, component)
 
     def test_unknown_sources(self):
         # The unknown policy uses a default component based on the
         # pre-override component.
-        for component in ('contrib', 'non-free'):
+        for component in ("contrib", "non-free"):
             self.factory.makeComponent(component)
         distroseries = self.factory.makeDistroSeries()
         spns = [self.factory.makeSourcePackageName() for i in range(3)]
         policy = UnknownOverridePolicy(
-            distroseries.main_archive, distroseries,
-            PackagePublishingPocket.RELEASE)
+            distroseries.main_archive,
+            distroseries,
+            PackagePublishingPocket.RELEASE,
+        )
         overrides = policy.calculateSourceOverrides(
             {
                 spn: SourceOverride(
-                    component=getUtility(IComponentSet)[component])
-                for spn, component in
-                zip(spns, ('main', 'contrib', 'non-free'))})
+                    component=getUtility(IComponentSet)[component]
+                )
+                for spn, component in zip(
+                    spns, ("main", "contrib", "non-free")
+                )
+            }
+        )
         expected = {
             spn: SourceOverride(
-                component=getUtility(IComponentSet)[component], new=True)
-            for spn, component in
-            zip(spns, ('universe', 'multiverse', 'multiverse'))}
+                component=getUtility(IComponentSet)[component], new=True
+            )
+            for spn, component in zip(
+                spns, ("universe", "multiverse", "multiverse")
+            )
+        }
         self.assertEqual(expected, overrides)
 
     def test_unknown_binaries(self):
@@ -457,14 +599,23 @@ class TestUnknownOverridePolicy(TestCaseWithFactory):
         distroseries = bpph.distroarchseries.distroseries
         distroseries.nominatedarchindep = bpph.distroarchseries
         policy = UnknownOverridePolicy(
-            distroseries.main_archive, distroseries, bpph.pocket)
+            distroseries.main_archive, distroseries, bpph.pocket
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpph.binarypackagerelease.binarypackagename, None):
-                BinaryOverride()})
-        universe = getUtility(IComponentSet)['universe']
+            {
+                (
+                    bpph.binarypackagerelease.binarypackagename,
+                    None,
+                ): BinaryOverride()
+            }
+        )
+        universe = getUtility(IComponentSet)["universe"]
         expected = {
-            (bpph.binarypackagerelease.binarypackagename, None):
-                BinaryOverride(component=universe, new=True)}
+            (
+                bpph.binarypackagerelease.binarypackagename,
+                None,
+            ): BinaryOverride(component=universe, new=True)
+        }
         self.assertEqual(expected, overrides)
 
 
@@ -476,35 +627,60 @@ class TestConstantOverridePolicy(TestCaseWithFactory):
         policy = ConstantOverridePolicy(
             component=self.factory.makeComponent(),
             section=self.factory.makeSection(),
-            phased_update_percentage=50, new=True)
+            phased_update_percentage=50,
+            new=True,
+        )
         spn = self.factory.makeSourcePackageName()
         self.assertEqual(
-            {spn: SourceOverride(
-                component=policy.component, section=policy.section,
-                new=True)},
+            {
+                spn: SourceOverride(
+                    component=policy.component,
+                    section=policy.section,
+                    new=True,
+                )
+            },
             policy.calculateSourceOverrides(
-                {spn: SourceOverride(
-                    component=self.factory.makeComponent(),
-                    section=self.factory.makeSection(), new=False)}))
+                {
+                    spn: SourceOverride(
+                        component=self.factory.makeComponent(),
+                        section=self.factory.makeSection(),
+                        new=False,
+                    )
+                }
+            ),
+        )
 
     def test_binary(self):
         policy = ConstantOverridePolicy(
             component=self.factory.makeComponent(),
             section=self.factory.makeSection(),
             priority=PackagePublishingPriority.EXTRA,
-            phased_update_percentage=50, new=True)
+            phased_update_percentage=50,
+            new=True,
+        )
         bpn = self.factory.makeBinaryPackageName()
         self.assertEqual(
-            {(bpn, None): BinaryOverride(
-                component=policy.component, section=policy.section,
-                priority=policy.priority, phased_update_percentage=50,
-                new=True)},
+            {
+                (bpn, None): BinaryOverride(
+                    component=policy.component,
+                    section=policy.section,
+                    priority=policy.priority,
+                    phased_update_percentage=50,
+                    new=True,
+                )
+            },
             policy.calculateBinaryOverrides(
-                {(bpn, None): BinaryOverride(
-                    component=self.factory.makeComponent(),
-                    section=self.factory.makeSection(),
-                    priority=PackagePublishingPriority.REQUIRED,
-                    phased_update_percentage=90, new=False)}))
+                {
+                    (bpn, None): BinaryOverride(
+                        component=self.factory.makeComponent(),
+                        section=self.factory.makeSection(),
+                        priority=PackagePublishingPriority.REQUIRED,
+                        phased_update_percentage=90,
+                        new=False,
+                    )
+                }
+            ),
+        )
 
 
 class TestFallbackOverridePolicy(TestCaseWithFactory):
@@ -514,36 +690,48 @@ class TestFallbackOverridePolicy(TestCaseWithFactory):
     def test_fallback_sources(self):
         # The fallback policy runs through a sequence of policies until
         # all overrides are fulfilled.
-        universe = getUtility(IComponentSet)['universe']
+        universe = getUtility(IComponentSet)["universe"]
         spns = [self.factory.makeSourcePackageName()]
         expected = {spns[0]: SourceOverride(component=universe, new=True)}
         distroseries = self.factory.makeDistroSeries()
         pocket = self.factory.getAnyPocket()
         for i in range(8):
             spph = self.factory.makeSourcePackagePublishingHistory(
-                distroseries=distroseries, archive=distroseries.main_archive,
-                pocket=pocket)
+                distroseries=distroseries,
+                archive=distroseries.main_archive,
+                pocket=pocket,
+            )
             spns.append(spph.sourcepackagerelease.sourcepackagename)
-            expected[spph.sourcepackagerelease.sourcepackagename] = (
-                SourceOverride(
-                    component=spph.component, section=spph.section,
-                    version=spph.sourcepackagerelease.version, new=False))
+            expected[
+                spph.sourcepackagerelease.sourcepackagename
+            ] = SourceOverride(
+                component=spph.component,
+                section=spph.section,
+                version=spph.sourcepackagerelease.version,
+                new=False,
+            )
         spns.append(self.factory.makeSourcePackageName())
         expected[spns[-1]] = SourceOverride(component=universe, new=True)
-        policy = FallbackOverridePolicy([
-            FromExistingOverridePolicy(
-                distroseries.main_archive, distroseries, pocket),
-            UnknownOverridePolicy(
-                distroseries.main_archive, distroseries, pocket)])
+        policy = FallbackOverridePolicy(
+            [
+                FromExistingOverridePolicy(
+                    distroseries.main_archive, distroseries, pocket
+                ),
+                UnknownOverridePolicy(
+                    distroseries.main_archive, distroseries, pocket
+                ),
+            ]
+        )
         overrides = policy.calculateSourceOverrides(
-            {spn: SourceOverride() for spn in spns})
+            {spn: SourceOverride() for spn in spns}
+        )
         self.assertEqual(10, len(overrides))
         self.assertEqual(expected, overrides)
 
     def test_ubuntu_override_policy_binaries(self):
         # The Ubuntu policy incorporates both the existing and the unknown
         # policy.
-        universe = getUtility(IComponentSet)['universe']
+        universe = getUtility(IComponentSet)["universe"]
         distroseries = self.factory.makeDistroSeries()
         pocket = self.factory.getAnyPocket()
         bpn = self.factory.makeBinaryPackageName()
@@ -551,75 +739,112 @@ class TestFallbackOverridePolicy(TestCaseWithFactory):
         expected = {}
         for i in range(3):
             distroarchseries = self.factory.makeDistroArchSeries(
-                distroseries=distroseries)
+                distroseries=distroseries
+            )
             bpb = self.factory.makeBinaryPackageBuild(
-                distroarchseries=distroarchseries)
+                distroarchseries=distroarchseries
+            )
             bpr = self.factory.makeBinaryPackageRelease(
-                build=bpb, binarypackagename=bpn,
-                architecturespecific=True)
+                build=bpb, binarypackagename=bpn, architecturespecific=True
+            )
             bpph = self.factory.makeBinaryPackagePublishingHistory(
-                binarypackagerelease=bpr, distroarchseries=distroarchseries,
-                archive=distroseries.main_archive, pocket=pocket)
+                binarypackagerelease=bpr,
+                distroarchseries=distroarchseries,
+                archive=distroseries.main_archive,
+                pocket=pocket,
+            )
             bpns.append((bpn, distroarchseries.architecturetag))
-            expected[(bpn, distroarchseries.architecturetag)] = (
-                BinaryOverride(
-                    component=bpph.component, section=bpph.section,
-                    priority=bpph.priority, new=False,
-                    version=bpph.binarypackagerelease.version))
+            expected[(bpn, distroarchseries.architecturetag)] = BinaryOverride(
+                component=bpph.component,
+                section=bpph.section,
+                priority=bpph.priority,
+                new=False,
+                version=bpph.binarypackagerelease.version,
+            )
         for i in range(2):
             distroarchseries = self.factory.makeDistroArchSeries(
-                distroseries=distroseries)
+                distroseries=distroseries
+            )
             bpns.append((bpn, distroarchseries.architecturetag))
             expected[bpn, distroarchseries.architecturetag] = BinaryOverride(
-                component=universe, new=True)
+                component=universe, new=True
+            )
         distroseries.nominatedarchindep = distroarchseries
-        policy = FallbackOverridePolicy([
-            FromExistingOverridePolicy(
-                distroseries.main_archive, distroseries, pocket),
-            UnknownOverridePolicy(
-                distroseries.main_archive, distroseries, pocket)])
+        policy = FallbackOverridePolicy(
+            [
+                FromExistingOverridePolicy(
+                    distroseries.main_archive, distroseries, pocket
+                ),
+                UnknownOverridePolicy(
+                    distroseries.main_archive, distroseries, pocket
+                ),
+            ]
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, das): BinaryOverride() for bpn, das in bpns})
+            {(bpn, das): BinaryOverride() for bpn, das in bpns}
+        )
         self.assertEqual(5, len(overrides))
         self.assertEqual(expected, overrides)
 
     def test_phased_update_percentage(self):
         # A policy with a phased_update_percentage applies it to new binary
         # overrides.
-        universe = getUtility(IComponentSet)['universe']
+        universe = getUtility(IComponentSet)["universe"]
         distroseries = self.factory.makeDistroSeries()
         pocket = self.factory.getAnyPocket()
         bpn = self.factory.makeBinaryPackageName()
         bpns = []
         expected = {}
         distroarchseries = self.factory.makeDistroArchSeries(
-            distroseries=distroseries)
+            distroseries=distroseries
+        )
         bpb = self.factory.makeBinaryPackageBuild(
-            distroarchseries=distroarchseries)
+            distroarchseries=distroarchseries
+        )
         bpr = self.factory.makeBinaryPackageRelease(
-            build=bpb, binarypackagename=bpn, architecturespecific=True)
+            build=bpb, binarypackagename=bpn, architecturespecific=True
+        )
         bpph = self.factory.makeBinaryPackagePublishingHistory(
-            binarypackagerelease=bpr, distroarchseries=distroarchseries,
-            archive=distroseries.main_archive, pocket=pocket)
+            binarypackagerelease=bpr,
+            distroarchseries=distroarchseries,
+            archive=distroseries.main_archive,
+            pocket=pocket,
+        )
         bpns.append((bpn, distroarchseries.architecturetag))
         expected[(bpn, distroarchseries.architecturetag)] = BinaryOverride(
-            component=bpph.component, section=bpph.section,
-            priority=bpph.priority, phased_update_percentage=50,
-            version=bpph.binarypackagerelease.version, new=False)
+            component=bpph.component,
+            section=bpph.section,
+            priority=bpph.priority,
+            phased_update_percentage=50,
+            version=bpph.binarypackagerelease.version,
+            new=False,
+        )
         distroarchseries = self.factory.makeDistroArchSeries(
-            distroseries=distroseries)
+            distroseries=distroseries
+        )
         bpns.append((bpn, distroarchseries.architecturetag))
         expected[(bpn, distroarchseries.architecturetag)] = BinaryOverride(
-            component=universe, phased_update_percentage=50, new=True)
+            component=universe, phased_update_percentage=50, new=True
+        )
         distroseries.nominatedarchindep = distroarchseries
-        policy = FallbackOverridePolicy([
-            FromExistingOverridePolicy(
-                distroseries.main_archive, distroseries, pocket,
-                phased_update_percentage=50),
-            UnknownOverridePolicy(
-                distroseries.main_archive, distroseries, pocket,
-                phased_update_percentage=50)])
+        policy = FallbackOverridePolicy(
+            [
+                FromExistingOverridePolicy(
+                    distroseries.main_archive,
+                    distroseries,
+                    pocket,
+                    phased_update_percentage=50,
+                ),
+                UnknownOverridePolicy(
+                    distroseries.main_archive,
+                    distroseries,
+                    pocket,
+                    phased_update_percentage=50,
+                ),
+            ]
+        )
         overrides = policy.calculateBinaryOverrides(
-            {(bpn, das): BinaryOverride() for bpn, das in bpns})
+            {(bpn, das): BinaryOverride() for bpn, das in bpns}
+        )
         self.assertEqual(2, len(overrides))
         self.assertEqual(expected, overrides)
diff --git a/lib/lp/soyuz/adapters/tests/test_packagelocation.py b/lib/lp/soyuz/adapters/tests/test_packagelocation.py
index 9bbef24..62e61db 100644
--- a/lib/lp/soyuz/adapters/tests/test_packagelocation.py
+++ b/lib/lp/soyuz/adapters/tests/test_packagelocation.py
@@ -6,9 +6,9 @@
 from zope.component import getUtility
 
 from lp.soyuz.adapters.packagelocation import (
-    build_package_location,
     PackageLocationError,
-    )
+    build_package_location,
+)
 from lp.soyuz.enums import ArchivePurpose
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.testing import TestCaseWithFactory
@@ -18,16 +18,29 @@ from lp.testing.layers import LaunchpadZopelessLayer
 
 class TestPackageLocation(TestCaseWithFactory):
     """Test the `PackageLocation` class."""
+
     layer = LaunchpadZopelessLayer
 
-    def getPackageLocation(self, distribution_name='ubuntu', suite=None,
-                           purpose=None, person_name=None,
-                           archive_name=None, packageset_names=None,
-                           channel=None):
+    def getPackageLocation(
+        self,
+        distribution_name="ubuntu",
+        suite=None,
+        purpose=None,
+        person_name=None,
+        archive_name=None,
+        packageset_names=None,
+        channel=None,
+    ):
         """Use a helper method to setup a `PackageLocation` object."""
         return build_package_location(
-            distribution_name, suite, purpose, person_name, archive_name,
-            packageset_names=packageset_names, channel=channel)
+            distribution_name,
+            suite,
+            purpose,
+            person_name,
+            archive_name,
+            packageset_names=packageset_names,
+            channel=channel,
+        )
 
     def testSetupLocationForCOPY(self):
         """`PackageLocation` for COPY archives."""
@@ -35,53 +48,62 @@ class TestPackageLocation(TestCaseWithFactory):
         ubuntu = self.getPackageLocation().distribution
 
         returned_location = self.factory.makeCopyArchiveLocation(
-            distribution=ubuntu, name='now-comes-the-mystery',
-            owner=self.factory.makePerson(name='mysteryman'))
+            distribution=ubuntu,
+            name="now-comes-the-mystery",
+            owner=self.factory.makePerson(name="mysteryman"),
+        )
         copy_archive = remove_security_proxy_and_shout_at_engineer(
-            returned_location).archive
+            returned_location
+        ).archive
 
         # Now use the created copy archive to test the build_package_location
         # helper (called via getPackageLocation):
-        location = self.getPackageLocation(purpose=ArchivePurpose.COPY,
-                                           archive_name=copy_archive.name)
-
-        self.assertEqual(location.distribution.name, 'ubuntu')
-        self.assertEqual(location.distroseries.name, 'hoary')
-        self.assertEqual(location.pocket.name, 'RELEASE')
-        self.assertEqual(location.archive.displayname,
-                         'Copy archive now-comes-the-mystery for Mysteryman')
+        location = self.getPackageLocation(
+            purpose=ArchivePurpose.COPY, archive_name=copy_archive.name
+        )
+
+        self.assertEqual(location.distribution.name, "ubuntu")
+        self.assertEqual(location.distroseries.name, "hoary")
+        self.assertEqual(location.pocket.name, "RELEASE")
+        self.assertEqual(
+            location.archive.displayname,
+            "Copy archive now-comes-the-mystery for Mysteryman",
+        )
         self.assertEqual([], location.packagesets)
 
     def testSetupLocationForPRIMARY(self):
         """`PackageLocation` for PRIMARY archives."""
         location = self.getPackageLocation()
-        self.assertEqual(location.distribution.name, 'ubuntu')
-        self.assertEqual(location.distroseries.name, 'hoary')
-        self.assertEqual(location.pocket.name, 'RELEASE')
-        self.assertEqual(location.archive.displayname,
-                         'Primary Archive for Ubuntu Linux')
+        self.assertEqual(location.distribution.name, "ubuntu")
+        self.assertEqual(location.distroseries.name, "hoary")
+        self.assertEqual(location.pocket.name, "RELEASE")
+        self.assertEqual(
+            location.archive.displayname, "Primary Archive for Ubuntu Linux"
+        )
         self.assertEqual([], location.packagesets)
 
     def testSetupLocationForPPA(self):
         """`PackageLocation` for PPA archives."""
-        location = self.getPackageLocation(purpose=ArchivePurpose.PPA,
-                                           person_name='cprov',
-                                           archive_name="ppa")
-        self.assertEqual(location.distribution.name, 'ubuntu')
-        self.assertEqual(location.distroseries.name, 'hoary')
-        self.assertEqual(location.pocket.name, 'RELEASE')
-        self.assertEqual(location.archive.displayname,
-                         'PPA for Celso Providelo')
+        location = self.getPackageLocation(
+            purpose=ArchivePurpose.PPA, person_name="cprov", archive_name="ppa"
+        )
+        self.assertEqual(location.distribution.name, "ubuntu")
+        self.assertEqual(location.distroseries.name, "hoary")
+        self.assertEqual(location.pocket.name, "RELEASE")
+        self.assertEqual(
+            location.archive.displayname, "PPA for Celso Providelo"
+        )
         self.assertEqual([], location.packagesets)
 
     def testSetupLocationForPARTNER(self):
         """`PackageLocation` for PARTNER archives."""
         location = self.getPackageLocation(purpose=ArchivePurpose.PARTNER)
-        self.assertEqual(location.distribution.name, 'ubuntu')
-        self.assertEqual(location.distroseries.name, 'hoary')
-        self.assertEqual(location.pocket.name, 'RELEASE')
-        self.assertEqual(location.archive.displayname,
-                         'Partner Archive for Ubuntu Linux')
+        self.assertEqual(location.distribution.name, "ubuntu")
+        self.assertEqual(location.distroseries.name, "hoary")
+        self.assertEqual(location.pocket.name, "RELEASE")
+        self.assertEqual(
+            location.archive.displayname, "Partner Archive for Ubuntu Linux"
+        )
         self.assertEqual([], location.packagesets)
 
     def testSetupLocationWithPackagesets(self):
@@ -90,7 +112,8 @@ class TestPackageLocation(TestCaseWithFactory):
         packageset1 = self.factory.makePackageset(name=packageset_name1)
         packageset2 = self.factory.makePackageset(name=packageset_name2)
         location = self.getPackageLocation(
-            packageset_names=[packageset_name1, packageset_name2])
+            packageset_names=[packageset_name1, packageset_name2]
+        )
         self.assertEqual([packageset1, packageset2], location.packagesets)
 
     def testSetupLocationUnknownDistribution(self):
@@ -98,14 +121,14 @@ class TestPackageLocation(TestCaseWithFactory):
         self.assertRaises(
             PackageLocationError,
             self.getPackageLocation,
-            distribution_name='beeblebrox')
+            distribution_name="beeblebrox",
+        )
 
     def testSetupLocationUnknownSuite(self):
         """`PackageLocationError` is raised on unknown suite."""
         self.assertRaises(
-            PackageLocationError,
-            self.getPackageLocation,
-            suite='beeblebrox')
+            PackageLocationError, self.getPackageLocation, suite="beeblebrox"
+        )
 
     def testSetupLocationUnknownPerson(self):
         """`PackageLocationError` is raised on unknown person."""
@@ -113,8 +136,9 @@ class TestPackageLocation(TestCaseWithFactory):
             PackageLocationError,
             self.getPackageLocation,
             purpose=ArchivePurpose.PPA,
-            person_name='beeblebrox',
-            archive_name="ppa")
+            person_name="beeblebrox",
+            archive_name="ppa",
+        )
 
     def testSetupLocationUnknownPPA(self):
         """`PackageLocationError` is raised on unknown PPA."""
@@ -122,24 +146,27 @@ class TestPackageLocation(TestCaseWithFactory):
             PackageLocationError,
             self.getPackageLocation,
             purpose=ArchivePurpose.PPA,
-            person_name='kiko',
-            archive_name="ppa")
+            person_name="kiko",
+            archive_name="ppa",
+        )
 
     def test_build_package_location_when_partner_missing(self):
         """`PackageLocationError` is raised when PARTNER does not exist."""
         self.assertRaises(
             PackageLocationError,
             self.getPackageLocation,
-            distribution_name='debian',
-            purpose=ArchivePurpose.PARTNER)
+            distribution_name="debian",
+            purpose=ArchivePurpose.PARTNER,
+        )
 
     def test_build_package_location_when_packageset_unknown(self):
         """`PackageLocationError` is raised on unknown packageset."""
         self.assertRaises(
             PackageLocationError,
             self.getPackageLocation,
-            distribution_name='debian',
-            packageset_names=["unknown"])
+            distribution_name="debian",
+            packageset_names=["unknown"],
+        )
 
     def test_build_package_location_when_one_packageset_unknown(self):
         """Test that with one of two packagesets unknown."""
@@ -148,8 +175,9 @@ class TestPackageLocation(TestCaseWithFactory):
         self.assertRaises(
             PackageLocationError,
             self.getPackageLocation,
-            distribution_name='debian',
-            packageset_names=[packageset_name, "unknown"])
+            distribution_name="debian",
+            packageset_names=[packageset_name, "unknown"],
+        )
 
     def test_build_package_location_with_channel_outside_release_pocket(self):
         """It doesn't make sense to use non-RELEASE pockets with channels."""
@@ -158,7 +186,8 @@ class TestPackageLocation(TestCaseWithFactory):
             "Channels may only be used with the RELEASE pocket.",
             self.getPackageLocation,
             suite="warty-security",
-            channel="stable")
+            channel="stable",
+        )
 
     def testSetupLocationPPANotMatchingDistribution(self):
         """`PackageLocationError` is raised when PPA does not match the
@@ -166,10 +195,11 @@ class TestPackageLocation(TestCaseWithFactory):
         self.assertRaises(
             PackageLocationError,
             self.getPackageLocation,
-            distribution_name='ubuntutest',
+            distribution_name="ubuntutest",
             purpose=ArchivePurpose.PPA,
-            person_name='cprov',
-            archive_name="ppa")
+            person_name="cprov",
+            archive_name="ppa",
+        )
 
     def testComparison(self):
         """Check if PackageLocation objects can be compared."""
@@ -180,36 +210,41 @@ class TestPackageLocation(TestCaseWithFactory):
         self.assertTrue(location_ubuntu_hoary.component is None)
         self.assertTrue(location_ubuntu_hoary_again.component is None)
 
-        universe = getUtility(IComponentSet)['universe']
-        restricted = getUtility(IComponentSet)['restricted']
+        universe = getUtility(IComponentSet)["universe"]
+        restricted = getUtility(IComponentSet)["restricted"]
 
         location_ubuntu_hoary.component = universe
-        self.assertNotEqual(
-            location_ubuntu_hoary, location_ubuntu_hoary_again)
+        self.assertNotEqual(location_ubuntu_hoary, location_ubuntu_hoary_again)
 
         location_ubuntu_hoary_again.component = universe
         self.assertEqual(location_ubuntu_hoary, location_ubuntu_hoary_again)
 
         location_ubuntu_hoary.component = restricted
-        self.assertNotEqual(
-            location_ubuntu_hoary, location_ubuntu_hoary_again)
+        self.assertNotEqual(location_ubuntu_hoary, location_ubuntu_hoary_again)
 
         location_ubuntu_warty_security = self.getPackageLocation(
-            suite='warty-security')
-        self.assertNotEqual(location_ubuntu_hoary,
-                            location_ubuntu_warty_security)
+            suite="warty-security"
+        )
+        self.assertNotEqual(
+            location_ubuntu_hoary, location_ubuntu_warty_security
+        )
 
         location_ubuntutest = self.getPackageLocation(
-            distribution_name='ubuntutest')
+            distribution_name="ubuntutest"
+        )
         self.assertNotEqual(location_ubuntu_hoary, location_ubuntutest)
 
         location_cprov_ppa = self.getPackageLocation(
-            distribution_name='ubuntu', purpose=ArchivePurpose.PPA,
-            person_name='cprov', archive_name="ppa")
+            distribution_name="ubuntu",
+            purpose=ArchivePurpose.PPA,
+            person_name="cprov",
+            archive_name="ppa",
+        )
         self.assertNotEqual(location_cprov_ppa, location_ubuntutest)
 
         location_ubuntu_partner = self.getPackageLocation(
-            distribution_name='ubuntu', purpose=ArchivePurpose.PARTNER)
+            distribution_name="ubuntu", purpose=ArchivePurpose.PARTNER
+        )
         self.assertNotEqual(location_ubuntu_partner, location_cprov_ppa)
 
     def testComparePackagesets(self):
@@ -217,12 +252,10 @@ class TestPackageLocation(TestCaseWithFactory):
         location_ubuntu_hoary_again = self.getPackageLocation()
         packageset = self.factory.makePackageset()
         location_ubuntu_hoary.packagesets = [packageset]
-        self.assertNotEqual(
-            location_ubuntu_hoary, location_ubuntu_hoary_again)
+        self.assertNotEqual(location_ubuntu_hoary, location_ubuntu_hoary_again)
 
         location_ubuntu_hoary_again.packagesets = [packageset]
-        self.assertEqual(
-            location_ubuntu_hoary, location_ubuntu_hoary_again)
+        self.assertEqual(location_ubuntu_hoary, location_ubuntu_hoary_again)
 
     def testCompareChannels(self):
         location_ubuntu_hoary = self.getPackageLocation(channel="stable")
@@ -235,39 +268,51 @@ class TestPackageLocation(TestCaseWithFactory):
     def testRepresentation(self):
         """Check if PackageLocation is represented correctly."""
         location_ubuntu_hoary = self.getPackageLocation()
-        self.assertEqual(
-            str(location_ubuntu_hoary), 'ubuntu: hoary-RELEASE')
+        self.assertEqual(str(location_ubuntu_hoary), "ubuntu: hoary-RELEASE")
 
-        universe = getUtility(IComponentSet)['universe']
+        universe = getUtility(IComponentSet)["universe"]
         location_ubuntu_hoary.component = universe
 
         self.assertEqual(
-            str(location_ubuntu_hoary), 'ubuntu: hoary-RELEASE (universe)')
+            str(location_ubuntu_hoary), "ubuntu: hoary-RELEASE (universe)"
+        )
 
         location_ubuntu_warty_security = self.getPackageLocation(
-            suite='warty-security')
+            suite="warty-security"
+        )
         self.assertEqual(
-            str(location_ubuntu_warty_security), 'ubuntu: warty-SECURITY')
+            str(location_ubuntu_warty_security), "ubuntu: warty-SECURITY"
+        )
 
         location_ubuntutest = self.getPackageLocation(
-            distribution_name='ubuntutest')
+            distribution_name="ubuntutest"
+        )
         self.assertEqual(
-            str(location_ubuntutest), 'ubuntutest: hoary-test-RELEASE')
+            str(location_ubuntutest), "ubuntutest: hoary-test-RELEASE"
+        )
 
         location_cprov_ppa = self.getPackageLocation(
-            distribution_name='ubuntu', purpose=ArchivePurpose.PPA,
-            person_name='cprov', archive_name="ppa")
+            distribution_name="ubuntu",
+            purpose=ArchivePurpose.PPA,
+            person_name="cprov",
+            archive_name="ppa",
+        )
         self.assertEqual(
-            str(location_cprov_ppa), '~cprov/ubuntu/ppa: hoary-RELEASE')
+            str(location_cprov_ppa), "~cprov/ubuntu/ppa: hoary-RELEASE"
+        )
 
         location_ubuntu_partner = self.getPackageLocation(
-            distribution_name='ubuntu', purpose=ArchivePurpose.PARTNER)
+            distribution_name="ubuntu", purpose=ArchivePurpose.PARTNER
+        )
         self.assertEqual(
-            str(location_ubuntu_partner), 'ubuntu/partner: hoary-RELEASE')
+            str(location_ubuntu_partner), "ubuntu/partner: hoary-RELEASE"
+        )
 
         self.factory.makePackageset(name="foo-packageset")
         location_ubuntu_packageset = self.getPackageLocation(
-            packageset_names=["foo-packageset"])
+            packageset_names=["foo-packageset"]
+        )
         self.assertEqual(
             str(location_ubuntu_packageset),
-            'ubuntu: hoary-RELEASE [foo-packageset]')
+            "ubuntu: hoary-RELEASE [foo-packageset]",
+        )
diff --git a/lib/lp/soyuz/browser/archive.py b/lib/lp/soyuz/browser/archive.py
index 484ffbd..b96650c 100644
--- a/lib/lp/soyuz/browser/archive.py
+++ b/lib/lp/soyuz/browser/archive.py
@@ -4,70 +4,53 @@
 """Browser views for archive."""
 
 __all__ = [
-    'ArchiveAdminView',
-    'ArchiveActivateView',
-    'ArchiveBadges',
-    'ArchiveBuildsView',
-    'ArchiveDeleteView',
-    'ArchiveEditDependenciesView',
-    'ArchiveEditView',
-    'ArchiveIndexActionsMenu',
-    'ArchiveNavigation',
-    'ArchiveNavigationMenu',
-    'ArchivePackageCopyingView',
-    'ArchivePackageDeletionView',
-    'ArchivePackagesActionMenu',
-    'ArchivePackagesView',
-    'ArchiveView',
-    'ArchiveViewBase',
-    'EnableProcessorsMixin',
-    'PackageCopyingMixin',
-    'traverse_named_ppa',
-    ]
-
-
-from datetime import (
-    datetime,
-    timedelta,
-    )
+    "ArchiveAdminView",
+    "ArchiveActivateView",
+    "ArchiveBadges",
+    "ArchiveBuildsView",
+    "ArchiveDeleteView",
+    "ArchiveEditDependenciesView",
+    "ArchiveEditView",
+    "ArchiveIndexActionsMenu",
+    "ArchiveNavigation",
+    "ArchiveNavigationMenu",
+    "ArchivePackageCopyingView",
+    "ArchivePackageDeletionView",
+    "ArchivePackagesActionMenu",
+    "ArchivePackagesView",
+    "ArchiveView",
+    "ArchiveViewBase",
+    "EnableProcessorsMixin",
+    "PackageCopyingMixin",
+    "traverse_named_ppa",
+]
+
+
+from datetime import datetime, timedelta
 from operator import attrgetter
 
-from lazr.restful.utils import smartquote
 import pytz
+from lazr.restful.utils import smartquote
 from storm.expr import Desc
 from zope.component import getUtility
 from zope.formlib import form
 from zope.formlib.widget import CustomWidgetFactory
 from zope.formlib.widgets import TextAreaWidget
-from zope.interface import (
-    implementer,
-    Interface,
-    )
-from zope.schema import (
-    Bool,
-    Choice,
-    List,
-    TextLine,
-    )
+from zope.interface import Interface, implementer
+from zope.schema import Bool, Choice, List, TextLine
 from zope.schema.interfaces import IContextSourceBinder
-from zope.schema.vocabulary import (
-    SimpleTerm,
-    SimpleVocabulary,
-    )
+from zope.schema.vocabulary import SimpleTerm, SimpleVocabulary
 from zope.security.interfaces import Unauthorized
 from zope.security.proxy import removeSecurityProxy
 
 from lp import _
 from lp.app.browser.badge import HasBadgeBase
 from lp.app.browser.launchpadform import (
-    action,
     LaunchpadEditFormView,
     LaunchpadFormView,
-    )
-from lp.app.browser.lazrjs import (
-    TextAreaEditorWidget,
-    TextLineEditorWidget,
-    )
+    action,
+)
+from lp.app.browser.lazrjs import TextAreaEditorWidget, TextLineEditorWidget
 from lp.app.errors import NotFoundError
 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
 from lp.app.widgets.itemswidgets import (
@@ -75,12 +58,12 @@ from lp.app.widgets.itemswidgets import (
     LaunchpadDropdownWidget,
     LaunchpadRadioWidget,
     PlainMultiCheckBoxWidget,
-    )
+)
 from lp.app.widgets.textwidgets import StrippedTextWidget
 from lp.buildmaster.enums import BuildStatus
 from lp.code.interfaces.sourcepackagerecipebuild import (
     ISourcePackageRecipeBuildSource,
-    )
+)
 from lp.registry.enums import PersonVisibility
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.person import IPersonSet
@@ -91,7 +74,7 @@ from lp.registry.model.person import Person
 from lp.services.browser_helpers import (
     get_plural_text,
     get_user_agent_distroseries,
-    )
+)
 from lp.services.database.bulk import load_related
 from lp.services.database.sqlobject import SQLObjectNotFound
 from lp.services.helpers import english_list
@@ -99,40 +82,34 @@ from lp.services.job.model.job import Job
 from lp.services.librarian.browser import (
     DeletedProxiedLibraryFileAlias,
     FileNavigationMixin,
-    )
+)
 from lp.services.propertycache import cachedproperty
 from lp.services.webapp import (
-    canonical_url,
-    enabled_with_permission,
     LaunchpadView,
     Link,
     Navigation,
+    canonical_url,
+    enabled_with_permission,
     stepthrough,
-    )
+)
 from lp.services.webapp.authorization import (
     check_permission,
     precache_permission_for_objects,
-    )
+)
 from lp.services.webapp.batching import BatchNavigator
 from lp.services.webapp.escaping import structured
-from lp.services.webapp.interfaces import (
-    ICanonicalUrlData,
-    IStructuredString,
-    )
+from lp.services.webapp.interfaces import ICanonicalUrlData, IStructuredString
 from lp.services.webapp.menu import NavigationMenu
 from lp.services.webapp.publisher import RedirectionView
 from lp.services.worlddata.interfaces.country import ICountrySet
 from lp.soyuz.adapters.archivedependencies import (
     default_component_dependency_name,
     default_pocket_dependency,
-    )
+)
 from lp.soyuz.adapters.archivesourcepublication import (
     ArchiveSourcePublications,
-    )
-from lp.soyuz.browser.build import (
-    BuildRecordsView,
-    get_build_by_id_str,
-    )
+)
+from lp.soyuz.browser.build import BuildRecordsView, get_build_by_id_str
 from lp.soyuz.browser.sourceslist import SourcesListEntriesWidget
 from lp.soyuz.browser.widgets.archive import PPANameWidget
 from lp.soyuz.enums import (
@@ -140,7 +117,7 @@ from lp.soyuz.enums import (
     ArchiveStatus,
     PackageCopyPolicy,
     PackagePublishingStatus,
-    )
+)
 from lp.soyuz.interfaces.archive import (
     ArchiveDependencyError,
     CannotCopy,
@@ -149,27 +126,24 @@ from lp.soyuz.interfaces.archive import (
     IArchiveSet,
     NoSuchPPA,
     validate_external_dependencies,
-    )
+)
 from lp.soyuz.interfaces.archivepermission import IArchivePermissionSet
 from lp.soyuz.interfaces.archivesubscriber import IArchiveSubscriberSet
 from lp.soyuz.interfaces.binarypackagebuild import (
     BuildSetStatus,
     IBinaryPackageBuildSet,
-    )
+)
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.soyuz.interfaces.packagecopyjob import IPlainPackageCopyJobSource
 from lp.soyuz.interfaces.packagecopyrequest import IPackageCopyRequestSet
 from lp.soyuz.interfaces.packageset import IPackagesetSet
 from lp.soyuz.interfaces.publishing import (
+    IPublishingSet,
     active_publishing_status,
     inactive_publishing_status,
-    IPublishingSet,
-    )
-from lp.soyuz.model.archive import (
-    Archive,
-    validate_ppa,
-    )
+)
+from lp.soyuz.model.archive import Archive, validate_ppa
 from lp.soyuz.model.publishing import SourcePackagePublishingHistory
 from lp.soyuz.scripts.packagecopier import check_copy_permissions
 from lp.soyuz.vocabularies import make_archive_vocabulary
@@ -207,6 +181,7 @@ class DistributionArchiveURL:
     IDistribution as /<distro>/+archive/<name>, for example:
     /ubuntu/+archive/partner
     """
+
     rootsite = None
 
     def __init__(self, context):
@@ -224,6 +199,7 @@ class DistributionArchiveURL:
 @implementer(ICanonicalUrlData)
 class PPAURL:
     """Dynamic URL declaration for named PPAs."""
+
     rootsite = None
 
     def __init__(self, context):
@@ -236,7 +212,9 @@ class PPAURL:
     @property
     def path(self):
         return "+archive/%s/%s" % (
-            self.context.distribution.name, self.context.name)
+            self.context.distribution.name,
+            self.context.name,
+        )
 
 
 class ArchiveNavigation(Navigation, FileNavigationMixin):
@@ -244,25 +222,25 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
 
     usedfor = IArchive
 
-    @stepthrough('+build')
+    @stepthrough("+build")
     def traverse_build(self, name):
         build = get_build_by_id_str(IBinaryPackageBuildSet, name)
         if build is None or build.archive != self.context:
             return None
         return build
 
-    @stepthrough('+recipebuild')
+    @stepthrough("+recipebuild")
     def traverse_recipebuild(self, name):
         build = get_build_by_id_str(ISourcePackageRecipeBuildSource, name)
         if build is None or build.archive != self.context:
             return None
         return build
 
-    @stepthrough('+sourcepub')
+    @stepthrough("+sourcepub")
     def traverse_sourcepub(self, name):
         return self._traverse_publication(name, source=True)
 
-    @stepthrough('+binarypub')
+    @stepthrough("+binarypub")
     def traverse_binarypub(self, name):
         return self._traverse_publication(name, source=False)
 
@@ -275,9 +253,10 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
         # The ID is not enough on its own to identify the publication,
         # we need to make sure it matches the context archive as well.
         return getUtility(IPublishingSet).getByIdAndArchive(
-            pub_id, self.context, source)
+            pub_id, self.context, source
+        )
 
-    @stepthrough('+binaryhits')
+    @stepthrough("+binaryhits")
     def traverse_binaryhits(self, name_str):
         """Traverse to an `IBinaryPackageReleaseDownloadCount`.
 
@@ -311,13 +290,13 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
             return None
 
         try:
-            date = datetime.strptime(date_str, '%Y-%m-%d').date()
+            date = datetime.strptime(date_str, "%Y-%m-%d").date()
         except ValueError:
             return None
 
         # 'unknown' should always be safe, since the key is the two letter
         # ISO code, and 'unknown' has more than two letters.
-        if country_str == 'unknown':
+        if country_str == "unknown":
             country = None
         else:
             try:
@@ -327,14 +306,15 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
 
         return self.context.getPackageDownloadCount(bpr, date, country)
 
-    @stepthrough('+subscriptions')
+    @stepthrough("+subscriptions")
     def traverse_subscription(self, person_name):
         person = getUtility(IPersonSet).getByName(person_name)
         if person is None:
             return None
 
         subscriptions = getUtility(IArchiveSubscriberSet).getBySubscriber(
-            person, archive=self.context)
+            person, archive=self.context
+        )
 
         # If a person is subscribed with a direct subscription as well as
         # via a team, subscriptions will contain both, so need to grab
@@ -345,16 +325,17 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
 
         return None
 
-    @stepthrough('+upload')
+    @stepthrough("+upload")
     def traverse_upload_permission(self, name):
         """Traverse the data part of the URL for upload permissions."""
         return self._traverse_permission(name, ArchivePermissionType.UPLOAD)
 
-    @stepthrough('+queue-admin')
+    @stepthrough("+queue-admin")
     def traverse_queue_admin_permission(self, name):
         """Traverse the data part of the URL for queue admin permissions."""
         return self._traverse_permission(
-            name, ArchivePermissionType.QUEUE_ADMIN)
+            name, ArchivePermissionType.QUEUE_ADMIN
+        )
 
     def _traverse_permission(self, name, permission_type):
         """Traversal helper function.
@@ -379,27 +360,27 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
             return None
 
         # Obtain the item type and name from the URL parameters.
-        item_type = get_url_param('type')
-        item = get_url_param('item')
+        item_type = get_url_param("type")
+        item = get_url_param("item")
 
         if item_type is None or item is None:
             return None
 
         the_item = None
         kwargs = {}
-        if item_type == 'component':
+        if item_type == "component":
             # See if "item" is a component name.
             try:
                 the_item = getUtility(IComponentSet)[item]
             except NotFoundError:
                 pass
-        elif item_type == 'packagename':
+        elif item_type == "packagename":
             # See if "item" is a source package name.
             the_item = getUtility(ISourcePackageNameSet).queryByName(item)
-        elif item_type == 'packageset':
+        elif item_type == "packageset":
             the_item = None
             # Was a 'series' URL param passed?
-            series = get_url_param('series')
+            series = get_url_param("series")
             if series is not None:
                 # Get the requested distro series.
                 try:
@@ -408,12 +389,12 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
                     series = None
             if series is not None:
                 the_item = getUtility(IPackagesetSet).getByName(series, item)
-        elif item_type == 'pocket':
+        elif item_type == "pocket":
             # See if "item" is a pocket name.
             try:
                 the_item = PackagePublishingPocket.items[item]
                 # Was a 'series' URL param passed?
-                series = get_url_param('series')
+                series = get_url_param("series")
                 if series is not None:
                     # Get the requested distro series.
                     try:
@@ -428,7 +409,8 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
 
         if the_item is not None:
             result_set = getUtility(IArchivePermissionSet).checkAuthenticated(
-                user, self.context, permission_type, the_item, **kwargs)
+                user, self.context, permission_type, the_item, **kwargs
+            )
             try:
                 return result_set[0]
             except IndexError:
@@ -436,7 +418,7 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
         else:
             return None
 
-    @stepthrough('+dependency')
+    @stepthrough("+dependency")
     def traverse_dependency(self, id):
         """Traverse to an archive dependency by archive ID.
 
@@ -457,7 +439,7 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
 
         return self.context.getArchiveDependency(archive)
 
-    @stepthrough('+sourcefiles')
+    @stepthrough("+sourcefiles")
     def traverse_sourcefiles(self, sourcepackagename):
         """Traverse to a source file in the archive.
 
@@ -480,11 +462,12 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
         version = self.request.stepstogo.consume()
         filename = self.request.stepstogo.consume()
 
-        if not check_permission('launchpad.View', self.context):
+        if not check_permission("launchpad.View", self.context):
             raise Unauthorized()
 
         library_file = self.context.getSourceFileByName(
-            sourcepackagename, version, filename)
+            sourcepackagename, version, filename
+        )
 
         # Deleted library files result in a NotFound-like error.
         if library_file.deleted:
@@ -495,24 +478,24 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
             return None
 
         return RedirectionView(
-            library_file.getURL(include_token=True), self.request)
+            library_file.getURL(include_token=True), self.request
+        )
 
 
 class ArchiveMenuMixin:
-
     def ppa(self):
-        text = 'View PPA'
-        return Link(canonical_url(self.context), text, icon='info')
+        text = "View PPA"
+        return Link(canonical_url(self.context), text, icon="info")
 
-    @enabled_with_permission('launchpad.Admin')
+    @enabled_with_permission("launchpad.Admin")
     def admin(self):
-        text = 'Administer archive'
-        return Link('+admin', text, icon='edit')
+        text = "Administer archive"
+        return Link("+admin", text, icon="edit")
 
-    @enabled_with_permission('launchpad.Append')
+    @enabled_with_permission("launchpad.Append")
     def manage_subscribers(self):
-        text = 'Manage access'
-        link = Link('+subscriptions', text, icon='edit')
+        text = "Manage access"
+        link = Link("+subscriptions", text, icon="edit")
 
         # This link should only be available for private archives:
         view = self.context
@@ -521,54 +504,53 @@ class ArchiveMenuMixin:
             link.enabled = False
         return link
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def edit(self):
-        text = 'Change details'
+        text = "Change details"
         view = self.context
-        return Link(
-            '+edit', text, icon='edit', enabled=view.context.is_active)
+        return Link("+edit", text, icon="edit", enabled=view.context.is_active)
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def delete_ppa(self):
-        text = 'Delete PPA'
+        text = "Delete PPA"
         view = self.context
         return Link(
-            '+delete', text, icon='trash-icon',
-            enabled=view.context.is_active)
+            "+delete", text, icon="trash-icon", enabled=view.context.is_active
+        )
 
     def builds(self):
-        text = 'View all builds'
-        return Link('+builds', text, icon='info')
+        text = "View all builds"
+        return Link("+builds", text, icon="info")
 
     def builds_successful(self):
-        text = 'View successful builds'
-        return Link('+builds?build_state=built', text, icon='info')
+        text = "View successful builds"
+        return Link("+builds?build_state=built", text, icon="info")
 
     def builds_pending(self):
-        text = 'View pending builds'
-        return Link('+builds?build_state=pending', text, icon='info')
+        text = "View pending builds"
+        return Link("+builds?build_state=pending", text, icon="info")
 
     def builds_building(self):
-        text = 'View in-progress builds'
-        return Link('+builds?build_state=building', text, icon='info')
+        text = "View in-progress builds"
+        return Link("+builds?build_state=building", text, icon="info")
 
     def packages(self):
-        text = 'View package details'
-        link = Link('+packages', text, icon='info')
+        text = "View package details"
+        link = Link("+packages", text, icon="info")
         # Disable the link for P3As if they don't have upload rights,
         # except if the user is a commercial admin.
         if self.context.private:
-            if not check_permission('launchpad.Append', self.context):
+            if not check_permission("launchpad.Append", self.context):
                 admins = getUtility(ILaunchpadCelebrities).commercial_admin
                 if not self.user.inTeam(admins):
                     link.enabled = False
         return link
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def delete(self):
         """Display a delete menu option for non-copy archives."""
-        text = 'Delete packages'
-        link = Link('+delete-packages', text, icon='trash-icon')
+        text = "Delete packages"
+        link = Link("+delete-packages", text, icon="trash-icon")
 
         # This link should not be available for copy archives or
         # archives without any sources.
@@ -579,34 +561,43 @@ class ArchiveMenuMixin:
             link.enabled = False
         return link
 
-    @enabled_with_permission('launchpad.AnyPerson')
+    @enabled_with_permission("launchpad.AnyPerson")
     def copy(self):
         """Display a copy menu option for non-copy archives."""
-        text = 'Copy packages'
-        link = Link('+copy-packages', text, icon='package-sync')
+        text = "Copy packages"
+        link = Link("+copy-packages", text, icon="package-sync")
 
         # This link should not be available for copy archives.
         if self.context.is_copy:
             link.enabled = False
         return link
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def edit_dependencies(self):
-        text = 'Edit PPA dependencies'
+        text = "Edit PPA dependencies"
         view = self.context
         return Link(
-            '+edit-dependencies', text, icon='edit',
-            enabled=view.context.is_active)
+            "+edit-dependencies",
+            text,
+            icon="edit",
+            enabled=view.context.is_active,
+        )
 
 
 class ArchiveNavigationMenu(NavigationMenu, ArchiveMenuMixin):
     """Overview Menu for IArchive."""
 
     usedfor = IArchive
-    facet = 'overview'
-    links = ['admin', 'builds', 'builds_building',
-             'builds_pending', 'builds_successful',
-             'packages', 'ppa']
+    facet = "overview"
+    links = [
+        "admin",
+        "builds",
+        "builds_building",
+        "builds_pending",
+        "builds_successful",
+        "packages",
+        "ppa",
+    ]
 
 
 class IArchiveIndexActionsMenu(Interface):
@@ -615,10 +606,17 @@ class IArchiveIndexActionsMenu(Interface):
 
 class ArchiveIndexActionsMenu(NavigationMenu, ArchiveMenuMixin):
     """Archive index navigation menu."""
+
     usedfor = IArchiveIndexActionsMenu
-    facet = 'overview'
-    links = ['admin', 'edit', 'edit_dependencies',
-             'manage_subscribers', 'packages', 'delete_ppa']
+    facet = "overview"
+    links = [
+        "admin",
+        "edit",
+        "edit_dependencies",
+        "manage_subscribers",
+        "packages",
+        "delete_ppa",
+    ]
 
 
 class IArchivePackagesActionMenu(Interface):
@@ -627,9 +625,10 @@ class IArchivePackagesActionMenu(Interface):
 
 class ArchivePackagesActionMenu(NavigationMenu, ArchiveMenuMixin):
     """An action menu for archive package-related actions."""
+
     usedfor = IArchivePackagesActionMenu
-    facet = 'overview'
-    links = ['copy', 'delete']
+    facet = "overview"
+    links = ["copy", "delete"]
 
 
 class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
@@ -640,16 +639,18 @@ class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
         # the current user has lp.Edit then add a link to +edit to fix
         # this.
         if not self.context.publish and self.context.is_active:
-            can_edit = check_permission('launchpad.Edit', self.context)
+            can_edit = check_permission("launchpad.Edit", self.context)
             notification = "Publishing has been disabled for this archive."
             if can_edit:
-                edit_url = canonical_url(self.context) + '/+edit'
+                edit_url = canonical_url(self.context) + "/+edit"
                 notification += (
-                    " <a href=%s>(re-enable publishing)</a>" % edit_url)
+                    " <a href=%s>(re-enable publishing)</a>" % edit_url
+                )
             if self.context.private:
                 notification += (
                     " Since this archive is private, no builds are "
-                    "being dispatched.")
+                    "being dispatched."
+                )
             self.request.response.addNotification(structured(notification))
         super().initialize()
         # Set properties for SourcesListEntriesWidget.
@@ -666,25 +667,29 @@ class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
 
         def package_plural(control):
             if control == 1:
-                return 'package'
-            return 'packages'
+                return "package"
+            return "packages"
 
         # Calculate the label for the package counters respecting
         # singular/plural forms.
         number_of_sources = self.context.number_of_sources
-        source_label = '%s source %s' % (
-            number_of_sources, package_plural(number_of_sources))
+        source_label = "%s source %s" % (
+            number_of_sources,
+            package_plural(number_of_sources),
+        )
 
         number_of_binaries = self.context.number_of_binaries
-        binary_label = '%s binary %s' % (
-            number_of_binaries, package_plural(number_of_binaries))
+        binary_label = "%s binary %s" % (
+            number_of_binaries,
+            package_plural(number_of_binaries),
+        )
 
         used = self.context.estimated_size
         if self.context.authorized_size:
             # Quota is stored in MiB, convert it to bytes.
-            quota = self.context.authorized_size * (2 ** 20)
+            quota = self.context.authorized_size * (2**20)
             # Calculate the usage factor and limit it to 100%.
-            used_factor = (float(used) / quota)
+            used_factor = float(used) / quota
             if used_factor > 1:
                 used_factor = 1
         else:
@@ -694,9 +699,9 @@ class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
         # Calculate the appropriate CSS class to be used with the usage
         # factor. Highlight it (in red) if usage is over 90% of the quota.
         if used_factor > 0.90:
-            used_css_class = 'red'
+            used_css_class = "red"
         else:
-            used_css_class = 'green'
+            used_css_class = "green"
 
         # Usage percentage with 2 degrees of precision (more than enough
         # for humans).
@@ -710,7 +715,8 @@ class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
             used=used,
             used_percentage=used_percentage,
             used_css_class=used_css_class,
-            quota=quota)
+            quota=quota,
+        )
 
     @property
     def archive_label(self):
@@ -721,9 +727,9 @@ class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
         not worth splitting off into a separate template or macro)
         """
         if self.context.is_ppa:
-            return 'PPA'
+            return "PPA"
         else:
-            return 'archive'
+            return "archive"
 
     @cachedproperty
     def build_counters(self):
@@ -741,7 +747,7 @@ class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
         The dependencies section is presented if there are any dependency set
         or if the user has permission to change it.
         """
-        can_edit = check_permission('launchpad.Edit', self.context)
+        can_edit = check_permission("launchpad.Edit", self.context)
         return can_edit or len(self.dependencies) > 0
 
     @property
@@ -755,15 +761,17 @@ class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
         disabled_dependencies = [
             archive_dependency
             for archive_dependency in self.dependencies
-            if not archive_dependency.dependency.enabled]
-        can_edit = check_permission('launchpad.Edit', self.context)
+            if not archive_dependency.dependency.enabled
+        ]
+        can_edit = check_permission("launchpad.Edit", self.context)
         return can_edit and len(disabled_dependencies) > 0
 
     @cachedproperty
     def package_copy_requests(self):
         """Return any package copy requests associated with this archive."""
-        copy_requests = getUtility(
-            IPackageCopyRequestSet).getByTargetArchive(self.context)
+        copy_requests = getUtility(IPackageCopyRequestSet).getByTargetArchive(
+            self.context
+        )
         return list(copy_requests)
 
     @property
@@ -773,7 +781,9 @@ class ArchiveViewBase(LaunchpadView, SourcesListEntriesWidget):
             return None
 
         if self.context.status in (
-            ArchiveStatus.DELETED, ArchiveStatus.DELETING):
+            ArchiveStatus.DELETED,
+            ArchiveStatus.DELETING,
+        ):
             return "This %s has been deleted." % self.archive_label
         else:
             return "This %s has been disabled." % self.archive_label
@@ -794,33 +804,47 @@ class ArchiveSeriesVocabularyFactory:
         series_terms = []
         for distroseries in context.series_with_sources:
             series_terms.append(
-                SimpleTerm(distroseries, token=distroseries.name,
-                           title=distroseries.displayname))
+                SimpleTerm(
+                    distroseries,
+                    token=distroseries.name,
+                    title=distroseries.displayname,
+                )
+            )
         return SimpleVocabulary(series_terms)
 
 
 class SeriesFilterWidget(LaunchpadDropdownWidget):
     """Redefining default display value as 'Any series'."""
+
     _messageNoValue = _("any", "Any series")
 
 
 class StatusFilterWidget(LaunchpadDropdownWidget):
     """Redefining default display value as 'Any status'."""
+
     _messageNoValue = _("any", "Any status")
 
 
 class IPPAPackageFilter(Interface):
     """The interface used as the schema for the package filtering form."""
-    name_filter = TextLine(
-        title=_("Package name contains"), required=False)
+
+    name_filter = TextLine(title=_("Package name contains"), required=False)
 
     series_filter = Choice(
-        source=ArchiveSeriesVocabularyFactory(), required=False)
+        source=ArchiveSeriesVocabularyFactory(), required=False
+    )
 
-    status_filter = Choice(vocabulary=SimpleVocabulary((
-        SimpleTerm(active_publishing_status, 'published', 'Published'),
-        SimpleTerm(inactive_publishing_status, 'superseded', 'Superseded'),
-        )), required=False)
+    status_filter = Choice(
+        vocabulary=SimpleVocabulary(
+            (
+                SimpleTerm(active_publishing_status, "published", "Published"),
+                SimpleTerm(
+                    inactive_publishing_status, "superseded", "Superseded"
+                ),
+            )
+        ),
+        required=False,
+    )
 
 
 class ArchiveSourcePackageListViewBase(ArchiveViewBase, LaunchpadFormView):
@@ -841,9 +865,10 @@ class ArchiveSourcePackageListViewBase(ArchiveViewBase, LaunchpadFormView):
 
     @property
     def specified_name_filter(self):
-        """Return the specified name filter if one was specified """
+        """Return the specified name filter if one was specified"""
         requested_name_filter = self.request.query_string_params.get(
-            'field.name_filter')
+            "field.name_filter"
+        )
 
         if requested_name_filter and requested_name_filter[0]:
             return requested_name_filter[0]
@@ -856,16 +881,16 @@ class ArchiveSourcePackageListViewBase(ArchiveViewBase, LaunchpadFormView):
         This is needed because zope's form library does not consider
         query string params (GET params) during a post request.
         """
-        field_name = 'field.' + filter_name
+        field_name = "field." + filter_name
         requested_filter = self.request.query_string_params.get(field_name)
 
         # If an empty filter was specified, then it's explicitly
         # been set to empty - so we use None.
-        if requested_filter == ['']:
+        if requested_filter == [""]:
             return None
 
         # If the requested filter is none, then we use the default.
-        default_filter_attr = 'default_' + filter_name
+        default_filter_attr = "default_" + filter_name
         if requested_filter is None:
             return getattr(self, default_filter_attr)
 
@@ -880,23 +905,26 @@ class ArchiveSourcePackageListViewBase(ArchiveViewBase, LaunchpadFormView):
     @property
     def plain_status_filter_widget(self):
         """Render a <select> control with no <div>s around it."""
-        return self.widgets['status_filter'].renderValue(
-            self.getSelectedFilterValue('status_filter'))
+        return self.widgets["status_filter"].renderValue(
+            self.getSelectedFilterValue("status_filter")
+        )
 
     @property
     def plain_series_filter_widget(self):
         """Render a <select> control with no <div>s around it."""
-        return self.widgets['series_filter'].renderValue(
-            self.getSelectedFilterValue('series_filter'))
+        return self.widgets["series_filter"].renderValue(
+            self.getSelectedFilterValue("series_filter")
+        )
 
     @property
     def filtered_sources(self):
         """Return the source results for display after filtering."""
         return self.context.getPublishedSources(
             name=self.specified_name_filter,
-            status=self.getSelectedFilterValue('status_filter'),
-            distroseries=self.getSelectedFilterValue('series_filter'),
-            eager_load=True)
+            status=self.getSelectedFilterValue("status_filter"),
+            distroseries=self.getSelectedFilterValue("series_filter"),
+            eager_load=True,
+        )
 
     @property
     def default_status_filter(self):
@@ -904,8 +932,11 @@ class ArchiveSourcePackageListViewBase(ArchiveViewBase, LaunchpadFormView):
 
         Subclasses of ArchiveViewBase can override this when required.
         """
-        return self.widgets['status_filter'].vocabulary.getTermByToken(
-            'published').value
+        return (
+            self.widgets["status_filter"]
+            .vocabulary.getTermByToken("published")
+            .value
+        )
 
     @property
     def default_series_filter(self):
@@ -946,30 +977,36 @@ class ArchiveView(ArchiveSourcePackageListViewBase):
         """Redirect if our context is a main archive."""
         if self.context.is_main:
             self.request.response.redirect(
-                canonical_url(self.context.distribution))
+                canonical_url(self.context.distribution)
+            )
             return
         super().initialize()
 
     @property
     def displayname_edit_widget(self):
-        display_name = IArchive['displayname']
+        display_name = IArchive["displayname"]
         title = "Edit the displayname"
         return TextLineEditorWidget(
-            self.context, display_name, title, 'h1', max_width='95%',
-            truncate_lines=1)
+            self.context,
+            display_name,
+            title,
+            "h1",
+            max_width="95%",
+            truncate_lines=1,
+        )
 
     @property
     def default_series_filter(self):
         """Return the distroseries identified by the user-agent."""
         version_number = get_user_agent_distroseries(
-            self.request.getHeader('HTTP_USER_AGENT'))
+            self.request.getHeader("HTTP_USER_AGENT")
+        )
 
         # Check if this version is one of the available
         # distroseries for this archive:
-        vocabulary = self.widgets['series_filter'].vocabulary
+        vocabulary = self.widgets["series_filter"].vocabulary
         for term in vocabulary:
-            if (term.value is not None and
-                term.value.version == version_number):
+            if term.value is not None and term.value.version == version_number:
                 return term.value
 
         # Otherwise we default to 'any'
@@ -982,19 +1019,24 @@ class ArchiveView(ArchiveSourcePackageListViewBase):
         if self.context.is_ppa:
             linkify_text = not self.context.owner.is_probationary
         archive = self.context
-        description = IArchive['description']
+        description = IArchive["description"]
         title = self.archive_label + " description"
         # Don't hide empty archive descriptions.  Even though the interface
         # says they are required, the model doesn't.
         return TextAreaEditorWidget(
-            archive, description, title, hide_empty=False,
-            linkify_text=linkify_text)
+            archive,
+            description,
+            title,
+            hide_empty=False,
+            linkify_text=linkify_text,
+        )
 
     @cachedproperty
     def latest_updates(self):
         """Return the last five published sources for this archive."""
         sources = self.context.getPublishedSources(
-            status=PackagePublishingStatus.PUBLISHED)
+            status=PackagePublishingStatus.PUBLISHED
+        )
         sources.order_by(Desc(SourcePackagePublishingHistory.datepublished))
         result_tuples = sources[:5]
 
@@ -1005,43 +1047,48 @@ class ArchiveView(ArchiveSourcePackageListViewBase):
         # each status is too long for use here, so define a dict of
         # concise status descriptions that will fit in a small area.
         status_names = {
-            'FULLYBUILT': 'Successfully built',
-            'FULLYBUILT_PENDING': 'Successfully built',
-            'NEEDSBUILD': 'Waiting to build',
-            'FAILEDTOBUILD': 'Failed to build:',
-            'BUILDING': 'Currently building',
-            'UPLOADING': 'Currently uploading',
-            }
+            "FULLYBUILT": "Successfully built",
+            "FULLYBUILT_PENDING": "Successfully built",
+            "NEEDSBUILD": "Waiting to build",
+            "FAILEDTOBUILD": "Failed to build:",
+            "BUILDING": "Currently building",
+            "UPLOADING": "Currently uploading",
+        }
 
         now = datetime.now(tz=pytz.UTC)
         source_ids = [result_tuple.id for result_tuple in result_tuples]
         summaries = getUtility(
-            IPublishingSet).getBuildStatusSummariesForSourceIdsAndArchive(
-                source_ids, self.context)
+            IPublishingSet
+        ).getBuildStatusSummariesForSourceIdsAndArchive(
+            source_ids, self.context
+        )
         for source_id, status_summary in summaries.items():
-            date_published = status_summary['date_published']
-            source_package_name = status_summary['source_package_name']
-            current_status = status_summary['status']
+            date_published = status_summary["date_published"]
+            source_package_name = status_summary["source_package_name"]
+            current_status = status_summary["status"]
             duration = now - date_published
 
             # We'd like to include the builds in the latest updates
             # iff the build failed.
             builds = []
             if current_status == BuildSetStatus.FAILEDTOBUILD:
-                builds = status_summary['builds']
-
-            latest_updates_list.append({
-                'source_id': source_id,
-                'date_published': date_published,
-                'title': source_package_name,
-                'status': status_names[current_status.title],
-                'status_class': current_status.title,
-                'duration': duration,
-                'builds': builds,
-                })
+                builds = status_summary["builds"]
+
+            latest_updates_list.append(
+                {
+                    "source_id": source_id,
+                    "date_published": date_published,
+                    "title": source_package_name,
+                    "status": status_names[current_status.title],
+                    "status_class": current_status.title,
+                    "duration": duration,
+                    "builds": builds,
+                }
+            )
 
         latest_updates_list.sort(
-            key=lambda x: (x['date_published'], x['source_id']), reverse=True)
+            key=lambda x: (x["date_published"], x["source_id"]), reverse=True
+        )
         return latest_updates_list
 
     def num_updates_over_last_days(self, num_days=30):
@@ -1049,19 +1096,22 @@ class ArchiveView(ArchiveSourcePackageListViewBase):
         now = datetime.now(tz=pytz.UTC)
         created_since = now - timedelta(num_days)
         return self.context.getPublishedSources(
-            created_since_date=created_since).count()
+            created_since_date=created_since
+        ).count()
 
     @property
     def num_pkgs_building(self):
         """Return the number of building/waiting to build packages."""
-        pkgs_building_count, pkgs_waiting_count = (
-            self.context.num_pkgs_building)
+        (
+            pkgs_building_count,
+            pkgs_waiting_count,
+        ) = self.context.num_pkgs_building
         # The total is just used for conditionals in the template.
         return {
-            'building': pkgs_building_count,
-            'waiting': pkgs_waiting_count,
-            'total': pkgs_building_count + pkgs_waiting_count,
-            }
+            "building": pkgs_building_count,
+            "waiting": pkgs_waiting_count,
+            "total": pkgs_building_count + pkgs_waiting_count,
+        }
 
 
 @implementer(IArchivePackagesActionMenu)
@@ -1080,7 +1130,8 @@ class ArchivePackagesView(ArchiveSourcePackageListViewBase):
     def series_list_string(self):
         """Return an English string of the distroseries."""
         return english_list(
-            series.displayname for series in self.series_with_sources)
+            series.displayname for series in self.series_with_sources
+        )
 
     @property
     def is_copy(self):
@@ -1100,11 +1151,11 @@ class ArchivePackagesView(ArchiveSourcePackageListViewBase):
         # related objects.
         pcjs = [removeSecurityProxy(ppcj).context for ppcj in ppcjs]
         # Pre-load related Jobs.
-        jobs = load_related(Job, pcjs, ['job_id'])
+        jobs = load_related(Job, pcjs, ["job_id"])
         # Pre-load related requesters.
-        load_related(Person, jobs, ['requester_id'])
+        load_related(Person, jobs, ["requester_id"])
         # Pre-load related source archives.
-        load_related(Archive, pcjs, ['source_archive_id'])
+        load_related(Archive, pcjs, ["source_archive_id"])
 
         return ppcjs.config(limit=5)
 
@@ -1117,11 +1168,11 @@ class ArchivePackagesView(ArchiveSourcePackageListViewBase):
         job_source = getUtility(IPlainPackageCopyJobSource)
         count = job_source.getIncompleteJobsForArchive(self.context).count()
         if count > 5:
-            return 'Showing 5 of %s' % count
+            return "Showing 5 of %s" % count
 
     @cachedproperty
     def has_append_perm(self):
-        return check_permission('launchpad.Append', self.context)
+        return check_permission("launchpad.Append", self.context)
 
 
 class ArchiveSourceSelectionFormView(ArchiveSourcePackageListViewBase):
@@ -1136,15 +1187,14 @@ class ArchiveSourceSelectionFormView(ArchiveSourcePackageListViewBase):
 
         This should be called during actions of subclasses.
         """
-        query_string = self.request.get('QUERY_STRING', '')
+        query_string = self.request.get("QUERY_STRING", "")
         if query_string:
             self.next_url = "%s?%s" % (self.request.URL, query_string)
         else:
             self.next_url = self.request.URL
 
     def setUpWidgets(self, context=None):
-        """Setup our custom widget which depends on the filter widget values.
-        """
+        """Setup our custom widget depending on the filter widget values."""
         # To create the selected sources field, we need to define a
         # vocabulary based on the currently selected sources (using self
         # batched_sources) but this itself requires the current values of
@@ -1155,9 +1205,13 @@ class ArchiveSourceSelectionFormView(ArchiveSourcePackageListViewBase):
         self.form_fields += self.createSelectedSourcesField()
 
         self.widgets += form.setUpWidgets(
-            self.form_fields.select('selected_sources'),
-            self.prefix, self.context, self.request,
-            data=self.initial_values, ignore_request=False)
+            self.form_fields.select("selected_sources"),
+            self.prefix,
+            self.context,
+            self.request,
+            data=self.initial_values,
+            ignore_request=False,
+        )
 
     def focusedElementScript(self):
         """Override `LaunchpadFormView`.
@@ -1165,7 +1219,7 @@ class ArchiveSourceSelectionFormView(ArchiveSourcePackageListViewBase):
         Ensure focus is only set if there are sources actually presented.
         """
         if not self.has_sources_for_display:
-            return ''
+            return ""
         return LaunchpadFormView.focusedElementScript(self)
 
     def createSelectedSourcesField(self):
@@ -1180,26 +1234,33 @@ class ArchiveSourceSelectionFormView(ArchiveSourcePackageListViewBase):
         for pub in self.batched_sources:
             terms.append(SimpleTerm(pub, str(pub.id), pub.displayname))
         return form.Fields(
-            List(__name__='selected_sources',
-                 title=_('Available sources'),
-                 value_type=Choice(vocabulary=SimpleVocabulary(terms)),
-                 required=False,
-                 default=[],
-                 description=_('Select one or more sources to be submitted '
-                               'to an action.')))
+            List(
+                __name__="selected_sources",
+                title=_("Available sources"),
+                value_type=Choice(vocabulary=SimpleVocabulary(terms)),
+                required=False,
+                default=[],
+                description=_(
+                    "Select one or more sources to be submitted "
+                    "to an action."
+                ),
+            )
+        )
 
     @property
     def action_url(self):
         """The forms should post to themselves, including GET params."""
-        return "%s?%s" % (self.request.getURL(), self.request['QUERY_STRING'])
+        return "%s?%s" % (self.request.getURL(), self.request["QUERY_STRING"])
 
 
 class IArchivePackageDeletionForm(IPPAPackageFilter):
     """Schema used to delete packages within an archive."""
 
     deletion_comment = TextLine(
-        title=_("Deletion comment"), required=False,
-        description=_("The reason why the package is being deleted."))
+        title=_("Deletion comment"),
+        required=False,
+        description=_("The reason why the package is being deleted."),
+    )
 
 
 class ArchivePackageDeletionView(ArchiveSourceSelectionFormView):
@@ -1211,12 +1272,13 @@ class ArchivePackageDeletionView(ArchiveSourceSelectionFormView):
 
     schema = IArchivePackageDeletionForm
     custom_widget_deletion_comment = CustomWidgetFactory(
-        StrippedTextWidget, displayWidth=50)
-    label = 'Delete packages'
+        StrippedTextWidget, displayWidth=50
+    )
+    label = "Delete packages"
 
     @property
     def label(self):
-        return 'Delete packages from %s' % self.context.displayname
+        return "Delete packages from %s" % self.context.displayname
 
     @property
     def default_status_filter(self):
@@ -1232,8 +1294,9 @@ class ArchivePackageDeletionView(ArchiveSourceSelectionFormView):
         """
         return self.context.getSourcesForDeletion(
             name=self.specified_name_filter,
-            status=self.getSelectedFilterValue('status_filter'),
-            distroseries=self.getSelectedFilterValue('series_filter'))
+            status=self.getSelectedFilterValue("status_filter"),
+            distroseries=self.getSelectedFilterValue("series_filter"),
+        )
 
     @cachedproperty
     def has_sources(self):
@@ -1251,10 +1314,10 @@ class ArchivePackageDeletionView(ArchiveSourceSelectionFormView):
         Ensure we have, at least, one source selected and deletion_comment
         is given.
         """
-        form.getWidgetsData(self.widgets, 'field', data)
+        form.getWidgetsData(self.widgets, "field", data)
 
-        if len(data.get('selected_sources', [])) == 0:
-            self.setFieldError('selected_sources', 'No sources selected.')
+        if len(data.get("selected_sources", [])) == 0:
+            self.setFieldError("selected_sources", "No sources selected.")
 
     @action(_("Request Deletion"), name="delete", validator="validate_delete")
     def delete_action(self, action, data):
@@ -1266,22 +1329,27 @@ class ArchivePackageDeletionView(ArchiveSourceSelectionFormView):
         if len(self.errors) != 0:
             return
 
-        comment = data.get('deletion_comment')
-        selected_sources = data.get('selected_sources')
+        comment = data.get("deletion_comment")
+        selected_sources = data.get("selected_sources")
 
         # Perform deletion of the source and its binaries.
         publishing_set = getUtility(IPublishingSet)
         publishing_set.requestDeletion(selected_sources, self.user, comment)
 
         # Present a page notification describing the action.
-        messages = [structured(
-            '<p>Source and binaries deleted by %s:', self.user.displayname)]
+        messages = [
+            structured(
+                "<p>Source and binaries deleted by %s:", self.user.displayname
+            )
+        ]
         for source in selected_sources:
-            messages.append(structured('<br/>%s', source.displayname))
-        messages.append(structured(
-            '</p>\n<p>Deletion comment: %s</p>', comment))
+            messages.append(structured("<br/>%s", source.displayname))
+        messages.append(
+            structured("</p>\n<p>Deletion comment: %s</p>", comment)
+        )
         notification = structured(
-            '\n'.join([msg.escapedtext for msg in messages]))
+            "\n".join([msg.escapedtext for msg in messages])
+        )
         self.request.response.addNotification(notification)
 
         self.setNextURL()
@@ -1289,18 +1357,28 @@ class ArchivePackageDeletionView(ArchiveSourceSelectionFormView):
 
 class DestinationArchiveDropdownWidget(LaunchpadDropdownWidget):
     """Redefining default display value as 'This PPA'."""
+
     _messageNoValue = _("vocabulary-copy-to-context-ppa", "This PPA")
 
 
 class DestinationSeriesDropdownWidget(LaunchpadDropdownWidget):
     """Redefining default display value as 'The same series'."""
+
     _messageNoValue = _("vocabulary-copy-to-same-series", "The same series")
 
 
-def copy_asynchronously(source_pubs, dest_archive, dest_series, dest_pocket,
-                        include_binaries, dest_url=None,
-                        dest_display_name=None, person=None,
-                        check_permissions=True, sponsored=None):
+def copy_asynchronously(
+    source_pubs,
+    dest_archive,
+    dest_series,
+    dest_pocket,
+    include_binaries,
+    dest_url=None,
+    dest_display_name=None,
+    person=None,
+    check_permissions=True,
+    sponsored=None,
+):
     """Schedule jobs to copy packages later.
 
     :return: A `structured` with human-readable feedback about the
@@ -1310,25 +1388,35 @@ def copy_asynchronously(source_pubs, dest_archive, dest_series, dest_pocket,
     """
     if check_permissions:
         check_copy_permissions(
-            person, dest_archive, dest_series, dest_pocket, source_pubs)
+            person, dest_archive, dest_series, dest_pocket, source_pubs
+        )
 
     job_source = getUtility(IPlainPackageCopyJobSource)
     for spph in source_pubs:
         job_source.create(
-            spph.source_package_name, spph.archive, dest_archive,
+            spph.source_package_name,
+            spph.archive,
+            dest_archive,
             dest_series if dest_series is not None else spph.distroseries,
-            dest_pocket, include_binaries=include_binaries,
+            dest_pocket,
+            include_binaries=include_binaries,
             package_version=spph.sourcepackagerelease.version,
             copy_policy=PackageCopyPolicy.INSECURE,
-            requester=person, sponsored=sponsored, unembargo=True,
-            source_distroseries=spph.distroseries, source_pocket=spph.pocket)
+            requester=person,
+            sponsored=sponsored,
+            unembargo=True,
+            source_distroseries=spph.distroseries,
+            source_pocket=spph.pocket,
+        )
 
     return copy_asynchronously_message(
-        len(source_pubs), dest_archive, dest_url, dest_display_name)
+        len(source_pubs), dest_archive, dest_url, dest_display_name
+    )
 
 
-def copy_asynchronously_message(source_pubs_count, dest_archive, dest_url=None,
-                                dest_display_name=None):
+def copy_asynchronously_message(
+    source_pubs_count, dest_archive, dest_url=None, dest_display_name=None
+):
     """Return a message detailing the sync action.
 
     :param source_pubs_count: The number of source pubs requested for syncing.
@@ -1339,26 +1427,33 @@ def copy_asynchronously_message(source_pubs_count, dest_archive, dest_url=None,
         Defaults to the target archive's display name.
     """
     if dest_url is None:
-        dest_url = canonical_url(dest_archive) + '/+packages'
+        dest_url = canonical_url(dest_archive) + "/+packages"
 
     if dest_display_name is None:
         dest_display_name = dest_archive.displayname
 
     package_or_packages = get_plural_text(
-        source_pubs_count, "package", "packages")
+        source_pubs_count, "package", "packages"
+    )
     if source_pubs_count == 0:
         return structured(
             'Requested sync of %s %s to <a href="%s">%s</a>.',
-            source_pubs_count, package_or_packages, dest_url,
-            dest_display_name)
+            source_pubs_count,
+            package_or_packages,
+            dest_url,
+            dest_display_name,
+        )
     else:
-        this_or_these = get_plural_text(
-            source_pubs_count, "this", "these")
+        this_or_these = get_plural_text(source_pubs_count, "this", "these")
         return structured(
             'Requested sync of %s %s to <a href="%s">%s</a>.<br />'
             "Please allow some time for %s to be processed.",
-            source_pubs_count, package_or_packages, dest_url,
-            dest_display_name, this_or_these)
+            source_pubs_count,
+            package_or_packages,
+            dest_url,
+            dest_display_name,
+            this_or_these,
+        )
 
 
 def render_cannotcopy_as_html(cannotcopy_exception):
@@ -1378,17 +1473,30 @@ def render_cannotcopy_as_html(cannotcopy_exception):
         <ul>
         %s
         </ul>
-        """ % (intro, "<li>%s</li>" * len(error_lines))
+        """ % (
+        intro,
+        "<li>%s</li>" * len(error_lines),
+    )
     return structured(html_text, *error_lines)
 
 
 class PackageCopyingMixin:
     """A mixin class that adds helpers for package copying."""
 
-    def do_copy(self, sources_field_name, source_pubs, dest_archive,
-                dest_series, dest_pocket, include_binaries,
-                dest_url=None, dest_display_name=None, person=None,
-                check_permissions=True, sponsored_person=None):
+    def do_copy(
+        self,
+        sources_field_name,
+        source_pubs,
+        dest_archive,
+        dest_series,
+        dest_pocket,
+        include_binaries,
+        dest_url=None,
+        dest_display_name=None,
+        person=None,
+        check_permissions=True,
+        sponsored_person=None,
+    ):
         """Copy packages and add appropriate feedback to the browser page.
 
         This will copy asynchronously, scheduling jobs that will be
@@ -1418,44 +1526,56 @@ class PackageCopyingMixin:
         """
         try:
             notification = copy_asynchronously(
-                source_pubs, dest_archive, dest_series, dest_pocket,
-                include_binaries, dest_url=dest_url,
-                dest_display_name=dest_display_name, person=person,
+                source_pubs,
+                dest_archive,
+                dest_series,
+                dest_pocket,
+                include_binaries,
+                dest_url=dest_url,
+                dest_display_name=dest_display_name,
+                person=person,
                 check_permissions=check_permissions,
-                sponsored=sponsored_person)
+                sponsored=sponsored_person,
+            )
         except CannotCopy as error:
             self.setFieldError(
-                sources_field_name, render_cannotcopy_as_html(error))
+                sources_field_name, render_cannotcopy_as_html(error)
+            )
             return False
 
         self.request.response.addNotification(notification)
         return True
 
 
-class ArchivePackageCopyingView(ArchiveSourceSelectionFormView,
-                                PackageCopyingMixin):
+class ArchivePackageCopyingView(
+    ArchiveSourceSelectionFormView, PackageCopyingMixin
+):
     """Archive package copying view class.
 
     This view presents a package selection slot in a POST form implementing
     a copying action that can be performed upon a set of selected packages.
     """
+
     schema = IPPAPackageFilter
     custom_widget_destination_archive = DestinationArchiveDropdownWidget
     custom_widget_destination_series = DestinationSeriesDropdownWidget
     custom_widget_include_binaries = LaunchpadRadioWidget
-    label = 'Copy packages'
+    label = "Copy packages"
 
     @property
     def label(self):
-        return 'Copy packages from %s' % self.context.displayname
+        return "Copy packages from %s" % self.context.displayname
 
     default_pocket = PackagePublishingPocket.RELEASE
 
     @property
     def default_status_filter(self):
         """Present published records by default."""
-        return self.widgets['status_filter'].vocabulary.getTermByToken(
-            'published').value
+        return (
+            self.widgets["status_filter"]
+            .vocabulary.getTermByToken("published")
+            .value
+        )
 
     def setUpFields(self):
         """Override `ArchiveSourceSelectionFormView`.
@@ -1464,10 +1584,11 @@ class ArchivePackageCopyingView(ArchiveSourceSelectionFormView,
         """
         ArchiveSourceSelectionFormView.setUpFields(self)
         self.form_fields = (
-            self.createDestinationArchiveField() +
-            self.createDestinationSeriesField() +
-            self.createIncludeBinariesField() +
-            self.form_fields)
+            self.createDestinationArchiveField()
+            + self.createDestinationSeriesField()
+            + self.createIncludeBinariesField()
+            + self.form_fields
+        )
 
     @cachedproperty
     def ppas_for_user(self):
@@ -1489,20 +1610,24 @@ class ArchivePackageCopyingView(ArchiveSourceSelectionFormView,
         # XXX cprov 2009-07-17 bug=385503: copies cannot be properly traced
         # that's why we explicitly don't allow them do be done via the UI
         # in main archives, only PPAs.
-        return (self.context.is_ppa and
-                self.context.checkArchivePermission(self.user))
+        return self.context.is_ppa and self.context.checkArchivePermission(
+            self.user
+        )
 
     def createDestinationArchiveField(self):
         """Create the 'destination_archive' field."""
         # Do not include the context PPA in the dropdown widget.
         ppas = [ppa for ppa in self.ppas_for_user if self.context != ppa]
         return form.Fields(
-            Choice(__name__='destination_archive',
-                   title=_('Destination PPA'),
-                   vocabulary=make_archive_vocabulary(ppas),
-                   description=_("Select the destination PPA."),
-                   missing_value=self.context,
-                   required=not self.can_copy_to_context_ppa))
+            Choice(
+                __name__="destination_archive",
+                title=_("Destination PPA"),
+                vocabulary=make_archive_vocabulary(ppas),
+                description=_("Select the destination PPA."),
+                missing_value=self.context,
+                required=not self.can_copy_to_context_ppa,
+            )
+        )
 
     def createDestinationSeriesField(self):
         """Create the 'destination_series' field."""
@@ -1516,13 +1641,17 @@ class ArchivePackageCopyingView(ArchiveSourceSelectionFormView,
             if series.status == SeriesStatus.OBSOLETE:
                 continue
             terms.append(
-                SimpleTerm(series, str(series.name), series.displayname))
+                SimpleTerm(series, str(series.name), series.displayname)
+            )
         return form.Fields(
-            Choice(__name__='destination_series',
-                   title=_('Destination series'),
-                   vocabulary=SimpleVocabulary(terms),
-                   description=_("Select the destination series."),
-                   required=False))
+            Choice(
+                __name__="destination_series",
+                title=_("Destination series"),
+                vocabulary=SimpleVocabulary(terms),
+                description=_("Select the destination series."),
+                required=False,
+            )
+        )
 
     def createIncludeBinariesField(self):
         """Create the 'include_binaries' field.
@@ -1538,20 +1667,27 @@ class ArchivePackageCopyingView(ArchiveSourceSelectionFormView,
         option when rendered.
         """
         rebuild_sources = SimpleTerm(
-                False, 'REBUILD_SOURCES', _('Rebuild the copied sources'))
+            False, "REBUILD_SOURCES", _("Rebuild the copied sources")
+        )
         copy_binaries = SimpleTerm(
-            True, 'COPY_BINARIES', _('Copy existing binaries'))
+            True, "COPY_BINARIES", _("Copy existing binaries")
+        )
         terms = [rebuild_sources, copy_binaries]
 
         return form.Fields(
-            Choice(__name__='include_binaries',
-                   title=_('Copy options'),
-                   vocabulary=SimpleVocabulary(terms),
-                   description=_("How the selected sources should be copied "
-                                 "to the destination archive."),
-                   missing_value=rebuild_sources,
-                   default=False,
-                   required=True))
+            Choice(
+                __name__="include_binaries",
+                title=_("Copy options"),
+                vocabulary=SimpleVocabulary(terms),
+                description=_(
+                    "How the selected sources should be copied "
+                    "to the destination archive."
+                ),
+                missing_value=rebuild_sources,
+                default=False,
+                required=True,
+            )
+        )
 
     @action(_("Update"), name="update")
     def update_action(self, action, data):
@@ -1571,22 +1707,27 @@ class ArchivePackageCopyingView(ArchiveSourceSelectionFormView,
         if `do_copy` succeeds, an informational messages is set containing
         the copied packages.
         """
-        selected_sources = data.get('selected_sources')
-        destination_archive = data.get('destination_archive')
-        destination_series = data.get('destination_series')
-        include_binaries = data.get('include_binaries')
+        selected_sources = data.get("selected_sources")
+        destination_archive = data.get("destination_archive")
+        destination_series = data.get("destination_series")
+        include_binaries = data.get("include_binaries")
         destination_pocket = self.default_pocket
 
         if len(selected_sources) == 0:
-            self.setFieldError('selected_sources', 'No sources selected.')
+            self.setFieldError("selected_sources", "No sources selected.")
             return
 
         # PackageCopyingMixin.do_copy() does the work of copying and
         # setting up on-page notifications.
         if self.do_copy(
-            'selected_sources', selected_sources, destination_archive,
-            destination_series, destination_pocket, include_binaries,
-            person=self.user):
+            "selected_sources",
+            selected_sources,
+            destination_archive,
+            destination_series,
+            destination_pocket,
+            include_binaries,
+            person=self.user,
+        ):
             # The copy worked so we can redirect back to the page to
             # show the result.
             self.setNextURL()
@@ -1607,11 +1748,14 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
 
     custom_widget_selected_dependencies = CustomWidgetFactory(
         PlainMultiCheckBoxWidget,
-        cssClass='line-through-when-checked ppa-dependencies')
+        cssClass="line-through-when-checked ppa-dependencies",
+    )
     custom_widget_primary_dependencies = CustomWidgetFactory(
-        LaunchpadRadioWidget, cssClass='highlight-selected')
+        LaunchpadRadioWidget, cssClass="highlight-selected"
+    )
     custom_widget_primary_components = CustomWidgetFactory(
-        LaunchpadRadioWidget, cssClass='highlight-selected')
+        LaunchpadRadioWidget, cssClass="highlight-selected"
+    )
 
     label = "Edit PPA dependencies"
     page_title = label
@@ -1632,10 +1776,11 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
         LaunchpadFormView.setUpFields(self)
 
         self.form_fields = (
-            self.createSelectedDependenciesField() +
-            self.createPrimaryDependenciesField() +
-            self.createPrimaryComponentsField() +
-            self.form_fields)
+            self.createSelectedDependenciesField()
+            + self.createPrimaryDependenciesField()
+            + self.createPrimaryComponentsField()
+            + self.form_fields
+        )
 
     def focusedElementScript(self):
         """Override `LaunchpadFormView`.
@@ -1660,23 +1805,30 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
             dependency = archive_dependency.dependency
             if not dependency.is_ppa:
                 continue
-            if check_permission('launchpad.View', dependency):
+            if check_permission("launchpad.View", dependency):
                 dependency_label = structured(
                     '<a href="%s">%s</a>',
-                    canonical_url(dependency), archive_dependency.title)
+                    canonical_url(dependency),
+                    archive_dependency.title,
+                )
             else:
                 dependency_label = archive_dependency.title
             term = SimpleTerm(
-                dependency, dependency.reference, dependency_label)
+                dependency, dependency.reference, dependency_label
+            )
             terms.append(term)
         return form.Fields(
-            List(__name__='selected_dependencies',
-                 title=_('Extra dependencies'),
-                 value_type=Choice(vocabulary=SimpleVocabulary(terms)),
-                 required=False,
-                 default=[],
-                 description=_(
-                    'Select one or more dependencies to be removed.')))
+            List(
+                __name__="selected_dependencies",
+                title=_("Extra dependencies"),
+                value_type=Choice(vocabulary=SimpleVocabulary(terms)),
+                required=False,
+                default=[],
+                description=_(
+                    "Select one or more dependencies to be removed."
+                ),
+            )
+        )
 
     def createPrimaryDependenciesField(self):
         """Create the 'primary_dependencies' field.
@@ -1695,49 +1847,65 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
         option when rendered.
         """
         release = SimpleTerm(
-            PackagePublishingPocket.RELEASE, 'RELEASE',
-            _('Basic (only released packages).'))
+            PackagePublishingPocket.RELEASE,
+            "RELEASE",
+            _("Basic (only released packages)."),
+        )
         security = SimpleTerm(
-            PackagePublishingPocket.SECURITY, 'SECURITY',
-            _('Security (basic dependencies and important security '
-              'updates).'))
+            PackagePublishingPocket.SECURITY,
+            "SECURITY",
+            _(
+                "Security (basic dependencies and important security "
+                "updates)."
+            ),
+        )
         updates = SimpleTerm(
-            PackagePublishingPocket.UPDATES, 'UPDATES',
-            _('Default (security dependencies and recommended updates).'))
+            PackagePublishingPocket.UPDATES,
+            "UPDATES",
+            _("Default (security dependencies and recommended updates)."),
+        )
         proposed = SimpleTerm(
-            PackagePublishingPocket.PROPOSED, 'PROPOSED',
-            _('Proposed (default dependencies and proposed updates).'))
+            PackagePublishingPocket.PROPOSED,
+            "PROPOSED",
+            _("Proposed (default dependencies and proposed updates)."),
+        )
         backports = SimpleTerm(
-            PackagePublishingPocket.BACKPORTS, 'BACKPORTS',
-            _('Backports (default dependencies and unsupported updates).'))
+            PackagePublishingPocket.BACKPORTS,
+            "BACKPORTS",
+            _("Backports (default dependencies and unsupported updates)."),
+        )
 
         terms = [release, security, updates, proposed, backports]
 
         primary_dependency = self.context.getArchiveDependency(
-            self.context.distribution.main_archive)
+            self.context.distribution.main_archive
+        )
         if primary_dependency is None:
             default_value = default_pocket_dependency
         else:
             default_value = primary_dependency.pocket
 
         primary_dependency_vocabulary = SimpleVocabulary(terms)
-        current_term = primary_dependency_vocabulary.getTerm(
-            default_value)
+        current_term = primary_dependency_vocabulary.getTerm(default_value)
 
         return form.Fields(
-            Choice(__name__='primary_dependencies',
-                   title=_(
-                    "%s dependencies"
-                    % self.context.distribution.displayname),
-                   vocabulary=primary_dependency_vocabulary,
-                   description=_(
+            Choice(
+                __name__="primary_dependencies",
+                title=_(
+                    "%s dependencies" % self.context.distribution.displayname
+                ),
+                vocabulary=primary_dependency_vocabulary,
+                description=_(
                     "Select which packages of the %s primary archive "
                     "should be used as build-dependencies when building "
                     "sources in this PPA."
-                    % self.context.distribution.displayname),
-                   missing_value=current_term,
-                   default=default_value,
-                   required=True))
+                    % self.context.distribution.displayname
+                ),
+                missing_value=current_term,
+                default=default_value,
+                required=True,
+            )
+        )
 
     def createPrimaryComponentsField(self):
         """Create the 'primary_components' field.
@@ -1753,46 +1921,63 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
         components' option when rendered. Other components, such as 'main',
         or 'contrib' will be added to the list of options if they are used.
         """
-        multiverse = getUtility(IComponentSet)['multiverse']
+        multiverse = getUtility(IComponentSet)["multiverse"]
 
         all_components = SimpleTerm(
-            multiverse, 'ALL_COMPONENTS',
-            _('Use all %s components available.' %
-              self.context.distribution.displayname))
+            multiverse,
+            "ALL_COMPONENTS",
+            _(
+                "Use all %s components available."
+                % self.context.distribution.displayname
+            ),
+        )
         follow_primary = SimpleTerm(
-            None, 'FOLLOW_PRIMARY',
-            _('Use the same components used for each source in the %s '
-              'primary archive.' % self.context.distribution.displayname))
+            None,
+            "FOLLOW_PRIMARY",
+            _(
+                "Use the same components used for each source in the %s "
+                "primary archive." % self.context.distribution.displayname
+            ),
+        )
 
         primary_dependency = self.context.getArchiveDependency(
-            self.context.distribution.main_archive)
+            self.context.distribution.main_archive
+        )
         if primary_dependency is None:
             default_value = getUtility(IComponentSet)[
-                default_component_dependency_name]
+                default_component_dependency_name
+            ]
         else:
             default_value = primary_dependency.component
 
         terms = [all_components, follow_primary]
         if default_value and default_value != multiverse:
             current_component = SimpleTerm(
-                default_value, 'OTHER_COMPONENT',
-                _('Unsupported component (%s)' % default_value.name))
+                default_value,
+                "OTHER_COMPONENT",
+                _("Unsupported component (%s)" % default_value.name),
+            )
             terms.append(current_component)
         primary_components_vocabulary = SimpleVocabulary(terms)
         current_term = primary_components_vocabulary.getTerm(default_value)
 
         return form.Fields(
-            Choice(__name__='primary_components',
-                   title=_('%s components' %
-                           self.context.distribution.displayname),
-                   vocabulary=primary_components_vocabulary,
-                   description=_("Which %s components of the archive pool "
-                                 "should be used when fetching build "
-                                 "dependencies." %
-                                 self.context.distribution.displayname),
-                   missing_value=current_term,
-                   default=default_value,
-                   required=True))
+            Choice(
+                __name__="primary_components",
+                title=_(
+                    "%s components" % self.context.distribution.displayname
+                ),
+                vocabulary=primary_components_vocabulary,
+                description=_(
+                    "Which %s components of the archive pool "
+                    "should be used when fetching build "
+                    "dependencies." % self.context.distribution.displayname
+                ),
+                missing_value=current_term,
+                default=default_value,
+                required=True,
+            )
+        )
 
     @cachedproperty
     def has_dependencies(self):
@@ -1801,11 +1986,11 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
 
     @property
     def messages(self):
-        return '\n'.join(map(get_escapedtext, self._messages))
+        return "\n".join(map(get_escapedtext, self._messages))
 
     def _remove_dependencies(self, data):
         """Perform the removal of the selected dependencies."""
-        selected_dependencies = data.get('selected_dependencies', [])
+        selected_dependencies = data.get("selected_dependencies", [])
 
         if len(selected_dependencies) == 0:
             return
@@ -1815,41 +2000,50 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
             self.context.removeArchiveDependency(dependency)
 
         # Present a page notification describing the action.
-        self._messages.append('<p>Dependencies removed:')
+        self._messages.append("<p>Dependencies removed:")
         for dependency in selected_dependencies:
             self._messages.append(
-                structured('<br/>%s', dependency.displayname))
-        self._messages.append('</p>')
+                structured("<br/>%s", dependency.displayname)
+            )
+        self._messages.append("</p>")
 
     def _add_ppa_dependencies(self, data):
         """Record the selected dependency."""
-        dependency_candidate = data.get('dependency_candidate')
+        dependency_candidate = data.get("dependency_candidate")
         if dependency_candidate is None:
             return
 
         self.context.addArchiveDependency(
-            dependency_candidate, PackagePublishingPocket.RELEASE,
-            getUtility(IComponentSet)['main'])
+            dependency_candidate,
+            PackagePublishingPocket.RELEASE,
+            getUtility(IComponentSet)["main"],
+        )
 
-        self._messages.append(structured(
-            '<p>Dependency added: %s</p>', dependency_candidate.displayname))
+        self._messages.append(
+            structured(
+                "<p>Dependency added: %s</p>", dependency_candidate.displayname
+            )
+        )
 
     def _add_primary_dependencies(self, data):
         """Record the selected dependency."""
         # Received values.
-        dependency_pocket = data.get('primary_dependencies')
-        dependency_component = data.get('primary_components')
+        dependency_pocket = data.get("primary_dependencies")
+        dependency_component = data.get("primary_components")
 
         # Check if the given values correspond to the default scenario
         # for the context archive.
         default_component_dependency = getUtility(IComponentSet)[
-            default_component_dependency_name]
+            default_component_dependency_name
+        ]
         is_default_dependency = (
-            dependency_pocket == default_pocket_dependency and
-            dependency_component == default_component_dependency)
+            dependency_pocket == default_pocket_dependency
+            and dependency_component == default_component_dependency
+        )
 
         primary_dependency = self.context.getArchiveDependency(
-            self.context.distribution.main_archive)
+            self.context.distribution.main_archive
+        )
 
         # No action is required if there is no primary_dependency
         # override set and the given values match it.
@@ -1858,27 +2052,36 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
 
         # Similarly, no action is required if the given values match
         # the existing primary_dependency override.
-        if (primary_dependency is not None and
-            primary_dependency.pocket == dependency_pocket and
-            primary_dependency.component == dependency_component):
+        if (
+            primary_dependency is not None
+            and primary_dependency.pocket == dependency_pocket
+            and primary_dependency.component == dependency_component
+        ):
             return
 
         # Remove any primary dependencies overrides.
         if primary_dependency is not None:
             self.context.removeArchiveDependency(
-                self.context.distribution.main_archive)
+                self.context.distribution.main_archive
+            )
 
         if is_default_dependency:
             self._messages.append(
-                '<p>Default primary dependencies restored.</p>')
+                "<p>Default primary dependencies restored.</p>"
+            )
             return
 
         # Install the required primary archive dependency override.
         primary_dependency = self.context.addArchiveDependency(
-            self.context.distribution.main_archive, dependency_pocket,
-            dependency_component)
-        self._messages.append(structured(
-            '<p>Primary dependency added: %s</p>', primary_dependency.title))
+            self.context.distribution.main_archive,
+            dependency_pocket,
+            dependency_component,
+        )
+        self._messages.append(
+            structured(
+                "<p>Primary dependency added: %s</p>", primary_dependency.title
+            )
+        )
 
     @action(_("Save"), name="save")
     def save_action(self, action, data):
@@ -1896,14 +2099,13 @@ class ArchiveEditDependenciesView(ArchiveViewBase, LaunchpadFormView):
         try:
             self._add_ppa_dependencies(data)
         except ArchiveDependencyError as e:
-            self.setFieldError('dependency_candidate', str(e))
+            self.setFieldError("dependency_candidate", str(e))
             return
         self._remove_dependencies(data)
 
         # Issue a notification if anything was changed.
         if len(self.messages) > 0:
-            self.request.response.addNotification(
-                structured(self.messages))
+            self.request.response.addNotification(structured(self.messages))
         # Redirect after POST.
         self.next_url = self.request.URL
 
@@ -1912,11 +2114,11 @@ class ArchiveActivateView(LaunchpadFormView):
     """PPA activation view class."""
 
     schema = IArchive
-    field_names = ('name', 'displayname', 'description')
+    field_names = ("name", "displayname", "description")
     custom_widget_description = CustomWidgetFactory(TextAreaWidget, height=3)
     custom_widget_name = CustomWidgetFactory(PPANameWidget, label="URL")
-    label = 'Activate a Personal Package Archive'
-    page_title = 'Activate PPA'
+    label = "Activate a Personal Package Archive"
+    page_title = "Activate PPA"
 
     @property
     def ubuntu(self):
@@ -1925,7 +2127,7 @@ class ArchiveActivateView(LaunchpadFormView):
     @cachedproperty
     def visible_ppas(self):
         ppas = self.context.getVisiblePPAs(self.user)
-        precache_permission_for_objects(self.request, 'launchpad.View', ppas)
+        precache_permission_for_objects(self.request, "launchpad.View", ppas)
         return ppas
 
     @property
@@ -1934,7 +2136,7 @@ class ArchiveActivateView(LaunchpadFormView):
         # Suggest a default value of "ppa" for the name for the
         # first PPA activation.
         if self.context.archive is None:
-            return {'name': 'ppa'}
+            return {"name": "ppa"}
         return {}
 
     def setUpFields(self):
@@ -1948,9 +2150,11 @@ class ArchiveActivateView(LaunchpadFormView):
 
         if self.context.archive is None:
             accepted = Bool(
-                __name__='accepted',
+                __name__="accepted",
                 title=_("I have read and accepted the PPA Terms of Use."),
-                required=True, default=False)
+                required=True,
+                default=False,
+            )
             self.form_fields += form.Fields(accepted)
 
     def validate(self, data):
@@ -1960,22 +2164,27 @@ class ArchiveActivateView(LaunchpadFormView):
 
         default_ppa = self.context.archive
 
-        proposed_name = data.get('name')
+        proposed_name = data.get("name")
         if proposed_name is None and default_ppa is not None:
             self.addError(
-                'The default PPA is already activated. Please specify a '
-                'name for the new PPA and resubmit the form.')
+                "The default PPA is already activated. Please specify a "
+                "name for the new PPA and resubmit the form."
+            )
 
         errors = validate_ppa(
-            self.context, self.ubuntu, proposed_name,
-            private=self.is_private_team)
+            self.context,
+            self.ubuntu,
+            proposed_name,
+            private=self.is_private_team,
+        )
         if errors is not None:
             self.addError(errors)
 
-        if default_ppa is None and not data.get('accepted'):
+        if default_ppa is None and not data.get("accepted"):
             self.setFieldError(
-                'accepted',
-                "PPA Terms of Service must be accepted to activate a PPA.")
+                "accepted",
+                "PPA Terms of Service must be accepted to activate a PPA.",
+            )
 
     @action(_("Activate"), name="activate")
     def save_action(self, action, data):
@@ -1983,12 +2192,16 @@ class ArchiveActivateView(LaunchpadFormView):
         # 'name' field is omitted from the form data for default PPAs and
         # it's dealt with by IArchive.new(), which will use the default
         # PPA name.
-        name = data.get('name', None)
-        displayname = data['displayname']
-        description = data['description']
+        name = data.get("name", None)
+        displayname = data["displayname"]
+        description = data["description"]
         ppa = self.context.createPPA(
-            self.ubuntu, name, displayname, description,
-            private=self.is_private_team)
+            self.ubuntu,
+            name,
+            displayname,
+            description,
+            private=self.is_private_team,
+        )
         self.next_url = canonical_url(ppa)
 
     @property
@@ -2025,20 +2238,21 @@ class BaseArchiveEditView(LaunchpadEditFormView, ArchiveViewBase):
     @action(_("Save"), name="save", validator="validate_save")
     def save_action(self, action, data):
         # Archive is enabled and user wants it disabled.
-        if self.context.enabled == True and data['enabled'] == False:
+        if self.context.enabled == True and data["enabled"] == False:
             self.context.disable()
         # Archive is disabled and user wants it enabled.
-        if self.context.enabled == False and data['enabled'] == True:
+        if self.context.enabled == False and data["enabled"] == True:
             self.context.enable()
         # IArchive.enabled is a read-only property that cannot be set
         # directly.
-        del(data['enabled'])
-        new_processors = data.get('processors')
+        del data["enabled"]
+        new_processors = data.get("processors")
         if new_processors is not None:
             if set(self.context.processors) != set(new_processors):
                 self.context.setProcessors(
-                    new_processors, check_permissions=True, user=self.user)
-            del data['processors']
+                    new_processors, check_permissions=True, user=self.user
+                )
+            del data["processors"]
         self.updateContextFromData(data)
         self.next_url = canonical_url(self.context)
 
@@ -2048,13 +2262,13 @@ class BaseArchiveEditView(LaunchpadEditFormView, ArchiveViewBase):
 
     def validate_save(self, action, data):
         """Check that we're not reenabling a deleted archive.."""
-        form.getWidgetsData(self.widgets, 'field', data)
+        form.getWidgetsData(self.widgets, "field", data)
 
         # Deleted PPAs can't be reactivated.
-        if ((data.get('enabled') or data.get('publish'))
-            and not self.context.is_active):
-            self.setFieldError(
-                "enabled", "Deleted PPAs can't be enabled.")
+        if (
+            data.get("enabled") or data.get("publish")
+        ) and not self.context.is_active:
+            self.setFieldError("enabled", "Deleted PPAs can't be enabled.")
 
 
 class EnableProcessorsMixin:
@@ -2064,52 +2278,63 @@ class EnableProcessorsMixin:
         """Creates the 'processors' field."""
         terms = []
         disabled = []
-        if check_permission('launchpad.Admin', self.context):
+        if check_permission("launchpad.Admin", self.context):
             can_modify = lambda proc: True
         else:
             can_modify = lambda proc: not proc.restricted
-        for processor in sorted(available_processors, key=attrgetter('name')):
-            terms.append(SimpleTerm(
-                processor, token=processor.name,
-                title="%s (%s)" % (processor.title, processor.name)))
+        for processor in sorted(available_processors, key=attrgetter("name")):
+            terms.append(
+                SimpleTerm(
+                    processor,
+                    token=processor.name,
+                    title="%s (%s)" % (processor.title, processor.name),
+                )
+            )
             if not can_modify(processor):
                 disabled.append(processor)
-        old_field = IArchive['processors']
+        old_field = IArchive["processors"]
         widget = CustomWidgetFactory(
-            LabeledMultiCheckBoxWidget, disabled_items=disabled)
+            LabeledMultiCheckBoxWidget, disabled_items=disabled
+        )
         return form.Fields(
-            List(__name__=old_field.__name__,
-                 title=old_field.title,
-                 value_type=Choice(vocabulary=SimpleVocabulary(terms)),
-                 required=False,
-                 description=old_field.description if description is None
-                     else description),
-             render_context=self.render_context, custom_widget=widget)
+            List(
+                __name__=old_field.__name__,
+                title=old_field.title,
+                value_type=Choice(vocabulary=SimpleVocabulary(terms)),
+                required=False,
+                description=old_field.description
+                if description is None
+                else description,
+            ),
+            render_context=self.render_context,
+            custom_widget=widget,
+        )
 
 
 class ArchiveEditView(BaseArchiveEditView, EnableProcessorsMixin):
 
     field_names = [
-        'displayname',
-        'description',
-        'enabled',
-        'publish',
-        'build_debug_symbols',
-        'publish_debug_symbols',
-        ]
+        "displayname",
+        "description",
+        "enabled",
+        "publish",
+        "build_debug_symbols",
+        "publish_debug_symbols",
+    ]
     custom_widget_description = CustomWidgetFactory(
-        TextAreaWidget, height=10, width=30)
-    page_title = 'Change details'
+        TextAreaWidget, height=10, width=30
+    )
+    page_title = "Change details"
 
     @property
     def label(self):
-        return 'Edit %s' % self.context.displayname
+        return "Edit %s" % self.context.displayname
 
     @property
     def initial_values(self):
         return {
-            'processors': self.context.processors,
-            }
+            "processors": self.context.processors,
+        }
 
     def setUpFields(self):
         """Override `LaunchpadEditFormView`.
@@ -2121,90 +2346,99 @@ class ArchiveEditView(BaseArchiveEditView, EnableProcessorsMixin):
             self.context.available_processors,
             "The architectures on which the archive can build. Some "
             "architectures are restricted and may only be enabled or "
-            "disabled by administrators.")
+            "disabled by administrators.",
+        )
 
     def validate(self, data):
-        if 'processors' in data:
+        if "processors" in data:
             available_processors = set(self.context.available_processors)
-            widget = self.widgets['processors']
+            widget = self.widgets["processors"]
             for processor in self.context.processors:
-                if processor not in data['processors']:
+                if processor not in data["processors"]:
                     if processor not in available_processors:
                         # This processor is not currently available for
                         # selection, but is enabled.  Leave it untouched.
-                        data['processors'].append(processor)
+                        data["processors"].append(processor)
                     elif processor.name in widget.disabled_items:
                         # This processor is restricted and currently
                         # enabled.  Leave it untouched.
-                        data['processors'].append(processor)
+                        data["processors"].append(processor)
 
 
 class ArchiveAdminView(BaseArchiveEditView, EnableProcessorsMixin):
 
     field_names = [
-        'enabled',
-        'private',
-        'suppress_subscription_notifications',
-        'require_virtualized',
-        'permit_obsolete_series_uploads',
-        'authorized_size',
-        'relative_build_score',
-        'external_dependencies',
-        'publishing_method',
-        'repository_format',
-        ]
+        "enabled",
+        "private",
+        "suppress_subscription_notifications",
+        "require_virtualized",
+        "permit_obsolete_series_uploads",
+        "authorized_size",
+        "relative_build_score",
+        "external_dependencies",
+        "publishing_method",
+        "repository_format",
+    ]
     custom_widget_external_dependencies = CustomWidgetFactory(
-        TextAreaWidget, height=3)
+        TextAreaWidget, height=3
+    )
     custom_widget_publishing_method = LaunchpadDropdownWidget
     custom_widget_repository_format = LaunchpadDropdownWidget
-    page_title = 'Administer'
+    page_title = "Administer"
 
     @property
     def label(self):
-        return 'Administer %s' % self.context.displayname
+        return "Administer %s" % self.context.displayname
 
     def validate_save(self, action, data):
         """Validate the save action on ArchiveAdminView."""
         super().validate_save(action, data)
 
-        if data.get('private') != self.context.private:
+        if data.get("private") != self.context.private:
             # The privacy is being switched.
             if not self.context.getPublishedSources().is_empty():
                 self.setFieldError(
-                    'private',
-                    'This archive already has published sources. It is '
-                    'not possible to switch the privacy.')
+                    "private",
+                    "This archive already has published sources. It is "
+                    "not possible to switch the privacy.",
+                )
 
-        if self.owner_is_private_team and not data['private']:
+        if self.owner_is_private_team and not data["private"]:
             self.setFieldError(
-                'private',
-                'Private teams may not have public archives.')
+                "private", "Private teams may not have public archives."
+            )
 
         # Check the external_dependencies field.
-        ext_deps = data.get('external_dependencies')
+        ext_deps = data.get("external_dependencies")
         if ext_deps is not None:
             errors = validate_external_dependencies(ext_deps)
             if len(errors) != 0:
                 error_text = "\n".join(errors)
-                self.setFieldError('external_dependencies', error_text)
+                self.setFieldError("external_dependencies", error_text)
 
-        if data.get('publishing_method') != self.context.publishing_method:
+        if data.get("publishing_method") != self.context.publishing_method:
             # The publishing method is being switched.
-            if (not self.context.getPublishedSources().is_empty() or
-                    not self.context.getAllPublishedBinaries().is_empty()):
+            if (
+                not self.context.getPublishedSources().is_empty()
+                or not self.context.getAllPublishedBinaries().is_empty()
+            ):
                 self.setFieldError(
-                    'publishing_method',
-                    'This archive already has published packages. It is '
-                    'not possible to switch the publishing method.')
+                    "publishing_method",
+                    "This archive already has published packages. It is "
+                    "not possible to switch the publishing method.",
+                )
 
-        if data.get('repository_format') != self.context.repository_format:
+        if data.get("repository_format") != self.context.repository_format:
             # The repository format is being switched.
-            if (not self.context.getPublishedSources().is_empty() or
-                    not self.context.getAllPublishedBinaries().is_empty()):
+            if (
+                not self.context.getPublishedSources().is_empty()
+                or not self.context.getAllPublishedBinaries().is_empty()
+            ):
                 self.setFieldError(
-                    'repository_format',
-                    'This archive already has published packages. It is '
-                    'not possible to switch the repository format.')
+                    "repository_format",
+                    "This archive already has published packages. It is "
+                    "not possible to switch the repository format.",
+                )
 
     @property
     def owner_is_private_team(self):
@@ -2232,7 +2466,9 @@ class ArchiveDeleteView(LaunchpadFormView):
     @property
     def can_be_deleted(self):
         return self.context.status not in (
-            ArchiveStatus.DELETING, ArchiveStatus.DELETED)
+            ArchiveStatus.DELETING,
+            ArchiveStatus.DELETED,
+        )
 
     @property
     def waiting_for_deletion(self):
@@ -2253,4 +2489,5 @@ class ArchiveDeleteView(LaunchpadFormView):
         self.context.delete(self.user)
         self.request.response.addInfoNotification(
             "Deletion of '%s' has been requested and the repository will be "
-            "removed shortly." % self.context.title)
+            "removed shortly." % self.context.title
+        )
diff --git a/lib/lp/soyuz/browser/archivepermission.py b/lib/lp/soyuz/browser/archivepermission.py
index dde0672..fa0d327 100644
--- a/lib/lp/soyuz/browser/archivepermission.py
+++ b/lib/lp/soyuz/browser/archivepermission.py
@@ -4,8 +4,8 @@
 """Browser views for archivepermission."""
 
 __all__ = [
-    'ArchivePermissionURL',
-    ]
+    "ArchivePermissionURL",
+]
 
 from zope.interface import implementer
 
@@ -16,6 +16,7 @@ from lp.soyuz.enums import ArchivePermissionType
 @implementer(ICanonicalUrlData)
 class ArchivePermissionURL:
     """Dynamic URL declaration for `IArchivePermission`."""
+
     rootsite = None
 
     def __init__(self, context):
@@ -33,7 +34,8 @@ class ArchivePermissionURL:
             perm_type = "+queue-admin"
         else:
             raise AssertionError(
-                "Unknown permission type %s" % self.context.permission)
+                "Unknown permission type %s" % self.context.permission
+            )
 
         username = self.context.person.name
 
@@ -41,11 +43,13 @@ class ArchivePermissionURL:
             item = "type=component&item=%s" % self.context.component_name
         elif self.context.source_package_name is not None:
             item = (
-                "type=packagename&item=%s" % self.context.source_package_name)
+                "type=packagename&item=%s" % self.context.source_package_name
+            )
         elif self.context.package_set_name is not None:
-            item = ("type=packageset&item=%s&series=%s" %
-                    (self.context.package_set_name,
-                     self.context.distro_series_name))
+            item = "type=packageset&item=%s&series=%s" % (
+                self.context.package_set_name,
+                self.context.distro_series_name,
+            )
         elif self.context.pocket is not None:
             item = "type=pocket&item=%s" % self.context.pocket.name
             # Queue admin permissions for pockets may be granted by series.
@@ -54,6 +58,7 @@ class ArchivePermissionURL:
         else:
             raise AssertionError(
                 "One of component, sourcepackagename or package set should "
-                "be set")
+                "be set"
+            )
 
         return "%s/%s?%s" % (perm_type, username, item)
diff --git a/lib/lp/soyuz/browser/archivesubscription.py b/lib/lp/soyuz/browser/archivesubscription.py
index e472bde..de0bb9a 100644
--- a/lib/lp/soyuz/browser/archivesubscription.py
+++ b/lib/lp/soyuz/browser/archivesubscription.py
@@ -4,12 +4,12 @@
 """Browser views related to archive subscriptions."""
 
 __all__ = [
-    'ArchiveSubscribersView',
-    'PersonalArchiveSubscription',
-    'PersonArchiveSubscriptionView',
-    'PersonArchiveSubscriptionsView',
-    'traverse_archive_subscription_for_subscriber',
-    ]
+    "ArchiveSubscribersView",
+    "PersonalArchiveSubscription",
+    "PersonArchiveSubscriptionView",
+    "PersonArchiveSubscriptionsView",
+    "traverse_archive_subscription_for_subscriber",
+]
 
 import datetime
 
@@ -18,44 +18,29 @@ from zope.component import getUtility
 from zope.formlib import form
 from zope.formlib.widget import CustomWidgetFactory
 from zope.formlib.widgets import TextWidget
-from zope.interface import (
-    implementer,
-    Interface,
-    )
-from zope.schema import (
-    Date,
-    Text,
-    )
+from zope.interface import Interface, implementer
+from zope.schema import Date, Text
 
 from lp import _
 from lp.app.browser.launchpadform import (
-    action,
     LaunchpadEditFormView,
     LaunchpadFormView,
-    )
+    action,
+)
 from lp.app.widgets.date import DateWidget
 from lp.app.widgets.popup import PersonPickerWidget
 from lp.registry.interfaces.person import IPersonSet
 from lp.services.fields import PersonChoice
-from lp.services.propertycache import (
-    cachedproperty,
-    get_property_cache,
-    )
+from lp.services.propertycache import cachedproperty, get_property_cache
 from lp.services.webapp.authorization import precache_permission_for_objects
-from lp.services.webapp.batching import (
-    BatchNavigator,
-    StormRangeFactory,
-    )
-from lp.services.webapp.publisher import (
-    canonical_url,
-    LaunchpadView,
-    )
+from lp.services.webapp.batching import BatchNavigator, StormRangeFactory
+from lp.services.webapp.publisher import LaunchpadView, canonical_url
 from lp.soyuz.browser.sourceslist import SourcesListEntriesWidget
 from lp.soyuz.interfaces.archive import IArchiveSet
 from lp.soyuz.interfaces.archivesubscriber import (
     IArchiveSubscriberSet,
     IPersonalArchiveSubscription,
-    )
+)
 
 
 def archive_subscription_ui_adapter(archive_subscription):
@@ -91,8 +76,11 @@ def traverse_archive_subscription_for_subscriber(subscriber, archive_id):
     subscription = None
     archive = getUtility(IArchiveSet).get(archive_id)
     if archive:
-        subscription = getUtility(IArchiveSubscriberSet).getBySubscriber(
-            subscriber, archive=archive).first()
+        subscription = (
+            getUtility(IArchiveSubscriberSet)
+            .getBySubscriber(subscriber, archive=archive)
+            .first()
+        )
 
     if subscription is None:
         return None
@@ -107,31 +95,43 @@ class IArchiveSubscriberUI(Interface):
     we simply want to use a date field when users create or edit new
     subscriptions.
     """
+
     subscriber = PersonChoice(
-        title=_("Subscriber"), required=True, vocabulary='ValidPersonOrTeam',
-        description=_("The person or team to grant access."))
+        title=_("Subscriber"),
+        required=True,
+        vocabulary="ValidPersonOrTeam",
+        description=_("The person or team to grant access."),
+    )
 
     date_expires = Date(
-        title=_("Date of Expiration"), required=False,
-        description=_("The date when the access will expire. "
-                      "Leave this blank for access that should "
-                      "never expire."))
+        title=_("Date of Expiration"),
+        required=False,
+        description=_(
+            "The date when the access will expire. "
+            "Leave this blank for access that should "
+            "never expire."
+        ),
+    )
 
     description = Text(
-        title=_("Description"), required=False,
-        description=_("Optional notes about this access."))
+        title=_("Description"),
+        required=False,
+        description=_("Optional notes about this access."),
+    )
 
 
 class ArchiveSubscribersView(LaunchpadFormView):
     """A view for listing and creating archive subscribers."""
 
     schema = IArchiveSubscriberUI
-    field_names = ['subscriber', 'date_expires', 'description']
+    field_names = ["subscriber", "date_expires", "description"]
     custom_widget_description = CustomWidgetFactory(
-        TextWidget, displayWidth=40)
+        TextWidget, displayWidth=40
+    )
     custom_widget_date_expires = DateWidget
     custom_widget_subscriber = CustomWidgetFactory(
-        PersonPickerWidget, header="Select the subscriber")
+        PersonPickerWidget, header="Select the subscriber"
+    )
 
     @property
     def label(self):
@@ -144,17 +144,19 @@ class ArchiveSubscribersView(LaunchpadFormView):
         # managing the subscribers.
         if not self.context.private:
             self.request.response.addNotification(
-                "Only private archives can have subscribers.")
-            self.request.response.redirect(
-                canonical_url(self.context))
+                "Only private archives can have subscribers."
+            )
+            self.request.response.redirect(canonical_url(self.context))
             return
 
         super().initialize()
         subscription_set = getUtility(IArchiveSubscriberSet)
         self.subscriptions = subscription_set.getByArchive(self.context)
         self.batchnav = BatchNavigator(
-            self.subscriptions, self.request,
-            range_factory=StormRangeFactory(self.subscriptions))
+            self.subscriptions,
+            self.request,
+            range_factory=StormRangeFactory(self.subscriptions),
+        )
 
     @cachedproperty
     def current_subscriptions_batch(self):
@@ -166,15 +168,18 @@ class ArchiveSubscribersView(LaunchpadFormView):
         # If the user can see this view, then they must have Append
         # permission on the archive, which grants View permission on all its
         # subscriptions.  Skip slow privacy checks.
-        precache_permission_for_objects(self.request, 'launchpad.View', batch)
+        precache_permission_for_objects(self.request, "launchpad.View", batch)
         ids = [subscription.subscriber_id for subscription in batch]
         subscribers = list(
             getUtility(IPersonSet).getPrecachedPersonsFromIDs(
-                ids, need_validity=True))
+                ids, need_validity=True
+            )
+        )
         # People who can manage subscriptions to this archive are entitled
         # to at least limited visibility of its existing subscribers.
         precache_permission_for_objects(
-            self.request, 'launchpad.LimitedView', subscribers)
+            self.request, "launchpad.LimitedView", subscribers
+        )
         return batch
 
     @cachedproperty
@@ -187,47 +192,52 @@ class ArchiveSubscribersView(LaunchpadFormView):
 
         Also ensures that the expiry date is in the future.
         """
-        form.getWidgetsData(self.widgets, 'field', data)
-        subscriber = data.get('subscriber')
-        date_expires = data.get('date_expires')
+        form.getWidgetsData(self.widgets, "field", data)
+        subscriber = data.get("subscriber")
+        date_expires = data.get("date_expires")
 
         if subscriber is not None:
             subscriber_set = getUtility(IArchiveSubscriberSet)
             current_subscription = subscriber_set.getBySubscriber(
-                subscriber, archive=self.context)
+                subscriber, archive=self.context
+            )
 
             # XXX noodles 20090212 bug=246200: use bool() when it gets fixed
             # in storm.
             if current_subscription.any() is not None:
-                self.setFieldError('subscriber',
-                    "%s is already subscribed." % subscriber.displayname)
+                self.setFieldError(
+                    "subscriber",
+                    "%s is already subscribed." % subscriber.displayname,
+                )
 
         if date_expires:
             if date_expires < datetime.date.today():
-                self.setFieldError('date_expires',
-                    "The expiry date must be in the future.")
+                self.setFieldError(
+                    "date_expires", "The expiry date must be in the future."
+                )
 
-    @action("Add", name="add",
-            validator="validate_new_subscription")
+    @action("Add", name="add", validator="validate_new_subscription")
     def create_subscription(self, action, data):
         """Create a subscription for the supplied user."""
         # As we present a date selection to the user for expiry, we
         # need to convert the value into a datetime with UTC:
-        date_expires = data['date_expires']
+        date_expires = data["date_expires"]
         if date_expires:
             date_expires = datetime.datetime(
                 date_expires.year,
                 date_expires.month,
                 date_expires.day,
-                tzinfo=pytz.timezone('UTC'))
+                tzinfo=pytz.timezone("UTC"),
+            )
         self.context.newSubscription(
-            data['subscriber'],
+            data["subscriber"],
             self.user,
-            description=data['description'],
-            date_expires=date_expires)
+            description=data["description"],
+            date_expires=date_expires,
+        )
 
-        subscriber_individuals = data['subscriber'].displayname
-        if data['subscriber'].is_team:
+        subscriber_individuals = data["subscriber"].displayname
+        if data["subscriber"].is_team:
             subscriber_individuals = "Members of " + subscriber_individuals
 
         notification = (
@@ -235,11 +245,11 @@ class ArchiveSubscribersView(LaunchpadFormView):
             "software from %(archive)s. "
             "%(subscriber_individuals)s will be notified of the access "
             " via email."
-            ) % {
-                'subscriber': data['subscriber'].displayname,
-                'archive': self.context.displayname,
-                'subscriber_individuals': subscriber_individuals,
-                }
+        ) % {
+            "subscriber": data["subscriber"].displayname,
+            "archive": self.context.displayname,
+            "subscriber_individuals": subscriber_individuals,
+        }
 
         self.request.response.addNotification(notification)
 
@@ -251,9 +261,10 @@ class ArchiveSubscriptionEditView(LaunchpadEditFormView):
     """A view for editing and canceling an archive subscriber."""
 
     schema = IArchiveSubscriberUI
-    field_names = ['date_expires', 'description']
+    field_names = ["date_expires", "description"]
     custom_widget_description = CustomWidgetFactory(
-        TextWidget, displayWidth=40)
+        TextWidget, displayWidth=40
+    )
     custom_widget_date_expires = DateWidget
 
     @property
@@ -263,43 +274,46 @@ class ArchiveSubscriptionEditView(LaunchpadEditFormView):
 
     def validate_update_subscription(self, action, data):
         """Ensure that the date of expiry is not in the past."""
-        form.getWidgetsData(self.widgets, 'field', data)
-        date_expires = data.get('date_expires')
+        form.getWidgetsData(self.widgets, "field", data)
+        date_expires = data.get("date_expires")
 
         if date_expires:
             if date_expires < datetime.date.today():
-                self.setFieldError('date_expires',
-                    "The expiry date must be in the future.")
+                self.setFieldError(
+                    "date_expires", "The expiry date must be in the future."
+                )
 
-    @action(
-        'Save', name='update', validator="validate_update_subscription")
+    @action("Save", name="update", validator="validate_update_subscription")
     def update_subscription(self, action, data):
         """Update the context subscription with the new data."""
         # As we present a date selection to the user for expiry, we
         # need to convert the value into a datetime with UTC:
-        date_expires = data['date_expires']
+        date_expires = data["date_expires"]
 
         if date_expires:
-            data['date_expires'] = datetime.datetime(
+            data["date_expires"] = datetime.datetime(
                 date_expires.year,
                 date_expires.month,
                 date_expires.day,
-                tzinfo=pytz.timezone('UTC'))
+                tzinfo=pytz.timezone("UTC"),
+            )
 
         self.updateContextFromData(data)
 
         notification = "The access for %s has been updated." % (
-            self.context.subscriber.displayname)
+            self.context.subscriber.displayname
+        )
         self.request.response.addNotification(notification)
 
-    @action('Revoke access', name='cancel')
+    @action("Revoke access", name="cancel")
     def cancel_subscription(self, action, data):
         """Cancel the context subscription."""
         self.context.cancel(self.user)
 
         notification = "You have revoked %s's access to %s." % (
             self.context.subscriber.displayname,
-            self.context.archive.displayname)
+            self.context.archive.displayname,
+        )
         self.request.response.addNotification(notification)
 
     @property
@@ -329,7 +343,8 @@ class PersonArchiveSubscriptionsView(LaunchpadView):
         """
         subscriber_set = getUtility(IArchiveSubscriberSet)
         subs_with_tokens = list(
-            subscriber_set.getBySubscriberWithActiveToken(self.context))
+            subscriber_set.getBySubscriberWithActiveToken(self.context)
+        )
 
         # ArchiveSubscriber.archive is preloaded.
         archives = [subscriber.archive for subscriber, _ in subs_with_tokens]
@@ -340,7 +355,7 @@ class PersonArchiveSubscriptionsView(LaunchpadView):
         # check results
         viewable_archives = []
         non_viewable_archives = []
-        archive_set = getUtility(IArchiveSet) # type: IArchiveSet
+        archive_set = getUtility(IArchiveSet)  # type: IArchiveSet
         for archive, has_view_permission in archive_set.checkViewPermission(
             archives, self.user
         ).items():
@@ -349,14 +364,10 @@ class PersonArchiveSubscriptionsView(LaunchpadView):
             else:
                 non_viewable_archives.append(archive)
         precache_permission_for_objects(
-            None,
-            'launchpad.View',
-            viewable_archives, result=True
+            None, "launchpad.View", viewable_archives, result=True
         )
         precache_permission_for_objects(
-            None,
-            'launchpad.View',
-            non_viewable_archives, result=False
+            None, "launchpad.View", non_viewable_archives, result=False
         )
 
         # Turn the result set into a list of dicts so it can be easily
@@ -372,9 +383,11 @@ class PersonArchiveSubscriptionsView(LaunchpadView):
             unique_archives.add(subscription.archive)
 
             personal_subscription = PersonalArchiveSubscription(
-                self.context, subscription.archive)
-            personal_subscription_tokens.append({
-                'subscription': personal_subscription, 'token': token})
+                self.context, subscription.archive
+            )
+            personal_subscription_tokens.append(
+                {"subscription": personal_subscription, "token": token}
+            )
 
         return personal_subscription_tokens
 
@@ -402,18 +415,19 @@ class PersonArchiveSubscriptionView(LaunchpadView, SourcesListEntriesWidget):
         # If an activation was requested and there isn't a currently
         # active token, then create a token, provide a notification
         # and redirect.
-        if self.request.form.get('activate') and not self.active_token:
+        if self.request.form.get("activate") and not self.active_token:
             self.context.archive.newAuthToken(self.context.subscriber)
             self.request.response.redirect(self.request.getURL())
         # Otherwise, if a regeneration was requested and there is an
         # active token, then cancel the old token, create a new one,
         # provide a notification and redirect.
-        elif self.request.form.get('regenerate') and self.active_token:
+        elif self.request.form.get("regenerate") and self.active_token:
             self.active_token.deactivate()
             self.context.archive.newAuthToken(self.context.subscriber)
             self.request.response.addNotification(
                 "Launchpad has generated the new password you requested "
                 "for your access to the archive %s. Please follow "
                 "the instructions below to update your custom "
-                "\"sources.list\"." % self.context.archive.displayname)
+                '"sources.list".' % self.context.archive.displayname
+            )
             self.request.response.redirect(self.request.getURL())
diff --git a/lib/lp/soyuz/browser/binarypackagerelease.py b/lib/lp/soyuz/browser/binarypackagerelease.py
index 7ba1ce9..42f6521 100644
--- a/lib/lp/soyuz/browser/binarypackagerelease.py
+++ b/lib/lp/soyuz/browser/binarypackagerelease.py
@@ -2,19 +2,16 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'BinaryPackageReleaseNavigation',
-    'BinaryPackageView',
-    ]
+    "BinaryPackageReleaseNavigation",
+    "BinaryPackageView",
+]
 
 from lp.services.webapp import Navigation
-from lp.services.webapp.publisher import (
-    canonical_url,
-    LaunchpadView,
-    )
+from lp.services.webapp.publisher import LaunchpadView, canonical_url
 from lp.soyuz.browser.packagerelationship import (
     PackageRelationshipSet,
     relationship_builder,
-    )
+)
 from lp.soyuz.interfaces.binarypackagerelease import IBinaryPackageRelease
 
 
@@ -65,7 +62,8 @@ class BinaryPackageView(LaunchpadView):
         for reference in self.context.built_using_references:
             spr = reference.source_package_release
             sp = spr.upload_distroseries.getSourcePackage(
-                spr.sourcepackagename)
+                spr.sourcepackagename
+            )
             sp_url = canonical_url(sp) if sp is not None else None
-            relationship_set.add(spr.name, '=', spr.version, sp_url)
+            relationship_set.add(spr.name, "=", spr.version, sp_url)
         return relationship_set
diff --git a/lib/lp/soyuz/browser/build.py b/lib/lp/soyuz/browser/build.py
index be9c034..27d3b5b 100644
--- a/lib/lp/soyuz/browser/build.py
+++ b/lib/lp/soyuz/browser/build.py
@@ -4,17 +4,17 @@
 """Browser views for builds."""
 
 __all__ = [
-    'BuildBreadcrumb',
-    'BuildCancelView',
-    'BuildContextMenu',
-    'BuildNavigation',
-    'BuildRecordsView',
-    'BuildRescoringView',
-    'BuildUrl',
-    'BuildView',
-    'DistributionBuildRecordsView',
-    'get_build_by_id_str',
-    ]
+    "BuildBreadcrumb",
+    "BuildCancelView",
+    "BuildContextMenu",
+    "BuildNavigation",
+    "BuildRecordsView",
+    "BuildRescoringView",
+    "BuildUrl",
+    "BuildView",
+    "DistributionBuildRecordsView",
+    "get_build_by_id_str",
+]
 
 
 from itertools import groupby
@@ -22,56 +22,41 @@ from operator import attrgetter
 
 from lazr.batchnavigator import ListRangeFactory
 from zope.component import getUtility
-from zope.interface import (
-    implementer,
-    Interface,
-    )
+from zope.interface import Interface, implementer
 from zope.security.interfaces import Unauthorized
 from zope.security.proxy import removeSecurityProxy
 
 from lp import _
-from lp.app.browser.launchpadform import (
-    action,
-    LaunchpadFormView,
-    )
-from lp.app.errors import (
-    NotFoundError,
-    UnexpectedFormData,
-    )
-from lp.buildmaster.enums import (
-    BuildQueueStatus,
-    BuildStatus,
-    )
+from lp.app.browser.launchpadform import LaunchpadFormView, action
+from lp.app.errors import NotFoundError, UnexpectedFormData
+from lp.buildmaster.enums import BuildQueueStatus, BuildStatus
 from lp.buildmaster.interfaces.buildfarmjob import (
     IBuildFarmJobDB,
     InconsistentBuildFarmJobError,
     ISpecificBuildFarmJobSource,
-    )
+)
 from lp.buildmaster.interfaces.buildqueue import IBuildQueueSet
 from lp.services.librarian.browser import (
     FileNavigationMixin,
     ProxiedLibraryFileAlias,
-    )
+)
 from lp.services.propertycache import cachedproperty
 from lp.services.webapp import (
-    canonical_url,
     ContextMenu,
-    enabled_with_permission,
     GetitemNavigation,
     LaunchpadView,
     Link,
-    )
-from lp.services.webapp.batching import (
-    BatchNavigator,
-    StormRangeFactory,
-    )
+    canonical_url,
+    enabled_with_permission,
+)
+from lp.services.webapp.batching import BatchNavigator, StormRangeFactory
 from lp.services.webapp.breadcrumb import Breadcrumb
 from lp.services.webapp.interfaces import ICanonicalUrlData
 from lp.soyuz.enums import PackageUploadStatus
 from lp.soyuz.interfaces.binarypackagebuild import (
     IBinaryPackageBuild,
     IBuildRescoreForm,
-    )
+)
 
 
 def get_build_by_id_str(utility, id_str):
@@ -107,6 +92,7 @@ class BuildUrl:
     Copy archives will be presented under the archives page:
        /ubuntu/+archive/my-special-archive/+build/1234
     """
+
     rootsite = None
 
     def __init__(self, context):
@@ -129,10 +115,11 @@ class BuildNavigation(GetitemNavigation, FileNavigationMixin):
 
 
 class BuildContextMenu(ContextMenu):
-    """Overview menu for build records """
+    """Overview menu for build records"""
+
     usedfor = IBinaryPackageBuild
 
-    links = ['ppa', 'records', 'retry', 'rescore', 'cancel']
+    links = ["ppa", "records", "retry", "rescore", "cancel"]
 
     @property
     def is_ppa_build(self):
@@ -141,37 +128,41 @@ class BuildContextMenu(ContextMenu):
 
     def ppa(self):
         return Link(
-            canonical_url(self.context.archive), text='View PPA',
-            enabled=self.is_ppa_build)
+            canonical_url(self.context.archive),
+            text="View PPA",
+            enabled=self.is_ppa_build,
+        )
 
     def records(self):
         return Link(
-            canonical_url(self.context.archive, view_name='+builds'),
-            text='View build records', enabled=self.is_ppa_build)
+            canonical_url(self.context.archive, view_name="+builds"),
+            text="View build records",
+            enabled=self.is_ppa_build,
+        )
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def retry(self):
         """Only enabled for build records that are active."""
-        text = 'Retry this build'
+        text = "Retry this build"
         return Link(
-            '+retry', text, icon='retry',
-            enabled=self.context.can_be_retried)
+            "+retry", text, icon="retry", enabled=self.context.can_be_retried
+        )
 
-    @enabled_with_permission('launchpad.Admin')
+    @enabled_with_permission("launchpad.Admin")
     def rescore(self):
         """Only enabled for pending build records."""
-        text = 'Rescore build'
+        text = "Rescore build"
         return Link(
-            '+rescore', text, icon='edit',
-            enabled=self.context.can_be_rescored)
+            "+rescore", text, icon="edit", enabled=self.context.can_be_rescored
+        )
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def cancel(self):
         """Only enabled for pending/active virtual builds."""
-        text = 'Cancel build'
+        text = "Cancel build"
         return Link(
-            '+cancel', text, icon='edit',
-            enabled=self.context.can_be_cancelled)
+            "+cancel", text, icon="edit", enabled=self.context.can_be_cancelled
+        )
 
 
 class BuildBreadcrumb(Breadcrumb):
@@ -183,12 +174,13 @@ class BuildBreadcrumb(Breadcrumb):
         # name and version. But for distro archives there are already
         # breadcrumbs for both, so we omit them.
         if self.context.archive.is_ppa or self.context.archive.is_copy:
-            return '%s build of %s %s' % (
+            return "%s build of %s %s" % (
                 self.context.arch_tag,
                 self.context.source_package_release.sourcepackagename.name,
-                self.context.source_package_release.version)
+                self.context.source_package_release.version,
+            )
         else:
-            return '%s build' % self.context.arch_tag
+            return "%s build" % self.context.arch_tag
 
 
 class BuildView(LaunchpadView):
@@ -213,22 +205,25 @@ class BuildView(LaunchpadView):
         """
         return [
             binarypackagerelease.title
-            for binarypackagerelease, binarypackagename
-                in self.context.getBinaryPackageNamesForDisplay()]
+            for binarypackagerelease, _ in (
+                self.context.getBinaryPackageNamesForDisplay()
+            )
+        ]
 
     @cachedproperty
     def has_published_binaries(self):
         """Whether or not binaries were already published for this build."""
         # Binaries imported by gina (missing `PackageUpload` record)
         # are always published.
-        imported_binaries = (
-            self.package_upload is None and
-            bool(self.context.binarypackages))
+        imported_binaries = self.package_upload is None and bool(
+            self.context.binarypackages
+        )
         # Binaries uploaded from the buildds are published when the
         # corresponding `PackageUpload` status is DONE.
         uploaded_binaries = (
-            self.package_upload is not None and
-            self.package_upload.status == PackageUploadStatus.DONE)
+            self.package_upload is not None
+            and self.package_upload.status == PackageUploadStatus.DONE
+        )
 
         if imported_binaries or uploaded_binaries:
             return True
@@ -256,7 +251,7 @@ class BuildView(LaunchpadView):
         if component is not None:
             return component.name
         else:
-            return 'unknown'
+            return "unknown"
 
     @cachedproperty
     def files(self):
@@ -266,9 +261,9 @@ class BuildView(LaunchpadView):
 
         return [
             ProxiedLibraryFileAlias(alias, self.context)
-            for bpr, bpf, alias, content
-                in self.context.getBinaryFilesForDisplay()
-                if not alias.deleted]
+            for _, _, alias, _ in self.context.getBinaryFilesForDisplay()
+            if not alias.deleted
+        ]
 
     @property
     def dispatch_time_estimate_available(self):
@@ -278,8 +273,10 @@ class BuildView(LaunchpadView):
         in state WAITING.
         """
         return (
-            self.context.status == BuildStatus.NEEDSBUILD and
-            self.context.buildqueue_record.status == BuildQueueStatus.WAITING)
+            self.context.status == BuildStatus.NEEDSBUILD
+            and self.context.buildqueue_record.status
+            == BuildQueueStatus.WAITING
+        )
 
     @cachedproperty
     def eta(self):
@@ -320,22 +317,23 @@ class BuildRetryView(BuildView):
 
     @property
     def label(self):
-        return 'Retry %s' % self.context.title
+        return "Retry %s" % self.context.title
 
     def retry_build(self):
         """Check user confirmation and perform the build record retry."""
         if not self.context.can_be_retried:
             self.request.response.addErrorNotification(
-                'Build can not be retried')
+                "Build can not be retried"
+            )
         else:
-            action = self.request.form.get('RETRY', None)
+            action = self.request.form.get("RETRY", None)
             # No action, return None to present the form again.
             if action is None:
                 return
 
             # Invoke context method to retry the build record.
             self.context.retry()
-            self.request.response.addInfoNotification('Build has been queued')
+            self.request.response.addInfoNotification("Build has been queued")
 
         self.request.response.redirect(canonical_url(self.context))
 
@@ -347,7 +345,7 @@ class BuildRescoringView(LaunchpadFormView):
 
     @property
     def label(self):
-        return 'Rescore %s' % self.context.title
+        return "Rescore %s" % self.context.title
 
     def initialize(self):
         """See `ILaunchpadFormView`.
@@ -370,10 +368,9 @@ class BuildRescoringView(LaunchpadFormView):
     @action(_("Rescore"), name="rescore")
     def action_rescore(self, action, data):
         """Set the given score value."""
-        score = data.get('priority')
+        score = data.get("priority")
         self.context.rescore(score)
-        self.request.response.addNotification(
-            "Build rescored to %s." % score)
+        self.request.response.addNotification("Build rescored to %s." % score)
 
 
 class BuildCancelView(LaunchpadFormView):
@@ -387,6 +384,7 @@ class BuildCancelView(LaunchpadFormView):
     @property
     def cancel_url(self):
         return canonical_url(self.context)
+
     next_url = cancel_url
 
     @action("Cancel build", name="cancel")
@@ -395,7 +393,8 @@ class BuildCancelView(LaunchpadFormView):
         self.context.cancel()
         if self.context.status == BuildStatus.CANCELLING:
             self.request.response.addNotification(
-                "Build cancellation in progress.")
+                "Build cancellation in progress."
+            )
         elif self.context.status == BuildStatus.CANCELLED:
             self.request.response.addNotification("Build cancelled.")
         else:
@@ -406,7 +405,8 @@ def setupCompleteBuilds(batch):
     """Pre-populate new object with buildqueue items."""
     builds = getSpecificJobs(batch)
     getUtility(IBuildQueueSet).preloadForBuildFarmJobs(
-        [build for build in builds if build is not None])
+        [build for build in builds if build is not None]
+    )
     return builds
 
 
@@ -417,17 +417,19 @@ def getSpecificJobs(jobs):
     If the job is already a specific job, it will be returned unchanged.
     """
     builds = []
-    key = attrgetter('job_type.name')
+    key = attrgetter("job_type.name")
     nonspecific_jobs = sorted(
-        (job for job in jobs if IBuildFarmJobDB.providedBy(job)), key=key)
+        (job for job in jobs if IBuildFarmJobDB.providedBy(job)), key=key
+    )
     job_builds = {}
     for job_type_name, grouped_jobs in groupby(nonspecific_jobs, key=key):
         # Fetch the jobs in batches grouped by their job type.
-        source = getUtility(
-            ISpecificBuildFarmJobSource, job_type_name)
-        builds = [build for build
-            in source.getByBuildFarmJobs(list(grouped_jobs))
-            if build is not None]
+        source = getUtility(ISpecificBuildFarmJobSource, job_type_name)
+        builds = [
+            build
+            for build in source.getByBuildFarmJobs(list(grouped_jobs))
+            if build is not None
+        ]
         for build in builds:
             try:
                 job_builds[build.build_farm_job.id] = build
@@ -442,11 +444,13 @@ def getSpecificJobs(jobs):
     # Return the corresponding builds.
     try:
         return [
-            job_builds[job.id]
-            if IBuildFarmJobDB.providedBy(job) else job for job in jobs]
+            job_builds[job.id] if IBuildFarmJobDB.providedBy(job) else job
+            for job in jobs
+        ]
     except KeyError:
         raise InconsistentBuildFarmJobError(
-            "Could not find all the related specific jobs.")
+            "Could not find all the related specific jobs."
+        )
 
 
 class BuildRecordsView(LaunchpadView):
@@ -458,7 +462,7 @@ class BuildRecordsView(LaunchpadView):
     DistroSeries, DistroArchSeries and SourcePackage view classes.
     """
 
-    page_title = 'Builds'
+    page_title = "Builds"
 
     # Currenly most build records views are interested in binaries
     # only, but subclasses can set this if desired.
@@ -468,7 +472,7 @@ class BuildRecordsView(LaunchpadView):
 
     @property
     def label(self):
-        return 'Builds for %s' % self.context.displayname
+        return "Builds for %s" % self.context.displayname
 
     def setupBuildList(self):
         """Setup a batched build records list.
@@ -477,10 +481,10 @@ class BuildRecordsView(LaunchpadView):
         invoke it in template.
         """
         # recover selected build state
-        state_tag = self.request.get('build_state', '')
-        self.text = self.request.get('build_text', None)
+        state_tag = self.request.get("build_state", "")
+        self.text = self.request.get("build_text", None)
 
-        if self.text == '':
+        if self.text == "":
             self.text = None
 
         # build self.state & self.available_states structures
@@ -494,23 +498,29 @@ class BuildRecordsView(LaunchpadView):
 
         # request context build records according to the selected state
         builds = self.context.getBuildRecords(
-            build_state=self.state, name=self.text, arch_tag=self.arch_tag,
-            user=self.user, binary_only=binary_only)
+            build_state=self.state,
+            name=self.text,
+            arch_tag=self.arch_tag,
+            user=self.user,
+            binary_only=binary_only,
+        )
         self.batchnav = BatchNavigator(
-            builds, self.request, range_factory=self.range_factory(builds))
+            builds, self.request, range_factory=self.range_factory(builds)
+        )
         # We perform this extra step because we don't what to issue one
         # extra query to retrieve the BuildQueue for each Build (batch item)
         # A more elegant approach should be extending Batching class and
         # integrating the fix into it. However the current solution is
         # simpler and shorter, producing the same result. cprov 20060810
         self.complete_builds = setupCompleteBuilds(
-            self.batchnav.currentBatch())
+            self.batchnav.currentBatch()
+        )
 
     @property
     def arch_tag(self):
         """Return the architecture tag from the request."""
-        arch_tag = self.request.get('arch_tag', None)
-        if arch_tag == '' or arch_tag == 'all':
+        arch_tag = self.request.get("arch_tag", None)
+        if arch_tag == "" or arch_tag == "all":
             return None
         else:
             return arch_tag
@@ -520,12 +530,13 @@ class BuildRecordsView(LaunchpadView):
         """Return the architecture options for the context."""
         # Guard against contexts that cannot tell us the available
         # distroarchseries.
-        if hasattr(self.context, 'architectures') is False:
+        if hasattr(self.context, "architectures") is False:
             return []
 
         # Grab all the architecture tags for the context.
         arch_tags = [
-            arch.architecturetag for arch in self.context.architectures]
+            arch.architecturetag for arch in self.context.architectures
+        ]
 
         # We cannot assume that the arch_tags will be distinct, so
         # create a distinct and sorted list:
@@ -533,22 +544,24 @@ class BuildRecordsView(LaunchpadView):
 
         # Create the initial 'all architectures' option.
         if self.arch_tag is None:
-            selected = 'selected'
+            selected = "selected"
         else:
             selected = None
         options = [
-            dict(name='All architectures', value='all', selected=selected)]
+            dict(name="All architectures", value="all", selected=selected)
+        ]
 
         # Create the options for the select box, ensuring to mark
         # the currently selected one.
         for arch_tag in arch_tags:
             if arch_tag == self.arch_tag:
-                selected = 'selected'
+                selected = "selected"
             else:
                 selected = None
 
             options.append(
-                dict(name=arch_tag, value=arch_tag, selected=selected))
+                dict(name=arch_tag, value=arch_tag, selected=selected)
+            )
 
         return options
 
@@ -566,21 +579,21 @@ class BuildRecordsView(LaunchpadView):
         """
         # Default states map.
         state_map = {
-            'built': BuildStatus.FULLYBUILT,
-            'failed': BuildStatus.FAILEDTOBUILD,
-            'depwait': BuildStatus.MANUALDEPWAIT,
-            'chrootwait': BuildStatus.CHROOTWAIT,
-            'superseded': BuildStatus.SUPERSEDED,
-            'uploadfail': BuildStatus.FAILEDTOUPLOAD,
-            'all': None,
-            }
+            "built": BuildStatus.FULLYBUILT,
+            "failed": BuildStatus.FAILEDTOBUILD,
+            "depwait": BuildStatus.MANUALDEPWAIT,
+            "chrootwait": BuildStatus.CHROOTWAIT,
+            "superseded": BuildStatus.SUPERSEDED,
+            "uploadfail": BuildStatus.FAILEDTOUPLOAD,
+            "all": None,
+        }
         # Include pristine (not yet assigned to a builder) builds,
         # if requested.
         if self.show_builder_info:
             extra_state_map = {
-                'building': BuildStatus.BUILDING,
-                'pending': BuildStatus.NEEDSBUILD,
-                }
+                "building": BuildStatus.BUILDING,
+                "pending": BuildStatus.NEEDSBUILD,
+            }
             state_map.update(**extra_state_map)
 
         # Lookup for the correspondent state or fallback to the default
@@ -590,7 +603,8 @@ class BuildRecordsView(LaunchpadView):
                 self.state = state_map[tag]
             except (KeyError, TypeError):
                 raise UnexpectedFormData(
-                    'No suitable state found for value "%s"' % tag)
+                    'No suitable state found for value "%s"' % tag
+                )
         else:
             self.state = self.default_build_state
 
@@ -601,15 +615,16 @@ class BuildRecordsView(LaunchpadView):
             if state:
                 name = state.title.strip()
             else:
-                name = 'All states'
+                name = "All states"
 
             if state == self.state:
-                selected = 'selected'
+                selected = "selected"
             else:
                 selected = None
 
             self.available_states.append(
-                dict(name=name, value=tag, selected=selected))
+                dict(name=name, value=tag, selected=selected)
+            )
 
     @property
     def default_build_state(self):
diff --git a/lib/lp/soyuz/browser/distributionsourcepackagerelease.py b/lib/lp/soyuz/browser/distributionsourcepackagerelease.py
index f2fa74a..8e00ff2 100644
--- a/lib/lp/soyuz/browser/distributionsourcepackagerelease.py
+++ b/lib/lp/soyuz/browser/distributionsourcepackagerelease.py
@@ -2,11 +2,11 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DistributionSourcePackageReleaseBreadcrumb',
-    'DistributionSourcePackageReleaseNavigation',
-    'DistributionSourcePackageReleasePublishingHistoryView',
-    'DistributionSourcePackageReleaseView',
-    ]
+    "DistributionSourcePackageReleaseBreadcrumb",
+    "DistributionSourcePackageReleaseNavigation",
+    "DistributionSourcePackageReleasePublishingHistoryView",
+    "DistributionSourcePackageReleaseView",
+]
 
 import operator
 
@@ -15,14 +15,14 @@ from lazr.restful.utils import smartquote
 from lp.archivepublisher.debversion import Version
 from lp.registry.browser.distributionsourcepackage import (
     PublishingHistoryViewMixin,
-    )
+)
 from lp.services.propertycache import cachedproperty
 from lp.services.webapp import (
-    canonical_url,
     LaunchpadView,
     Navigation,
+    canonical_url,
     stepthrough,
-    )
+)
 from lp.services.webapp.breadcrumb import Breadcrumb
 from lp.soyuz.adapters.proxiedsourcefiles import ProxiedSourceLibraryFileAlias
 from lp.soyuz.browser.build import get_build_by_id_str
@@ -30,7 +30,7 @@ from lp.soyuz.enums import PackagePublishingStatus
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.distributionsourcepackagerelease import (
     IDistributionSourcePackageRelease,
-    )
+)
 
 
 class DistributionSourcePackageReleaseBreadcrumb(Breadcrumb):
@@ -44,18 +44,20 @@ class DistributionSourcePackageReleaseBreadcrumb(Breadcrumb):
 class DistributionSourcePackageReleaseNavigation(Navigation):
     usedfor = IDistributionSourcePackageRelease
 
-    @stepthrough('+build')
+    @stepthrough("+build")
     def traverse_build(self, name):
         build = get_build_by_id_str(IBinaryPackageBuildSet, name)
-        if (build is None
-            or build.archive not in
-                self.context.distribution.all_distro_archives
-            or build.source_package_release !=
-                self.context.sourcepackagerelease):
+        if (
+            build is None
+            or build.archive
+            not in self.context.distribution.all_distro_archives
+            or build.source_package_release
+            != self.context.sourcepackagerelease
+        ):
             return None
         return build
 
-    @stepthrough('+latestbuild')
+    @stepthrough("+latestbuild")
     def redirect_latestbuild(self, name):
         build = self.context.getBuildsByArchTag(name).first()
         if build is not None:
@@ -64,7 +66,7 @@ class DistributionSourcePackageReleaseNavigation(Navigation):
 
 
 class DistributionSourcePackageReleaseView(LaunchpadView):
-    """View logic for `DistributionSourcePackageRelease` objects. """
+    """View logic for `DistributionSourcePackageRelease` objects."""
 
     usedfor = IDistributionSourcePackageRelease
 
@@ -92,7 +94,7 @@ class DistributionSourcePackageReleaseView(LaunchpadView):
             publishing
             for publishing in self._cached_publishing_history
             if publishing.status == PackagePublishingStatus.PUBLISHED
-            ]
+        ]
 
     @property
     def files(self):
@@ -100,8 +102,10 @@ class DistributionSourcePackageReleaseView(LaunchpadView):
         last_publication = self._cached_publishing_history[0]
         return [
             ProxiedSourceLibraryFileAlias(
-                source_file.libraryfile, last_publication)
-            for source_file in self.context.files]
+                source_file.libraryfile, last_publication
+            )
+            for source_file in self.context.files
+        ]
 
     @cachedproperty
     def sponsor(self):
@@ -133,15 +137,19 @@ class DistributionSourcePackageReleaseView(LaunchpadView):
         # Build a local list of `IBinaryPackageBuilds` ordered by ascending
         # 'architecture_tag'.
         cached_builds = sorted(
-            self.context.builds, key=operator.attrgetter('arch_tag'))
+            self.context.builds, key=operator.attrgetter("arch_tag")
+        )
 
         # Build a list of unique `IDistroSeries` related with the local
         # builds ordered by descending version.
         def distroseries_sort_key(item):
             return Version(item.version)
+
         sorted_distroseries = sorted(
             {build.distro_series for build in cached_builds},
-            key=distroseries_sort_key, reverse=True)
+            key=distroseries_sort_key,
+            reverse=True,
+        )
 
         # Group builds as dictionaries.
         distroseries_builds = []
@@ -150,21 +158,23 @@ class DistributionSourcePackageReleaseView(LaunchpadView):
                 build
                 for build in cached_builds
                 if build.distro_series == distroseries
-                ]
+            ]
             distroseries_builds.append(
-                {'distroseries': distroseries, 'builds': builds})
+                {"distroseries": distroseries, "builds": builds}
+            )
 
         return distroseries_builds
 
 
 class DistributionSourcePackageReleasePublishingHistoryView(
-        LaunchpadView, PublishingHistoryViewMixin):
+    LaunchpadView, PublishingHistoryViewMixin
+):
     """Presenting `DistributionSourcePackageRelease` publishing history."""
 
     usedfor = IDistributionSourcePackageRelease
 
-    page_title = 'Publishing history'
+    page_title = "Publishing history"
 
     @property
     def label(self):
-        return 'Publishing history of %s' % smartquote(self.context.title)
+        return "Publishing history of %s" % smartquote(self.context.title)
diff --git a/lib/lp/soyuz/browser/distroarchseries.py b/lib/lp/soyuz/browser/distroarchseries.py
index df67c22..d3dae16 100644
--- a/lib/lp/soyuz/browser/distroarchseries.py
+++ b/lib/lp/soyuz/browser/distroarchseries.py
@@ -2,39 +2,33 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DistroArchSeriesActionMenu',
-    'DistroArchSeriesAddView',
-    'DistroArchSeriesAdminView',
-    'DistroArchSeriesBreadcrumb',
-    'DistroArchSeriesPackageSearchView',
-    'DistroArchSeriesNavigation',
-    'DistroArchSeriesView',
-    ]
+    "DistroArchSeriesActionMenu",
+    "DistroArchSeriesAddView",
+    "DistroArchSeriesAdminView",
+    "DistroArchSeriesBreadcrumb",
+    "DistroArchSeriesPackageSearchView",
+    "DistroArchSeriesNavigation",
+    "DistroArchSeriesView",
+]
 
 from lazr.restful.interface import copy_field
 from lazr.restful.utils import smartquote
-from zope.interface import (
-    implementer,
-    Interface,
-    )
+from zope.interface import Interface, implementer
 
 from lp import _
 from lp.app.browser.launchpadform import (
-    action,
     LaunchpadEditFormView,
     LaunchpadFormView,
-    )
+    action,
+)
 from lp.services.webapp import GetitemNavigation
 from lp.services.webapp.breadcrumb import Breadcrumb
 from lp.services.webapp.menu import (
-    enabled_with_permission,
     Link,
     NavigationMenu,
-    )
-from lp.services.webapp.publisher import (
-    canonical_url,
-    stepto,
-    )
+    enabled_with_permission,
+)
+from lp.services.webapp.publisher import canonical_url, stepto
 from lp.soyuz.browser.packagesearch import PackageSearchViewBase
 from lp.soyuz.interfaces.distroarchseries import IDistroArchSeries
 
@@ -43,7 +37,7 @@ class DistroArchSeriesNavigation(GetitemNavigation):
 
     usedfor = IDistroArchSeries
 
-    @stepto('+source-filter')
+    @stepto("+source-filter")
     def traverse_source_filter(self):
         """Traverse to the `IDistroArchSeriesFilter` for this DAS, if any."""
         return self.context.getSourceFilter()
@@ -63,21 +57,22 @@ class IDistroArchSeriesActionMenu(Interface):
 
 class DistroArchSeriesActionMenu(NavigationMenu):
     """Action menu for distro arch series."""
+
     usedfor = IDistroArchSeriesActionMenu
     facet = "overview"
-    links = ['admin', 'builds']
+    links = ["admin", "builds"]
 
-    @enabled_with_permission('launchpad.Admin')
+    @enabled_with_permission("launchpad.Admin")
     def admin(self):
-        text = 'Administer'
-        return Link('+admin', text, icon='edit')
+        text = "Administer"
+        return Link("+admin", text, icon="edit")
 
     # Search link not necessary, because there's a search form on
     # the overview page.
 
     def builds(self):
-        text = 'Show builds'
-        return Link('+builds', text, icon='info')
+        text = "Show builds"
+        return Link("+builds", text, icon="info")
 
 
 class DistroArchSeriesPackageSearchView(PackageSearchViewBase):
@@ -98,18 +93,18 @@ class DistroArchSeriesView(DistroArchSeriesPackageSearchView):
 
 
 class DistroArchSeriesAddSchema(IDistroArchSeries):
-    processor = copy_field(IDistroArchSeries['processor'], readonly=False)
+    processor = copy_field(IDistroArchSeries["processor"], readonly=False)
 
 
 class DistroArchSeriesAddView(LaunchpadFormView):
 
     schema = DistroArchSeriesAddSchema
-    field_names = ['architecturetag', 'processor', 'official']
+    field_names = ["architecturetag", "processor", "official"]
 
     @property
     def label(self):
         """See `LaunchpadFormView`"""
-        return 'Add a port of %s' % self.context.title
+        return "Add a port of %s" % self.context.title
 
     @property
     def page_title(self):
@@ -121,12 +116,15 @@ class DistroArchSeriesAddView(LaunchpadFormView):
         """See `LaunchpadFormView`."""
         return canonical_url(self.context)
 
-    @action(_('Continue'), name='continue')
+    @action(_("Continue"), name="continue")
     def create_action(self, action, data):
         """Create a new Port."""
         distroarchseries = self.context.newArch(
-            data['architecturetag'], data['processor'], data['official'],
-            self.user)
+            data["architecturetag"],
+            data["processor"],
+            data["official"],
+            self.user,
+        )
         self.next_url = canonical_url(distroarchseries)
 
 
@@ -135,16 +133,15 @@ class DistroArchSeriesAdminView(LaunchpadEditFormView):
 
     schema = IDistroArchSeries
 
-    field_names = ['architecturetag', 'official', 'enabled']
+    field_names = ["architecturetag", "official", "enabled"]
 
-    @action(_('Change'), name='update')
+    @action(_("Change"), name="update")
     def change_details(self, action, data):
         """Update with details from the form."""
         modified = self.updateContextFromData(data)
 
         if modified:
-            self.request.response.addNotification(
-                "Successfully updated")
+            self.request.response.addNotification("Successfully updated")
 
         return modified
 
diff --git a/lib/lp/soyuz/browser/distroarchseriesbinarypackage.py b/lib/lp/soyuz/browser/distroarchseriesbinarypackage.py
index e793fec..d3d70f1 100644
--- a/lib/lp/soyuz/browser/distroarchseriesbinarypackage.py
+++ b/lib/lp/soyuz/browser/distroarchseriesbinarypackage.py
@@ -2,19 +2,16 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DistroArchSeriesBinaryPackageNavigation',
-    'DistroArchSeriesBinaryPackageView',
-    ]
+    "DistroArchSeriesBinaryPackageNavigation",
+    "DistroArchSeriesBinaryPackageView",
+]
 
 from lazr.restful.utils import smartquote
 
-from lp.services.webapp import (
-    GetitemNavigation,
-    LaunchpadView,
-    )
+from lp.services.webapp import GetitemNavigation, LaunchpadView
 from lp.soyuz.interfaces.distroarchseriesbinarypackage import (
     IDistroArchSeriesBinaryPackage,
-    )
+)
 
 
 class DistroArchSeriesBinaryPackageNavigation(GetitemNavigation):
@@ -23,7 +20,6 @@ class DistroArchSeriesBinaryPackageNavigation(GetitemNavigation):
 
 
 class DistroArchSeriesBinaryPackageView(LaunchpadView):
-
     @property
     def page_title(self):
         return smartquote(self.context.title)
diff --git a/lib/lp/soyuz/browser/distroarchseriesbinarypackagerelease.py b/lib/lp/soyuz/browser/distroarchseriesbinarypackagerelease.py
index 335d278..be4ac49 100644
--- a/lib/lp/soyuz/browser/distroarchseriesbinarypackagerelease.py
+++ b/lib/lp/soyuz/browser/distroarchseriesbinarypackagerelease.py
@@ -2,21 +2,18 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DistroArchSeriesBinaryPackageReleaseBreadcrumb',
-    'DistroArchSeriesBinaryPackageReleaseNavigation',
-    'DistroArchSeriesBinaryPackageReleaseView',
-    ]
+    "DistroArchSeriesBinaryPackageReleaseBreadcrumb",
+    "DistroArchSeriesBinaryPackageReleaseNavigation",
+    "DistroArchSeriesBinaryPackageReleaseView",
+]
 
 from lazr.restful.utils import smartquote
 
-from lp.services.webapp import (
-    LaunchpadView,
-    Navigation,
-    )
+from lp.services.webapp import LaunchpadView, Navigation
 from lp.services.webapp.breadcrumb import Breadcrumb
 from lp.soyuz.interfaces.distroarchseriesbinarypackagerelease import (
     IDistroArchSeriesBinaryPackageRelease,
-    )
+)
 
 
 class DistroArchSeriesBinaryPackageReleaseBreadcrumb(Breadcrumb):
@@ -32,7 +29,6 @@ class DistroArchSeriesBinaryPackageReleaseNavigation(Navigation):
 
 
 class DistroArchSeriesBinaryPackageReleaseView(LaunchpadView):
-
     def __init__(self, context, request):
         self.context = context
         self.request = request
diff --git a/lib/lp/soyuz/browser/distroseries.py b/lib/lp/soyuz/browser/distroseries.py
index 82efc87..e0c654d 100644
--- a/lib/lp/soyuz/browser/distroseries.py
+++ b/lib/lp/soyuz/browser/distroseries.py
@@ -2,9 +2,9 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DistroSeriesBuildsView',
-    'DistroSeriesQueueView',
-    ]
+    "DistroSeriesBuildsView",
+    "DistroSeriesQueueView",
+]
 
 from lp.soyuz.browser.build import BuildRecordsView
 from lp.soyuz.browser.queue import QueueItemsView
@@ -24,5 +24,5 @@ class DistroSeriesBuildsView(BuildRecordsView):
 class DistroSeriesQueueView(QueueItemsView):
     """A View to show an `IDistroSeries` object's uploads."""
 
-    label = 'Upload queue'
+    label = "Upload queue"
     page_title = label
diff --git a/lib/lp/soyuz/browser/distroseriesbinarypackage.py b/lib/lp/soyuz/browser/distroseriesbinarypackage.py
index 91a0e8f..780a88e 100644
--- a/lib/lp/soyuz/browser/distroseriesbinarypackage.py
+++ b/lib/lp/soyuz/browser/distroseriesbinarypackage.py
@@ -2,21 +2,18 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DistroSeriesBinaryPackageBreadcrumb',
-    'DistroSeriesBinaryPackageNavigation',
-    'DistroSeriesBinaryPackageView',
-    ]
+    "DistroSeriesBinaryPackageBreadcrumb",
+    "DistroSeriesBinaryPackageNavigation",
+    "DistroSeriesBinaryPackageView",
+]
 
 from lazr.restful.utils import smartquote
 
-from lp.services.webapp import (
-    LaunchpadView,
-    Navigation,
-    )
+from lp.services.webapp import LaunchpadView, Navigation
 from lp.services.webapp.breadcrumb import Breadcrumb
 from lp.soyuz.interfaces.distroseriesbinarypackage import (
     IDistroSeriesBinaryPackage,
-    )
+)
 
 
 class DistroSeriesBinaryPackageNavigation(Navigation):
@@ -26,13 +23,13 @@ class DistroSeriesBinaryPackageNavigation(Navigation):
 
 class DistroSeriesBinaryPackageBreadcrumb(Breadcrumb):
     """Builds a breadcrumb for an `IDistroSeriesBinaryPackage`."""
+
     @property
     def text(self):
         return self.context.binarypackagename.name
 
 
 class DistroSeriesBinaryPackageView(LaunchpadView):
-
     def __init__(self, context, request):
         self.context = context
         self.request = request
diff --git a/lib/lp/soyuz/browser/livefs.py b/lib/lp/soyuz/browser/livefs.py
index 5409eea..0fea4d9 100644
--- a/lib/lp/soyuz/browser/livefs.py
+++ b/lib/lp/soyuz/browser/livefs.py
@@ -4,73 +4,64 @@
 """LiveFS views."""
 
 __all__ = [
-    'LiveFSAddView',
-    'LiveFSDeleteView',
-    'LiveFSEditView',
-    'LiveFSNavigation',
-    'LiveFSNavigationMenu',
-    'LiveFSView',
-    ]
+    "LiveFSAddView",
+    "LiveFSDeleteView",
+    "LiveFSEditView",
+    "LiveFSNavigation",
+    "LiveFSNavigationMenu",
+    "LiveFSView",
+]
 
 import json
 
 from lazr.restful import ResourceJSONEncoder
-from lazr.restful.interface import (
-    copy_field,
-    use_template,
-    )
+from lazr.restful.interface import copy_field, use_template
 from zope.component import getUtility
 from zope.interface import Interface
-from zope.schema import (
-    Choice,
-    Text,
-    )
+from zope.schema import Choice, Text
 
 from lp.app.browser.launchpadform import (
-    action,
     LaunchpadEditFormView,
     LaunchpadFormView,
-    )
+    action,
+)
 from lp.app.browser.lazrjs import (
     InlinePersonEditPickerWidget,
     TextLineEditorWidget,
-    )
+)
 from lp.app.browser.tales import format_link
 from lp.app.widgets.itemswidgets import LaunchpadRadioWidget
 from lp.code.vocabularies.sourcepackagerecipe import BuildableDistroSeries
 from lp.registry.interfaces.series import SeriesStatus
 from lp.services.features import getFeatureFlag
 from lp.services.webapp import (
-    canonical_url,
-    enabled_with_permission,
     LaunchpadView,
     Link,
     Navigation,
     NavigationMenu,
+    canonical_url,
+    enabled_with_permission,
     stepthrough,
-    )
+)
 from lp.services.webapp.authorization import check_permission
-from lp.services.webapp.breadcrumb import (
-    Breadcrumb,
-    NameBreadcrumb,
-    )
+from lp.services.webapp.breadcrumb import Breadcrumb, NameBreadcrumb
 from lp.services.webhooks.browser import WebhookTargetNavigationMixin
 from lp.soyuz.browser.build import get_build_by_id_str
 from lp.soyuz.interfaces.livefs import (
-    ILiveFS,
-    ILiveFSSet,
     LIVEFS_FEATURE_FLAG,
     LIVEFS_WEBHOOKS_FEATURE_FLAG,
+    ILiveFS,
+    ILiveFSSet,
     LiveFSFeatureDisabled,
     NoSuchLiveFS,
-    )
+)
 from lp.soyuz.interfaces.livefsbuild import ILiveFSBuildSet
 
 
 class LiveFSNavigation(WebhookTargetNavigationMixin, Navigation):
     usedfor = ILiveFS
 
-    @stepthrough('+build')
+    @stepthrough("+build")
     def traverse_build(self, name):
         build = get_build_by_id_str(ILiveFSBuildSet, name)
         if build is None or build.livefs != self.context:
@@ -79,13 +70,14 @@ class LiveFSNavigation(WebhookTargetNavigationMixin, Navigation):
 
 
 class LiveFSBreadcrumb(NameBreadcrumb):
-
     @property
     def inside(self):
         return Breadcrumb(
             self.context.owner,
             url=canonical_url(self.context.owner, view_name="+livefs"),
-            text="Live filesystems", inside=self.context.owner)
+            text="Live filesystems",
+            inside=self.context.owner,
+        )
 
 
 class LiveFSNavigationMenu(NavigationMenu):
@@ -93,27 +85,30 @@ class LiveFSNavigationMenu(NavigationMenu):
 
     usedfor = ILiveFS
 
-    facet = 'overview'
+    facet = "overview"
 
-    links = ('admin', 'edit', 'webhooks', 'delete')
+    links = ("admin", "edit", "webhooks", "delete")
 
-    @enabled_with_permission('launchpad.Admin')
+    @enabled_with_permission("launchpad.Admin")
     def admin(self):
-        return Link('+admin', 'Administer live filesystem', icon='edit')
+        return Link("+admin", "Administer live filesystem", icon="edit")
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def edit(self):
-        return Link('+edit', 'Edit live filesystem', icon='edit')
+        return Link("+edit", "Edit live filesystem", icon="edit")
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def webhooks(self):
         return Link(
-            '+webhooks', 'Manage webhooks', icon='edit',
-            enabled=bool(getFeatureFlag(LIVEFS_WEBHOOKS_FEATURE_FLAG)))
+            "+webhooks",
+            "Manage webhooks",
+            icon="edit",
+            enabled=bool(getFeatureFlag(LIVEFS_WEBHOOKS_FEATURE_FLAG)),
+        )
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def delete(self):
-        return Link('+delete', 'Delete live filesystem', icon='trash-icon')
+        return Link("+delete", "Delete live filesystem", icon="trash-icon")
 
 
 class LiveFSView(LaunchpadView):
@@ -122,10 +117,10 @@ class LiveFSView(LaunchpadView):
     @property
     def page_title(self):
         return "%(name)s's %(livefs_name)s live filesystem in %(series)s" % {
-            'name': self.context.owner.displayname,
-            'livefs_name': self.context.name,
-            'series': self.context.distro_series.fullseriesname,
-            }
+            "name": self.context.owner.displayname,
+            "livefs_name": self.context.name,
+            "series": self.context.distro_series.fullseriesname,
+        }
 
     label = page_title
 
@@ -136,18 +131,24 @@ class LiveFSView(LaunchpadView):
     @property
     def person_picker(self):
         field = copy_field(
-            ILiveFS['owner'],
-            vocabularyName='UserTeamsParticipationPlusSelfSimpleDisplay')
+            ILiveFS["owner"],
+            vocabularyName="UserTeamsParticipationPlusSelfSimpleDisplay",
+        )
         return InlinePersonEditPickerWidget(
-            self.context, field, format_link(self.context.owner),
-            header='Change owner', step_title='Select a new owner')
+            self.context,
+            field,
+            format_link(self.context.owner),
+            header="Change owner",
+            step_title="Select a new owner",
+        )
 
     @property
     def name_widget(self):
-        name = ILiveFS['name']
+        name = ILiveFS["name"]
         title = "Edit the live filesystem name"
         return TextLineEditorWidget(
-            self.context, name, title, 'h1', max_width='95%', truncate_lines=1)
+            self.context, name, title, "h1", max_width="95%", truncate_lines=1
+        )
 
     @property
     def sorted_metadata_items(self):
@@ -169,10 +170,12 @@ def builds_for_livefs(livefs):
     Builds that the user does not have permission to see are excluded.
     """
     builds = [
-        build for build in livefs.pending_builds
-        if check_permission('launchpad.View', build)]
+        build
+        for build in livefs.pending_builds
+        if check_permission("launchpad.View", build)
+    ]
     for build in livefs.completed_builds:
-        if not check_permission('launchpad.View', build):
+        if not check_permission("launchpad.View", build):
             continue
         builds.append(build)
         if len(builds) >= 10:
@@ -183,40 +186,46 @@ def builds_for_livefs(livefs):
 class ILiveFSEditSchema(Interface):
     """Schema for adding or editing a live filesystem."""
 
-    use_template(ILiveFS, include=[
-        'owner',
-        'name',
-        'require_virtualized',
-        'relative_build_score',
-        'keep_binary_files_days',
-        ])
+    use_template(
+        ILiveFS,
+        include=[
+            "owner",
+            "name",
+            "require_virtualized",
+            "relative_build_score",
+            "keep_binary_files_days",
+        ],
+    )
     distro_series = Choice(
-        vocabulary='BuildableDistroSeries', title='Distribution series')
+        vocabulary="BuildableDistroSeries", title="Distribution series"
+    )
     metadata = Text(
-        title='Live filesystem build metadata',
+        title="Live filesystem build metadata",
         description=(
-            'A JSON dictionary of data about the image.  Entries here will '
-            'be passed to the builder.'))
+            "A JSON dictionary of data about the image.  Entries here will "
+            "be passed to the builder."
+        ),
+    )
 
 
 class LiveFSMetadataValidatorMixin:
     """Class to validate that live filesystem properties are valid."""
 
     def validate(self, data):
-        if data['metadata']:
+        if data["metadata"]:
             try:
-                json.loads(data['metadata'])
+                json.loads(data["metadata"])
             except Exception as e:
-                self.setFieldError('metadata', str(e))
+                self.setFieldError("metadata", str(e))
 
 
 class LiveFSAddView(LiveFSMetadataValidatorMixin, LaunchpadFormView):
     """View for creating live filesystems."""
 
-    title = label = 'Create a new live filesystem'
+    title = label = "Create a new live filesystem"
 
     schema = ILiveFSEditSchema
-    field_names = ['owner', 'name', 'distro_series', 'metadata']
+    field_names = ["owner", "name", "distro_series", "metadata"]
     custom_widget_distro_series = LaunchpadRadioWidget
 
     def initialize(self):
@@ -228,38 +237,45 @@ class LiveFSAddView(LiveFSMetadataValidatorMixin, LaunchpadFormView):
     @property
     def initial_values(self):
         series = [
-            term.value for term in BuildableDistroSeries()
-            if term.value.status in (
-                SeriesStatus.CURRENT, SeriesStatus.DEVELOPMENT)][0]
+            term.value
+            for term in BuildableDistroSeries()
+            if term.value.status
+            in (SeriesStatus.CURRENT, SeriesStatus.DEVELOPMENT)
+        ][0]
         return {
-            'owner': self.user,
-            'distro_series': series,
-            'metadata': '{}',
-            }
+            "owner": self.user,
+            "distro_series": series,
+            "metadata": "{}",
+        }
 
     @property
     def cancel_url(self):
         return canonical_url(self.context)
 
-    @action('Create live filesystem', name='create')
+    @action("Create live filesystem", name="create")
     def request_action(self, action, data):
         livefs = getUtility(ILiveFSSet).new(
-            self.user, data['owner'], data['distro_series'], data['name'],
-            json.loads(data['metadata']))
+            self.user,
+            data["owner"],
+            data["distro_series"],
+            data["name"],
+            json.loads(data["metadata"]),
+        )
         self.next_url = canonical_url(livefs)
 
     def validate(self, data):
         super().validate(data)
-        owner = data.get('owner', None)
-        distro_series = data['distro_series']
-        name = data.get('name', None)
+        owner = data.get("owner", None)
+        distro_series = data["distro_series"]
+        name = data.get("name", None)
         if owner and name:
             if getUtility(ILiveFSSet).exists(owner, distro_series, name):
                 self.setFieldError(
-                    'name',
-                    'There is already a live filesystem for %s owned by %s '
-                    'with this name.' % (
-                        distro_series.displayname, owner.displayname))
+                    "name",
+                    "There is already a live filesystem for %s owned by %s "
+                    "with this name."
+                    % (distro_series.displayname, owner.displayname),
+                )
 
 
 class BaseLiveFSEditView(LaunchpadEditFormView):
@@ -270,7 +286,7 @@ class BaseLiveFSEditView(LaunchpadEditFormView):
     def cancel_url(self):
         return canonical_url(self.context)
 
-    @action('Update live filesystem', name='update')
+    @action("Update live filesystem", name="update")
     def request_action(self, action, data):
         self.updateContextFromData(data)
         self.next_url = canonical_url(self.context)
@@ -286,23 +302,23 @@ class LiveFSAdminView(BaseLiveFSEditView):
 
     @property
     def title(self):
-        return 'Administer %s live filesystem' % self.context.name
+        return "Administer %s live filesystem" % self.context.name
 
     label = title
 
     field_names = [
-        'require_virtualized',
-        'relative_build_score',
-        'keep_binary_files_days',
-        ]
+        "require_virtualized",
+        "relative_build_score",
+        "keep_binary_files_days",
+    ]
 
     @property
     def initial_values(self):
         return {
-            'require_virtualized': self.context.require_virtualized,
-            'relative_build_score': self.context.relative_build_score,
-            'keep_binary_files_days': self.context.keep_binary_files_days,
-            }
+            "require_virtualized": self.context.require_virtualized,
+            "relative_build_score": self.context.relative_build_score,
+            "keep_binary_files_days": self.context.keep_binary_files_days,
+        }
 
 
 class LiveFSEditView(LiveFSMetadataValidatorMixin, BaseLiveFSEditView):
@@ -310,44 +326,49 @@ class LiveFSEditView(LiveFSMetadataValidatorMixin, BaseLiveFSEditView):
 
     @property
     def title(self):
-        return 'Edit %s live filesystem' % self.context.name
+        return "Edit %s live filesystem" % self.context.name
 
     label = title
 
-    field_names = ['owner', 'name', 'distro_series', 'metadata']
+    field_names = ["owner", "name", "distro_series", "metadata"]
     custom_widget_distro_series = LaunchpadRadioWidget
 
     @property
     def initial_values(self):
         return {
-            'distro_series': self.context.distro_series,
-            'metadata': json.dumps(
-                self.context.metadata, ensure_ascii=False,
-                cls=ResourceJSONEncoder),
-            }
+            "distro_series": self.context.distro_series,
+            "metadata": json.dumps(
+                self.context.metadata,
+                ensure_ascii=False,
+                cls=ResourceJSONEncoder,
+            ),
+        }
 
     def updateContextFromData(self, data, context=None, notify_modified=True):
         """See `LaunchpadEditFormView`."""
-        if 'metadata' in data:
-            data['metadata'] = json.loads(data['metadata'])
+        if "metadata" in data:
+            data["metadata"] = json.loads(data["metadata"])
         super().updateContextFromData(
-            data, context=context, notify_modified=notify_modified)
+            data, context=context, notify_modified=notify_modified
+        )
 
     def validate(self, data):
         super().validate(data)
-        owner = data.get('owner', None)
-        distro_series = data['distro_series']
-        name = data.get('name', None)
+        owner = data.get("owner", None)
+        distro_series = data["distro_series"]
+        name = data.get("name", None)
         if owner and name:
             try:
                 livefs = getUtility(ILiveFSSet).getByName(
-                    owner, distro_series, name)
+                    owner, distro_series, name
+                )
                 if livefs != self.context:
                     self.setFieldError(
-                        'name',
-                        'There is already a live filesystem for %s owned by '
-                        '%s with this name.' % (
-                            distro_series.displayname, owner.displayname))
+                        "name",
+                        "There is already a live filesystem for %s owned by "
+                        "%s with this name."
+                        % (distro_series.displayname, owner.displayname),
+                    )
             except NoSuchLiveFS:
                 pass
 
@@ -357,7 +378,7 @@ class LiveFSDeleteView(BaseLiveFSEditView):
 
     @property
     def title(self):
-        return 'Delete %s live filesystem' % self.context.name
+        return "Delete %s live filesystem" % self.context.name
 
     label = title
 
@@ -367,7 +388,7 @@ class LiveFSDeleteView(BaseLiveFSEditView):
     def has_builds(self):
         return not self.context.builds.is_empty()
 
-    @action('Delete live filesystem', name='delete')
+    @action("Delete live filesystem", name="delete")
     def delete_action(self, action, data):
         owner = self.context.owner
         self.context.destroySelf()
diff --git a/lib/lp/soyuz/browser/livefsbuild.py b/lib/lp/soyuz/browser/livefsbuild.py
index 78c4f1f..4041d07 100644
--- a/lib/lp/soyuz/browser/livefsbuild.py
+++ b/lib/lp/soyuz/browser/livefsbuild.py
@@ -4,31 +4,28 @@
 """LiveFSBuild views."""
 
 __all__ = [
-    'LiveFSBuildContextMenu',
-    'LiveFSBuildNavigation',
-    'LiveFSBuildView',
-    ]
+    "LiveFSBuildContextMenu",
+    "LiveFSBuildNavigation",
+    "LiveFSBuildView",
+]
 
 from zope.interface import Interface
 
-from lp.app.browser.launchpadform import (
-    action,
-    LaunchpadFormView,
-    )
+from lp.app.browser.launchpadform import LaunchpadFormView, action
 from lp.buildmaster.enums import BuildQueueStatus
 from lp.services.librarian.browser import (
     FileNavigationMixin,
     ProxiedLibraryFileAlias,
-    )
+)
 from lp.services.propertycache import cachedproperty
 from lp.services.webapp import (
-    canonical_url,
     ContextMenu,
-    enabled_with_permission,
     LaunchpadView,
     Link,
     Navigation,
-    )
+    canonical_url,
+    enabled_with_permission,
+)
 from lp.soyuz.interfaces.binarypackagebuild import IBuildRescoreForm
 from lp.soyuz.interfaces.livefsbuild import ILiveFSBuild
 
@@ -42,21 +39,27 @@ class LiveFSBuildContextMenu(ContextMenu):
 
     usedfor = ILiveFSBuild
 
-    facet = 'overview'
+    facet = "overview"
 
-    links = ('cancel', 'rescore')
+    links = ("cancel", "rescore")
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def cancel(self):
         return Link(
-            '+cancel', 'Cancel build', icon='remove',
-            enabled=self.context.can_be_cancelled)
+            "+cancel",
+            "Cancel build",
+            icon="remove",
+            enabled=self.context.can_be_cancelled,
+        )
 
-    @enabled_with_permission('launchpad.Admin')
+    @enabled_with_permission("launchpad.Admin")
     def rescore(self):
         return Link(
-            '+rescore', 'Rescore build', icon='edit',
-            enabled=self.context.can_be_rescored)
+            "+rescore",
+            "Rescore build",
+            icon="edit",
+            enabled=self.context.can_be_rescored,
+        )
 
 
 class LiveFSBuildView(LaunchpadView):
@@ -109,7 +112,9 @@ class LiveFSBuildView(LaunchpadView):
 
         return [
             ProxiedLibraryFileAlias(alias, self.context)
-            for _, alias, _ in self.context.getFiles() if not alias.deleted]
+            for _, alias, _ in self.context.getFiles()
+            if not alias.deleted
+        ]
 
     @cachedproperty
     def has_files(self):
@@ -128,14 +133,15 @@ class LiveFSBuildCancelView(LaunchpadFormView):
     class schema(Interface):
         """Schema for cancelling a build."""
 
-    page_title = label = 'Cancel build'
+    page_title = label = "Cancel build"
 
     @property
     def cancel_url(self):
         return canonical_url(self.context)
+
     next_url = cancel_url
 
-    @action('Cancel build', name='cancel')
+    @action("Cancel build", name="cancel")
     def request_action(self, action, data):
         """Cancel the build."""
         self.context.cancel()
@@ -146,27 +152,29 @@ class LiveFSBuildRescoreView(LaunchpadFormView):
 
     schema = IBuildRescoreForm
 
-    page_title = label = 'Rescore build'
+    page_title = label = "Rescore build"
 
     def __call__(self):
         if self.context.can_be_rescored:
             return super().__call__()
         self.request.response.addWarningNotification(
-            "Cannot rescore this build because it is not queued.")
+            "Cannot rescore this build because it is not queued."
+        )
         self.request.response.redirect(canonical_url(self.context))
 
     @property
     def cancel_url(self):
         return canonical_url(self.context)
+
     next_url = cancel_url
 
-    @action('Rescore build', name='rescore')
+    @action("Rescore build", name="rescore")
     def request_action(self, action, data):
         """Rescore the build."""
-        score = data.get('priority')
+        score = data.get("priority")
         self.context.rescore(score)
-        self.request.response.addNotification('Build rescored to %s.' % score)
+        self.request.response.addNotification("Build rescored to %s." % score)
 
     @property
     def initial_values(self):
-        return {'score': str(self.context.buildqueue_record.lastscore)}
+        return {"score": str(self.context.buildqueue_record.lastscore)}
diff --git a/lib/lp/soyuz/browser/packagerelationship.py b/lib/lp/soyuz/browser/packagerelationship.py
index 8123efc..2c8e729 100644
--- a/lib/lp/soyuz/browser/packagerelationship.py
+++ b/lib/lp/soyuz/browser/packagerelationship.py
@@ -4,10 +4,10 @@
 """Package relationships."""
 
 __all__ = [
-    'relationship_builder',
-    'PackageRelationship',
-    'PackageRelationshipSet',
-    ]
+    "relationship_builder",
+    "PackageRelationship",
+    "PackageRelationshipSet",
+]
 
 import operator as std_operator
 
@@ -18,7 +18,7 @@ from lp.services.webapp import canonical_url
 from lp.soyuz.interfaces.packagerelationship import (
     IPackageRelationship,
     IPackageRelationshipSet,
-    )
+)
 
 
 def relationship_builder(relationship_line, getter):
@@ -34,20 +34,21 @@ def relationship_builder(relationship_line, getter):
         return relationship_set
 
     parsed_relationships = [
-        token[0] for token in PkgRelation.parse_relations(relationship_line)]
+        token[0] for token in PkgRelation.parse_relations(relationship_line)
+    ]
 
     for rel in parsed_relationships:
-        name = rel['name']
+        name = rel["name"]
         target_object = getter(name)
         if target_object is not None:
             url = canonical_url(target_object)
         else:
             url = None
-        if rel['version'] is None:
-            operator = ''
-            version = ''
+        if rel["version"] is None:
+            operator = ""
+            version = ""
         else:
-            operator, version = rel['version']
+            operator, version = rel["version"]
         relationship_set.add(name, operator, version, url)
 
     return relationship_set
@@ -77,13 +78,11 @@ class PackageRelationshipSet:
 
     def add(self, name, operator, version, url):
         """See IPackageRelationshipSet."""
-        self.contents.append(
-            PackageRelationship(name, operator, version, url))
+        self.contents.append(PackageRelationship(name, operator, version, url))
 
     def has_items(self):
         """See IPackageRelationshipSet."""
         return len(self.contents) != 0
 
     def __iter__(self):
-        return iter(sorted(
-            self.contents, key=std_operator.attrgetter('name')))
+        return iter(sorted(self.contents, key=std_operator.attrgetter("name")))
diff --git a/lib/lp/soyuz/browser/packagesearch.py b/lib/lp/soyuz/browser/packagesearch.py
index ba37878..edc189d 100644
--- a/lib/lp/soyuz/browser/packagesearch.py
+++ b/lib/lp/soyuz/browser/packagesearch.py
@@ -1,9 +1,7 @@
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-__all__ = [
-    'PackageSearchViewBase'
-    ]
+__all__ = ["PackageSearchViewBase"]
 
 from lp.services.propertycache import cachedproperty
 from lp.services.webapp.batching import BatchNavigator
@@ -24,7 +22,7 @@ class PackageSearchViewBase(LaunchpadView):
             self.text = self.text.strip()
             # We need to ensure the form on the refreshed page shows the
             # correct text.
-            self.request.form['text'] = self.text
+            self.request.form["text"] = self.text
 
     @property
     def search_requested(self):
@@ -60,4 +58,4 @@ class PackageSearchViewBase(LaunchpadView):
         """Call the context specific search."""
         raise NotImplementedError(
             "do_context_specific_search needs to be implemented in sub-class"
-            )
+        )
diff --git a/lib/lp/soyuz/browser/packageset.py b/lib/lp/soyuz/browser/packageset.py
index 4ef6909..51c0c74 100644
--- a/lib/lp/soyuz/browser/packageset.py
+++ b/lib/lp/soyuz/browser/packageset.py
@@ -4,8 +4,8 @@
 """Browser views for packagesets."""
 
 __all__ = [
-    'PackagesetSetNavigation',
-    ]
+    "PackagesetSetNavigation",
+]
 
 from zope.component import getUtility
 
@@ -16,6 +16,7 @@ from lp.soyuz.interfaces.packageset import IPackagesetSet
 
 class PackagesetSetNavigation(GetitemNavigation):
     """Navigation methods for PackagesetSet."""
+
     usedfor = IPackagesetSet
 
     def traverse(self, distribution_name):
diff --git a/lib/lp/soyuz/browser/publishing.py b/lib/lp/soyuz/browser/publishing.py
index 92db14a..b936640 100644
--- a/lib/lp/soyuz/browser/publishing.py
+++ b/lib/lp/soyuz/browser/publishing.py
@@ -4,11 +4,11 @@
 """Browser views for Soyuz publishing records."""
 
 __all__ = [
-    'BinaryPublishingRecordView',
-    'SourcePublicationURL',
-    'SourcePublishingRecordSelectableView',
-    'SourcePublishingRecordView',
-    ]
+    "BinaryPublishingRecordView",
+    "SourcePublicationURL",
+    "SourcePublishingRecordSelectableView",
+    "SourcePublishingRecordView",
+]
 
 from operator import attrgetter
 
@@ -20,16 +20,13 @@ from lp.archiveuploader.utils import re_isadeb
 from lp.services.librarian.browser import (
     FileNavigationMixin,
     ProxiedLibraryFileAlias,
-    )
+)
 from lp.services.propertycache import cachedproperty
 from lp.services.webapp import Navigation
 from lp.services.webapp.authorization import check_permission
 from lp.services.webapp.escaping import structured
 from lp.services.webapp.interfaces import ICanonicalUrlData
-from lp.services.webapp.publisher import (
-    canonical_url,
-    LaunchpadView,
-    )
+from lp.services.webapp.publisher import LaunchpadView, canonical_url
 from lp.soyuz.adapters.proxiedsourcefiles import ProxiedSourceLibraryFileAlias
 from lp.soyuz.enums import PackagePublishingStatus
 from lp.soyuz.interfaces.binarypackagebuild import BuildSetStatus
@@ -38,12 +35,13 @@ from lp.soyuz.interfaces.publishing import (
     IBinaryPackagePublishingHistory,
     IPublishingSet,
     ISourcePackagePublishingHistory,
-    )
+)
 
 
 @implementer(ICanonicalUrlData)
 class PublicationURLBase:
     """Dynamic URL declaration for `I*PackagePublishingHistory`"""
+
     rootsite = None
 
     def __init__(self, context):
@@ -56,6 +54,7 @@ class PublicationURLBase:
 
 class SourcePublicationURL(PublicationURLBase):
     """Dynamic URL declaration for `ISourcePackagePublishingHistory`"""
+
     @property
     def path(self):
         return "+sourcepub/%s" % self.context.id
@@ -63,13 +62,15 @@ class SourcePublicationURL(PublicationURLBase):
 
 class BinaryPublicationURL(PublicationURLBase):
     """Dynamic URL declaration for `IBinaryPackagePublishingHistory`"""
+
     @property
     def path(self):
         return "+binarypub/%s" % self.context.id
 
 
-class SourcePackagePublishingHistoryNavigation(Navigation,
-                                               FileNavigationMixin):
+class SourcePackagePublishingHistoryNavigation(
+    Navigation, FileNavigationMixin
+):
     usedfor = ISourcePackagePublishingHistory
 
 
@@ -111,11 +112,11 @@ class BasePublishingRecordView(LaunchpadView):
     # new value to the PackagePublishingStatus enumeration but do not update
     # this map.
     timestamp_map = {
-        PackagePublishingStatus.DELETED: 'dateremoved',
-        PackagePublishingStatus.OBSOLETE: 'scheduleddeletiondate',
-        PackagePublishingStatus.PENDING: 'datecreated',
-        PackagePublishingStatus.PUBLISHED: 'datepublished',
-        PackagePublishingStatus.SUPERSEDED: 'datesuperseded'
+        PackagePublishingStatus.DELETED: "dateremoved",
+        PackagePublishingStatus.OBSOLETE: "scheduleddeletiondate",
+        PackagePublishingStatus.PENDING: "datecreated",
+        PackagePublishingStatus.PUBLISHED: "datepublished",
+        PackagePublishingStatus.SUPERSEDED: "datesuperseded",
     }
 
     @property
@@ -188,15 +189,17 @@ class BasePublishingRecordView(LaunchpadView):
         """Return the removal comment or 'None provided'."""
         removal_comment = self.context.removal_comment
         if removal_comment is None or not removal_comment.strip():
-            removal_comment = 'None provided.'
+            removal_comment = "None provided."
 
         return removal_comment
 
     @property
     def phased_update_percentage(self):
         """Return the formatted phased update percentage, or empty."""
-        if (self.is_binary and
-            self.context.phased_update_percentage is not None):
+        if (
+            self.is_binary
+            and self.context.phased_update_percentage is not None
+        ):
             return "%d%% of users" % self.context.phased_update_percentage
         return ""
 
@@ -210,7 +213,7 @@ class BasePublishingRecordView(LaunchpadView):
         archive = self.context.copied_from_archive
         if archive is None:
             return False
-        return archive.is_ppa and check_permission('launchpad.View', archive)
+        return archive.is_ppa and check_permission("launchpad.View", archive)
 
 
 class SourcePublishingRecordView(BasePublishingRecordView):
@@ -224,20 +227,20 @@ class SourcePublishingRecordView(BasePublishingRecordView):
     @property
     def builds_successful_and_published(self):
         """Return whether all builds were successful and published."""
-        status = self.build_status_summary['status']
+        status = self.build_status_summary["status"]
         return status == BuildSetStatus.FULLYBUILT
 
     @property
     def builds_successful_and_pending(self):
         """Return whether builds were successful but not all published."""
-        status = self.build_status_summary['status']
+        status = self.build_status_summary["status"]
         return status == BuildSetStatus.FULLYBUILT_PENDING
 
     @property
     def pending_builds(self):
         """Return a list of successful builds pending publication."""
         if self.builds_successful_and_pending:
-            return self.build_status_summary['builds']
+            return self.build_status_summary["builds"]
         else:
             return []
 
@@ -245,13 +248,13 @@ class SourcePublishingRecordView(BasePublishingRecordView):
     def build_status_img_src(self):
         """Return the image path for the current build status summary."""
         image_map = {
-            BuildSetStatus.BUILDING: '/@@/processing',
-            BuildSetStatus.NEEDSBUILD: '/@@/build-needed',
-            BuildSetStatus.FAILEDTOBUILD: '/@@/no',
-            BuildSetStatus.FULLYBUILT_PENDING: '/@@/build-success-publishing'
-            }
+            BuildSetStatus.BUILDING: "/@@/processing",
+            BuildSetStatus.NEEDSBUILD: "/@@/build-needed",
+            BuildSetStatus.FAILEDTOBUILD: "/@@/no",
+            BuildSetStatus.FULLYBUILT_PENDING: "/@@/build-success-publishing",
+        }
 
-        return image_map.get(self.build_status_summary['status'], '/@@/yes')
+        return image_map.get(self.build_status_summary["status"], "/@@/yes")
 
     def wasCopied(self):
         """Whether or not a source is published in its original location.
@@ -294,7 +297,8 @@ class SourcePublishingRecordView(BasePublishingRecordView):
         """Return list of dictionaries representing published files."""
         files = sorted(
             self.context.getSourceAndBinaryLibraryFiles(),
-            key=attrgetter('filename'))
+            key=attrgetter("filename"),
+        )
         result = []
         urls = set()
         for library_file in files:
@@ -310,13 +314,15 @@ class SourcePublishingRecordView(BasePublishingRecordView):
             custom_dict["filename"] = library_file.filename
             custom_dict["filesize"] = library_file.content.filesize
             if re_isadeb.match(library_file.filename):
-                custom_dict['class'] = 'binary'
+                custom_dict["class"] = "binary"
                 custom_dict["url"] = ProxiedLibraryFileAlias(
-                    library_file, self.context.archive).http_url
+                    library_file, self.context.archive
+                ).http_url
             else:
-                custom_dict['class'] = 'source'
+                custom_dict["class"] = "source"
                 custom_dict["url"] = ProxiedSourceLibraryFileAlias(
-                    library_file, self.context).http_url
+                    library_file, self.context
+                ).http_url
 
             result.append(custom_dict)
 
@@ -327,7 +333,8 @@ class SourcePublishingRecordView(BasePublishingRecordView):
         package_diffs = self.context.sourcepackagerelease.package_diffs
         return [
             ProxiedPackageDiff(package_diff, self.context.archive)
-            for package_diff in package_diffs]
+            for package_diff in package_diffs
+        ]
 
     @property
     def built_packages(self):
@@ -340,7 +347,8 @@ class SourcePublishingRecordView(BasePublishingRecordView):
         """
         publishing_set = getUtility(IPublishingSet)
         return publishing_set.getBuiltPackagesSummaryForSourcePublication(
-            self.context)
+            self.context
+        )
 
     @cachedproperty
     def builds(self):
@@ -360,7 +368,7 @@ class SourcePublishingRecordView(BasePublishingRecordView):
         if not archive.is_ppa:
             return False
 
-        return check_permission('launchpad.View', archive)
+        return check_permission("launchpad.View", archive)
 
     @property
     def recipe_build_details(self):
@@ -370,16 +378,20 @@ class SourcePublishingRecordView(BasePublishingRecordView):
         sprb = self.context.sourcepackagerelease.source_package_recipe_build
         if sprb is not None:
             if sprb.recipe is None:
-                recipe = 'deleted recipe'
+                recipe = "deleted recipe"
             else:
                 recipe = structured(
                     'recipe <a href="%s">%s</a>',
-                    canonical_url(sprb.recipe), sprb.recipe.name)
+                    canonical_url(sprb.recipe),
+                    sprb.recipe.name,
+                )
             return structured(
                 '<a href="%s">Built</a> by %s for <a href="%s">%s</a>',
-                    canonical_url(sprb), recipe,
-                    canonical_url(sprb.requester),
-                    sprb.requester.displayname).escapedtext
+                canonical_url(sprb),
+                recipe,
+                canonical_url(sprb.requester),
+                sprb.requester.displayname,
+            ).escapedtext
         return None
 
 
diff --git a/lib/lp/soyuz/browser/queue.py b/lib/lp/soyuz/browser/queue.py
index ba6650a..2fbcc60 100644
--- a/lib/lp/soyuz/browser/queue.py
+++ b/lib/lp/soyuz/browser/queue.py
@@ -4,9 +4,9 @@
 """Browser views for package queue."""
 
 __all__ = [
-    'PackageUploadNavigation',
-    'QueueItemsView',
-    ]
+    "PackageUploadNavigation",
+    "QueueItemsView",
+]
 
 from operator import attrgetter
 
@@ -14,35 +14,20 @@ from lazr.delegates import delegate_to
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
-from lp.app.errors import (
-    NotFoundError,
-    UnexpectedFormData,
-    )
+from lp.app.errors import NotFoundError, UnexpectedFormData
 from lp.registry.interfaces.person import IPersonSet
-from lp.services.database.bulk import (
-    load_referencing,
-    load_related,
-    )
+from lp.services.database.bulk import load_referencing, load_related
 from lp.services.job.model.job import Job
 from lp.services.librarian.browser import (
     FileNavigationMixin,
     ProxiedLibraryFileAlias,
-    )
-from lp.services.librarian.model import (
-    LibraryFileAlias,
-    LibraryFileContent,
-    )
-from lp.services.webapp import (
-    GetitemNavigation,
-    LaunchpadView,
-    )
+)
+from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent
+from lp.services.webapp import GetitemNavigation, LaunchpadView
 from lp.services.webapp.authorization import check_permission
 from lp.services.webapp.batching import BatchNavigator
 from lp.services.webapp.escaping import structured
-from lp.soyuz.enums import (
-    PackagePublishingPriority,
-    PackageUploadStatus,
-    )
+from lp.soyuz.enums import PackagePublishingPriority, PackageUploadStatus
 from lp.soyuz.interfaces.archivepermission import IArchivePermissionSet
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
 from lp.soyuz.interfaces.component import IComponentSet
@@ -53,17 +38,13 @@ from lp.soyuz.interfaces.queue import (
     IPackageUploadSet,
     QueueAdminUnauthorizedError,
     QueueInconsistentStateError,
-    )
+)
 from lp.soyuz.interfaces.section import ISectionSet
 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
 from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
-from lp.soyuz.model.files import (
-    BinaryPackageFile,
-    SourcePackageReleaseFile,
-    )
+from lp.soyuz.model.files import BinaryPackageFile, SourcePackageReleaseFile
 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
 
-
 QUEUE_SIZE = 30
 
 
@@ -84,10 +65,10 @@ class QueueItemsView(LaunchpadView):
         """
 
         # recover selected queue state and name filter
-        self.name_filter = self.request.get('queue_text', '')
+        self.name_filter = self.request.get("queue_text", "")
 
         try:
-            state_value = int(self.request.get('queue_state', ''))
+            state_value = int(self.request.get("queue_state", ""))
         except ValueError:
             state_value = PackageUploadStatus.NEW.value
 
@@ -95,7 +76,8 @@ class QueueItemsView(LaunchpadView):
             self.state = PackageUploadStatus.items[state_value]
         except KeyError:
             raise UnexpectedFormData(
-                'No suitable status found for value "%s"' % state_value)
+                'No suitable status found for value "%s"' % state_value
+            )
 
         self.queue = self.context.getPackageUploadQueue(self.state)
 
@@ -105,19 +87,22 @@ class QueueItemsView(LaunchpadView):
             PackageUploadStatus.REJECTED,
             PackageUploadStatus.DONE,
             PackageUploadStatus.UNAPPROVED,
-            ]
+        ]
 
         self.filtered_options = []
 
         for state in valid_states:
-            selected = (state == self.state)
+            selected = state == self.state
             self.filtered_options.append(
-                dict(name=state.title, value=state.value, selected=selected))
+                dict(name=state.title, value=state.value, selected=selected)
+            )
 
         queue_items = self.context.getPackageUploads(
-            status=self.state, name=self.name_filter)
+            status=self.state, name=self.name_filter
+        )
         self.batchnav = BatchNavigator(
-            queue_items, self.request, size=QUEUE_SIZE)
+            queue_items, self.request, size=QUEUE_SIZE
+        )
 
     def builds_dict(self, upload_ids, binary_files):
         """Return a dictionary of PackageUploadBuild keyed on build ID.
@@ -125,14 +110,17 @@ class QueueItemsView(LaunchpadView):
         :param upload_ids: A list of PackageUpload IDs.
         :param binary_files: A list of BinaryPackageReleaseFiles.
         """
-        build_ids = [binary_file.binarypackagerelease.build.id
-                     for binary_file in binary_files]
+        build_ids = [
+            binary_file.binarypackagerelease.build.id
+            for binary_file in binary_files
+        ]
         upload_set = getUtility(IPackageUploadSet)
         package_upload_builds = upload_set.getBuildByBuildIDs(build_ids)
         package_upload_builds_dict = {}
         for package_upload_build in package_upload_builds:
             package_upload_builds_dict[
-                package_upload_build.build.id] = package_upload_build
+                package_upload_build.build.id
+            ] = package_upload_build
         return package_upload_builds_dict
 
     def binary_files_dict(self, package_upload_builds_dict, binary_files):
@@ -146,7 +134,8 @@ class QueueItemsView(LaunchpadView):
         binary_package_names = set()
         for binary_file in binary_files:
             binary_package_names.add(
-                binary_file.binarypackagerelease.binarypackagename.id)
+                binary_file.binarypackagerelease.binarypackagename.id
+            )
             build_id = binary_file.binarypackagerelease.build.id
             upload_id = package_upload_builds_dict[build_id].packageupload.id
             if upload_id not in build_upload_files:
@@ -159,7 +148,8 @@ class QueueItemsView(LaunchpadView):
         source_upload_files = {}
         for source_file in source_files:
             upload_id = package_upload_source_dict[
-                source_file.sourcepackagerelease.id].packageupload.id
+                source_file.sourcepackagerelease.id
+            ].packageupload.id
             if upload_id not in source_upload_files:
                 source_upload_files[upload_id] = []
             source_upload_files[upload_id].append(source_file)
@@ -172,9 +162,11 @@ class QueueItemsView(LaunchpadView):
         # getNotNewByIDs can't handle a security-wrapped list of
         # integers.
         archive_ids = removeSecurityProxy(
-            self.context.distribution.all_distro_archive_ids)
+            self.context.distribution.all_distro_archive_ids
+        )
         old_binary_packages = name_set.getNotNewByNames(
-            binary_package_names, self.context, archive_ids)
+            binary_package_names, self.context, archive_ids
+        )
         # Listify to avoid repeated queries.
         return list(old_binary_packages)
 
@@ -185,21 +177,27 @@ class QueueItemsView(LaunchpadView):
         """
         sprs = [spr for spr in source_package_releases if spr is not None]
         return getUtility(IPackagesetSet).getForPackages(
-            self.context, {spr.sourcepackagenameID for spr in sprs})
+            self.context, {spr.sourcepackagenameID for spr in sprs}
+        )
 
     def loadPackageCopyJobs(self, uploads):
         """Batch-load `PackageCopyJob`s and related information."""
         # PackageUploadSet.getAll preloads the PackageCopyJobs themselves,
         # along with their related archives and distributions.
         package_copy_jobs = {
-            removeSecurityProxy(upload.package_copy_job) for upload in uploads
-            if upload.package_copy_job_id is not None}
+            removeSecurityProxy(upload.package_copy_job)
+            for upload in uploads
+            if upload.package_copy_job_id is not None
+        }
         archives = {pcj.source_archive for pcj in package_copy_jobs}
         person_ids = [archive.ownerID for archive in archives]
-        jobs = load_related(Job, package_copy_jobs, ['job_id'])
+        jobs = load_related(Job, package_copy_jobs, ["job_id"])
         person_ids.extend(job.requester_id for job in jobs)
-        list(getUtility(IPersonSet).getPrecachedPersonsFromIDs(
-            person_ids, need_validity=True))
+        list(
+            getUtility(IPersonSet).getPrecachedPersonsFromIDs(
+                person_ids, need_validity=True
+            )
+        )
 
     def decoratedQueueBatch(self):
         """Return the current batch, converted to decorated objects.
@@ -222,35 +220,42 @@ class QueueItemsView(LaunchpadView):
         pubs = sum((removeSecurityProxy(u.builds) for u in uploads), [])
 
         source_sprs = load_related(
-            SourcePackageRelease, puses, ['sourcepackagerelease_id'])
-        bpbs = load_related(BinaryPackageBuild, pubs, ['build_id'])
-        bprs = load_referencing(BinaryPackageRelease, bpbs, ['buildID'])
+            SourcePackageRelease, puses, ["sourcepackagerelease_id"]
+        )
+        bpbs = load_related(BinaryPackageBuild, pubs, ["build_id"])
+        bprs = load_referencing(BinaryPackageRelease, bpbs, ["buildID"])
         source_files = load_referencing(
-            SourcePackageReleaseFile, source_sprs, ['sourcepackagereleaseID'])
+            SourcePackageReleaseFile, source_sprs, ["sourcepackagereleaseID"]
+        )
         binary_files = load_referencing(
-            BinaryPackageFile, bprs, ['binarypackagereleaseID'])
+            BinaryPackageFile, bprs, ["binarypackagereleaseID"]
+        )
         file_lfas = load_related(
-            LibraryFileAlias, source_files + binary_files, ['libraryfileID'])
-        load_related(LibraryFileContent, file_lfas, ['contentID'])
+            LibraryFileAlias, source_files + binary_files, ["libraryfileID"]
+        )
+        load_related(LibraryFileContent, file_lfas, ["contentID"])
 
         # Get a dictionary of lists of binary files keyed by upload ID.
         package_upload_builds_dict = self.builds_dict(upload_ids, binary_files)
 
         build_upload_files, binary_package_names = self.binary_files_dict(
-            package_upload_builds_dict, binary_files)
+            package_upload_builds_dict, binary_files
+        )
 
         # Get a dictionary of lists of source files keyed by upload ID.
         package_upload_source_dict = {}
         for pus in puses:
             package_upload_source_dict[pus.sourcepackagerelease_id] = pus
         source_upload_files = self.source_files_dict(
-            package_upload_source_dict, source_files)
+            package_upload_source_dict, source_files
+        )
 
         # Get a list of binary package names that already exist in
         # the distribution.  The avoids multiple queries to is_new
         # on IBinaryPackageRelease.
         self.old_binary_packages = self.calculateOldBinaries(
-            binary_package_names)
+            binary_package_names
+        )
 
         package_sets = self.getPackagesetsFor(source_sprs)
 
@@ -258,14 +263,17 @@ class QueueItemsView(LaunchpadView):
 
         return [
             CompletePackageUpload(
-                item, build_upload_files, source_upload_files, package_sets)
-            for item in uploads]
+                item, build_upload_files, source_upload_files, package_sets
+            )
+            for item in uploads
+        ]
 
     def is_new(self, binarypackagerelease):
         """Return True if the binarypackagerelease has no ancestry."""
         return (
             binarypackagerelease.binarypackagename
-            not in self.old_binary_packages)
+            not in self.old_binary_packages
+        )
 
     def availableActions(self):
         """Return the available actions according to the selected queue state.
@@ -276,14 +284,15 @@ class QueueItemsView(LaunchpadView):
         mutable_states = [
             PackageUploadStatus.NEW,
             PackageUploadStatus.REJECTED,
-            PackageUploadStatus.UNAPPROVED
-            ]
+            PackageUploadStatus.UNAPPROVED,
+        ]
 
         # Return actions only for supported states and require
         # edit permission.
-        if (self.state in mutable_states and
-            check_permission('launchpad.Edit', self.queue)):
-            return ['Accept', 'Reject']
+        if self.state in mutable_states and check_permission(
+            "launchpad.Edit", self.queue
+        ):
+            return ["Accept", "Reject"]
 
         # No actions for unsupported states.
         return []
@@ -299,18 +308,18 @@ class QueueItemsView(LaunchpadView):
             return
 
         # Also bail out if an unauthorised user is faking submissions.
-        if not check_permission('launchpad.Edit', self.queue):
-            self.error = 'You do not have permission to act on queue items.'
+        if not check_permission("launchpad.Edit", self.queue):
+            self.error = "You do not have permission to act on queue items."
             return
 
         # Retrieve the form data.
-        accept = self.request.form.get('Accept', '')
-        reject = self.request.form.get('Reject', '')
-        rejection_comment = self.request.form.get('rejection_comment', '')
-        component_override = self.request.form.get('component_override', '')
-        section_override = self.request.form.get('section_override', '')
-        priority_override = self.request.form.get('priority_override', '')
-        queue_ids = self.request.form.get('QUEUE_ID', '')
+        accept = self.request.form.get("Accept", "")
+        reject = self.request.form.get("Reject", "")
+        rejection_comment = self.request.form.get("rejection_comment", "")
+        component_override = self.request.form.get("component_override", "")
+        section_override = self.request.form.get("section_override", "")
+        priority_override = self.request.form.get("priority_override", "")
+        queue_ids = self.request.form.get("QUEUE_ID", "")
 
         # If no boxes were checked, bail out.
         if (not accept and not reject) or not queue_ids:
@@ -318,7 +327,7 @@ class QueueItemsView(LaunchpadView):
 
         # If we're asked to reject with no comment, bail.
         if reject and not rejection_comment:
-            self.error = 'Rejection comment required.'
+            self.error = "Rejection comment required."
             return
 
         # Determine if there is a source override requested.
@@ -335,11 +344,14 @@ class QueueItemsView(LaunchpadView):
         # override to or from.
         permission_set = getUtility(IArchivePermissionSet)
         component_permissions = permission_set.componentsForQueueAdmin(
-            self.context.main_archive, self.user)
+            self.context.main_archive, self.user
+        )
         allowed_components = {
-            permission.component for permission in component_permissions}
+            permission.component for permission in component_permissions
+        }
         pocket_permissions = permission_set.pocketsForQueueAdmin(
-            self.context.main_archive, self.user)
+            self.context.main_archive, self.user
+        )
 
         try:
             if section_override:
@@ -373,40 +385,55 @@ class QueueItemsView(LaunchpadView):
             queue_item = queue_set.get(int(queue_id))
             # First check that the user has rights to accept/reject this
             # item by virtue of which component it has.
-            if not check_permission('launchpad.Edit', queue_item):
+            if not check_permission("launchpad.Edit", queue_item):
                 existing_component_names = ", ".join(
-                    component.name for component in queue_item.components)
+                    component.name for component in queue_item.components
+                )
                 failure.append(
                     "FAILED: %s (You have no rights to %s component(s) "
-                    "'%s')" % (queue_item.displayname,
-                               action,
-                               existing_component_names))
+                    "'%s')"
+                    % (
+                        queue_item.displayname,
+                        action,
+                        existing_component_names,
+                    )
+                )
                 continue
 
             # Sources and binaries are mutually exclusive when it comes to
             # overriding, so only one of these will be set.
             try:
                 for permission in pocket_permissions:
-                    if (permission.pocket == queue_item.pocket and
-                        permission.distroseries in (
-                            None, queue_item.distroseries)):
+                    if (
+                        permission.pocket == queue_item.pocket
+                        and permission.distroseries
+                        in (None, queue_item.distroseries)
+                    ):
                         item_allowed_components = (
-                            queue_item.distroseries.upload_components)
+                            queue_item.distroseries.upload_components
+                        )
                 else:
                     item_allowed_components = allowed_components
                 source_overridden = queue_item.overrideSource(
-                    new_component, new_section, item_allowed_components)
-                binary_changes = [{
-                    "component": new_component,
-                    "section": new_section,
-                    "priority": new_priority,
-                    }]
+                    new_component, new_section, item_allowed_components
+                )
+                binary_changes = [
+                    {
+                        "component": new_component,
+                        "section": new_section,
+                        "priority": new_priority,
+                    }
+                ]
                 binary_overridden = queue_item.overrideBinaries(
-                    binary_changes, item_allowed_components)
-            except (QueueAdminUnauthorizedError,
-                    QueueInconsistentStateError) as info:
-                failure.append("FAILED: %s (%s)" %
-                               (queue_item.displayname, info))
+                    binary_changes, item_allowed_components
+                )
+            except (
+                QueueAdminUnauthorizedError,
+                QueueInconsistentStateError,
+            ) as info:
+                failure.append(
+                    "FAILED: %s (%s)" % (queue_item.displayname, info)
+                )
                 continue
 
             feedback_interpolations = {
@@ -414,25 +441,30 @@ class QueueItemsView(LaunchpadView):
                 "component": "(unchanged)",
                 "section": "(unchanged)",
                 "priority": "(unchanged)",
-                }
+            }
             if new_component:
-                feedback_interpolations['component'] = new_component.name
+                feedback_interpolations["component"] = new_component.name
             if new_section:
-                feedback_interpolations['section'] = new_section.name
+                feedback_interpolations["section"] = new_section.name
             if new_priority:
                 feedback_interpolations[
-                    'priority'] = new_priority.title.lower()
+                    "priority"
+                ] = new_priority.title.lower()
 
             try:
-                if action == 'accept':
+                if action == "accept":
                     queue_item.acceptFromQueue(user=self.user)
-                elif action == 'reject':
+                elif action == "reject":
                     queue_item.rejectFromQueue(
-                        user=self.user, comment=rejection_comment)
-            except (QueueAdminUnauthorizedError,
-                    QueueInconsistentStateError) as info:
-                failure.append('FAILED: %s (%s)' %
-                               (queue_item.displayname, info))
+                        user=self.user, comment=rejection_comment
+                    )
+            except (
+                QueueAdminUnauthorizedError,
+                QueueInconsistentStateError,
+            ) as info:
+                failure.append(
+                    "FAILED: %s (%s)" % (queue_item.displayname, info)
+                )
             else:
                 if source_overridden:
                     desc = "%(name)s(%(component)s/%(section)s)"
@@ -440,8 +472,7 @@ class QueueItemsView(LaunchpadView):
                     desc = "%(name)s(%(component)s/%(section)s/%(priority)s)"
                 else:
                     desc = "%(name)s"
-                success.append(
-                    "OK: " + desc % feedback_interpolations)
+                success.append("OK: " + desc % feedback_interpolations)
 
         for message in success:
             self.request.response.addInfoNotification(message)
@@ -460,8 +491,7 @@ class QueueItemsView(LaunchpadView):
         Return an iterable of possible sections for the context distroseries
         sorted by their name.
         """
-        return sorted(
-            self.context.sections, key=attrgetter('name'))
+        return sorted(self.context.sections, key=attrgetter("name"))
 
     def priorities(self):
         """An iterable of priorities from PackagePublishingPriority."""
@@ -480,6 +510,7 @@ class CompletePackageUpload:
     the class are cached here to reduce the number of queries that the +queue
     template has to make.  Others are added here exclusively.
     """
+
     # These need to be predeclared to avoid delegates taking them over.
     # Would be nice if there was a way of allowing writes to just work
     # (i.e. no proxying of __set__).
@@ -490,8 +521,13 @@ class CompletePackageUpload:
     contains_build = None
     sourcepackagerelease = None
 
-    def __init__(self, packageupload, build_upload_files,
-                 source_upload_files, package_sets):
+    def __init__(
+        self,
+        packageupload,
+        build_upload_files,
+        source_upload_files,
+        package_sets,
+    ):
         self.pocket = packageupload.pocket
         self.date_created = packageupload.date_created
         self.context = packageupload
@@ -512,15 +548,17 @@ class CompletePackageUpload:
         if self.contains_source:
             self.sourcepackagerelease = self.sources[0].sourcepackagerelease
             self.package_sets = package_sets.get(
-                self.sourcepackagerelease.sourcepackagenameID, [])
+                self.sourcepackagerelease.sourcepackagenameID, []
+            )
         else:
             self.package_sets = []
 
     @property
     def display_package_sets(self):
         """Package sets, if any, for display on the +queue page."""
-        return ' '.join(sorted(
-            packageset.name for packageset in self.package_sets))
+        return " ".join(
+            sorted(packageset.name for packageset in self.package_sets)
+        )
 
     @property
     def display_component(self):
@@ -554,25 +592,28 @@ class CompletePackageUpload:
         if title is None:
             title = alt
         return structured(
-            '<img alt="[%s]" src="/@@/%s" title="%s" />', alt, icon, title)
+            '<img alt="[%s]" src="/@@/%s" title="%s" />', alt, icon, title
+        )
 
     def composeIconList(self):
         """List icons that should be shown for this upload."""
         ddtp = "Debian Description Translation Project Indexes"
         potential_icons = [
-            (self.contains_source, ("Source", 'package-source')),
-            (self.contains_build, ("Build", 'package-binary', "Binary")),
-            (self.package_copy_job, ("Sync", 'package-sync')),
-            (self.contains_translation, ("Translation", 'translation-file')),
-            (self.contains_installer, ("Installer", 'ubuntu-icon')),
-            (self.contains_upgrader, ("Upgrader", 'ubuntu-icon')),
-            (self.contains_ddtp, (ddtp, 'ubuntu-icon')),
-            (self.contains_uefi, ("UEFI Objects for Signing", 'ubuntu-icon')),
-            (self.contains_signing, ("Objects for Signing", 'ubuntu-icon')),
-            ]
+            (self.contains_source, ("Source", "package-source")),
+            (self.contains_build, ("Build", "package-binary", "Binary")),
+            (self.package_copy_job, ("Sync", "package-sync")),
+            (self.contains_translation, ("Translation", "translation-file")),
+            (self.contains_installer, ("Installer", "ubuntu-icon")),
+            (self.contains_upgrader, ("Upgrader", "ubuntu-icon")),
+            (self.contains_ddtp, (ddtp, "ubuntu-icon")),
+            (self.contains_uefi, ("UEFI Objects for Signing", "ubuntu-icon")),
+            (self.contains_signing, ("Objects for Signing", "ubuntu-icon")),
+        ]
         return [
             self.composeIcon(*details)
-            for condition, details in potential_icons if condition]
+            for condition, details in potential_icons
+            if condition
+        ]
 
     def composeNameAndChangesLink(self):
         """Compose HTML: upload name and link to changes file."""
@@ -581,19 +622,25 @@ class CompletePackageUpload:
         else:
             return structured(
                 '<a href="%s" title="Changes file for %s">%s</a>',
-                self.proxiedFile(self.changesfile).http_url, self.displayname,
-                self.displayname)
+                self.proxiedFile(self.changesfile).http_url,
+                self.displayname,
+                self.displayname,
+            )
 
     @property
     def icons_and_name(self):
         """Icon list and name, linked to changes file if appropriate."""
         iconlist_id = "queue%d-iconlist" % self.id
         icons = self.composeIconList()
-        icon_string = structured('\n'.join(['%s'] * len(icons)), *icons)
+        icon_string = structured("\n".join(["%s"] * len(icons)), *icons)
         link = self.composeNameAndChangesLink()
         return structured(
             """<div id="%s"> %s %s (%s)</div>""",
-            iconlist_id, icon_string, link, self.displayarchs).escapedtext
+            iconlist_id,
+            icon_string,
+            link,
+            self.displayarchs,
+        ).escapedtext
 
     def proxiedFile(self, libraryfile):
         """Return a librarian file proxied in the context of this upload."""
diff --git a/lib/lp/soyuz/browser/sourcepackage.py b/lib/lp/soyuz/browser/sourcepackage.py
index 549d7a7..fa58346 100644
--- a/lib/lp/soyuz/browser/sourcepackage.py
+++ b/lib/lp/soyuz/browser/sourcepackage.py
@@ -4,9 +4,9 @@
 """Browser views for source package builds."""
 
 __all__ = [
-    'SourcePackageChangelogView',
-    'SourcePackageCopyrightView',
-    ]
+    "SourcePackageChangelogView",
+    "SourcePackageCopyrightView",
+]
 
 from lazr.restful.utils import smartquote
 from zope.component import getUtility
@@ -15,11 +15,8 @@ from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.distroseries import IDistroSeriesSet
 from lp.registry.interfaces.distroseriesdifference import (
     IDistroSeriesDifferenceSource,
-    )
-from lp.services.webapp import (
-    LaunchpadView,
-    Navigation,
-    )
+)
+from lp.services.webapp import LaunchpadView, Navigation
 
 
 class SourcePackageChangelogView(LaunchpadView):
@@ -45,15 +42,16 @@ class SourcePackageCopyrightView(LaunchpadView):
 
 
 class SourcePackageDifferenceView(Navigation):
-    """A view to traverse to a DistroSeriesDifference.
-    """
+    """A view to traverse to a DistroSeriesDifference."""
 
     def traverse(self, parent_distro_name):
-        parent_distro = getUtility(
-            IDistributionSet).getByName(parent_distro_name)
-        parent_series = getUtility(
-            IDistroSeriesSet).queryByName(
-                parent_distro, self.request.stepstogo.consume())
+        parent_distro = getUtility(IDistributionSet).getByName(
+            parent_distro_name
+        )
+        parent_series = getUtility(IDistroSeriesSet).queryByName(
+            parent_distro, self.request.stepstogo.consume()
+        )
         dsd_source = getUtility(IDistroSeriesDifferenceSource)
         return dsd_source.getByDistroSeriesNameAndParentSeries(
-            self.context.distroseries, self.context.name, parent_series)
+            self.context.distroseries, self.context.name, parent_series
+        )
diff --git a/lib/lp/soyuz/browser/sourcepackagebuilds.py b/lib/lp/soyuz/browser/sourcepackagebuilds.py
index 1496fab..40a1056 100644
--- a/lib/lp/soyuz/browser/sourcepackagebuilds.py
+++ b/lib/lp/soyuz/browser/sourcepackagebuilds.py
@@ -4,8 +4,8 @@
 """Browser views for source package builds."""
 
 __all__ = [
-    'SourcePackageBuildsView',
-    ]
+    "SourcePackageBuildsView",
+]
 
 
 from lp.soyuz.browser.build import BuildRecordsView
diff --git a/lib/lp/soyuz/browser/sourcepackagerelease.py b/lib/lp/soyuz/browser/sourcepackagerelease.py
index 95eb952..b535799 100644
--- a/lib/lp/soyuz/browser/sourcepackagerelease.py
+++ b/lib/lp/soyuz/browser/sourcepackagerelease.py
@@ -4,16 +4,13 @@
 """Browser view for a sourcepackagerelease"""
 
 __all__ = [
-    'linkify_changelog',
-    'SourcePackageReleaseView',
-    ]
+    "linkify_changelog",
+    "SourcePackageReleaseView",
+]
 
 import re
 
-from lp.app.browser.stringformatter import (
-    FormattersAPI,
-    linkify_bug_numbers,
-    )
+from lp.app.browser.stringformatter import FormattersAPI, linkify_bug_numbers
 from lp.services.webapp import LaunchpadView
 from lp.services.webapp.escaping import html_escape
 
@@ -45,7 +42,7 @@ def linkify_changelog(user, changelog, preloaded_person_data=None):
     numbers mentioned.
     """
     if changelog is None:
-        return ''
+        return ""
 
     # Remove any email addresses if the user is not logged in.
     changelog = obfuscate_email(user, changelog)
@@ -66,7 +63,6 @@ def linkify_changelog(user, changelog, preloaded_person_data=None):
 
 
 class SourcePackageReleaseView(LaunchpadView):
-
     @property
     def changelog_entry(self):
         """Return a linkified changelog entry."""
@@ -81,8 +77,8 @@ class SourcePackageReleaseView(LaunchpadView):
     def highlighted_copyright(self):
         """Return the copyright with markup that highlights paths and URLs."""
         if not self.context.copyright:
-            return ''
+            return ""
         # Match any string with 2 or more non-consecutive slashes in it.
-        pattern = re.compile(r'([\S]+/[\S]+/[\S]+)')
+        pattern = re.compile(r"([\S]+/[\S]+/[\S]+)")
         highlight = r'<span class="highlight">\1</span>'
         return pattern.sub(highlight, self.context.copyright)
diff --git a/lib/lp/soyuz/browser/sourceslist.py b/lib/lp/soyuz/browser/sourceslist.py
index 6a63a44..2552de7 100644
--- a/lib/lp/soyuz/browser/sourceslist.py
+++ b/lib/lp/soyuz/browser/sourceslist.py
@@ -8,10 +8,7 @@ from zope.component import getUtility
 from zope.formlib.interfaces import IInputWidget
 from zope.formlib.utility import setUpWidget
 from zope.schema import Choice
-from zope.schema.vocabulary import (
-    SimpleTerm,
-    SimpleVocabulary,
-    )
+from zope.schema.vocabulary import SimpleTerm, SimpleVocabulary
 
 from lp import _
 from lp.services.browser_helpers import get_user_agent_distroseries
@@ -35,10 +32,11 @@ class SourcesListEntries:
 class SourcesListEntriesView(LaunchpadView):
     """Renders sources.list entries with a Javascript menu."""
 
-    template = ViewPageTemplateFile('../templates/sources-list-entries.pt')
+    template = ViewPageTemplateFile("../templates/sources-list-entries.pt")
 
-    def __init__(self, context, request, initially_without_selection=False,
-        comment=None):
+    def __init__(
+        self, context, request, initially_without_selection=False, comment=None
+    ):
         self._initially_without_selection = initially_without_selection
         self.comment = comment
         super().__init__(context, request)
@@ -47,9 +45,9 @@ class SourcesListEntriesView(LaunchpadView):
         self.terms = []
         for series in self.context.valid_series:
             distro_version = "%(series_name)s (%(series_version)s)" % {
-                'series_name': series.displayname,
-                'series_version': series.version
-                }
+                "series_name": series.displayname,
+                "series_version": series.version,
+            }
             self.terms.append(SimpleTerm(series, series.name, distro_version))
 
         # If the call-site requested that the widget be displayed initially
@@ -58,20 +56,31 @@ class SourcesListEntriesView(LaunchpadView):
         # a distroseries.
         if self._initially_without_selection or self.default_series is None:
             initial_selection = "Choose your %s version" % (
-                    self.context.distribution.displayname)
-            self.terms.insert(0, SimpleTerm(
-                None, self.initial_value_without_selection,
-                initial_selection))
-
-        field = Choice(__name__='series', title=_("Distro Series"),
-                       vocabulary=SimpleVocabulary(self.terms), required=True)
-        setUpWidget(self, 'series',  field, IInputWidget)
+                self.context.distribution.displayname
+            )
+            self.terms.insert(
+                0,
+                SimpleTerm(
+                    None,
+                    self.initial_value_without_selection,
+                    initial_selection,
+                ),
+            )
+
+        field = Choice(
+            __name__="series",
+            title=_("Distro Series"),
+            vocabulary=SimpleVocabulary(self.terms),
+            required=True,
+        )
+        setUpWidget(self, "series", field, IInputWidget)
         self.series_widget.extra = "onChange='updateSeries(this);'"
 
     @property
     def initial_value_without_selection(self):
         return "YOUR_%s_VERSION_HERE" % (
-            self.context.distribution.displayname.upper())
+            self.context.distribution.displayname.upper()
+        )
 
     @property
     def plain_series_widget(self):
@@ -104,15 +113,18 @@ class SourcesListEntriesView(LaunchpadView):
         # number, we check for a corresponding valid distroseries and, if one
         # is found, return it's name.
         version_number = get_user_agent_distroseries(
-            self.request.getHeader('HTTP_USER_AGENT'))
+            self.request.getHeader("HTTP_USER_AGENT")
+        )
 
         if version_number is not None:
 
             # Finally, check if this version is one of the available
             # distroseries for this archive:
             for term in self.terms:
-                if (term.value is not None and
-                    term.value.version == version_number):
+                if (
+                    term.value is not None
+                    and term.value.version == version_number
+                ):
                     return term.value
 
         # If we were not able to get the users distribution series, then
@@ -143,25 +155,33 @@ class SourcesListEntriesWidget:
         """Setup and return the sources list entries widget."""
         if self.active_token is None:
             entries = SourcesListEntries(
-                self.archive.distribution, self.archive_url,
-                self.archive.series_with_sources)
+                self.archive.distribution,
+                self.archive_url,
+                self.archive.series_with_sources,
+            )
             return SourcesListEntriesView(entries, self.request)
         else:
             comment = "Personal access of %s (%s) to %s" % (
                 self.sources_list_user.displayname,
-                self.sources_list_user.name, self.archive.displayname)
+                self.sources_list_user.name,
+                self.archive.displayname,
+            )
             entries = SourcesListEntries(
-                self.archive.distribution, self.active_token.archive_url,
-                self.archive.series_with_sources)
+                self.archive.distribution,
+                self.active_token.archive_url,
+                self.archive.series_with_sources,
+            )
             return SourcesListEntriesView(
-                entries, self.request, comment=comment)
+                entries, self.request, comment=comment
+            )
 
     @cachedproperty
     def active_token(self):
         """Return the corresponding current token for this subscription."""
         token_set = getUtility(IArchiveAuthTokenSet)
         return token_set.getActiveTokenForArchiveAndPerson(
-            self.archive, self.sources_list_user)
+            self.archive, self.sources_list_user
+        )
 
     @property
     def archive_url(self):
diff --git a/lib/lp/soyuz/browser/tests/test_archive.py b/lib/lp/soyuz/browser/tests/test_archive.py
index 040a1e4..b2fd564 100644
--- a/lib/lp/soyuz/browser/tests/test_archive.py
+++ b/lib/lp/soyuz/browser/tests/test_archive.py
@@ -1,13 +1,9 @@
 # Copyright 2014-2019 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from fixtures import FakeLogger
 import soupmatchers
-from testtools.matchers import (
-    MatchesSetwise,
-    MatchesStructure,
-    Not,
-    )
+from fixtures import FakeLogger
+from testtools.matchers import MatchesSetwise, MatchesStructure, Not
 from zope.component import getUtility
 
 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
@@ -16,16 +12,13 @@ from lp.services.webapp import canonical_url
 from lp.soyuz.enums import PackagePublishingStatus
 from lp.soyuz.interfaces.archive import CannotModifyArchiveProcessor
 from lp.testing import (
+    TestCaseWithFactory,
     admin_logged_in,
     login_person,
     person_logged_in,
     record_two_runs,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    DatabaseFunctionalLayer,
-    LaunchpadFunctionalLayer,
-    )
+)
+from lp.testing.layers import DatabaseFunctionalLayer, LaunchpadFunctionalLayer
 from lp.testing.matchers import HasQueryCount
 from lp.testing.pages import extract_text
 from lp.testing.views import create_initialized_view
@@ -38,19 +31,21 @@ class TestArchiveIndexView(TestCaseWithFactory):
     def test_index_page_without_packages(self):
         ppa = self.factory.makeArchive()
         self.factory.makeSourcePackagePublishingHistory(
-            archive=ppa, status=PackagePublishingStatus.DELETED)
+            archive=ppa, status=PackagePublishingStatus.DELETED
+        )
         owner = login_person(ppa.owner)
-        browser = self.getUserBrowser(
-            canonical_url(ppa), user=owner)
+        browser = self.getUserBrowser(canonical_url(ppa), user=owner)
         html = browser.contents
         empty_package_msg_exists = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'no packages message', 'div',
-                attrs={'id': 'empty-result'}),
+                "no packages message", "div", attrs={"id": "empty-result"}
+            ),
         )
         self.assertThat(
-            html, Not(empty_package_msg_exists),
-            'Message "No matching package for (...)" should not appear')
+            html,
+            Not(empty_package_msg_exists),
+            'Message "No matching package for (...)" should not appear',
+        )
 
 
 class TestArchiveEditView(TestCaseWithFactory):
@@ -65,38 +60,48 @@ class TestArchiveEditView(TestCaseWithFactory):
         proc_amd64 = getUtility(IProcessorSet).getByName("amd64")
         self.factory.makeDistroArchSeries(
             distroseries=self.ubuntu.getSeries("breezy-autotest"),
-            architecturetag="amd64", processor=proc_amd64)
+            architecturetag="amd64",
+            processor=proc_amd64,
+        )
 
     def assertArchiveProcessors(self, archive, names):
         with person_logged_in(archive.owner):
             self.assertContentEqual(
-                names, [processor.name for processor in archive.processors])
+                names, [processor.name for processor in archive.processors]
+            )
 
     def assertProcessorControls(self, processors_control, enabled, disabled):
         matchers = [
             MatchesStructure.byEquality(optionValue=name, disabled=False)
-            for name in enabled]
-        matchers.extend([
-            MatchesStructure.byEquality(optionValue=name, disabled=True)
-            for name in disabled])
+            for name in enabled
+        ]
+        matchers.extend(
+            [
+                MatchesStructure.byEquality(optionValue=name, disabled=True)
+                for name in disabled
+            ]
+        )
         self.assertThat(processors_control.controls, MatchesSetwise(*matchers))
 
     def test_display_processors(self):
         ppa = self.factory.makeArchive()
         owner = login_person(ppa.owner)
         browser = self.getUserBrowser(
-            canonical_url(ppa) + "/+edit", user=owner)
+            canonical_url(ppa) + "/+edit", user=owner
+        )
         processors = browser.getControl(name="field.processors")
         self.assertContentEqual(
             ["Intel 386 (386)", "AMD 64bit (amd64)", "HPPA Processor (hppa)"],
-            [extract_text(option) for option in processors.displayOptions])
+            [extract_text(option) for option in processors.displayOptions],
+        )
         self.assertContentEqual(["386", "amd64", "hppa"], processors.options)
 
     def test_edit_processors(self):
         ppa = self.factory.makeArchive()
         self.assertArchiveProcessors(ppa, ["386", "amd64", "hppa"])
         browser = self.getUserBrowser(
-            canonical_url(ppa) + "/+edit", user=ppa.owner)
+            canonical_url(ppa) + "/+edit", user=ppa.owner
+        )
         processors = browser.getControl(name="field.processors")
         self.assertContentEqual(["386", "amd64", "hppa"], processors.value)
         processors.value = ["386", "amd64"]
@@ -113,11 +118,13 @@ class TestArchiveEditView(TestCaseWithFactory):
         proc_386 = getUtility(IProcessorSet).getByName("386")
         proc_amd64 = getUtility(IProcessorSet).getByName("amd64")
         proc_armel = self.factory.makeProcessor(
-            name="armel", restricted=True, build_by_default=False)
+            name="armel", restricted=True, build_by_default=False
+        )
         ppa = self.factory.makeArchive()
         ppa.setProcessors([proc_386, proc_amd64, proc_armel])
         browser = self.getUserBrowser(
-            canonical_url(ppa) + "/+edit", user=ppa.owner)
+            canonical_url(ppa) + "/+edit", user=ppa.owner
+        )
         processors = browser.getControl(name="field.processors")
         self.assertContentEqual(["386", "amd64"], processors.value)
         processors.value = ["amd64"]
@@ -129,18 +136,23 @@ class TestArchiveEditView(TestCaseWithFactory):
         # checkbox in the UI, and the processor cannot be enabled.
         self.useFixture(FakeLogger())
         proc_armhf = self.factory.makeProcessor(
-            name="armhf", restricted=True, build_by_default=False)
+            name="armhf", restricted=True, build_by_default=False
+        )
         self.factory.makeDistroArchSeries(
             distroseries=self.ubuntu.getSeries("breezy-autotest"),
-            architecturetag="armhf", processor=proc_armhf)
+            architecturetag="armhf",
+            processor=proc_armhf,
+        )
         ppa = self.factory.makeArchive()
         self.assertArchiveProcessors(ppa, ["386", "amd64", "hppa"])
         browser = self.getUserBrowser(
-            canonical_url(ppa) + "/+edit", user=ppa.owner)
+            canonical_url(ppa) + "/+edit", user=ppa.owner
+        )
         processors = browser.getControl(name="field.processors")
         self.assertContentEqual(["386", "amd64", "hppa"], processors.value)
         self.assertProcessorControls(
-            processors, ["386", "amd64", "hppa"], ["armhf"])
+            processors, ["386", "amd64", "hppa"], ["armhf"]
+        )
         # Even if the user works around the disabled checkbox and forcibly
         # enables it, they can't enable the restricted processor.
         for control in processors.controls:
@@ -148,7 +160,8 @@ class TestArchiveEditView(TestCaseWithFactory):
                 del control._control.attrs["disabled"]
         processors.value = ["386", "amd64", "armhf"]
         self.assertRaises(
-            CannotModifyArchiveProcessor, browser.getControl("Save").click)
+            CannotModifyArchiveProcessor, browser.getControl("Save").click
+        )
 
     def test_edit_processors_restricted_already_enabled(self):
         # A restricted processor that is already enabled is shown with a
@@ -159,20 +172,25 @@ class TestArchiveEditView(TestCaseWithFactory):
         proc_386 = getUtility(IProcessorSet).getByName("386")
         proc_amd64 = getUtility(IProcessorSet).getByName("amd64")
         proc_armhf = self.factory.makeProcessor(
-            name="armhf", restricted=True, build_by_default=False)
+            name="armhf", restricted=True, build_by_default=False
+        )
         self.factory.makeDistroArchSeries(
             distroseries=self.ubuntu.getSeries("breezy-autotest"),
-            architecturetag="armhf", processor=proc_armhf)
+            architecturetag="armhf",
+            processor=proc_armhf,
+        )
         ppa = self.factory.makeArchive()
         ppa.setProcessors([proc_386, proc_amd64, proc_armhf])
         self.assertArchiveProcessors(ppa, ["386", "amd64", "armhf"])
         browser = self.getUserBrowser(
-            canonical_url(ppa) + "/+edit", user=ppa.owner)
+            canonical_url(ppa) + "/+edit", user=ppa.owner
+        )
         processors = browser.getControl(name="field.processors")
         # armhf is checked but disabled.
         self.assertContentEqual(["386", "amd64", "armhf"], processors.value)
         self.assertProcessorControls(
-            processors, ["386", "amd64", "hppa"], ["armhf"])
+            processors, ["386", "amd64", "hppa"], ["armhf"]
+        )
         processors.value = ["386"]
         browser.getControl("Save").click()
         self.assertArchiveProcessors(ppa, ["386", "armhf"])
@@ -188,14 +206,19 @@ class TestArchiveCopyPackagesView(TestCaseWithFactory):
 
         def create_targets():
             self.factory.makeArchive(
-                owner=self.factory.makeTeam(members=[person]))
+                owner=self.factory.makeTeam(members=[person])
+            )
             archive = self.factory.makeArchive()
             with admin_logged_in():
-                archive.newComponentUploader(person, 'main')
+                archive.newComponentUploader(person, "main")
+
         nb_objects = 2
         login_person(person)
         recorder1, recorder2 = record_two_runs(
             lambda: create_initialized_view(
-                source, '+copy-packages', principal=person),
-            create_targets, nb_objects)
+                source, "+copy-packages", principal=person
+            ),
+            create_targets,
+            nb_objects,
+        )
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
diff --git a/lib/lp/soyuz/browser/tests/test_archive_admin_view.py b/lib/lp/soyuz/browser/tests/test_archive_admin_view.py
index 21f624c..d31966c 100644
--- a/lib/lp/soyuz/browser/tests/test_archive_admin_view.py
+++ b/lib/lp/soyuz/browser/tests/test_archive_admin_view.py
@@ -8,16 +8,9 @@ from zope.testbrowser.browser import LinkNotFoundError
 from lp.services.webapp import canonical_url
 from lp.services.webapp.servers import LaunchpadTestRequest
 from lp.soyuz.browser.archive import ArchiveAdminView
-from lp.soyuz.enums import (
-    ArchivePublishingMethod,
-    ArchiveRepositoryFormat,
-    )
+from lp.soyuz.enums import ArchivePublishingMethod, ArchiveRepositoryFormat
 from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
-from lp.testing import (
-    login,
-    login_person,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCaseWithFactory, login, login_person
 from lp.testing.layers import LaunchpadFunctionalLayer
 
 
@@ -31,22 +24,23 @@ class TestArchiveAdminView(TestCaseWithFactory):
         self.ppa = self.factory.makeArchive()
         # Login as an admin to ensure access to the view's context
         # object.
-        login('admin@xxxxxxxxxxxxx')
+        login("admin@xxxxxxxxxxxxx")
 
     def initialize_admin_view(self, archive, fields):
         """Initialize the admin view to set the privacy.."""
-        method = 'POST'
+        method = "POST"
         form = {
-            'field.enabled': 'on',
-            'field.actions.save': 'Save',
-            'field.private': 'on' if archive.private else 'off',
-            'field.publishing_method': archive.publishing_method.title,
-            'field.repository_format': archive.repository_format.title,
-            }
+            "field.enabled": "on",
+            "field.actions.save": "Save",
+            "field.private": "on" if archive.private else "off",
+            "field.publishing_method": archive.publishing_method.title,
+            "field.repository_format": archive.repository_format.title,
+        }
         form.update(fields)
 
-        view = ArchiveAdminView(self.ppa, LaunchpadTestRequest(
-            method=method, form=form))
+        view = ArchiveAdminView(
+            self.ppa, LaunchpadTestRequest(method=method, form=form)
+        )
         view.initialize()
         return view
 
@@ -63,10 +57,14 @@ class TestArchiveAdminView(TestCaseWithFactory):
         ppa_url = canonical_url(self.ppa)
         browser = self.getUserBrowser(ppa_url, user=self.ppa.owner)
         self.assertRaises(
-            LinkNotFoundError, browser.getLink, "Administer archive")
+            LinkNotFoundError, browser.getLink, "Administer archive"
+        )
         self.assertRaises(
-            Unauthorized, self.getUserBrowser, ppa_url + "/+admin",
-            user=self.ppa.owner)
+            Unauthorized,
+            self.getUserBrowser,
+            ppa_url + "/+admin",
+            user=self.ppa.owner,
+        )
 
     def test_set_private_without_packages(self):
         # If a ppa does not have packages published, it is possible to
@@ -89,9 +87,10 @@ class TestArchiveAdminView(TestCaseWithFactory):
         view = self.initialize_admin_view(self.ppa, {"field.private": "on"})
         self.assertEqual(1, len(view.errors))
         self.assertEqual(
-            'This archive already has published sources. '
-            'It is not possible to switch the privacy.',
-            view.errors[0])
+            "This archive already has published sources. "
+            "It is not possible to switch the privacy.",
+            view.errors[0],
+        )
 
     def test_set_public_with_packages(self):
         # A PPA that does have (or had) packages published is presented
@@ -102,52 +101,63 @@ class TestArchiveAdminView(TestCaseWithFactory):
         view = self.initialize_admin_view(self.ppa, {"field.private": "off"})
         self.assertEqual(1, len(view.errors))
         self.assertEqual(
-            'This archive already has published sources. '
-            'It is not possible to switch the privacy.',
-            view.errors[0])
+            "This archive already has published sources. "
+            "It is not possible to switch the privacy.",
+            view.errors[0],
+        )
 
     def test_set_publishing_method_without_packages(self):
         # If a PPA does not have packages published, it is possible to
         # update the publishing_method attribute.
         self.assertEqual(
-            ArchivePublishingMethod.LOCAL, self.ppa.publishing_method)
+            ArchivePublishingMethod.LOCAL, self.ppa.publishing_method
+        )
         view = self.initialize_admin_view(
-            self.ppa, {"field.publishing_method": "ARTIFACTORY"})
+            self.ppa, {"field.publishing_method": "ARTIFACTORY"}
+        )
         self.assertEqual(0, len(view.errors))
         self.assertEqual(
-            ArchivePublishingMethod.ARTIFACTORY, self.ppa.publishing_method)
+            ArchivePublishingMethod.ARTIFACTORY, self.ppa.publishing_method
+        )
 
     def test_set_publishing_method_with_packages(self):
         # If a PPA has packages published, it is impossible to update the
         # publishing_method attribute.
         self.publish_to_ppa(self.ppa)
         view = self.initialize_admin_view(
-            self.ppa, {"field.publishing_method": "ARTIFACTORY"})
+            self.ppa, {"field.publishing_method": "ARTIFACTORY"}
+        )
         self.assertEqual(1, len(view.errors))
         self.assertEqual(
             "This archive already has published packages. "
             "It is not possible to switch the publishing method.",
-            view.errors[0])
+            view.errors[0],
+        )
 
     def test_set_repository_format_without_packages(self):
         # If a PPA does not have packages published, it is possible to
         # update the repository_format attribute.
         self.assertEqual(
-            ArchiveRepositoryFormat.DEBIAN, self.ppa.repository_format)
+            ArchiveRepositoryFormat.DEBIAN, self.ppa.repository_format
+        )
         view = self.initialize_admin_view(
-            self.ppa, {"field.repository_format": "PYTHON"})
+            self.ppa, {"field.repository_format": "PYTHON"}
+        )
         self.assertEqual(0, len(view.errors))
         self.assertEqual(
-            ArchiveRepositoryFormat.PYTHON, self.ppa.repository_format)
+            ArchiveRepositoryFormat.PYTHON, self.ppa.repository_format
+        )
 
     def test_set_repository_format_with_packages(self):
         # If a PPA has packages published, it is impossible to update the
         # repository_format attribute.
         self.publish_to_ppa(self.ppa)
         view = self.initialize_admin_view(
-            self.ppa, {"field.repository_format": "PYTHON"})
+            self.ppa, {"field.repository_format": "PYTHON"}
+        )
         self.assertEqual(1, len(view.errors))
         self.assertEqual(
             "This archive already has published packages. "
             "It is not possible to switch the repository format.",
-            view.errors[0])
+            view.errors[0],
+        )
diff --git a/lib/lp/soyuz/browser/tests/test_archive_packages.py b/lib/lp/soyuz/browser/tests/test_archive_packages.py
index ea6d517..4302bbf 100644
--- a/lib/lp/soyuz/browser/tests/test_archive_packages.py
+++ b/lib/lp/soyuz/browser/tests/test_archive_packages.py
@@ -4,18 +4,14 @@
 """Unit tests for TestP3APackages."""
 
 __all__ = [
-    'TestP3APackages',
-    'TestPPAPackages',
-    ]
+    "TestP3APackages",
+    "TestPPAPackages",
+]
 
 import re
 
 import soupmatchers
-from testtools.matchers import (
-    Equals,
-    LessThan,
-    Not,
-    )
+from testtools.matchers import Equals, LessThan, Not
 from zope.component import getUtility
 from zope.security.interfaces import Unauthorized
 from zope.security.proxy import removeSecurityProxy
@@ -30,18 +26,15 @@ from lp.soyuz.browser.archive import ArchiveNavigationMenu
 from lp.soyuz.enums import PackagePublishingStatus
 from lp.soyuz.interfaces.packagecopyjob import IPlainPackageCopyJobSource
 from lp.testing import (
+    RequestTimelineCollector,
+    TestCaseWithFactory,
     celebrity_logged_in,
     login,
     login_person,
     person_logged_in,
     record_two_runs,
-    RequestTimelineCollector,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    DatabaseFunctionalLayer,
-    LaunchpadFunctionalLayer,
-    )
+)
+from lp.testing.layers import DatabaseFunctionalLayer, LaunchpadFunctionalLayer
 from lp.testing.matchers import HasQueryCount
 from lp.testing.pages import get_feedback_messages
 from lp.testing.sampledata import ADMIN_EMAIL
@@ -55,28 +48,30 @@ class TestP3APackages(TestCaseWithFactory):
 
     def setUp(self):
         super().setUp()
-        self.private_ppa = self.factory.makeArchive(description='Foo')
-        login('admin@xxxxxxxxxxxxx')
+        self.private_ppa = self.factory.makeArchive(description="Foo")
+        login("admin@xxxxxxxxxxxxx")
         self.private_ppa.private = True
-        self.joe = self.factory.makePerson(name='joe')
-        self.fred = self.factory.makePerson(name='fred')
-        self.mary = self.factory.makePerson(name='mary')
+        self.joe = self.factory.makePerson(name="joe")
+        self.fred = self.factory.makePerson(name="fred")
+        self.mary = self.factory.makePerson(name="mary")
         login_person(self.private_ppa.owner)
         self.private_ppa.newSubscription(self.joe, self.private_ppa.owner)
-        self.private_ppa.newComponentUploader(self.mary, 'main')
+        self.private_ppa.newComponentUploader(self.mary, "main")
 
     def test_packages_unauthorized(self):
-        """A person with no subscription will not be able to view +packages
-        """
+        """A person with no subscription will not be able to view +packages"""
         login_person(self.fred)
         self.assertRaises(
-            Unauthorized, create_initialized_view, self.private_ppa,
-            "+packages")
+            Unauthorized,
+            create_initialized_view,
+            self.private_ppa,
+            "+packages",
+        )
 
     def test_packages_authorized_for_commercial_admin_with_subscription(self):
         # A commercial admin should always be able to see +packages even
         # if they have a subscription.
-        login('admin@xxxxxxxxxxxxx')
+        login("admin@xxxxxxxxxxxxx")
         admins = getUtility(ILaunchpadCelebrities).commercial_admin
         admins.addMember(self.joe, admins)
         login_person(self.joe)
@@ -112,7 +107,8 @@ class TestPPAPackages(TestCaseWithFactory):
     def getPackagesView(self, query_string=None):
         ppa = self.factory.makeArchive()
         return create_initialized_view(
-            ppa, "+packages", query_string=query_string)
+            ppa, "+packages", query_string=query_string
+        )
 
     def assertNotifications(self, ppa, notification, person=None):
         # Assert that while requesting a 'ppa' page as 'person', the
@@ -120,12 +116,16 @@ class TestPPAPackages(TestCaseWithFactory):
         if person is not None:
             login_person(ppa.owner)
             principal = LaunchpadPrincipal(
-                ppa.owner.account.id, ppa.owner.displayname,
-                ppa.owner.displayname, ppa.owner)
+                ppa.owner.account.id,
+                ppa.owner.displayname,
+                ppa.owner.displayname,
+                ppa.owner,
+            )
         else:
             principal = None
         page = create_initialized_view(
-            ppa, "+packages", principal=principal).render()
+            ppa, "+packages", principal=principal
+        ).render()
         notifications = get_feedback_messages(page)
         self.assertIn(notification, notifications)
 
@@ -138,7 +138,8 @@ class TestPPAPackages(TestCaseWithFactory):
             ppa,
             "Publishing has been disabled for this archive. (re-enable "
             "publishing)",
-            person=ppa.owner)
+            person=ppa.owner,
+        )
 
     def test_warning_for_disabled_publishing_with_private_ppa(self):
         # Ensure that a notification is shown when archive.publish
@@ -150,7 +151,8 @@ class TestPPAPackages(TestCaseWithFactory):
             "Publishing has been disabled for this archive. (re-enable "
             "publishing) Since this archive is private, no builds are being "
             "dispatched.",
-            person=ppa.owner)
+            person=ppa.owner,
+        )
 
     def test_warning_for_disabled_publishing_with_anonymous_user(self):
         # The warning notification doesn't mention the Change details
@@ -158,43 +160,53 @@ class TestPPAPackages(TestCaseWithFactory):
         ppa = self.factory.makeArchive()
         removeSecurityProxy(ppa).publish = False
         self.assertNotifications(
-            ppa, 'Publishing has been disabled for this archive.')
+            ppa, "Publishing has been disabled for this archive."
+        )
 
     def test_page_show_singular_pending_builds(self):
         ppa = self.factory.makeArchive()
         self.factory.makeBinaryPackageBuild(
-            archive=ppa, status=BuildStatus.NEEDSBUILD)
+            archive=ppa, status=BuildStatus.NEEDSBUILD
+        )
         owner = login_person(ppa.owner)
         browser = self.getUserBrowser(
-            canonical_url(ppa) + '/+packages', user=owner)
+            canonical_url(ppa) + "/+packages", user=owner
+        )
         html = browser.contents
         pending_build_exists = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'pending build', 'p',
-                text=re.compile(r'(?s).*(pending\s*build\.)')),
+                "pending build",
+                "p",
+                text=re.compile(r"(?s).*(pending\s*build\.)"),
+            ),
         )
         self.assertThat(
-            html, pending_build_exists,
-            'Pending builds message was not found')
+            html, pending_build_exists, "Pending builds message was not found"
+        )
 
     def test_page_show_plural_pending_builds(self):
         ppa = self.factory.makeArchive()
         self.factory.makeBinaryPackageBuild(
-            archive=ppa, status=BuildStatus.NEEDSBUILD)
+            archive=ppa, status=BuildStatus.NEEDSBUILD
+        )
         self.factory.makeBinaryPackageBuild(
-            archive=ppa, status=BuildStatus.NEEDSBUILD)
+            archive=ppa, status=BuildStatus.NEEDSBUILD
+        )
         owner = login_person(ppa.owner)
         browser = self.getUserBrowser(
-            canonical_url(ppa) + '/+packages', user=owner)
+            canonical_url(ppa) + "/+packages", user=owner
+        )
         html = browser.contents
         pending_build_exists = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'pending build', 'p',
-                text=re.compile(r'(?s).*(pending\s*builds\.)')),
+                "pending build",
+                "p",
+                text=re.compile(r"(?s).*(pending\s*builds\.)"),
+            ),
         )
         self.assertThat(
-            html, pending_build_exists,
-            'Pending builds message was not found')
+            html, pending_build_exists, "Pending builds message was not found"
+        )
 
     def test_ppa_packages_menu_is_enabled(self):
         joe = self.factory.makePerson()
@@ -205,51 +217,63 @@ class TestPPAPackages(TestCaseWithFactory):
         self.assertTrue(menu.packages().enabled)
 
     def test_specified_name_filter_works(self):
-        view = self.getPackagesView('field.name_filter=blah')
-        self.assertEqual('blah', view.specified_name_filter)
+        view = self.getPackagesView("field.name_filter=blah")
+        self.assertEqual("blah", view.specified_name_filter)
 
     def test_page_with_filter_parameter_shows_message(self):
         ppa = self.factory.makeArchive()
         self.factory.makeSourcePackagePublishingHistory(archive=ppa)
         owner = login_person(ppa.owner)
         browser = self.getUserBrowser(
-            canonical_url(ppa) + '/+packages?field.name_filter=unknown_name',
-            user=owner)
+            canonical_url(ppa) + "/+packages?field.name_filter=unknown_name",
+            user=owner,
+        )
         html = browser.contents
         empty_package_msg_exists = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'no matching packages message', 'div',
+                "no matching packages message",
+                "div",
                 text=re.compile(
-                    r"\s*No matching package for 'unknown_name'\s*"),
-                attrs={'id': 'empty-result'}),
+                    r"\s*No matching package for 'unknown_name'\s*"
+                ),
+                attrs={"id": "empty-result"},
+            ),
         )
         self.assertThat(
-            html, empty_package_msg_exists,
-            'Message "No matching package for (...)" should appear')
+            html,
+            empty_package_msg_exists,
+            'Message "No matching package for (...)" should appear',
+        )
 
     def test_page_without_filter_parameter_doesnt_show_message(self):
         ppa = self.factory.makeArchive()
         self.factory.makeSourcePackagePublishingHistory(
-            archive=ppa, status=PackagePublishingStatus.DELETED)
+            archive=ppa, status=PackagePublishingStatus.DELETED
+        )
         owner = login_person(ppa.owner)
         browser = self.getUserBrowser(
-            canonical_url(ppa) + '/+packages', user=owner)
+            canonical_url(ppa) + "/+packages", user=owner
+        )
         html = browser.contents
         empty_package_msg_exists = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'no matching packages message', 'div',
-                attrs={'id': 'empty-result'}),
+                "no matching packages message",
+                "div",
+                attrs={"id": "empty-result"},
+            ),
         )
         self.assertThat(
-            html, Not(empty_package_msg_exists),
-            'Message "No matching package for (...)" should *NOT* appear')
+            html,
+            Not(empty_package_msg_exists),
+            'Message "No matching package for (...)" should *NOT* appear',
+        )
 
     def test_specified_name_filter_returns_none_on_omission(self):
         view = self.getPackagesView()
         self.assertIs(None, view.specified_name_filter)
 
     def test_specified_name_filter_returns_none_on_empty_filter(self):
-        view = self.getPackagesView('field.name_filter=')
+        view = self.getPackagesView("field.name_filter=")
         self.assertIs(None, view.specified_name_filter)
 
     def test_source_query_counts(self):
@@ -286,9 +310,11 @@ class TestPPAPackages(TestCaseWithFactory):
         with person_logged_in(viewer):
             for i in range(2):
                 pkg = self.factory.makeSourcePackagePublishingHistory(
-                    archive=ppa)
-                self.factory.makeSourcePackagePublishingHistory(archive=ppa,
-                    distroseries=pkg.distroseries)
+                    archive=ppa
+                )
+                self.factory.makeSourcePackagePublishingHistory(
+                    archive=ppa, distroseries=pkg.distroseries
+                )
             url = canonical_url(ppa) + "/+packages"
         browser.open(url)
         self.assertThat(collector, HasQueryCount(LessThan(expected_count)))
@@ -307,8 +333,7 @@ class TestPPAPackages(TestCaseWithFactory):
             # short-circuit prevents the packages iteration happening at
             # all and we're not actually measuring scaling
             # appropriately.
-            pkg = self.factory.makeBinaryPackagePublishingHistory(
-                archive=ppa)
+            pkg = self.factory.makeBinaryPackagePublishingHistory(archive=ppa)
             url = canonical_url(ppa) + "/+packages"
         browser.open(url)
         self.assertThat(collector, HasQueryCount(LessThan(query_baseline)))
@@ -322,7 +347,8 @@ class TestPPAPackages(TestCaseWithFactory):
         with person_logged_in(viewer):
             for i in range(3):
                 pkg = self.factory.makeBinaryPackagePublishingHistory(
-                    archive=ppa, distroarchseries=pkg.distroarchseries)
+                    archive=ppa, distroarchseries=pkg.distroarchseries
+                )
             url = canonical_url(ppa) + "/+packages"
         browser.open(url)
         self.assertThat(collector, HasQueryCount(Equals(expected_count)))
@@ -334,7 +360,7 @@ class TestPPAPackagesJobNotifications(TestCaseWithFactory):
 
     def setUp(self):
         super().setUp()
-        self.ws_version = 'devel'
+        self.ws_version = "devel"
         self.person = self.factory.makePerson()
         self.archive = self.factory.makeArchive(owner=self.person)
 
@@ -344,11 +370,15 @@ class TestPPAPackagesJobNotifications(TestCaseWithFactory):
         requester = self.factory.makePerson()
         source = getUtility(IPlainPackageCopyJobSource)
         job = source.create(
-            package_name=package_name, source_archive=source_archive,
-            target_archive=self.archive, target_distroseries=distroseries,
+            package_name=package_name,
+            source_archive=source_archive,
+            target_archive=self.archive,
+            target_distroseries=distroseries,
             target_pocket=PackagePublishingPocket.RELEASE,
-            package_version="1.0-1", include_binaries=True,
-            requester=requester)
+            package_version="1.0-1",
+            include_binaries=True,
+            requester=requester,
+        )
         job.start()
         if failed:
             job.fail()
@@ -357,135 +387,173 @@ class TestPPAPackagesJobNotifications(TestCaseWithFactory):
     def getPackagesView(self, query_string=None):
         ppa = self.factory.makeArchive()
         return create_initialized_view(
-            ppa, "+packages", query_string=query_string)
+            ppa, "+packages", query_string=query_string
+        )
 
     def test_job_notifications_display_failed(self):
-        job = self.makeJob('package_1', failed=True)
+        job = self.makeJob("package_1", failed=True)
         # Manually poke an error message.
         removeSecurityProxy(job).extendMetadata(
-            {'error_message': 'Job failed!'})
+            {"error_message": "Job failed!"}
+        )
         with person_logged_in(self.archive.owner):
             view = create_initialized_view(
-                self.archive, "+packages", principal=self.archive.owner)
+                self.archive, "+packages", principal=self.archive.owner
+            )
             html = view.render()
         packages_matches = soupmatchers.HTMLContains(
             # Check the main title.
             soupmatchers.Tag(
-                'job summary', 'a',
-                text=re.compile('Copying.*'),
-                attrs={'class': re.compile('job-summary')}),
+                "job summary",
+                "a",
+                text=re.compile("Copying.*"),
+                attrs={"class": re.compile("job-summary")},
+            ),
             # Check the link to the source archive.
             soupmatchers.Tag(
-                'copied from', 'a',
+                "copied from",
+                "a",
                 text=job.source_archive.displayname,
-                attrs={'class': re.compile('copied-from')}),
+                attrs={"class": re.compile("copied-from")},
+            ),
             # Check the presence of the link to remove the notification.
             soupmatchers.Tag(
-                'no remove notification link', 'a',
-                text=re.compile(r'\s*Remove notification\s*'),
-                attrs={'class': re.compile('remove-notification')}),
+                "no remove notification link",
+                "a",
+                text=re.compile(r"\s*Remove notification\s*"),
+                attrs={"class": re.compile("remove-notification")},
+            ),
             # Check the presence of the error message.
             soupmatchers.Tag(
-                'job error msg', 'div',
-                text='Job failed!',
-                attrs={'class': re.compile('job-failed-error-msg')}),
-            )
+                "job error msg",
+                "div",
+                text="Job failed!",
+                attrs={"class": re.compile("job-failed-error-msg")},
+            ),
+        )
         self.assertThat(html, packages_matches)
 
     def test_job_notifications_display_in_progress_not_allowed(self):
         other_person = self.factory.makePerson()
-        self.makeJob('package_1', failed=True)
+        self.makeJob("package_1", failed=True)
         with person_logged_in(other_person):
             view = create_initialized_view(
-                self.archive, "+packages", principal=other_person)
+                self.archive, "+packages", principal=other_person
+            )
             html = view.render()
         packages_not_matches = soupmatchers.HTMLContains(
             # Check the absence of the link remove the notification.
             soupmatchers.Tag(
-                'no remove notification link', 'a',
-                text=re.compile(r'\s*Remove notification\s*'),
-                attrs={'class': re.compile('remove-notification')}),
-            )
+                "no remove notification link",
+                "a",
+                text=re.compile(r"\s*Remove notification\s*"),
+                attrs={"class": re.compile("remove-notification")},
+            ),
+        )
         self.assertThat(html, Not(packages_not_matches))
 
     def test_job_notifications_display_in_progress(self):
-        job = self.makeJob('package_1', failed=False)
+        job = self.makeJob("package_1", failed=False)
         with person_logged_in(self.archive.owner):
             view = create_initialized_view(
-                self.archive, "+packages", principal=self.archive.owner)
+                self.archive, "+packages", principal=self.archive.owner
+            )
             html = view.render()
         packages_matches = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'job summary', 'a',
-                text=re.compile('Copying.*'),
-                attrs={'class': re.compile('job-summary')}),
+                "job summary",
+                "a",
+                text=re.compile("Copying.*"),
+                attrs={"class": re.compile("job-summary")},
+            ),
             soupmatchers.Tag(
-                'copied from', 'a',
+                "copied from",
+                "a",
                 text=job.source_archive.displayname,
-                attrs={'class': re.compile('copied-from')}),
-            )
+                attrs={"class": re.compile("copied-from")},
+            ),
+        )
         packages_not_matches = soupmatchers.HTMLContains(
             # Check the absence of the link remove the notification.
             soupmatchers.Tag(
-                'remove notification link', 'a',
-                text=re.compile(r'\s*Remove notification\s*'),
-                attrs={'class': re.compile('remove-notification')}),
-            )
+                "remove notification link",
+                "a",
+                text=re.compile(r"\s*Remove notification\s*"),
+                attrs={"class": re.compile("remove-notification")},
+            ),
+        )
         self.assertThat(html, packages_matches)
         self.assertThat(html, Not(packages_not_matches))
 
     def test_job_notifications_display_multiple(self):
-        job1 = self.makeJob('package_1')
-        job2 = self.makeJob('package_2', failed=True)
-        job3 = self.makeJob('package_3')
+        job1 = self.makeJob("package_1")
+        job2 = self.makeJob("package_2", failed=True)
+        job3 = self.makeJob("package_3")
         with person_logged_in(self.archive.owner):
             view = create_initialized_view(
-                self.archive, "+packages", principal=self.archive.owner)
+                self.archive, "+packages", principal=self.archive.owner
+            )
             html = view.render()
         packages_matches = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'job1', 'div',
-                attrs={'class': 'pending-job', 'job_id': job1.id}),
+                "job1",
+                "div",
+                attrs={"class": "pending-job", "job_id": job1.id},
+            ),
             soupmatchers.Tag(
-                'job2', 'div',
-                attrs={'class': 'pending-job', 'job_id': job2.id}),
+                "job2",
+                "div",
+                attrs={"class": "pending-job", "job_id": job2.id},
+            ),
             soupmatchers.Tag(
-                'job3', 'div',
-                attrs={'class': 'pending-job', 'job_id': job3.id}),
-            )
+                "job3",
+                "div",
+                attrs={"class": "pending-job", "job_id": job3.id},
+            ),
+        )
         self.assertThat(html, packages_matches)
         self.assertEqual(
-            [], BeautifulSoup(html).find_all(
-                'span', text=re.compile('Showing 5 of .')))
+            [],
+            BeautifulSoup(html).find_all(
+                "span", text=re.compile("Showing 5 of .")
+            ),
+        )
 
     def test_job_notifications_display_multiple_is_capped(self):
-        jobs = [self.makeJob('package%d' % i) for i in range(7)]
+        jobs = [self.makeJob("package%d" % i) for i in range(7)]
         with person_logged_in(self.archive.owner):
             view = create_initialized_view(
-                self.archive, "+packages", principal=self.archive.owner)
+                self.archive, "+packages", principal=self.archive.owner
+            )
             soup = BeautifulSoup(view.render())
-        self.assertEqual([],
+        self.assertEqual(
+            [],
             soup.find_all(
-                'div', attrs={'class': 'pending-job', 'job_id': jobs[-1].id}))
-        showing_tags = soup.find_all(
-            'span', text=re.compile('Showing 5 of .'))
+                "div", attrs={"class": "pending-job", "job_id": jobs[-1].id}
+            ),
+        )
+        showing_tags = soup.find_all("span", text=re.compile("Showing 5 of ."))
         self.assertEqual(
-            ['Showing 5 of 7'], [tag.string for tag in showing_tags])
+            ["Showing 5 of 7"], [tag.string for tag in showing_tags]
+        )
 
     def test_job_notifications_display_owner_is_team(self):
         team = self.factory.makeTeam()
         removeSecurityProxy(self.archive).owner = team
-        job = self.makeJob('package_1', failed=False)
+        job = self.makeJob("package_1", failed=False)
         with person_logged_in(self.archive.owner):
             view = create_initialized_view(
-                self.archive, "+packages", principal=self.archive.owner)
+                self.archive, "+packages", principal=self.archive.owner
+            )
             html = view.render()
         packages_matches = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'copied by', 'a',
+                "copied by",
+                "a",
                 text=job.job.requester.displayname,
-                attrs={'class': re.compile('copied-by')}),
-            )
+                attrs={"class": re.compile("copied-by")},
+            ),
+        )
         self.assertThat(html, packages_matches)
 
 
@@ -500,28 +568,36 @@ class TestP3APackagesQueryCount(TestCaseWithFactory):
         self.person = self.factory.makePerson()
 
         self.private_ppa = self.factory.makeArchive(
-            owner=self.team, private=True)
+            owner=self.team, private=True
+        )
         self.private_ppa.newSubscription(
-            self.person, registrant=self.team.teamowner)
+            self.person, registrant=self.team.teamowner
+        )
         self.distroseries = self.factory.makeDistroSeries(
-            distribution=self.private_ppa.distribution)
+            distribution=self.private_ppa.distribution
+        )
 
     def createPackage(self):
-        with celebrity_logged_in('admin'):
+        with celebrity_logged_in("admin"):
             pkg = self.factory.makeBinaryPackagePublishingHistory(
                 distroarchseries=self.factory.makeDistroArchSeries(
-                    distroseries=self.distroseries),
+                    distroseries=self.distroseries
+                ),
                 status=PackagePublishingStatus.PUBLISHED,
-                archive=self.private_ppa)
+                archive=self.private_ppa,
+            )
         return pkg
 
     def test_ppa_index_queries_count(self):
         def ppa_index_render():
             with person_logged_in(self.team.teamowner):
                 view = create_initialized_view(
-                    self.private_ppa, '+index', principal=self.team.teamowner)
+                    self.private_ppa, "+index", principal=self.team.teamowner
+                )
                 view.page_title = "title"
                 view.render()
+
         recorder1, recorder2 = record_two_runs(
-            ppa_index_render, self.createPackage, 2, 3)
+            ppa_index_render, self.createPackage, 2, 3
+        )
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
diff --git a/lib/lp/soyuz/browser/tests/test_archive_webservice.py b/lib/lp/soyuz/browser/tests/test_archive_webservice.py
index 3c059ab..47ed467 100644
--- a/lib/lp/soyuz/browser/tests/test_archive_webservice.py
+++ b/lib/lp/soyuz/browser/tests/test_archive_webservice.py
@@ -1,9 +1,9 @@
 # Copyright 2010-2020 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from datetime import timedelta
 import json
 import os.path
+from datetime import timedelta
 
 import responses
 from testtools.matchers import (
@@ -14,7 +14,7 @@ from testtools.matchers import (
     MatchesListwise,
     MatchesRegex,
     MatchesStructure,
-    )
+)
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
@@ -26,19 +26,19 @@ from lp.soyuz.enums import (
     ArchivePermissionType,
     ArchivePurpose,
     PackagePublishingStatus,
-    )
+)
 from lp.soyuz.interfaces.component import IComponentSet
 from lp.soyuz.interfaces.packagecopyjob import IPlainPackageCopyJobSource
 from lp.soyuz.model.archivepermission import ArchivePermission
 from lp.testing import (
-    admin_logged_in,
     ANONYMOUS,
+    TestCaseWithFactory,
+    admin_logged_in,
     api_url,
     login,
     person_logged_in,
     record_two_runs,
-    TestCaseWithFactory,
-    )
+)
 from lp.testing.gpgkeys import gpgkeysdir
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.matchers import HasQueryCount
@@ -53,32 +53,43 @@ class TestArchiveWebservice(TestCaseWithFactory):
         with admin_logged_in() as _admin:
             admin = _admin
             self.archive = self.factory.makeArchive(
-                purpose=ArchivePurpose.PRIMARY)
+                purpose=ArchivePurpose.PRIMARY
+            )
             distroseries = self.factory.makeDistroSeries(
-                distribution=self.archive.distribution)
+                distribution=self.archive.distribution
+            )
             person = self.factory.makePerson()
         self.main_archive_url = api_url(self.archive)
         self.distroseries_url = api_url(distroseries)
         self.person_url = api_url(person)
         self.ws = webservice_for_person(
-            admin, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            admin,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
 
     def test_checkUpload_bad_pocket(self):
         # Make sure a 403 error and not an OOPS is returned when
         # CannotUploadToPocket is raised when calling checkUpload.
         response = self.ws.named_get(
-            self.main_archive_url, "checkUpload",
+            self.main_archive_url,
+            "checkUpload",
             distroseries=self.distroseries_url,
             sourcepackagename="mozilla-firefox",
             pocket="Updates",
             component="restricted",
-            person=self.person_url)
-        self.assertThat(response, MatchesStructure.byEquality(
-            status=403,
-            body=(
-                b"Not permitted to upload to the UPDATES pocket in a series "
-                b"in the 'DEVELOPMENT' state.")))
+            person=self.person_url,
+        )
+        self.assertThat(
+            response,
+            MatchesStructure.byEquality(
+                status=403,
+                body=(
+                    b"Not permitted to upload to the UPDATES pocket in a "
+                    b"series in the 'DEVELOPMENT' state."
+                ),
+            ),
+        )
 
     def test_getAllPermissions_constant_query_count(self):
         # getAllPermissions has a query count constant in the number of
@@ -86,15 +97,18 @@ class TestArchiveWebservice(TestCaseWithFactory):
         def create_permission():
             with admin_logged_in():
                 ArchivePermission(
-                    archive=self.archive, person=self.factory.makePerson(),
+                    archive=self.archive,
+                    person=self.factory.makePerson(),
                     component=getUtility(IComponentSet)["main"],
-                    permission=ArchivePermissionType.UPLOAD)
+                    permission=ArchivePermissionType.UPLOAD,
+                )
 
         def get_permissions():
             self.ws.named_get(self.main_archive_url, "getAllPermissions")
 
         recorder1, recorder2 = record_two_runs(
-            get_permissions, create_permission, 1)
+            get_permissions, create_permission, 1
+        )
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
     def test_delete(self):
@@ -102,22 +116,28 @@ class TestArchiveWebservice(TestCaseWithFactory):
             ppa = self.factory.makeArchive(purpose=ArchivePurpose.PPA)
             ppa_url = api_url(ppa)
             ws = webservice_for_person(
-                ppa.owner, permission=OAuthPermission.WRITE_PRIVATE,
-                default_api_version="devel")
+                ppa.owner,
+                permission=OAuthPermission.WRITE_PRIVATE,
+                default_api_version="devel",
+            )
 
         # DELETE on an archive resource doesn't actually remove it
         # immediately, but it asks the publisher to delete it later.
         self.assertEqual(
-            'Active', self.getWebserviceJSON(ws, ppa_url)['status'])
+            "Active", self.getWebserviceJSON(ws, ppa_url)["status"]
+        )
         self.assertEqual(200, ws.delete(ppa_url).status)
         self.assertEqual(
-            'Deleting', self.getWebserviceJSON(ws, ppa_url)['status'])
+            "Deleting", self.getWebserviceJSON(ws, ppa_url)["status"]
+        )
 
         # Deleting the PPA again fails.
         self.assertThat(
             ws.delete(ppa_url),
             MatchesStructure.byEquality(
-                status=400, body=b"Archive already deleted."))
+                status=400, body=b"Archive already deleted."
+            ),
+        )
 
     def test_delete_is_restricted(self):
         with admin_logged_in():
@@ -126,7 +146,8 @@ class TestArchiveWebservice(TestCaseWithFactory):
             ws = webservice_for_person(
                 self.factory.makePerson(),
                 permission=OAuthPermission.WRITE_PRIVATE,
-                default_api_version="devel")
+                default_api_version="devel",
+            )
 
         # A random user can't delete someone else's PPA.
         self.assertEqual(401, ws.delete(ppa_url).status)
@@ -136,8 +157,10 @@ class TestArchiveWebservice(TestCaseWithFactory):
             archive = self.factory.makeArchive()
             archive_url = api_url(archive)
             ws = webservice_for_person(
-                archive.owner, permission=OAuthPermission.WRITE_PRIVATE,
-                default_api_version="devel")
+                archive.owner,
+                permission=OAuthPermission.WRITE_PRIVATE,
+                default_api_version="devel",
+            )
 
         ws_archive = self.getWebserviceJSON(ws, archive_url)
 
@@ -149,12 +172,15 @@ class TestArchiveWebservice(TestCaseWithFactory):
             archive = self.factory.makeArchive()
             archive_url = api_url(archive)
             ws = webservice_for_person(
-                archive.owner, permission=OAuthPermission.WRITE_PRIVATE,
-                default_api_version="devel")
+                archive.owner,
+                permission=OAuthPermission.WRITE_PRIVATE,
+                default_api_version="devel",
+            )
 
         # Setting it to False works.
         response = ws.patch(
-            archive_url, "application/json", json.dumps({"publish": False}))
+            archive_url, "application/json", json.dumps({"publish": False})
+        )
         self.assertEqual(209, response.status)
 
         ws_archive = self.getWebserviceJSON(ws, archive_url)
@@ -165,16 +191,19 @@ class TestArchiveWebservice(TestCaseWithFactory):
             archive = self.factory.makeArchive()
             archive_url = api_url(archive)
             ws = webservice_for_person(
-                archive.owner, permission=OAuthPermission.WRITE_PRIVATE,
-                default_api_version="devel")
+                archive.owner,
+                permission=OAuthPermission.WRITE_PRIVATE,
+                default_api_version="devel",
+            )
             response = ws.patch(
-                archive_url, "application/json",
-                json.dumps({"publish": False}))
+                archive_url, "application/json", json.dumps({"publish": False})
+            )
             self.assertEqual(209, response.status)
 
         # Setting it back to True works because archive is Active.
         ws.patch(
-            archive_url, "application/json", json.dumps({"publish": True}))
+            archive_url, "application/json", json.dumps({"publish": True})
+        )
 
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertTrue(ws_archive["publish"])
@@ -184,21 +213,24 @@ class TestArchiveWebservice(TestCaseWithFactory):
             archive = self.factory.makeArchive()
             archive_url = api_url(archive)
             ws = webservice_for_person(
-                archive.owner, permission=OAuthPermission.WRITE_PRIVATE,
-                default_api_version="devel")
+                archive.owner,
+                permission=OAuthPermission.WRITE_PRIVATE,
+                default_api_version="devel",
+            )
             response = ws.patch(
-                    archive_url, "application/json",
-                    json.dumps({"publish": False}))
+                archive_url, "application/json", json.dumps({"publish": False})
+            )
             self.assertEqual(209, response.status)
             response = ws.delete(archive_url)
             self.assertEqual(200, response.status)
             ws_archive = self.getWebserviceJSON(ws, archive_url)
-            self.assertEqual('Deleting', ws_archive["status"])
+            self.assertEqual("Deleting", ws_archive["status"])
 
             # Setting it to True with archive status
             # different from Active won't work.
             response = ws.patch(
-                archive_url, "application/json", json.dumps({"publish": True}))
+                archive_url, "application/json", json.dumps({"publish": True})
+            )
 
             self.assertEqual(400, response.status)
             self.assertEqual(b"Deleted PPAs can't be enabled.", response.body)
@@ -222,10 +254,12 @@ class TestSigningKey(TestCaseWithFactory):
             secret_key = gpghandler.importSecretKey(key_file.read())
         public_key = gpghandler.retrieveKey(secret_key.fingerprint)
         public_key_data = public_key.export()
-        removeSecurityProxy(archive).signing_key_fingerprint = (
-            public_key.fingerprint)
+        removeSecurityProxy(
+            archive
+        ).signing_key_fingerprint = public_key.fingerprint
         key_url = gpghandler.getURLForKeyInServer(
-            public_key.fingerprint, action="get")
+            public_key.fingerprint, action="get"
+        )
         responses.add("GET", key_url, body=public_key_data)
         gpghandler.resetLocalState()
         return public_key.fingerprint, public_key_data
@@ -237,7 +271,8 @@ class TestSigningKey(TestCaseWithFactory):
         fingerprint, public_key_data = self._setUpSigningKey(archive)
         archive_url = api_url(archive)
         ws = webservice_for_person(
-            self.factory.makePerson(), default_api_version="devel")
+            self.factory.makePerson(), default_api_version="devel"
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertEqual(fingerprint, ws_archive["signing_key_fingerprint"])
         response = ws.named_get(archive_url, "getSigningKeyData")
@@ -254,8 +289,10 @@ class TestSigningKey(TestCaseWithFactory):
             archive.newSubscription(subscriber, archive.owner)
         archive_url = api_url(archive)
         ws = webservice_for_person(
-            subscriber, permission=OAuthPermission.READ_PRIVATE,
-            default_api_version="devel")
+            subscriber,
+            permission=OAuthPermission.READ_PRIVATE,
+            default_api_version="devel",
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertEqual(fingerprint, ws_archive["signing_key_fingerprint"])
         response = ws.named_get(archive_url, "getSigningKeyData")
@@ -270,12 +307,15 @@ class TestSigningKey(TestCaseWithFactory):
         fingerprint, public_key_data = self._setUpSigningKey(archive)
         archive_url = api_url(archive)
         ws = webservice_for_person(
-            self.factory.makePerson(), permission=OAuthPermission.READ_PRIVATE,
-            default_api_version="devel")
+            self.factory.makePerson(),
+            permission=OAuthPermission.READ_PRIVATE,
+            default_api_version="devel",
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertEqual(
             "tag:launchpad.net:2008:redacted",
-            ws_archive["signing_key_fingerprint"])
+            ws_archive["signing_key_fingerprint"],
+        )
         response = ws.named_get(archive_url, "getSigningKeyData")
         self.assertEqual(401, response.status)
 
@@ -289,12 +329,15 @@ class TestExternalDependencies(TestCaseWithFactory):
         archive = self.factory.makeArchive()
         archive_url = api_url(archive)
         ws = webservice_for_person(
-            self.factory.makePerson(), permission=OAuthPermission.WRITE_PUBLIC)
+            self.factory.makePerson(), permission=OAuthPermission.WRITE_PUBLIC
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertIsNone(ws_archive["external_dependencies"])
         response = ws.patch(
-            archive_url, "application/json",
-            json.dumps({"external_dependencies": "random"}))
+            archive_url,
+            "application/json",
+            json.dumps({"external_dependencies": "random"}),
+        )
         self.assertEqual(401, response.status)
 
     def test_external_dependencies_owner(self):
@@ -302,12 +345,15 @@ class TestExternalDependencies(TestCaseWithFactory):
         archive = self.factory.makeArchive()
         archive_url = api_url(archive)
         ws = webservice_for_person(
-            archive.owner, permission=OAuthPermission.WRITE_PUBLIC)
+            archive.owner, permission=OAuthPermission.WRITE_PUBLIC
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertIsNone(ws_archive["external_dependencies"])
         response = ws.patch(
-            archive_url, "application/json",
-            json.dumps({"external_dependencies": "random"}))
+            archive_url,
+            "application/json",
+            json.dumps({"external_dependencies": "random"}),
+        )
         self.assertEqual(401, response.status)
 
     def test_external_dependencies_ppa_owner_invalid(self):
@@ -317,15 +363,22 @@ class TestExternalDependencies(TestCaseWithFactory):
         archive = self.factory.makeArchive(owner=ppa_admin)
         archive_url = api_url(archive)
         ws = webservice_for_person(
-            archive.owner, permission=OAuthPermission.WRITE_PUBLIC)
+            archive.owner, permission=OAuthPermission.WRITE_PUBLIC
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertIsNone(ws_archive["external_dependencies"])
         response = ws.patch(
-            archive_url, "application/json",
-            json.dumps({"external_dependencies": "random"}))
-        self.assertThat(response, MatchesStructure(
-            status=Equals(400),
-            body=Contains(b"Invalid external dependencies")))
+            archive_url,
+            "application/json",
+            json.dumps({"external_dependencies": "random"}),
+        )
+        self.assertThat(
+            response,
+            MatchesStructure(
+                status=Equals(400),
+                body=Contains(b"Invalid external dependencies"),
+            ),
+        )
 
     def test_external_dependencies_ppa_owner_valid(self):
         """PPA admins can look and touch."""
@@ -334,15 +387,21 @@ class TestExternalDependencies(TestCaseWithFactory):
         archive = self.factory.makeArchive(owner=ppa_admin)
         archive_url = api_url(archive)
         ws = webservice_for_person(
-            archive.owner, permission=OAuthPermission.WRITE_PUBLIC)
+            archive.owner, permission=OAuthPermission.WRITE_PUBLIC
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertIsNone(ws_archive["external_dependencies"])
         response = ws.patch(
-            archive_url, "application/json",
-            json.dumps({
-                "external_dependencies":
-                    "deb http://example.org suite components",
-                }))
+            archive_url,
+            "application/json",
+            json.dumps(
+                {
+                    "external_dependencies": (
+                        "deb http://example.org suite components"
+                    ),
+                }
+            ),
+        )
         self.assertEqual(209, response.status)
 
 
@@ -357,18 +416,31 @@ class TestArchiveDependencies(TestCaseWithFactory):
         archive_url = api_url(archive)
         dependency_url = api_url(dependency)
         ws = webservice_for_person(
-            self.factory.makePerson(), permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            self.factory.makePerson(),
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         ws_dependencies = self.getWebserviceJSON(
-            ws, ws_archive["dependencies_collection_link"])
+            ws, ws_archive["dependencies_collection_link"]
+        )
         self.assertEqual([], ws_dependencies["entries"])
         response = ws.named_post(
-            archive_url, "addArchiveDependency",
-            dependency=dependency_url, pocket="Release", component="main")
-        self.assertThat(response, MatchesStructure(
-            status=Equals(401),
-            body=MatchesRegex(br".*addArchiveDependency.*launchpad.Edit.*")))
+            archive_url,
+            "addArchiveDependency",
+            dependency=dependency_url,
+            pocket="Release",
+            component="main",
+        )
+        self.assertThat(
+            response,
+            MatchesStructure(
+                status=Equals(401),
+                body=MatchesRegex(
+                    rb".*addArchiveDependency.*launchpad.Edit.*"
+                ),
+            ),
+        )
 
     def test_addArchiveDependency_owner(self):
         """Archive owners can add archive dependencies."""
@@ -377,28 +449,48 @@ class TestArchiveDependencies(TestCaseWithFactory):
         archive_url = api_url(archive)
         dependency_url = api_url(dependency)
         ws = webservice_for_person(
-            archive.owner, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            archive.owner,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         ws_dependencies = self.getWebserviceJSON(
-            ws, ws_archive["dependencies_collection_link"])
+            ws, ws_archive["dependencies_collection_link"]
+        )
         self.assertEqual([], ws_dependencies["entries"])
         response = ws.named_post(
-            archive_url, "addArchiveDependency",
-            dependency=dependency_url, pocket="Release", component="asdf")
+            archive_url,
+            "addArchiveDependency",
+            dependency=dependency_url,
+            pocket="Release",
+            component="asdf",
+        )
         self.assertThat(
             response,
-            MatchesStructure(status=Equals(404), body=Contains(b"asdf")))
+            MatchesStructure(status=Equals(404), body=Contains(b"asdf")),
+        )
         response = ws.named_post(
-            archive_url, "addArchiveDependency",
-            dependency=dependency_url, pocket="Release", component="main")
+            archive_url,
+            "addArchiveDependency",
+            dependency=dependency_url,
+            pocket="Release",
+            component="main",
+        )
         self.assertEqual(201, response.status)
         archive_dependency_url = response.getHeader("Location")
         ws_dependencies = self.getWebserviceJSON(
-            ws, ws_archive["dependencies_collection_link"])
-        self.assertThat(ws_dependencies["entries"], MatchesListwise([
-            ContainsDict({"self_link": Equals(archive_dependency_url)}),
-            ]))
+            ws, ws_archive["dependencies_collection_link"]
+        )
+        self.assertThat(
+            ws_dependencies["entries"],
+            MatchesListwise(
+                [
+                    ContainsDict(
+                        {"self_link": Equals(archive_dependency_url)}
+                    ),
+                ]
+            ),
+        )
 
     def test_addArchiveDependency_invalid(self):
         """Invalid requests generate a BadRequest error."""
@@ -406,17 +498,27 @@ class TestArchiveDependencies(TestCaseWithFactory):
         dependency = self.factory.makeArchive()
         with person_logged_in(archive.owner):
             archive.addArchiveDependency(
-                dependency, PackagePublishingPocket.RELEASE)
+                dependency, PackagePublishingPocket.RELEASE
+            )
         archive_url = api_url(archive)
         dependency_url = api_url(dependency)
         ws = webservice_for_person(
-            archive.owner, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            archive.owner,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         response = ws.named_post(
-            archive_url, "addArchiveDependency",
-            dependency=dependency_url, pocket="Release")
-        self.assertThat(response, MatchesStructure.byEquality(
-            status=400, body=b"This dependency is already registered."))
+            archive_url,
+            "addArchiveDependency",
+            dependency=dependency_url,
+            pocket="Release",
+        )
+        self.assertThat(
+            response,
+            MatchesStructure.byEquality(
+                status=400, body=b"This dependency is already registered."
+            ),
+        )
 
     def test_removeArchiveDependency_random_user(self):
         """Normal users cannot remove archive dependencies."""
@@ -424,18 +526,27 @@ class TestArchiveDependencies(TestCaseWithFactory):
         dependency = self.factory.makeArchive()
         with person_logged_in(archive.owner):
             archive.addArchiveDependency(
-                dependency, PackagePublishingPocket.RELEASE)
+                dependency, PackagePublishingPocket.RELEASE
+            )
         archive_url = api_url(archive)
         dependency_url = api_url(dependency)
         ws = webservice_for_person(
-            self.factory.makePerson(), permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            self.factory.makePerson(),
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         response = ws.named_post(
-            archive_url, "removeArchiveDependency", dependency=dependency_url)
-        self.assertThat(response, MatchesStructure(
-            status=Equals(401),
-            body=MatchesRegex(
-                br".*removeArchiveDependency.*launchpad.Edit.*")))
+            archive_url, "removeArchiveDependency", dependency=dependency_url
+        )
+        self.assertThat(
+            response,
+            MatchesStructure(
+                status=Equals(401),
+                body=MatchesRegex(
+                    rb".*removeArchiveDependency.*launchpad.Edit.*"
+                ),
+            ),
+        )
 
     def test_removeArchiveDependency_owner(self):
         """Archive owners can remove archive dependencies."""
@@ -443,18 +554,23 @@ class TestArchiveDependencies(TestCaseWithFactory):
         dependency = self.factory.makeArchive()
         with person_logged_in(archive.owner):
             archive.addArchiveDependency(
-                dependency, PackagePublishingPocket.RELEASE)
+                dependency, PackagePublishingPocket.RELEASE
+            )
         archive_url = api_url(archive)
         dependency_url = api_url(dependency)
         ws = webservice_for_person(
-            archive.owner, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            archive.owner,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         response = ws.named_post(
-            archive_url, "removeArchiveDependency", dependency=dependency_url)
+            archive_url, "removeArchiveDependency", dependency=dependency_url
+        )
         self.assertEqual(200, response.status)
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         ws_dependencies = self.getWebserviceJSON(
-            ws, ws_archive["dependencies_collection_link"])
+            ws, ws_archive["dependencies_collection_link"]
+        )
         self.assertEqual([], ws_dependencies["entries"])
 
 
@@ -472,7 +588,8 @@ class TestProcessors(TestCaseWithFactory):
         ws = webservice_for_person(ppa_admin, default_api_version="beta")
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         self.assertNotIn(
-            "enabled_restricted_processors_collection_link", ws_archive)
+            "enabled_restricted_processors_collection_link", ws_archive
+        )
 
     def test_erpAvailableInDevel(self):
         """The enabled_restricted_processors property is in devel."""
@@ -483,59 +600,80 @@ class TestProcessors(TestCaseWithFactory):
         ws = webservice_for_person(ppa_admin, default_api_version="devel")
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         ws_erp = self.getWebserviceJSON(
-            ws, ws_archive["enabled_restricted_processors_collection_link"])
+            ws, ws_archive["enabled_restricted_processors_collection_link"]
+        )
         self.assertEqual([], ws_erp["entries"])
 
     def test_processors(self):
         """Attributes about processors are available."""
         self.factory.makeProcessor(
-            'new-arm', 'New ARM Title', 'New ARM Description')
+            "new-arm", "New ARM Title", "New ARM Description"
+        )
         ws = webservice_for_person(
-            self.factory.makePerson(), default_api_version="devel")
+            self.factory.makePerson(), default_api_version="devel"
+        )
         response = ws.named_get("/+processors", "getByName", name="new-arm")
         self.assertEqual(200, response.status)
-        self.assertThat(response.jsonBody(), ContainsDict({
-            "name": Equals("new-arm"),
-            "title": Equals("New ARM Title"),
-            "description": Equals("New ARM Description"),
-            }))
+        self.assertThat(
+            response.jsonBody(),
+            ContainsDict(
+                {
+                    "name": Equals("new-arm"),
+                    "title": Equals("New ARM Title"),
+                    "description": Equals("New ARM Description"),
+                }
+            ),
+        )
 
     def setProcessors(self, user, archive_url, names):
         ws = webservice_for_person(
-            user, permission=OAuthPermission.WRITE_PUBLIC)
+            user, permission=OAuthPermission.WRITE_PUBLIC
+        )
         return ws.named_post(
-            archive_url, 'setProcessors',
-            processors=['/+processors/%s' % name for name in names],
-            api_version='devel')
+            archive_url,
+            "setProcessors",
+            processors=["/+processors/%s" % name for name in names],
+            api_version="devel",
+        )
 
     def assertProcessors(self, user, archive_url, names):
-        body = webservice_for_person(user).get(
-            archive_url + '/processors', api_version='devel').jsonBody()
+        body = (
+            webservice_for_person(user)
+            .get(archive_url + "/processors", api_version="devel")
+            .jsonBody()
+        )
         self.assertContentEqual(
-            names, [entry['name'] for entry in body['entries']])
+            names, [entry["name"] for entry in body["entries"]]
+        )
 
     def test_setProcessors_admin(self):
         """An admin can add a new processor to the enabled restricted set."""
         ppa_admin_team = getUtility(ILaunchpadCelebrities).ppa_admin
         ppa_admin = self.factory.makePerson(member_of=[ppa_admin_team])
         self.factory.makeProcessor(
-            'arm', 'ARM', 'ARM', restricted=True, build_by_default=False)
+            "arm", "ARM", "ARM", restricted=True, build_by_default=False
+        )
         ppa_url = api_url(self.factory.makeArchive(purpose=ArchivePurpose.PPA))
-        self.assertProcessors(ppa_admin, ppa_url, ['386', 'hppa', 'amd64'])
+        self.assertProcessors(ppa_admin, ppa_url, ["386", "hppa", "amd64"])
 
-        response = self.setProcessors(ppa_admin, ppa_url, ['386', 'arm'])
+        response = self.setProcessors(ppa_admin, ppa_url, ["386", "arm"])
         self.assertEqual(200, response.status)
-        self.assertProcessors(ppa_admin, ppa_url, ['386', 'arm'])
+        self.assertProcessors(ppa_admin, ppa_url, ["386", "arm"])
 
     def test_setProcessors_non_owner_forbidden(self):
         """Only PPA admins and archive owners can call setProcessors."""
         self.factory.makeProcessor(
-            'unrestricted', 'Unrestricted', 'Unrestricted', restricted=False,
-            build_by_default=False)
+            "unrestricted",
+            "Unrestricted",
+            "Unrestricted",
+            restricted=False,
+            build_by_default=False,
+        )
         ppa_url = api_url(self.factory.makeArchive(purpose=ArchivePurpose.PPA))
 
         response = self.setProcessors(
-            self.factory.makePerson(), ppa_url, ['386', 'unrestricted'])
+            self.factory.makePerson(), ppa_url, ["386", "unrestricted"]
+        )
         self.assertEqual(401, response.status)
 
     def test_setProcessors_owner(self):
@@ -543,61 +681,73 @@ class TestProcessors(TestCaseWithFactory):
         archive = self.factory.makeArchive(purpose=ArchivePurpose.PPA)
         ppa_url = api_url(archive)
         owner = archive.owner
-        self.assertProcessors(owner, ppa_url, ['386', 'hppa', 'amd64'])
+        self.assertProcessors(owner, ppa_url, ["386", "hppa", "amd64"])
 
-        response = self.setProcessors(owner, ppa_url, ['386'])
+        response = self.setProcessors(owner, ppa_url, ["386"])
         self.assertEqual(200, response.status)
-        self.assertProcessors(owner, ppa_url, ['386'])
+        self.assertProcessors(owner, ppa_url, ["386"])
 
-        response = self.setProcessors(owner, ppa_url, ['386', 'amd64'])
+        response = self.setProcessors(owner, ppa_url, ["386", "amd64"])
         self.assertEqual(200, response.status)
-        self.assertProcessors(owner, ppa_url, ['386', 'amd64'])
+        self.assertProcessors(owner, ppa_url, ["386", "amd64"])
 
     def test_setProcessors_owner_restricted_forbidden(self):
         """The archive owner cannot enable/disable restricted processors."""
         ppa_admin_team = getUtility(ILaunchpadCelebrities).ppa_admin
         ppa_admin = self.factory.makePerson(member_of=[ppa_admin_team])
         self.factory.makeProcessor(
-            'arm', 'ARM', 'ARM', restricted=True, build_by_default=False)
+            "arm", "ARM", "ARM", restricted=True, build_by_default=False
+        )
         archive = self.factory.makeArchive(purpose=ArchivePurpose.PPA)
         ppa_url = api_url(archive)
         owner = archive.owner
 
-        response = self.setProcessors(owner, ppa_url, ['386', 'arm'])
+        response = self.setProcessors(owner, ppa_url, ["386", "arm"])
         self.assertEqual(403, response.status)
 
         # If a PPA admin enables arm, the owner cannot disable it.
-        response = self.setProcessors(ppa_admin, ppa_url, ['386', 'arm'])
+        response = self.setProcessors(ppa_admin, ppa_url, ["386", "arm"])
         self.assertEqual(200, response.status)
-        self.assertProcessors(owner, ppa_url, ['386', 'arm'])
+        self.assertProcessors(owner, ppa_url, ["386", "arm"])
 
-        response = self.setProcessors(owner, ppa_url, ['386'])
+        response = self.setProcessors(owner, ppa_url, ["386"])
         self.assertEqual(403, response.status)
 
     def test_enableRestrictedProcessor(self):
         """A new processor can be added to the enabled restricted set."""
         archive = self.factory.makeArchive()
         arm = self.factory.makeProcessor(
-            name='arm', restricted=True, build_by_default=False)
+            name="arm", restricted=True, build_by_default=False
+        )
         ppa_admin_team = getUtility(ILaunchpadCelebrities).ppa_admin
         ppa_admin = self.factory.makePerson(member_of=[ppa_admin_team])
         archive_url = api_url(archive)
         arm_url = api_url(arm)
         ws = webservice_for_person(
-            ppa_admin, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            ppa_admin,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         ws_erp = self.getWebserviceJSON(
-            ws, ws_archive["enabled_restricted_processors_collection_link"])
+            ws, ws_archive["enabled_restricted_processors_collection_link"]
+        )
         self.assertEqual([], ws_erp["entries"])
         response = ws.named_post(
-            archive_url, "enableRestrictedProcessor", processor=arm_url)
+            archive_url, "enableRestrictedProcessor", processor=arm_url
+        )
         self.assertEqual(200, response.status)
         ws_erp = self.getWebserviceJSON(
-            ws, ws_archive["enabled_restricted_processors_collection_link"])
-        self.assertThat(ws_erp["entries"], MatchesListwise([
-            ContainsDict({"self_link": EndsWith(arm_url)}),
-            ]))
+            ws, ws_archive["enabled_restricted_processors_collection_link"]
+        )
+        self.assertThat(
+            ws_erp["entries"],
+            MatchesListwise(
+                [
+                    ContainsDict({"self_link": EndsWith(arm_url)}),
+                ]
+            ),
+        )
 
     def test_enableRestrictedProcessor_owner(self):
         """A new processor can be added to the enabled restricted set.
@@ -606,20 +756,29 @@ class TestProcessors(TestCaseWithFactory):
         """
         archive = self.factory.makeArchive()
         arm = self.factory.makeProcessor(
-            name='arm', restricted=True, build_by_default=False)
+            name="arm", restricted=True, build_by_default=False
+        )
         archive_url = api_url(archive)
         arm_url = api_url(arm)
         ws = webservice_for_person(
-            archive.owner, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            archive.owner,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         ws_erp = self.getWebserviceJSON(
-            ws, ws_archive["enabled_restricted_processors_collection_link"])
+            ws, ws_archive["enabled_restricted_processors_collection_link"]
+        )
         self.assertEqual([], ws_erp["entries"])
         response = ws.named_post(
-            archive_url, "enableRestrictedProcessor", processor=arm_url)
-        self.assertThat(response, MatchesStructure(
-            status=Equals(401), body=Contains(b"'launchpad.Admin'")))
+            archive_url, "enableRestrictedProcessor", processor=arm_url
+        )
+        self.assertThat(
+            response,
+            MatchesStructure(
+                status=Equals(401), body=Contains(b"'launchpad.Admin'")
+            ),
+        )
 
     def test_enableRestrictedProcessor_nonPrivUser(self):
         """A new processor can be added to the enabled restricted set.
@@ -628,20 +787,29 @@ class TestProcessors(TestCaseWithFactory):
         """
         archive = self.factory.makeArchive()
         arm = self.factory.makeProcessor(
-            name='arm', restricted=True, build_by_default=False)
+            name="arm", restricted=True, build_by_default=False
+        )
         archive_url = api_url(archive)
         arm_url = api_url(arm)
         ws = webservice_for_person(
-            self.factory.makePerson(), permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            self.factory.makePerson(),
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         ws_archive = self.getWebserviceJSON(ws, archive_url)
         ws_erp = self.getWebserviceJSON(
-            ws, ws_archive["enabled_restricted_processors_collection_link"])
+            ws, ws_archive["enabled_restricted_processors_collection_link"]
+        )
         self.assertEqual([], ws_erp["entries"])
         response = ws.named_post(
-            archive_url, "enableRestrictedProcessor", processor=arm_url)
-        self.assertThat(response, MatchesStructure(
-            status=Equals(401), body=Contains(b"'launchpad.Admin'")))
+            archive_url, "enableRestrictedProcessor", processor=arm_url
+        )
+        self.assertThat(
+            response,
+            MatchesStructure(
+                status=Equals(401), body=Contains(b"'launchpad.Admin'")
+            ),
+        )
 
 
 class TestCopyPackage(TestCaseWithFactory):
@@ -654,38 +822,65 @@ class TestCopyPackage(TestCaseWithFactory):
         sponsored_dude = self.factory.makePerson()
         source_archive = self.factory.makeArchive()
         target_archive = self.factory.makeArchive(
-            purpose=ArchivePurpose.PRIMARY)
+            purpose=ArchivePurpose.PRIMARY
+        )
         source = self.factory.makeSourcePackagePublishingHistory(
-            archive=source_archive, status=PackagePublishingStatus.PUBLISHED)
+            archive=source_archive, status=PackagePublishingStatus.PUBLISHED
+        )
         source_name = source.source_package_name
         version = source.source_package_version
         to_pocket = PackagePublishingPocket.RELEASE
         to_series = self.factory.makeDistroSeries(
-            distribution=target_archive.distribution)
+            distribution=target_archive.distribution
+        )
         with person_logged_in(target_archive.owner):
             target_archive.newComponentUploader(uploader_dude, "universe")
-        return (source, source_archive, source_name, target_archive,
-                to_pocket, to_series, uploader_dude, sponsored_dude, version)
+        return (
+            source,
+            source_archive,
+            source_name,
+            target_archive,
+            to_pocket,
+            to_series,
+            uploader_dude,
+            sponsored_dude,
+            version,
+        )
 
     def test_copyPackage(self):
         """Basic smoke test"""
-        (source, source_archive, source_name, target_archive, to_pocket,
-         to_series, uploader_dude, sponsored_dude,
-         version) = self.setup_data()
+        (
+            source,
+            source_archive,
+            source_name,
+            target_archive,
+            to_pocket,
+            to_series,
+            uploader_dude,
+            sponsored_dude,
+            version,
+        ) = self.setup_data()
 
         target_archive_url = api_url(target_archive)
         source_archive_url = api_url(source_archive)
         sponsored_dude_url = api_url(sponsored_dude)
         ws = webservice_for_person(
-            uploader_dude, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            uploader_dude,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
 
         response = ws.named_post(
-            target_archive_url, "copyPackage",
-            source_name=source_name, version=version,
-            from_archive=source_archive_url, to_pocket=to_pocket.name,
-            to_series=to_series.name, include_binaries=False,
-            sponsored=sponsored_dude_url)
+            target_archive_url,
+            "copyPackage",
+            source_name=source_name,
+            version=version,
+            from_archive=source_archive_url,
+            to_pocket=to_pocket.name,
+            to_series=to_series.name,
+            include_binaries=False,
+            sponsored=sponsored_dude_url,
+        )
         self.assertEqual(200, response.status)
 
         login(ANONYMOUS)
@@ -695,22 +890,39 @@ class TestCopyPackage(TestCaseWithFactory):
         self.assertFalse(copy_job.move)
 
     def test_copyPackage_move(self):
-        (source, source_archive, source_name, target_archive, to_pocket,
-         to_series, uploader, _, version) = self.setup_data()
+        (
+            source,
+            source_archive,
+            source_name,
+            target_archive,
+            to_pocket,
+            to_series,
+            uploader,
+            _,
+            version,
+        ) = self.setup_data()
         with person_logged_in(source_archive.owner):
             source_archive.newComponentUploader(uploader, "main")
 
         target_archive_url = api_url(target_archive)
         source_archive_url = api_url(source_archive)
         ws = webservice_for_person(
-            uploader, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            uploader,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
 
         response = ws.named_post(
-            target_archive_url, "copyPackage",
-            source_name=source_name, version=version,
-            from_archive=source_archive_url, to_pocket=to_pocket.name,
-            to_series=to_series.name, include_binaries=False, move=True)
+            target_archive_url,
+            "copyPackage",
+            source_name=source_name,
+            version=version,
+            from_archive=source_archive_url,
+            to_pocket=to_pocket.name,
+            to_series=to_series.name,
+            include_binaries=False,
+            move=True,
+        )
         self.assertEqual(200, response.status)
 
         login(ANONYMOUS)
@@ -721,24 +933,39 @@ class TestCopyPackage(TestCaseWithFactory):
 
     def test_copyPackages(self):
         """Basic smoke test"""
-        (source, source_archive, source_name, target_archive, to_pocket,
-         to_series, uploader_dude, sponsored_dude,
-         version) = self.setup_data()
+        (
+            source,
+            source_archive,
+            source_name,
+            target_archive,
+            to_pocket,
+            to_series,
+            uploader_dude,
+            sponsored_dude,
+            version,
+        ) = self.setup_data()
         from_series = source.distroseries
 
         target_archive_url = api_url(target_archive)
         source_archive_url = api_url(source_archive)
         sponsored_dude_url = api_url(sponsored_dude)
         ws = webservice_for_person(
-            uploader_dude, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            uploader_dude,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
 
         response = ws.named_post(
-            target_archive_url, "copyPackages",
-            source_names=[source_name], from_archive=source_archive_url,
-            to_pocket=to_pocket.name, to_series=to_series.name,
-            from_series=from_series.name, include_binaries=False,
-            sponsored=sponsored_dude_url)
+            target_archive_url,
+            "copyPackages",
+            source_names=[source_name],
+            from_archive=source_archive_url,
+            to_pocket=to_pocket.name,
+            to_series=to_series.name,
+            from_series=from_series.name,
+            include_binaries=False,
+            sponsored=sponsored_dude_url,
+        )
         self.assertEqual(200, response.status)
 
         login(ANONYMOUS)
@@ -772,14 +999,17 @@ class TestGetPublishedBinaries(TestCaseWithFactory):
         private_archive = self.factory.makeArchive(private=True)
         with admin_logged_in():
             self.factory.makeBinaryPackagePublishingHistory(
-                archive=private_archive)
+                archive=private_archive
+            )
         subscriber = self.factory.makePerson()
         with person_logged_in(private_archive.owner):
             private_archive.newSubscription(subscriber, private_archive.owner)
         archive_url = api_url(private_archive)
         ws = webservice_for_person(
-            subscriber, permission=OAuthPermission.READ_PRIVATE,
-            default_api_version="devel")
+            subscriber,
+            permission=OAuthPermission.READ_PRIVATE,
+            default_api_version="devel",
+        )
         response = ws.named_get(archive_url, "getPublishedBinaries")
         self.assertEqual(200, response.status)
         self.assertEqual(1, response.jsonBody()["total_size"])
@@ -788,8 +1018,10 @@ class TestGetPublishedBinaries(TestCaseWithFactory):
         private_archive = self.factory.makeArchive(private=True)
         archive_url = api_url(private_archive)
         ws = webservice_for_person(
-            self.factory.makePerson(), permission=OAuthPermission.READ_PRIVATE,
-            default_api_version="devel")
+            self.factory.makePerson(),
+            permission=OAuthPermission.READ_PRIVATE,
+            default_api_version="devel",
+        )
         response = ws.named_get(archive_url, "getPublishedBinaries")
         self.assertEqual(401, response.status)
 
@@ -797,11 +1029,14 @@ class TestGetPublishedBinaries(TestCaseWithFactory):
         datecreated = self.factory.getUniqueDate()
         later_date = datecreated + timedelta(minutes=1)
         self.factory.makeBinaryPackagePublishingHistory(
-            archive=self.archive, datecreated=datecreated)
+            archive=self.archive, datecreated=datecreated
+        )
         ws = webservice_for_person(self.person, default_api_version="beta")
         response = ws.named_get(
-            self.archive_url, "getPublishedBinaries",
-            created_since_date=later_date.isoformat())
+            self.archive_url,
+            "getPublishedBinaries",
+            created_since_date=later_date.isoformat(),
+        )
         self.assertEqual(200, response.status)
         self.assertEqual(0, response.jsonBody()["total_size"])
 
@@ -810,7 +1045,8 @@ class TestGetPublishedBinaries(TestCaseWithFactory):
         self.factory.makeBinaryPackagePublishingHistory(archive=self.archive)
         ws = webservice_for_person(self.person, default_api_version="beta")
         response = ws.named_get(
-            self.archive_url, "getPublishedBinaries", ordered=False)
+            self.archive_url, "getPublishedBinaries", ordered=False
+        )
         self.assertEqual(200, response.status)
         self.assertEqual(2, response.jsonBody()["total_size"])
 
@@ -823,11 +1059,13 @@ class TestGetPublishedBinaries(TestCaseWithFactory):
         def create_bpph():
             with admin_logged_in():
                 self.factory.makeBinaryPackagePublishingHistory(
-                    archive=self.archive)
+                    archive=self.archive
+                )
 
         def get_binaries():
             webservice.named_get(
-                archive_url, 'getPublishedBinaries').jsonBody()
+                archive_url, "getPublishedBinaries"
+            ).jsonBody()
 
         recorder1, recorder2 = record_two_runs(get_binaries, create_bpph, 1)
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
@@ -841,15 +1079,17 @@ class TestGetPublishedBinaries(TestCaseWithFactory):
             archive.newComponentUploader(uploader, archive.default_component)
         archive_url = api_url(archive)
         ws = webservice_for_person(
-            uploader, permission=OAuthPermission.READ_PRIVATE)
+            uploader, permission=OAuthPermission.READ_PRIVATE
+        )
 
         def create_bpph():
             with admin_logged_in():
                 self.factory.makeBinaryPackagePublishingHistory(
-                    archive=archive)
+                    archive=archive
+                )
 
         def get_binaries():
-            ws.named_get(archive_url, 'getPublishedBinaries').jsonBody()
+            ws.named_get(archive_url, "getPublishedBinaries").jsonBody()
 
         recorder1, recorder2 = record_two_runs(get_binaries, create_bpph, 1)
         # XXX cjwatson 2019-07-01: There are still some O(n) queries from
@@ -858,7 +1098,8 @@ class TestGetPublishedBinaries(TestCaseWithFactory):
         # need to arrange for AuthorizationBase.forwardCheckAuthenticated to
         # be able to use iter_authorization's cache.
         self.assertThat(
-            recorder2, HasQueryCount(Equals(recorder1.count + 3), recorder1))
+            recorder2, HasQueryCount(Equals(recorder1.count + 3), recorder1)
+        )
 
     def test_getPublishedBinaries_filter_by_component(self):
         # self.archive cannot be used, as this is a PPA, which only
@@ -867,18 +1108,21 @@ class TestGetPublishedBinaries(TestCaseWithFactory):
         archive_url = api_url(archive)
         for component in ("main", "main", "universe"):
             self.factory.makeBinaryPackagePublishingHistory(
-                archive=archive, component=component)
+                archive=archive, component=component
+            )
         ws = webservice_for_person(self.person, default_api_version="devel")
 
         for component, expected_count in (
-            ("main", 2), ("universe", 1), ("restricted", 0)):
+            ("main", 2),
+            ("universe", 1),
+            ("restricted", 0),
+        ):
             response = ws.named_get(
-                archive_url, "getPublishedBinaries",
-                component_name=component)
+                archive_url, "getPublishedBinaries", component_name=component
+            )
 
             self.assertEqual(200, response.status)
-            self.assertEqual(
-                expected_count, response.jsonBody()["total_size"])
+            self.assertEqual(expected_count, response.jsonBody()["total_size"])
             for entry in response.jsonBody()["entries"]:
                 self.assertEqual(component, entry["component_name"])
 
@@ -900,25 +1144,33 @@ class TestRemoveCopyNotification(TestCaseWithFactory):
         requester = self.factory.makePerson()
         source = getUtility(IPlainPackageCopyJobSource)
         job = source.create(
-            package_name="foo", source_archive=source_archive,
-            target_archive=self.archive, target_distroseries=distroseries,
+            package_name="foo",
+            source_archive=source_archive,
+            target_archive=self.archive,
+            target_distroseries=distroseries,
             target_pocket=PackagePublishingPocket.RELEASE,
-            package_version="1.0-1", include_binaries=True,
-            requester=requester)
+            package_version="1.0-1",
+            include_binaries=True,
+            requester=requester,
+        )
         job.start()
         job.fail()
 
         ws = webservice_for_person(
-            self.person, permission=OAuthPermission.WRITE_PUBLIC,
-            default_api_version="devel")
+            self.person,
+            permission=OAuthPermission.WRITE_PUBLIC,
+            default_api_version="devel",
+        )
         response = ws.named_post(
-            self.archive_url, "removeCopyNotification", job_id=job.id)
+            self.archive_url, "removeCopyNotification", job_id=job.id
+        )
         self.assertEqual(200, response.status)
 
         login(ANONYMOUS)
         source = getUtility(IPlainPackageCopyJobSource)
         self.assertEqual(
-            None, source.getIncompleteJobsForArchive(self.archive).any())
+            None, source.getIncompleteJobsForArchive(self.archive).any()
+        )
 
 
 class TestArchiveSet(TestCaseWithFactory):
@@ -928,25 +1180,53 @@ class TestArchiveSet(TestCaseWithFactory):
 
     def test_getByReference(self):
         random = self.factory.makePerson()
-        body = webservice_for_person(None).named_get(
-            '/archives', 'getByReference', reference='ubuntu',
-            api_version='devel').jsonBody()
-        self.assertEqual(body['reference'], 'ubuntu')
-        body = webservice_for_person(random).named_get(
-            '/archives', 'getByReference', reference='ubuntu',
-            api_version='devel').jsonBody()
-        self.assertEqual(body['reference'], 'ubuntu')
+        body = (
+            webservice_for_person(None)
+            .named_get(
+                "/archives",
+                "getByReference",
+                reference="ubuntu",
+                api_version="devel",
+            )
+            .jsonBody()
+        )
+        self.assertEqual(body["reference"], "ubuntu")
+        body = (
+            webservice_for_person(random)
+            .named_get(
+                "/archives",
+                "getByReference",
+                reference="ubuntu",
+                api_version="devel",
+            )
+            .jsonBody()
+        )
+        self.assertEqual(body["reference"], "ubuntu")
 
     def test_getByReference_ppa(self):
-        body = webservice_for_person(None).named_get(
-            '/archives', 'getByReference', reference='~cprov/ubuntu/ppa',
-            api_version='devel').jsonBody()
-        self.assertEqual(body['reference'], '~cprov/ubuntu/ppa')
+        body = (
+            webservice_for_person(None)
+            .named_get(
+                "/archives",
+                "getByReference",
+                reference="~cprov/ubuntu/ppa",
+                api_version="devel",
+            )
+            .jsonBody()
+        )
+        self.assertEqual(body["reference"], "~cprov/ubuntu/ppa")
 
     def test_getByReference_invalid(self):
-        body = webservice_for_person(None).named_get(
-            '/archives', 'getByReference', reference='~cprov/ubuntu',
-            api_version='devel').jsonBody()
+        body = (
+            webservice_for_person(None)
+            .named_get(
+                "/archives",
+                "getByReference",
+                reference="~cprov/ubuntu",
+                api_version="devel",
+            )
+            .jsonBody()
+        )
         self.assertIs(None, body)
 
     def test_getByReference_private(self):
@@ -955,15 +1235,36 @@ class TestArchiveSet(TestCaseWithFactory):
             owner = archive.owner
             reference = archive.reference
             random = self.factory.makePerson()
-        body = webservice_for_person(None).named_get(
-            '/archives', 'getByReference', reference=reference,
-            api_version='devel').jsonBody()
+        body = (
+            webservice_for_person(None)
+            .named_get(
+                "/archives",
+                "getByReference",
+                reference=reference,
+                api_version="devel",
+            )
+            .jsonBody()
+        )
         self.assertIs(None, body)
-        body = webservice_for_person(random).named_get(
-            '/archives', 'getByReference', reference=reference,
-            api_version='devel').jsonBody()
+        body = (
+            webservice_for_person(random)
+            .named_get(
+                "/archives",
+                "getByReference",
+                reference=reference,
+                api_version="devel",
+            )
+            .jsonBody()
+        )
         self.assertIs(None, body)
-        body = webservice_for_person(owner).named_get(
-            '/archives', 'getByReference', reference=reference,
-            api_version='devel').jsonBody()
-        self.assertEqual(body['reference'], reference)
+        body = (
+            webservice_for_person(owner)
+            .named_get(
+                "/archives",
+                "getByReference",
+                reference=reference,
+                api_version="devel",
+            )
+            .jsonBody()
+        )
+        self.assertEqual(body["reference"], reference)
diff --git a/lib/lp/soyuz/browser/tests/test_archivesubscription.py b/lib/lp/soyuz/browser/tests/test_archivesubscription.py
index 31daf54..81b059b 100644
--- a/lib/lp/soyuz/browser/tests/test_archivesubscription.py
+++ b/lib/lp/soyuz/browser/tests/test_archivesubscription.py
@@ -3,21 +3,18 @@
 
 """Unit tests for ArchiveSubscribersView."""
 
-from soupmatchers import (
-    HTMLContains,
-    Tag,
-    )
+from soupmatchers import HTMLContains, Tag
 from zope.component import getUtility
 
 from lp.registry.enums import PersonVisibility
 from lp.registry.interfaces.person import IPersonSet
 from lp.services.webapp import canonical_url
 from lp.testing import (
+    TestCaseWithFactory,
     login_person,
     person_logged_in,
     record_two_runs,
-    TestCaseWithFactory,
-    )
+)
 from lp.testing.layers import LaunchpadFunctionalLayer
 from lp.testing.matchers import HasQueryCount
 from lp.testing.views import create_initialized_view
@@ -31,10 +28,11 @@ class TestArchiveSubscribersView(TestCaseWithFactory):
     def setUp(self):
         super().setUp()
         self.p3a_owner = self.factory.makePerson()
-        admin = getUtility(IPersonSet).getByEmail('admin@xxxxxxxxxxxxx')
+        admin = getUtility(IPersonSet).getByEmail("admin@xxxxxxxxxxxxx")
         with person_logged_in(admin):
             self.private_ppa = self.factory.makeArchive(
-                owner=self.p3a_owner, private=True, name='p3a')
+                owner=self.p3a_owner, private=True, name="p3a"
+            )
         with person_logged_in(self.p3a_owner):
             for count in range(3):
                 subscriber = self.factory.makePerson()
@@ -44,27 +42,38 @@ class TestArchiveSubscribersView(TestCaseWithFactory):
         # The page has the usual batch navigation links.
         with person_logged_in(self.p3a_owner):
             view = create_initialized_view(
-                self.private_ppa, '+subscriptions', principal=self.p3a_owner)
+                self.private_ppa, "+subscriptions", principal=self.p3a_owner
+            )
             html = view.render()
         has_batch_navigation = HTMLContains(
-            Tag('batch navigation links', 'td',
-                attrs={'class': 'batch-navigation-links'}, count=2))
+            Tag(
+                "batch navigation links",
+                "td",
+                attrs={"class": "batch-navigation-links"},
+                count=2,
+            )
+        )
         self.assertThat(html, has_batch_navigation)
 
     def test_constant_query_count(self):
         def create_subscribers():
             self.private_ppa.newSubscription(
-                self.factory.makePerson(), self.p3a_owner)
+                self.factory.makePerson(), self.p3a_owner
+            )
             self.private_ppa.newSubscription(
                 self.factory.makeTeam(
                     visibility=PersonVisibility.PRIVATE,
-                    members=[self.p3a_owner]),
-                self.p3a_owner)
+                    members=[self.p3a_owner],
+                ),
+                self.p3a_owner,
+            )
 
-        self.pushConfig('launchpad', default_batch_size=75)
-        url = canonical_url(self.private_ppa, view_name='+subscriptions')
+        self.pushConfig("launchpad", default_batch_size=75)
+        url = canonical_url(self.private_ppa, view_name="+subscriptions")
         recorder1, recorder2 = record_two_runs(
             lambda: self.getUserBrowser(url, user=self.p3a_owner),
-            create_subscribers, 2,
-            login_method=lambda: login_person(self.p3a_owner))
+            create_subscribers,
+            2,
+            login_method=lambda: login_person(self.p3a_owner),
+        )
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
diff --git a/lib/lp/soyuz/browser/tests/test_breadcrumbs.py b/lib/lp/soyuz/browser/tests/test_breadcrumbs.py
index 23875f3..cec6be8 100644
--- a/lib/lp/soyuz/browser/tests/test_breadcrumbs.py
+++ b/lib/lp/soyuz/browser/tests/test_breadcrumbs.py
@@ -6,20 +6,16 @@ from zope.component import getUtility
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.services.webapp.publisher import canonical_url
 from lp.soyuz.browser.archivesubscription import PersonalArchiveSubscription
-from lp.testing import (
-    login,
-    login_person,
-    )
+from lp.testing import login, login_person
 from lp.testing.breadcrumbs import BaseBreadcrumbTestCase
 
 
 class TestDistroArchSeriesBreadcrumb(BaseBreadcrumbTestCase):
-
     def setUp(self):
         super().setUp()
-        self.ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
-        self.hoary = self.ubuntu.getSeries('hoary')
-        self.hoary_i386 = self.hoary['i386']
+        self.ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
+        self.hoary = self.ubuntu.getSeries("hoary")
+        self.hoary_i386 = self.hoary["i386"]
 
     def test_distroarchseries(self):
         das_url = canonical_url(self.hoary_i386)
@@ -36,7 +32,7 @@ class TestDistroArchSeriesBreadcrumb(BaseBreadcrumbTestCase):
 
     def test_distroarchseriesbinarypackagerelease(self):
         pmount_hoary_i386 = self.hoary_i386.getBinaryPackage("pmount")
-        pmount_release = pmount_hoary_i386['0.1-1']
+        pmount_release = pmount_hoary_i386["0.1-1"]
         pmount_release_url = canonical_url(pmount_release)
         crumbs = self.getBreadcrumbsForObject(pmount_release)
         self.assertEqual(crumbs[-1].url, pmount_release_url)
@@ -44,13 +40,12 @@ class TestDistroArchSeriesBreadcrumb(BaseBreadcrumbTestCase):
 
 
 class TestArchiveSubscriptionBreadcrumb(BaseBreadcrumbTestCase):
-
     def setUp(self):
         super().setUp()
 
         # Create a private ppa
         self.ppa = self.factory.makeArchive()
-        login('foo.bar@xxxxxxxxxxxxx')
+        login("foo.bar@xxxxxxxxxxxxx")
         self.ppa.private = True
 
         owner = self.ppa.owner
@@ -58,12 +53,15 @@ class TestArchiveSubscriptionBreadcrumb(BaseBreadcrumbTestCase):
         self.ppa_subscription = self.ppa.newSubscription(owner, owner)
         self.ppa_token = self.ppa.newAuthToken(owner)
         self.personal_archive_subscription = PersonalArchiveSubscription(
-            owner, self.ppa)
+            owner, self.ppa
+        )
 
     def test_personal_archive_subscription(self):
         subscription_url = canonical_url(self.personal_archive_subscription)
         crumbs = self.getBreadcrumbsForObject(
-            self.personal_archive_subscription)
+            self.personal_archive_subscription
+        )
         self.assertEqual(subscription_url, crumbs[-1].url)
         self.assertEqual(
-            "Access to %s" % self.ppa.displayname, crumbs[-1].text)
+            "Access to %s" % self.ppa.displayname, crumbs[-1].text
+        )
diff --git a/lib/lp/soyuz/browser/tests/test_build_views.py b/lib/lp/soyuz/browser/tests/test_build_views.py
index 176b21e..7b9390c 100644
--- a/lib/lp/soyuz/browser/tests/test_build_views.py
+++ b/lib/lp/soyuz/browser/tests/test_build_views.py
@@ -2,21 +2,11 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 import soupmatchers
-from testtools.matchers import (
-    MatchesException,
-    Not,
-    Raises,
-    )
-from zope.component import (
-    getMultiAdapter,
-    getUtility,
-    )
+from testtools.matchers import MatchesException, Not, Raises
+from zope.component import getMultiAdapter, getUtility
 from zope.security.proxy import removeSecurityProxy
 
-from lp.buildmaster.enums import (
-    BuildQueueStatus,
-    BuildStatus,
-    )
+from lp.buildmaster.enums import BuildQueueStatus, BuildStatus
 from lp.registry.interfaces.person import IPersonSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.interfaces.series import SeriesStatus
@@ -30,11 +20,11 @@ from lp.soyuz.interfaces.archivepermission import IArchivePermissionSet
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.packageset import IPackagesetSet
 from lp.testing import (
-    admin_logged_in,
     ANONYMOUS,
-    person_logged_in,
     TestCaseWithFactory,
-    )
+    admin_logged_in,
+    person_logged_in,
+)
 from lp.testing.layers import LaunchpadFunctionalLayer
 from lp.testing.sampledata import ADMIN_EMAIL
 from lp.testing.views import create_initialized_view
@@ -59,7 +49,7 @@ class TestBuildViews(TestCaseWithFactory):
         removeSecurityProxy(archive).require_virtualized = False
         build = self.factory.makeBinaryPackageBuild(archive=archive)
         view = create_initialized_view(build, name="+index")
-        self.assertEqual('multiverse', view.component_name)
+        self.assertEqual("multiverse", view.component_name)
 
     def test_view_without_component(self):
         # Production has some buggy builds without source publications.
@@ -67,10 +57,13 @@ class TestBuildViews(TestCaseWithFactory):
         spph = self.factory.makeSourcePackagePublishingHistory()
         other_das = self.factory.makeDistroArchSeries()
         build = getUtility(IBinaryPackageBuildSet).new(
-            spph.sourcepackagerelease, spph.archive, other_das,
-            PackagePublishingPocket.RELEASE)
+            spph.sourcepackagerelease,
+            spph.archive,
+            other_das,
+            PackagePublishingPocket.RELEASE,
+        )
         view = create_initialized_view(build, name="+index")
-        self.assertEqual('unknown', view.component_name)
+        self.assertEqual("unknown", view.component_name)
 
     def test_build_menu_primary(self):
         # The menu presented in the build page depends on the targeted
@@ -81,8 +74,8 @@ class TestBuildViews(TestCaseWithFactory):
         build = self.factory.makeBinaryPackageBuild(archive=archive)
         build_menu = BuildContextMenu(build)
         self.assertEqual(
-            build_menu.links,
-            ['ppa', 'records', 'retry', 'rescore', 'cancel'])
+            build_menu.links, ["ppa", "records", "retry", "rescore", "cancel"]
+        )
         self.assertFalse(build_menu.is_ppa_build)
         self.assertFalse(build_menu.ppa().enabled)
         # Cancel is not enabled on non-virtual builds.
@@ -92,13 +85,14 @@ class TestBuildViews(TestCaseWithFactory):
         # The 'PPA' action-menu item will be enabled if we target the build
         # to a PPA.
         ppa = self.factory.makeArchive(
-            purpose=ArchivePurpose.PPA, virtualized=True)
+            purpose=ArchivePurpose.PPA, virtualized=True
+        )
         build = self.factory.makeBinaryPackageBuild(archive=ppa)
         build.queueBuild()
         build_menu = BuildContextMenu(build)
         self.assertEqual(
-            build_menu.links,
-            ['ppa', 'records', 'retry', 'rescore', 'cancel'])
+            build_menu.links, ["ppa", "records", "retry", "rescore", "cancel"]
+        )
         self.assertTrue(build_menu.is_ppa_build)
         self.assertTrue(build_menu.ppa().enabled)
         # Cancel is enabled on virtual builds if the user is in the
@@ -112,15 +106,17 @@ class TestBuildViews(TestCaseWithFactory):
         # build for a released distroseries cannot be retried.
         archive = self.factory.makeArchive(purpose=ArchivePurpose.PRIMARY)
         build = self.factory.makeBinaryPackageBuild(
-            archive=archive, status=BuildStatus.FAILEDTOBUILD)
+            archive=archive, status=BuildStatus.FAILEDTOBUILD
+        )
         distroseries = build.distro_arch_series.distroseries
         with person_logged_in(self.admin):
             distroseries.status = SeriesStatus.CURRENT
         build_view = getMultiAdapter(
-            (build, self.empty_request), name="+index")
+            (build, self.empty_request), name="+index"
+        )
         self.assertFalse(build_view.is_ppa)
         self.assertEqual(build.buildqueue_record, None)
-        self.assertEqual(build_view.component_name, 'multiverse')
+        self.assertEqual(build_view.component_name, "multiverse")
         self.assertFalse(build.can_be_retried)
         self.assertBuildMenuRetryIsExpected(build, build.archive.owner, False)
 
@@ -128,7 +124,8 @@ class TestBuildViews(TestCaseWithFactory):
         # PPA builds can always be retried, no matter what status the
         # distroseries has.
         build = self.factory.makeBinaryPackageBuild(
-            status=BuildStatus.FAILEDTOBUILD)
+            status=BuildStatus.FAILEDTOBUILD
+        )
         self.assertTrue(build.can_be_retried)
         # Anonymous, therefore supposed to be disallowed
         self.assertBuildMenuRetryIsExpected(build, ANONYMOUS, False)
@@ -139,7 +136,8 @@ class TestBuildViews(TestCaseWithFactory):
         # permits it to be re-tried.
         archive = self.factory.makeArchive(purpose=ArchivePurpose.PRIMARY)
         build = self.factory.makeBinaryPackageBuild(
-            archive=archive, status=BuildStatus.FAILEDTOBUILD)
+            archive=archive, status=BuildStatus.FAILEDTOBUILD
+        )
         with person_logged_in(self.admin):
             self.assertTrue(build.can_be_retried)
         nopriv = getUtility(IPersonSet).getByName("no-priv")
@@ -147,7 +145,8 @@ class TestBuildViews(TestCaseWithFactory):
         self.assertBuildMenuRetryIsExpected(build, nopriv, False)
         # But they can as a member of launchpad-buildd-admins
         buildd_admins = getUtility(IPersonSet).getByName(
-            "launchpad-buildd-admins")
+            "launchpad-buildd-admins"
+        )
         with person_logged_in(self.admin):
             buildd_admins.addMember(nopriv, nopriv)
         self.assertBuildMenuRetryIsExpected(build, nopriv, True)
@@ -158,17 +157,22 @@ class TestBuildViews(TestCaseWithFactory):
         team = self.factory.makeTeam()
         archive = self.factory.makeArchive(purpose=ArchivePurpose.PRIMARY)
         build = self.factory.makeBinaryPackageBuild(
-            archive=archive, status=BuildStatus.FAILEDTOBUILD)
+            archive=archive, status=BuildStatus.FAILEDTOBUILD
+        )
         with person_logged_in(self.admin):
             packageset = getUtility(IPackagesetSet).new(
-                'rebuild', 'test', team,
-                distroseries=build.distro_arch_series.distroseries)
+                "rebuild",
+                "test",
+                team,
+                distroseries=build.distro_arch_series.distroseries,
+            )
             packageset.add((build.source_package_release.sourcepackagename,))
         # The team doesn't have permission until we grant it
         self.assertBuildMenuRetryIsExpected(build, team.teamowner, False)
         with person_logged_in(self.admin):
             getUtility(IArchivePermissionSet).newPackagesetUploader(
-                archive, team, packageset)
+                archive, team, packageset
+            )
         self.assertBuildMenuRetryIsExpected(build, team.teamowner, True)
 
     def test_build_view_package_upload(self):
@@ -177,41 +181,48 @@ class TestBuildViews(TestCaseWithFactory):
         # a build were not yet collected.
         build = self.factory.makeBinaryPackageBuild()
         build_view = getMultiAdapter(
-            (build, self.empty_request), name="+index")
+            (build, self.empty_request), name="+index"
+        )
         self.assertEqual(build_view.package_upload, None)
         self.assertFalse(build_view.has_published_binaries)
         package_upload = build.distro_series.createQueueEntry(
-            PackagePublishingPocket.UPDATES, build.archive,
-            'changes.txt', b'my changes')
+            PackagePublishingPocket.UPDATES,
+            build.archive,
+            "changes.txt",
+            b"my changes",
+        )
         with person_l