← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad-buildd:black into launchpad-buildd:master

 

Colin Watson has proposed merging ~cjwatson/launchpad-buildd:black into launchpad-buildd:master.

Commit message:
Apply black and isort

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad-buildd/+git/launchpad-buildd/+merge/436975

This is of course very long, but it's entirely mechanical apart from the changes to `.pre-commit-config.yaml` and a few cases where I rewrapped strings by hand.

I've got bored of formatting code manually.
-- 
The attached diff has been truncated due to its size.
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad-buildd:black into launchpad-buildd:master.
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 0000000..01b1c51
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,2 @@
+# apply black and isort
+b7e61dd8bbcff898b9a500da005b4f5b0853c4ac
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 62e4683..bd58fbb 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -7,10 +7,6 @@ repos:
     -   id: check-xml
     -   id: check-yaml
     -   id: debug-statements
--   repo: https://github.com/PyCQA/flake8
-    rev: 4.0.1
-    hooks:
-    -   id: flake8
 -   repo: https://github.com/asottile/pyupgrade
     rev: v2.31.0
     hooks:
@@ -19,3 +15,17 @@ repos:
         # lpbuildd-git-proxy is copied into the build chroot and run there,
         # so it has different compatibility constraints.
         exclude: ^bin/lpbuildd-git-proxy$
+-   repo: https://github.com/psf/black
+    rev: 22.12.0
+    hooks:
+    -   id: black
+-   repo: https://github.com/PyCQA/isort
+    rev: 5.12.0
+    hooks:
+    -   id: isort
+        name: isort
+        args: [--profile, black]
+-   repo: https://github.com/PyCQA/flake8
+    rev: 4.0.1
+    hooks:
+    -   id: flake8
diff --git a/bin/buildrecipe b/bin/buildrecipe
index 9094490..6cfa10f 100755
--- a/bin/buildrecipe
+++ b/bin/buildrecipe
@@ -4,7 +4,6 @@
 
 """A script that builds a package from a recipe and a chroot."""
 
-from optparse import OptionParser
 import os
 import pwd
 import socket
@@ -12,11 +11,11 @@ import stat
 import subprocess
 import sys
 import tempfile
+from optparse import OptionParser
 from textwrap import dedent
 
 from debian.deb822 import Deb822
 
-
 RETCODE_SUCCESS = 0
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD_TREE = 201
@@ -31,16 +30,24 @@ def call_report(args, env):
 
     :return: The process exit status.
     """
-    print('RUN %r' % args)
+    print("RUN %r" % args)
     return subprocess.call(args, env=env)
 
 
 class RecipeBuilder:
     """Builds a package from a recipe."""
 
-    def __init__(self, build_id, author_name, author_email,
-                 suite, distroseries_name, component, archive_purpose,
-                 git=False):
+    def __init__(
+        self,
+        build_id,
+        author_name,
+        author_email,
+        suite,
+        distroseries_name,
+        component,
+        archive_purpose,
+        git=False,
+    ):
         """Constructor.
 
         :param build_id: The id of the build (a str).
@@ -52,7 +59,7 @@ class RecipeBuilder:
         """
         self.build_id = build_id
         if isinstance(author_name, bytes):
-            author_name = author_name.decode('utf-8')
+            author_name = author_name.decode("utf-8")
         self.author_name = author_name
         self.author_email = author_email
         self.archive_purpose = archive_purpose
@@ -60,24 +67,26 @@ class RecipeBuilder:
         self.distroseries_name = distroseries_name
         self.suite = suite
         self.git = git
-        self.chroot_path = get_build_path(build_id, 'chroot-autobuild')
-        self.work_dir_relative = os.environ['HOME'] + '/work'
-        self.work_dir = os.path.join(self.chroot_path,
-                                     self.work_dir_relative[1:])
-
-        self.tree_path = os.path.join(self.work_dir, 'tree')
-        self.apt_dir_relative = os.path.join(self.work_dir_relative, 'apt')
-        self.apt_dir = os.path.join(self.work_dir, 'apt')
+        self.chroot_path = get_build_path(build_id, "chroot-autobuild")
+        self.work_dir_relative = os.environ["HOME"] + "/work"
+        self.work_dir = os.path.join(
+            self.chroot_path, self.work_dir_relative[1:]
+        )
+
+        self.tree_path = os.path.join(self.work_dir, "tree")
+        self.apt_dir_relative = os.path.join(self.work_dir_relative, "apt")
+        self.apt_dir = os.path.join(self.work_dir, "apt")
         self.username = pwd.getpwuid(os.getuid())[0]
         self.apt_sources_list_dir = os.path.join(
-            self.chroot_path, "etc/apt/sources.list.d")
+            self.chroot_path, "etc/apt/sources.list.d"
+        )
 
     def install(self):
         """Install all the requirements for building recipes.
 
         :return: A retcode from apt.
         """
-        return self.chroot(['apt-get', 'install', '-y', 'lsb-release'])
+        return self.chroot(["apt-get", "install", "-y", "lsb-release"])
 
     # XXX cjwatson 2021-11-23: Use shutil.which instead once we can assume
     # Python >= 3.3.
@@ -101,102 +110,136 @@ class RecipeBuilder:
         :return: a retcode from `bzr dailydeb` or `git-build-recipe`.
         """
         assert not os.path.exists(self.tree_path)
-        recipe_path = os.path.join(self.work_dir, 'recipe')
-        manifest_path = os.path.join(self.tree_path, 'manifest')
+        recipe_path = os.path.join(self.work_dir, "recipe")
+        manifest_path = os.path.join(self.tree_path, "manifest")
         with open(recipe_path) as recipe_file:
             recipe = recipe_file.read()
         # As of bzr 2.2, a defined identity is needed.  In this case, we're
         # using buildd@<hostname>.
         hostname = socket.gethostname()
-        email = 'buildd@%s' % hostname
-        lsb_release = subprocess.Popen([
-            'sudo', '/usr/sbin/chroot', self.chroot_path, 'lsb_release',
-            '-r', '-s'], stdout=subprocess.PIPE, universal_newlines=True)
+        email = "buildd@%s" % hostname
+        lsb_release = subprocess.Popen(
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                self.chroot_path,
+                "lsb_release",
+                "-r",
+                "-s",
+            ],
+            stdout=subprocess.PIPE,
+            universal_newlines=True,
+        )
         distroseries_version = lsb_release.communicate()[0].rstrip()
         assert lsb_release.returncode == 0
 
         if self.git:
-            print('Git version:')
-            subprocess.check_call(['git', '--version'])
-            print(subprocess.check_output(
-                ['dpkg-query', '-W', 'git-build-recipe'],
-                universal_newlines=True).rstrip('\n').replace('\t', ' '))
+            print("Git version:")
+            subprocess.check_call(["git", "--version"])
+            print(
+                subprocess.check_output(
+                    ["dpkg-query", "-W", "git-build-recipe"],
+                    universal_newlines=True,
+                )
+                .rstrip("\n")
+                .replace("\t", " ")
+            )
         else:
-            print('Bazaar versions:')
-            subprocess.check_call(['bzr', 'version'])
-            subprocess.check_call(['bzr', 'plugins'])
+            print("Bazaar versions:")
+            subprocess.check_call(["bzr", "version"])
+            subprocess.check_call(["bzr", "plugins"])
 
-        print('Building recipe:')
+        print("Building recipe:")
         print(recipe)
         sys.stdout.flush()
         env = {
-            'DEBEMAIL': self.author_email,
-            'DEBFULLNAME': self.author_name.encode('utf-8'),
-            'EMAIL': email,
-            'LANG': 'C.UTF-8',
-            }
+            "DEBEMAIL": self.author_email,
+            "DEBFULLNAME": self.author_name.encode("utf-8"),
+            "EMAIL": email,
+            "LANG": "C.UTF-8",
+        }
         if self.git:
-            cmd = ['git-build-recipe']
-        elif self._is_command_on_path('brz-build-daily-recipe'):
-            cmd = ['brz-build-daily-recipe']
+            cmd = ["git-build-recipe"]
+        elif self._is_command_on_path("brz-build-daily-recipe"):
+            cmd = ["brz-build-daily-recipe"]
         else:
-            cmd = ['bzr', '-Derror', 'dailydeb']
-        cmd.extend([
-            '--safe', '--no-build',
-            '--manifest', manifest_path,
-            '--distribution', self.distroseries_name,
-            '--allow-fallback-to-native',
-            '--append-version', '~ubuntu%s.1' % distroseries_version,
-            recipe_path, self.tree_path,
-            ])
+            cmd = ["bzr", "-Derror", "dailydeb"]
+        cmd.extend(
+            [
+                "--safe",
+                "--no-build",
+                "--manifest",
+                manifest_path,
+                "--distribution",
+                self.distroseries_name,
+                "--allow-fallback-to-native",
+                "--append-version",
+                "~ubuntu%s.1" % distroseries_version,
+                recipe_path,
+                self.tree_path,
+            ]
+        )
         retcode = call_report(cmd, env=env)
         if retcode != 0:
             return retcode
-        (source,) = (name for name in os.listdir(self.tree_path)
-                     if os.path.isdir(os.path.join(self.tree_path, name)))
+        (source,) = (
+            name
+            for name in os.listdir(self.tree_path)
+            if os.path.isdir(os.path.join(self.tree_path, name))
+        )
         self.source_dir_relative = os.path.join(
-            self.work_dir_relative, 'tree', source)
+            self.work_dir_relative, "tree", source
+        )
         return retcode
 
     def getPackageName(self):
         source_dir = os.path.join(
-            self.chroot_path, self.source_dir_relative.lstrip('/'))
-        changelog = os.path.join(source_dir, 'debian/changelog')
-        return open(
-            changelog, errors='replace').readline().split(' ')[0]
+            self.chroot_path, self.source_dir_relative.lstrip("/")
+        )
+        changelog = os.path.join(source_dir, "debian/changelog")
+        return open(changelog, errors="replace").readline().split(" ")[0]
 
     def getSourceControl(self):
         """Return the parsed source control stanza from the source tree."""
         source_dir = os.path.join(
-            self.chroot_path, self.source_dir_relative.lstrip('/'))
+            self.chroot_path, self.source_dir_relative.lstrip("/")
+        )
         # Open as bytes to allow debian.deb822 to apply its own encoding
         # handling.  We'll get text back from it.
         with open(
-                os.path.join(source_dir, 'debian/control'),
-                'rb') as control_file:
+            os.path.join(source_dir, "debian/control"), "rb"
+        ) as control_file:
             # Don't let Deb822.iter_paragraphs use apt_pkg.TagFile
             # internally, since that only handles real tag files and not the
             # slightly more permissive syntax of debian/control which also
             # allows comments.
-            return next(Deb822.iter_paragraphs(
-                control_file, use_apt_pkg=False))
+            return next(
+                Deb822.iter_paragraphs(control_file, use_apt_pkg=False)
+            )
 
     def makeDummyDsc(self, package):
         control = self.getSourceControl()
-        with open(os.path.join(
-                self.apt_dir, "%s.dsc" % package), "w") as dummy_dsc:
+        with open(
+            os.path.join(self.apt_dir, "%s.dsc" % package), "w"
+        ) as dummy_dsc:
             print(
-                dedent("""\
+                dedent(
+                    """\
                     Format: 1.0
                     Source: %(package)s
                     Architecture: any
                     Version: 99:0
-                    Maintainer: invalid@xxxxxxxxxxx""") % {"package": package},
-                file=dummy_dsc)
+                    Maintainer: invalid@xxxxxxxxxxx"""
+                )
+                % {"package": package},
+                file=dummy_dsc,
+            )
             for field in (
-                    "Build-Depends", "Build-Depends-Indep",
-                    "Build-Conflicts", "Build-Conflicts-Indep",
-                    ):
+                "Build-Depends",
+                "Build-Depends-Indep",
+                "Build-Conflicts",
+                "Build-Conflicts-Indep",
+            ):
                 if field in control:
                     print(f"{field}: {control[field]}", file=dummy_dsc)
             print(file=dummy_dsc)
@@ -205,7 +248,8 @@ class RecipeBuilder:
         conf_path = os.path.join(self.apt_dir, "ftparchive.conf")
         with open(conf_path, "w") as conf:
             print(
-                dedent("""\
+                dedent(
+                    """\
                     Dir::ArchiveDir "%(apt_dir)s";
                     Default::Sources::Compress ". bzip2";
                     BinDirectory "%(apt_dir)s" { Sources "Sources"; };
@@ -215,21 +259,33 @@ class RecipeBuilder:
                         Suite "invalid";
                         Codename "invalid";
                         Description "buildrecipe temporary archive";
-                    };""") % {"apt_dir": self.apt_dir},
-                file=conf)
+                    };"""
+                )
+                % {"apt_dir": self.apt_dir},
+                file=conf,
+            )
         ftparchive_env = dict(os.environ)
         ftparchive_env.pop("APT_CONFIG", None)
         ret = subprocess.call(
             ["apt-ftparchive", "-q=2", "generate", conf_path],
-            env=ftparchive_env)
+            env=ftparchive_env,
+        )
         if ret != 0:
             return ret
 
         with open(os.path.join(self.apt_dir, "Release"), "w") as release:
             return subprocess.call(
-                ["apt-ftparchive", "-q=2", "-c", conf_path, "release",
-                 self.apt_dir],
-                stdout=release, env=ftparchive_env)
+                [
+                    "apt-ftparchive",
+                    "-q=2",
+                    "-c",
+                    conf_path,
+                    "release",
+                    self.apt_dir,
+                ],
+                stdout=release,
+                env=ftparchive_env,
+            )
 
     def enableAptArchive(self):
         """Enable the dummy apt archive.
@@ -241,20 +297,28 @@ class RecipeBuilder:
         """
         tmp_list_path = os.path.join(self.apt_dir, "buildrecipe-archive.list")
         tmp_list_path_relative = os.path.join(
-            self.apt_dir_relative, "buildrecipe-archive.list")
+            self.apt_dir_relative, "buildrecipe-archive.list"
+        )
         with open(tmp_list_path, "w") as tmp_list:
-            print("deb-src [trusted=yes] file://%s ./" % self.apt_dir_relative,
-                  file=tmp_list)
-        ret = self.chroot([
-                'apt-get',
-                '-o', 'Dir::Etc::sourcelist=%s' % tmp_list_path_relative,
-                '-o', 'APT::Get::List-Cleanup=false',
-                'update',
-                ])
+            print(
+                "deb-src [trusted=yes] file://%s ./" % self.apt_dir_relative,
+                file=tmp_list,
+            )
+        ret = self.chroot(
+            [
+                "apt-get",
+                "-o",
+                "Dir::Etc::sourcelist=%s" % tmp_list_path_relative,
+                "-o",
+                "APT::Get::List-Cleanup=false",
+                "update",
+            ]
+        )
         if ret == 0:
             list_path = os.path.join(
-                self.apt_sources_list_dir, "buildrecipe-archive.list")
-            return subprocess.call(['sudo', 'mv', tmp_list_path, list_path])
+                self.apt_sources_list_dir, "buildrecipe-archive.list"
+            )
+            return subprocess.call(["sudo", "mv", tmp_list_path, list_path])
         return ret
 
     def setUpAptArchive(self, package):
@@ -273,20 +337,22 @@ class RecipeBuilder:
         """Install the build-depends of the source tree."""
         package = self.getPackageName()
         currently_building_contents = (
-            'Package: %s\n'
-            'Suite: %s\n'
-            'Component: %s\n'
-            'Purpose: %s\n'
-            'Build-Debug-Symbols: no\n' %
-            (package, self.suite, self.component, self.archive_purpose))
-        with tempfile.NamedTemporaryFile(mode='w+') as currently_building:
+            "Package: %s\n"
+            "Suite: %s\n"
+            "Component: %s\n"
+            "Purpose: %s\n"
+            "Build-Debug-Symbols: no\n"
+            % (package, self.suite, self.component, self.archive_purpose)
+        )
+        with tempfile.NamedTemporaryFile(mode="w+") as currently_building:
             currently_building.write(currently_building_contents)
             currently_building.flush()
             os.fchmod(currently_building.fileno(), 0o644)
-            self.copy_in(currently_building.name, '/CurrentlyBuilding')
+            self.copy_in(currently_building.name, "/CurrentlyBuilding")
         self.setUpAptArchive(package)
         return self.chroot(
-            ['apt-get', 'build-dep', '-y', '--only-source', package])
+            ["apt-get", "build-dep", "-y", "--only-source", package]
+        )
 
     def chroot(self, args, echo=False):
         """Run a command in the chroot.
@@ -295,11 +361,14 @@ class RecipeBuilder:
         :return: the status code.
         """
         if echo:
-            print("Running in chroot: %s" %
-                  ' '.join("'%s'" % arg for arg in args))
+            print(
+                "Running in chroot: %s"
+                % " ".join("'%s'" % arg for arg in args)
+            )
             sys.stdout.flush()
         return subprocess.call(
-            ['sudo', '/usr/sbin/chroot', self.chroot_path] + args)
+            ["sudo", "/usr/sbin/chroot", self.chroot_path] + args
+        )
 
     def copy_in(self, source_path, target_path):
         """Copy a file into the target environment.
@@ -318,20 +387,37 @@ class RecipeBuilder:
         # in the target.
         mode = stat.S_IMODE(os.stat(source_path).st_mode)
         full_target_path = os.path.join(
-            self.chroot_path, target_path.lstrip("/"))
+            self.chroot_path, target_path.lstrip("/")
+        )
         subprocess.check_call(
-            ["sudo", "install", "-o", "root", "-g", "root", "-m", "%o" % mode,
-             source_path, full_target_path])
+            [
+                "sudo",
+                "install",
+                "-o",
+                "root",
+                "-g",
+                "root",
+                "-m",
+                "%o" % mode,
+                source_path,
+                full_target_path,
+            ]
+        )
 
     def buildSourcePackage(self):
         """Build the source package.
 
         :return: a retcode from dpkg-buildpackage.
         """
-        retcode = self.chroot([
-            'su', '-c',
-            'cd %s && /usr/bin/dpkg-buildpackage -i -I -us -uc -S -sa'
-            % self.source_dir_relative, self.username])
+        retcode = self.chroot(
+            [
+                "su",
+                "-c",
+                "cd %s && /usr/bin/dpkg-buildpackage -i -I -us -uc -S -sa"
+                % self.source_dir_relative,
+                self.username,
+            ]
+        )
         for filename in os.listdir(self.tree_path):
             path = os.path.join(self.tree_path, filename)
             if os.path.isfile(path):
@@ -346,17 +432,22 @@ def get_build_path(build_id, *extra):
     :param extra: the extra path segments within the build directory.
     :return: the generated path.
     """
-    return os.path.join(
-        os.environ["HOME"], "build-" + build_id, *extra)
+    return os.path.join(os.environ["HOME"], "build-" + build_id, *extra)
 
 
 def main():
-    parser = OptionParser(usage=(
-        "usage: %prog BUILD-ID AUTHOR-NAME AUTHOR-EMAIL SUITE "
-        "DISTROSERIES-NAME COMPONENT ARCHIVE-PURPOSE"))
+    parser = OptionParser(
+        usage=(
+            "usage: %prog BUILD-ID AUTHOR-NAME AUTHOR-EMAIL SUITE "
+            "DISTROSERIES-NAME COMPONENT ARCHIVE-PURPOSE"
+        )
+    )
     parser.add_option(
-        "--git", default=False, action="store_true",
-        help="build a git recipe (default: bzr)")
+        "--git",
+        default=False,
+        action="store_true",
+        help="build a git recipe (default: bzr)",
+    )
     options, args = parser.parse_args()
 
     builder = RecipeBuilder(*args, git=options.git)
@@ -371,5 +462,5 @@ def main():
     return RETCODE_SUCCESS
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     sys.exit(main())
diff --git a/bin/check-implicit-pointer-functions b/bin/check-implicit-pointer-functions
index a8fcd95..36dbec3 100755
--- a/bin/check-implicit-pointer-functions
+++ b/bin/check-implicit-pointer-functions
@@ -5,8 +5,8 @@
 
 """Scan for compiler warnings that are likely to cause 64-bit problems."""
 
-from argparse import ArgumentParser
 import sys
+from argparse import ArgumentParser
 
 from lpbuildd.check_implicit_pointer_functions import filter_log
 
@@ -14,11 +14,17 @@ from lpbuildd.check_implicit_pointer_functions import filter_log
 def main():
     parser = ArgumentParser(description=__doc__)
     parser.add_argument(
-        "--inline", default=False, action="store_true",
-        help="Pass through input, inserting errors in-line")
+        "--inline",
+        default=False,
+        action="store_true",
+        help="Pass through input, inserting errors in-line",
+    )
     parser.add_argument(
-        "--warnonly", default=False, action="store_true",
-        help="Exit zero even if problems are found")
+        "--warnonly",
+        default=False,
+        action="store_true",
+        help="Exit zero even if problems are found",
+    )
     args = parser.parse_args()
     stdin = sys.stdin
     stdout = sys.stdout
diff --git a/bin/in-target b/bin/in-target
index 664936d..0cdf8f2 100755
--- a/bin/in-target
+++ b/bin/in-target
@@ -7,10 +7,7 @@
 
 import sys
 
-from lpbuildd.target.cli import (
-    configure_logging,
-    parse_args,
-    )
+from lpbuildd.target.cli import configure_logging, parse_args
 
 
 def main():
diff --git a/bin/lpbuildd-git-proxy b/bin/lpbuildd-git-proxy
index f1ad451..578a78c 100755
--- a/bin/lpbuildd-git-proxy
+++ b/bin/lpbuildd-git-proxy
@@ -15,12 +15,17 @@ from urllib.parse import urlparse
 def main():
     proxy_url = urlparse(os.environ["http_proxy"])
     proxy_arg = "PROXY:%s:%s:%s" % (
-        proxy_url.hostname, sys.argv[1], sys.argv[2])
+        proxy_url.hostname,
+        sys.argv[1],
+        sys.argv[2],
+    )
     if proxy_url.port:
         proxy_arg += ",proxyport=%s" % proxy_url.port
     if proxy_url.username:
         proxy_arg += ",proxyauth=%s:%s" % (
-            proxy_url.username, proxy_url.password)
+            proxy_url.username,
+            proxy_url.password,
+        )
     os.execvp("socat", ["socat", "STDIO", proxy_arg])
 
 
diff --git a/bin/test_buildd_generatetranslationtemplates b/bin/test_buildd_generatetranslationtemplates
index 9cce305..ffaf819 100755
--- a/bin/test_buildd_generatetranslationtemplates
+++ b/bin/test_buildd_generatetranslationtemplates
@@ -17,11 +17,11 @@ if len(sys.argv) != 2:
 
 chroot_sha1 = sys.argv[1]
 
-proxy = ServerProxy('http://localhost:8221/rpc')
+proxy = ServerProxy("http://localhost:8221/rpc";)
 print(proxy.info())
 print(proxy.status())
-buildid = '1-2'
-build_type = 'translation-templates'
+buildid = "1-2"
+build_type = "translation-templates"
 filemap = {}
-args = {'branch_url': 'no-branch-here-sorry'}
+args = {"branch_url": "no-branch-here-sorry"}
 print(proxy.build(buildid, build_type, chroot_sha1, filemap, args))
diff --git a/bin/test_buildd_recipe b/bin/test_buildd_recipe
index 9090688..a1b4a26 100755
--- a/bin/test_buildd_recipe
+++ b/bin/test_buildd_recipe
@@ -8,40 +8,52 @@
 import sys
 from xmlrpc.client import ServerProxy
 
-country_code = 'us'
-apt_cacher_ng_host = 'stumpy'
-distroseries_name = 'maverick'
+country_code = "us"
+apt_cacher_ng_host = "stumpy"
+distroseries_name = "maverick"
 recipe_text = """# bzr-builder format 0.2 deb-version {debupstream}-0~{revno}
 http://bazaar.launchpad.dev/~ppa-user/+junk/wakeonlan""";
 
 
 def deb_line(host, suites):
-    prefix = 'deb http://'
+    prefix = "deb http://";
     if apt_cacher_ng_host is not None:
-        prefix += '%s:3142/' % apt_cacher_ng_host
-    return f'{prefix}{host} {distroseries_name} {suites}'
+        prefix += "%s:3142/" % apt_cacher_ng_host
+    return f"{prefix}{host} {distroseries_name} {suites}"
 
 
-proxy = ServerProxy('http://localhost:8221/rpc')
-print(proxy.echo('Hello World'))
+proxy = ServerProxy("http://localhost:8221/rpc";)
+print(proxy.echo("Hello World"))
 print(proxy.info())
 status = proxy.status()
 print(status)
-if status[0] != 'BuilderStatus.IDLE':
+if status[0] != "BuilderStatus.IDLE":
     print("Aborting due to non-IDLE builder.")
     sys.exit(1)
-print(proxy.build(
-    '1-2', 'sourcepackagerecipe', '1ef177161c3cb073e66bf1550931c6fbaa0a94b0',
-    {}, {'author_name': 'Steve\u1234',
-         'author_email': 'stevea@xxxxxxxxxxx',
-         'suite': distroseries_name,
-         'distroseries_name': distroseries_name,
-         'ogrecomponent': 'universe',
-         'archive_purpose': 'puppies',
-         'recipe_text': recipe_text,
-         'archives': [
-            deb_line('%s.archive.ubuntu.com/ubuntu' % country_code,
-                     'main universe'),
-            deb_line('ppa.launchpad.net/launchpad/bzr-builder-dev/ubuntu',
-                     'main'),
-            ]}))
+print(
+    proxy.build(
+        "1-2",
+        "sourcepackagerecipe",
+        "1ef177161c3cb073e66bf1550931c6fbaa0a94b0",
+        {},
+        {
+            "author_name": "Steve\u1234",
+            "author_email": "stevea@xxxxxxxxxxx",
+            "suite": distroseries_name,
+            "distroseries_name": distroseries_name,
+            "ogrecomponent": "universe",
+            "archive_purpose": "puppies",
+            "recipe_text": recipe_text,
+            "archives": [
+                deb_line(
+                    "%s.archive.ubuntu.com/ubuntu" % country_code,
+                    "main universe",
+                ),
+                deb_line(
+                    "ppa.launchpad.net/launchpad/bzr-builder-dev/ubuntu",
+                    "main",
+                ),
+            ],
+        },
+    )
+)
diff --git a/buildd-genconfig b/buildd-genconfig
index 0ad028d..f991f6e 100755
--- a/buildd-genconfig
+++ b/buildd-genconfig
@@ -3,47 +3,64 @@
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from optparse import OptionParser
 import os
+from optparse import OptionParser
 
 archtag = os.popen("dpkg --print-architecture").read().strip()
 
 parser = OptionParser()
 parser.add_option(
-    "-n", "--name", dest="NAME",
+    "-n",
+    "--name",
+    dest="NAME",
     help="the name for this buildd",
     metavar="NAME",
-    default="default")
+    default="default",
+)
 
 parser.add_option(
-    "-H", "--host", dest="BINDHOST",
+    "-H",
+    "--host",
+    dest="BINDHOST",
     help="the IP/host this buildd binds to",
     metavar="HOSTNAME",
-    default="localhost")
+    default="localhost",
+)
 
 parser.add_option(
-    "-p", "--port", dest="BINDPORT",
+    "-p",
+    "--port",
+    dest="BINDPORT",
     help="the port this buildd binds to",
     metavar="PORT",
-    default="8221")
+    default="8221",
+)
 
 parser.add_option(
-    "-a", "--arch", dest="ARCHTAG",
+    "-a",
+    "--arch",
+    dest="ARCHTAG",
     help="the arch tag this buildd claims",
     metavar="ARCHTAG",
-    default=archtag)
+    default=archtag,
+)
 
 parser.add_option(
-    "-t", "--template", dest="TEMPLATE",
+    "-t",
+    "--template",
+    dest="TEMPLATE",
     help="the template file to use",
     metavar="FILE",
-    default="/usr/share/launchpad-buildd/template-buildd.conf")
+    default="/usr/share/launchpad-buildd/template-buildd.conf",
+)
 
 parser.add_option(
-    "--proxy-port", dest="PROXYPORT",
+    "--proxy-port",
+    dest="PROXYPORT",
     help="the port the local builder proxy binds to",
     metavar="PORT",
-    default="8222")
+    default="8222",
+)
 
 (options, args) = parser.parse_args()
 
@@ -55,10 +72,9 @@ replacements = {
     "@ARCHTAG@": options.ARCHTAG,
     "@BINDPORT@": options.BINDPORT,
     "@PROXYPORT@": options.PROXYPORT,
-    }
+}
 
 for replacement_key in replacements:
-    template = template.replace(replacement_key,
-                                replacements[replacement_key])
+    template = template.replace(replacement_key, replacements[replacement_key])
 
 print(template.strip())
diff --git a/charm/reactive/launchpad-buildd.py b/charm/reactive/launchpad-buildd.py
index 4e88d3e..c2ada48 100644
--- a/charm/reactive/launchpad-buildd.py
+++ b/charm/reactive/launchpad-buildd.py
@@ -12,7 +12,7 @@ from charms.reactive import (
     set_state,
     when,
     when_not,
-    )
+)
 
 
 @only_once
diff --git a/debian/changelog b/debian/changelog
index 8869014..af71f9c 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+launchpad-buildd (230) UNRELEASED; urgency=medium
+
+  * Apply black and isort.
+
+ -- Colin Watson <cjwatson@xxxxxxxxxx>  Tue, 07 Feb 2023 20:02:04 +0000
+
 launchpad-buildd (229) focal; urgency=medium
 
   * Call dpkg-architecture with -a rather than setting DEB_HOST_ARCH, to
diff --git a/debian/upgrade-config b/debian/upgrade-config
index 9594a2e..33f38cb 100755
--- a/debian/upgrade-config
+++ b/debian/upgrade-config
@@ -6,25 +6,22 @@
 """Upgrade a launchpad-buildd configuration file."""
 
 try:
-    from configparser import (
-        ConfigParser as SafeConfigParser,
-        NoOptionError,
-        NoSectionError,
-        )
+    from configparser import ConfigParser as SafeConfigParser
+    from configparser import NoOptionError, NoSectionError
 except ImportError:
     from ConfigParser import (
         SafeConfigParser,
         NoOptionError,
         NoSectionError,
-        )
+    )
+
 import os
 import re
-import sys
 import subprocess
+import sys
 
 import apt_pkg
 
-
 apt_pkg.init()
 
 (old_version, conf_file) = sys.argv[1:]
@@ -34,8 +31,8 @@ bin_path = "/usr/share/launchpad-buildd/slavebin"
 
 def upgrade_to_12():
     print("Upgrading %s to version 12" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev12~"])
-    in_file = open(conf_file+"-prev12~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev12~"])
+    in_file = open(conf_file + "-prev12~")
     out_file = open(conf_file, "w")
     for line in in_file:
         if line.startswith("[debianmanager]"):
@@ -49,8 +46,8 @@ def upgrade_to_12():
 
 def upgrade_to_34():
     print("Upgrading %s to version 34" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev34~"])
-    in_file = open(conf_file+"-prev34~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev34~"])
+    in_file = open(conf_file + "-prev34~")
     out_file = open(conf_file, "w")
     for line in in_file:
         if line.startswith("[debianmanager]"):
@@ -62,8 +59,8 @@ def upgrade_to_34():
 
 def upgrade_to_39():
     print("Upgrading %s to version 39" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev39~"])
-    in_file = open(conf_file+"-prev39~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev39~"])
+    in_file = open(conf_file + "-prev39~")
     out_file = open(conf_file, "w")
     for line in in_file:
         if line.startswith("sbuildargs"):
@@ -77,8 +74,8 @@ def upgrade_to_39():
 
 def upgrade_to_57():
     print("Upgrading %s to version 57" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev57~"])
-    in_file = open(conf_file+"-prev57~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev57~"])
+    in_file = open(conf_file + "-prev57~")
     out_file = open(conf_file, "w")
     # We want to move all the sbuild lines to a new
     # 'binarypackagemanager' section at the end.
@@ -88,7 +85,7 @@ def upgrade_to_57():
             binarypackage_lines.append(line)
         else:
             out_file.write(line)
-    out_file.write('[binarypackagemanager]\n')
+    out_file.write("[binarypackagemanager]\n")
     for line in binarypackage_lines:
         out_file.write(line)
     in_file.close()
@@ -97,41 +94,43 @@ def upgrade_to_57():
 
 def upgrade_to_58():
     print("Upgrading %s to version 58" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev58~"])
-    in_file = open(conf_file+"-prev58~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev58~"])
+    in_file = open(conf_file + "-prev58~")
     out_file = open(conf_file, "w")
     out_file.write(in_file.read())
     out_file.write(
-        '\n[sourcepackagerecipemanager]\n'
-        'buildrecipepath = %s/buildrecipe\n' % bin_path)
+        "\n[sourcepackagerecipemanager]\n"
+        "buildrecipepath = %s/buildrecipe\n" % bin_path
+    )
 
 
 def upgrade_to_59():
     print("Upgrading %s to version 59" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev59~"])
-    in_file = open(conf_file+"-prev59~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev59~"])
+    in_file = open(conf_file + "-prev59~")
     out_file = open(conf_file, "w")
     out_file.write(in_file.read())
     out_file.write(
-        '\n[translationtemplatesmanager]\n'
-        'generatepath = %s/generate-translation-templates\n'
-        'resultarchive = translation-templates.tar.gz\n' % bin_path)
+        "\n[translationtemplatesmanager]\n"
+        "generatepath = %s/generate-translation-templates\n"
+        "resultarchive = translation-templates.tar.gz\n" % bin_path
+    )
 
 
 def upgrade_to_63():
     print("Upgrading %s to version 63" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev63~"])
-    in_file = open(conf_file+"-prev63~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev63~"])
+    in_file = open(conf_file + "-prev63~")
     out_file = open(conf_file, "w")
     for line in in_file:
-        if not line.startswith('ogrepath'):
+        if not line.startswith("ogrepath"):
             out_file.write(line)
 
 
 def upgrade_to_110():
     print("Upgrading %s to version 110" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev110~"])
-    in_file = open(conf_file+"-prev110~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev110~"])
+    in_file = open(conf_file + "-prev110~")
     out_file = open(conf_file, "w")
     for line in in_file:
         if line.startswith("[allmanagers]"):
@@ -143,16 +142,17 @@ def upgrade_to_110():
 
 def upgrade_to_115():
     print("Upgrading %s to version 115" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev115~"])
+    subprocess.call(["mv", conf_file, conf_file + "-prev115~"])
     in_allmanagers = False
-    in_file = open(conf_file+"-prev115~")
+    in_file = open(conf_file + "-prev115~")
     out_file = open(conf_file, "w")
     for line in in_file:
         if line.startswith("[allmanagers]"):
             in_allmanagers = True
         elif in_allmanagers and (line.startswith("[") or not line.strip()):
             out_file.write(
-                "processscanpath = %s/scan-for-processes\n" % bin_path)
+                "processscanpath = %s/scan-for-processes\n" % bin_path
+            )
             in_allmanagers = False
         if not line.startswith("processscanpath = "):
             out_file.write(line)
@@ -162,27 +162,28 @@ def upgrade_to_115():
 
 def upgrade_to_120():
     print("Upgrading %s to version 120" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev120~"])
-    in_file = open(conf_file+"-prev120~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev120~"])
+    in_file = open(conf_file + "-prev120~")
     out_file = open(conf_file, "w")
     out_file.write(in_file.read())
     out_file.write(
         "\n[livefilesystemmanager]\n"
-        "buildlivefspath = %s/buildlivefs\n" % bin_path)
+        "buildlivefspath = %s/buildlivefs\n" % bin_path
+    )
     in_file.close()
     out_file.close()
 
 
 def upgrade_to_126():
     print("Upgrading %s to version 126" % conf_file)
-    subprocess.call(["mv", conf_file, conf_file+"-prev126~"])
-    in_file = open(conf_file+"-prev126~")
+    subprocess.call(["mv", conf_file, conf_file + "-prev126~"])
+    in_file = open(conf_file + "-prev126~")
     out_file = open(conf_file, "w")
     archive_ubuntu = " --archive=ubuntu"
     for line in in_file:
         line = line.rstrip("\n")
         if line.endswith(archive_ubuntu):
-            line = line[:-len(archive_ubuntu)]
+            line = line[: -len(archive_ubuntu)]
         out_file.write(line + "\n")
     in_file.close()
     out_file.close()
@@ -195,13 +196,25 @@ def upgrade_to_127():
     in_file = open(conf_file + "-prev127~")
     out_file = open(conf_file, "w")
     obsolete_prefixes = [
-        '[allmanagers]', '[debianmanager]', '[binarypackagemanager]',
-        '[sourcepackagerecipemanager]', '[livefilesystemmanager]',
-        'preppath ', 'unpackpath ', 'cleanpath ', 'mountpath ', 'umountpath ',
-        'processscanpath ', 'updatepath ', 'sourcespath ', 'sbuildpath ',
-        'sbuildargs ', 'buildrecipepath ', 'generatepath ',
-        'buildlivefspath ',
-        ]
+        "[allmanagers]",
+        "[debianmanager]",
+        "[binarypackagemanager]",
+        "[sourcepackagerecipemanager]",
+        "[livefilesystemmanager]",
+        "preppath ",
+        "unpackpath ",
+        "cleanpath ",
+        "mountpath ",
+        "umountpath ",
+        "processscanpath ",
+        "updatepath ",
+        "sourcespath ",
+        "sbuildpath ",
+        "sbuildargs ",
+        "buildrecipepath ",
+        "generatepath ",
+        "buildlivefspath ",
+    ]
     wrote_blank = False
     for line in in_file:
         # Remove obsolete paths and sections.
@@ -229,9 +242,7 @@ def upgrade_to_162():
     with open(conf_file + "-prev162~") as in_file:
         with open(conf_file, "w") as out_file:
             out_file.write(in_file.read())
-            out_file.write(
-                "\n[snapmanager]\n"
-                "proxyport = 8222\n")
+            out_file.write("\n[snapmanager]\n" "proxyport = 8222\n")
 
 
 def upgrade_to_190():
@@ -283,14 +294,16 @@ def upgrade_to_200():
                     out_file.write(line)
                     wrote_blank = False
 
-                if (in_snapmanager and
-                        not line.startswith("[snapmanager]") and
-                        (line.startswith("[") or not line.strip())):
+                if (
+                    in_snapmanager
+                    and not line.startswith("[snapmanager]")
+                    and (line.startswith("[") or not line.strip())
+                ):
                     in_snapmanager = False
 
 
 if __name__ == "__main__":
-    old_version = re.sub(r'[~-].*', '', old_version)
+    old_version = re.sub(r"[~-].*", "", old_version)
     if apt_pkg.version_compare(old_version, "12") < 0:
         upgrade_to_12()
     if apt_pkg.version_compare(old_version, "34") < 0:
diff --git a/docs/conf.py b/docs/conf.py
index b153225..6c74a9d 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -17,12 +17,12 @@
 
 # -- Project information -----------------------------------------------------
 
-project = 'launchpad-buildd'
-copyright = '2009-2022, Canonical Ltd'
-author = 'Launchpad developers'
+project = "launchpad-buildd"
+copyright = "2009-2022, Canonical Ltd"
+author = "Launchpad developers"
 
 # The full version, including alpha/beta/rc tags
-release = '207'
+release = "207"
 
 
 # -- General configuration ---------------------------------------------------
@@ -30,16 +30,15 @@ release = '207'
 # Add any Sphinx extension module names here, as strings. They can be
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
 # ones.
-extensions = [
-]
+extensions = []
 
 # Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
 # This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
 
 
 # -- Options for HTML output -------------------------------------------------
@@ -47,7 +46,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
 #
-html_theme = 'alabaster'
+html_theme = "alabaster"
 
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
diff --git a/lpbuildd/binarypackage.py b/lpbuildd/binarypackage.py
index 44b1c6c..5b7bc54 100644
--- a/lpbuildd/binarypackage.py
+++ b/lpbuildd/binarypackage.py
@@ -1,29 +1,24 @@
 # Copyright 2009-2018 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from collections import defaultdict
 import os
 import re
 import subprocess
 import tempfile
-from textwrap import dedent
 import traceback
+from collections import defaultdict
+from textwrap import dedent
 
 import apt_pkg
-from debian.deb822 import (
-    Dsc,
-    PkgRelation,
-    )
+from debian.deb822 import Dsc, PkgRelation
 from debian.debian_support import Version
 
-from lpbuildd.debian import (
-    DebianBuildManager,
-    DebianBuildState,
-    )
+from lpbuildd.debian import DebianBuildManager, DebianBuildState
 
 
 class SBuildExitCodes:
     """SBUILD process result codes."""
+
     OK = 0
     FAILED = 1
     ATTEMPTED = 2
@@ -32,17 +27,17 @@ class SBuildExitCodes:
 
 
 APT_MISSING_DEP_PATTERNS = [
-    r'but [^ ]* is to be installed',
-    r'but [^ ]* is installed',
-    r'but it is not installable',
-    r'but it is a virtual package',
-    ]
+    r"but [^ ]* is to be installed",
+    r"but [^ ]* is installed",
+    r"but it is not installable",
+    r"but it is a virtual package",
+]
 
 
 APT_DUBIOUS_DEP_PATTERNS = [
-    r'but it is not installed',
-    r'but it is not going to be installed',
-    ]
+    r"but it is not installed",
+    r"but it is not going to be installed",
+]
 
 
 class BuildLogRegexes:
@@ -51,19 +46,20 @@ class BuildLogRegexes:
     These allow performing actions based on regexes, and extracting
     dependencies for auto dep-waits.
     """
+
     GIVENBACK = [
         (r"^E: There are problems and -y was used without --force-yes"),
-        ]
+    ]
     MAYBEDEPFAIL = [
-        r'The following packages have unmet dependencies:\n'
-        r'.* Depends: [^ ]*( \([^)]*\))? (%s)\n'
-        % r'|'.join(APT_DUBIOUS_DEP_PATTERNS),
-        ]
+        r"The following packages have unmet dependencies:\n"
+        r".* Depends: [^ ]*( \([^)]*\))? (%s)\n"
+        % r"|".join(APT_DUBIOUS_DEP_PATTERNS),
+    ]
     DEPFAIL = {
-        r'The following packages have unmet dependencies:\n'
-        r'.* Depends: (?P<p>[^ ]*( \([^)]*\))?) (%s)\n'
-        % r'|'.join(APT_MISSING_DEP_PATTERNS): r"\g<p>",
-        }
+        r"The following packages have unmet dependencies:\n"
+        r".* Depends: (?P<p>[^ ]*( \([^)]*\))?) (%s)\n"
+        % r"|".join(APT_MISSING_DEP_PATTERNS): r"\g<p>",
+    }
 
 
 class DpkgArchitectureCache:
@@ -77,7 +73,7 @@ class DpkgArchitectureCache:
             command = ["dpkg-architecture", "-a%s" % arch, "-i%s" % wildcard]
             env = dict(os.environ)
             env.pop("DEB_HOST_ARCH", None)
-            ret = (subprocess.call(command, env=env) == 0)
+            ret = subprocess.call(command, env=env) == 0
             self._matches[(arch, wildcard)] = ret
         return self._matches[(arch, wildcard)]
 
@@ -101,11 +97,12 @@ class BinaryPackageBuildManager(DebianBuildManager):
     @property
     def chroot_path(self):
         return os.path.join(
-            self.home, "build-" + self._buildid, 'chroot-autobuild')
+            self.home, "build-" + self._buildid, "chroot-autobuild"
+        )
 
     @property
     def schroot_config_path(self):
-        return os.path.join('/etc/schroot/chroot.d', 'build-' + self._buildid)
+        return os.path.join("/etc/schroot/chroot.d", "build-" + self._buildid)
 
     def initiate(self, files, chroot, extra_args):
         """Initiate a build with a given set of files and chroot."""
@@ -117,11 +114,11 @@ class BinaryPackageBuildManager(DebianBuildManager):
         if self._dscfile is None:
             raise ValueError(files)
 
-        self.archive_purpose = extra_args.get('archive_purpose')
-        self.suite = extra_args['suite']
-        self.component = extra_args['ogrecomponent']
-        self.arch_indep = extra_args.get('arch_indep', False)
-        self.build_debug_symbols = extra_args.get('build_debug_symbols', False)
+        self.archive_purpose = extra_args.get("archive_purpose")
+        self.suite = extra_args["suite"]
+        self.component = extra_args["ogrecomponent"]
+        self.arch_indep = extra_args.get("arch_indep", False)
+        self.build_debug_symbols = extra_args.get("build_debug_symbols", False)
 
         super().initiate(files, chroot, extra_args)
 
@@ -132,31 +129,51 @@ class BinaryPackageBuildManager(DebianBuildManager):
             # and teardown ourselves: it's easier to do this the same way
             # for all build types.
             print(
-                dedent(f'''\
+                dedent(
+                    f"""\
                     [build-{self._buildid}]
                     description=build-{self._buildid}
                     groups=sbuild,root
                     root-groups=sbuild,root
                     type=plain
                     directory={self.chroot_path}
-                    '''),
-                file=schroot_file, end='')
+                    """
+                ),
+                file=schroot_file,
+                end="",
+            )
             schroot_file.flush()
             subprocess.check_call(
-                ['sudo', 'install', '-o', 'root', '-g', 'root', '-m', '0644',
-                 schroot_file.name, self.schroot_config_path])
+                [
+                    "sudo",
+                    "install",
+                    "-o",
+                    "root",
+                    "-g",
+                    "root",
+                    "-m",
+                    "0644",
+                    schroot_file.name,
+                    self.schroot_config_path,
+                ]
+            )
 
         currently_building_contents = (
-            'Package: %s\n'
-            'Component: %s\n'
-            'Suite: %s\n'
-            'Purpose: %s\n'
-            % (self._dscfile.split('_')[0], self.component, self.suite,
-               self.archive_purpose))
+            "Package: %s\n"
+            "Component: %s\n"
+            "Suite: %s\n"
+            "Purpose: %s\n"
+            % (
+                self._dscfile.split("_")[0],
+                self.component,
+                self.suite,
+                self.archive_purpose,
+            )
+        )
         if self.build_debug_symbols:
-            currently_building_contents += 'Build-Debug-Symbols: yes\n'
+            currently_building_contents += "Build-Debug-Symbols: yes\n"
         with self.backend.open(
-            '/CurrentlyBuilding', mode='w+'
+            "/CurrentlyBuilding", mode="w+"
         ) as currently_building:
             currently_building.write(currently_building_contents)
             os.fchmod(currently_building.fileno(), 0o644)
@@ -184,10 +201,18 @@ class BinaryPackageBuildManager(DebianBuildManager):
         if os.path.exists(os.path.join(self.chroot_path, apt_helper[1:])):
             try:
                 paths = subprocess.check_output(
-                    ["sudo", "chroot", self.chroot_path,
-                     "apt-get", "indextargets", "--format", "$(FILENAME)",
-                     "Created-By: Packages"],
-                    universal_newlines=True).splitlines()
+                    [
+                        "sudo",
+                        "chroot",
+                        self.chroot_path,
+                        "apt-get",
+                        "indextargets",
+                        "--format",
+                        "$(FILENAME)",
+                        "Created-By: Packages",
+                    ],
+                    universal_newlines=True,
+                ).splitlines()
             except subprocess.CalledProcessError:
                 # This might be e.g. Ubuntu 14.04, where
                 # /usr/lib/apt/apt-helper exists but "apt-get indextargets"
@@ -196,9 +221,16 @@ class BinaryPackageBuildManager(DebianBuildManager):
         if paths is not None:
             for path in paths:
                 helper = subprocess.Popen(
-                    ["sudo", "chroot", self.chroot_path,
-                     apt_helper, "cat-file", path],
-                    stdout=subprocess.PIPE)
+                    [
+                        "sudo",
+                        "chroot",
+                        self.chroot_path,
+                        apt_helper,
+                        "cat-file",
+                        path,
+                    ],
+                    stdout=subprocess.PIPE,
+                )
                 try:
                     yield helper.stdout
                 finally:
@@ -206,7 +238,8 @@ class BinaryPackageBuildManager(DebianBuildManager):
                     helper.wait()
         else:
             apt_lists = os.path.join(
-                self.chroot_path, "var", "lib", "apt", "lists")
+                self.chroot_path, "var", "lib", "apt", "lists"
+            )
             for name in sorted(os.listdir(apt_lists)):
                 if name.endswith("_Packages"):
                     path = os.path.join(apt_lists, name)
@@ -234,7 +267,8 @@ class BinaryPackageBuildManager(DebianBuildManager):
                         if provide[0][1] and provide[0][2] != "=":
                             continue
                         available[provide[0][0]].add(
-                            provide[0][1] if provide[0][1] else None)
+                            provide[0][1] if provide[0][1] else None
+                        )
         return available
 
     def getBuildDepends(self, dscpath, arch_indep):
@@ -285,8 +319,10 @@ class BinaryPackageBuildManager(DebianBuildManager):
                 return True
         dep_restrictions = dep.get("restrictions")
         if dep_restrictions is not None:
-            if all(any(restriction.enabled for restriction in restrlist)
-                   for restrlist in dep_restrictions):
+            if all(
+                any(restriction.enabled for restriction in restrlist)
+                for restrlist in dep_restrictions
+            ):
                 # This dependency "matches" in the sense that it's ignored
                 # when no build profiles are enabled.
                 return True
@@ -301,12 +337,13 @@ class BinaryPackageBuildManager(DebianBuildManager):
             "=": (lambda a, b: a == b),
             ">=": (lambda a, b: a >= b),
             ">>": (lambda a, b: a > b),
-            }
+        }
         operator = operator_map[dep_version[0]]
         want_version = dep_version[1]
         for version in available[dep["name"]]:
-            if (version is not None and
-                    operator(Version(version), want_version)):
+            if version is not None and operator(
+                Version(version), want_version
+            ):
                 return True
         return False
 
@@ -410,19 +447,24 @@ class BinaryPackageBuildManager(DebianBuildManager):
             elif rx in BuildLogRegexes.MAYBEDEPFAIL:
                 # These matches need further analysis.
                 dscpath = os.path.join(
-                    self.home, "build-%s" % self._buildid, self._dscfile)
+                    self.home, "build-%s" % self._buildid, self._dscfile
+                )
                 missing_dep = self.analyseDepWait(
                     self.getBuildDepends(dscpath, self.arch_indep),
-                    self.getAvailablePackages())
+                    self.getAvailablePackages(),
+                )
                 if missing_dep is None:
                     success = SBuildExitCodes.FAILED
             elif rx in BuildLogRegexes.DEPFAIL:
                 # A depwait match forces depwait.
                 missing_dep = mo.expand(
-                    BuildLogRegexes.DEPFAIL[rx].encode("UTF-8"))
+                    BuildLogRegexes.DEPFAIL[rx].encode("UTF-8")
+                )
                 missing_dep = self.stripDependencies(
                     PkgRelation.parse_relations(
-                        missing_dep.decode("UTF-8", "replace")))
+                        missing_dep.decode("UTF-8", "replace")
+                    )
+                )
             else:
                 # Otherwise it was a givenback pattern, so leave it
                 # in givenback.
@@ -449,7 +491,7 @@ class BinaryPackageBuildManager(DebianBuildManager):
     def iterateReap_SBUILD(self, success):
         """Finished reaping after sbuild run."""
         # Ignore errors from tearing down schroot configuration.
-        subprocess.call(['sudo', 'rm', '-f', self.schroot_config_path])
+        subprocess.call(["sudo", "rm", "-f", self.schroot_config_path])
 
         self._state = DebianBuildState.UMOUNT
         self.doUnmounting()
diff --git a/lpbuildd/buildd.tac b/lpbuildd/buildd.tac
index d73f300..b2d756a 100644
--- a/lpbuildd/buildd.tac
+++ b/lpbuildd/buildd.tac
@@ -8,35 +8,28 @@ try:
     from configparser import ConfigParser as SafeConfigParser
 except ImportError:
     from ConfigParser import SafeConfigParser
+
 import os
 
-from twisted.application import (
-    service,
-    strports,
-    )
+from twisted.application import service, strports
 from twisted.scripts.twistd import ServerOptions
-from twisted.web import (
-    resource,
-    server,
-    static,
-    )
+from twisted.web import resource, server, static
 
 from lpbuildd.binarypackage import BinaryPackageBuildManager
 from lpbuildd.builder import XMLRPCBuilder
 from lpbuildd.charm import CharmBuildManager
 from lpbuildd.ci import CIBuildManager
-from lpbuildd.oci import OCIBuildManager
 from lpbuildd.livefs import LiveFilesystemBuildManager
 from lpbuildd.log import RotatableFileLogObserver
+from lpbuildd.oci import OCIBuildManager
 from lpbuildd.snap import SnapBuildManager
 from lpbuildd.sourcepackagerecipe import SourcePackageRecipeBuildManager
 from lpbuildd.translationtemplates import TranslationTemplatesBuildManager
 
-
 options = ServerOptions()
 options.parseOptions()
 
-conffile = os.environ.get('BUILDD_CONFIG', 'buildd-example.conf')
+conffile = os.environ.get("BUILDD_CONFIG", "buildd-example.conf")
 
 conf = SafeConfigParser()
 conf.read(conffile)
@@ -45,26 +38,29 @@ builder = XMLRPCBuilder(conf)
 builder.registerManager(BinaryPackageBuildManager, "binarypackage")
 builder.registerManager(SourcePackageRecipeBuildManager, "sourcepackagerecipe")
 builder.registerManager(
-    TranslationTemplatesBuildManager, 'translation-templates')
+    TranslationTemplatesBuildManager, "translation-templates"
+)
 builder.registerManager(LiveFilesystemBuildManager, "livefs")
 builder.registerManager(SnapBuildManager, "snap")
 builder.registerManager(OCIBuildManager, "oci")
 builder.registerManager(CharmBuildManager, "charm")
 builder.registerManager(CIBuildManager, "ci")
 
-application = service.Application('Builder')
+application = service.Application("Builder")
 application.addComponent(
-    RotatableFileLogObserver(options.get('logfile')), ignoreClass=1)
+    RotatableFileLogObserver(options.get("logfile")), ignoreClass=1
+)
 builderService = service.IServiceCollection(application)
 builder.builder.service = builderService
 
 root = resource.Resource()
-root.putChild(b'rpc', builder)
-root.putChild(b'filecache', static.File(conf.get('builder', 'filecache')))
+root.putChild(b"rpc", builder)
+root.putChild(b"filecache", static.File(conf.get("builder", "filecache")))
 buildersite = server.Site(root)
 
-strports.service("tcp:%s" % builder.builder._config.get("builder", "bindport"),
-                 buildersite).setServiceParent(builderService)
+strports.service(
+    "tcp:%s" % builder.builder._config.get("builder", "bindport"), buildersite
+).setServiceParent(builderService)
 
 # You can interact with a running builder like this:
 # (assuming the builder is on localhost:8221)
diff --git a/lpbuildd/builder.py b/lpbuildd/builder.py
index c7a0f50..ed624b3 100644
--- a/lpbuildd/builder.py
+++ b/lpbuildd/builder.py
@@ -6,7 +6,6 @@
 
 # The basic builder implementation.
 
-from functools import partial
 import hashlib
 import json
 import os
@@ -14,25 +13,24 @@ import re
 import shutil
 import sys
 import tempfile
+from functools import partial
 from urllib.request import (
-    build_opener,
     HTTPBasicAuthHandler,
     HTTPPasswordMgrWithPriorAuth,
+    build_opener,
     urlopen,
-    )
+)
 from xmlrpc.client import Binary
 
 import apt
-from twisted.internet import protocol
+from twisted.internet import process, protocol
 from twisted.internet import reactor as default_reactor
-from twisted.internet import process
 from twisted.python import log
 from twisted.web import xmlrpc
 
 from lpbuildd.target.backend import make_backend
 from lpbuildd.util import shell_escape
 
-
 devnull = open("/dev/null")
 
 
@@ -47,13 +45,13 @@ def _sanitizeURLs(bytes_seq):
     """
     # This regular expression will be used to remove authentication
     # credentials from URLs.
-    password_re = re.compile(br'://([^:@/]*:[^:@/]+@)(\S+)')
+    password_re = re.compile(rb"://([^:@/]*:[^:@/]+@)(\S+)")
     # Builder proxy passwords are UUIDs.
-    proxy_auth_re = re.compile(br',proxyauth=[^:]+:[A-Za-z0-9-]+')
+    proxy_auth_re = re.compile(rb",proxyauth=[^:]+:[A-Za-z0-9-]+")
 
     for line in bytes_seq:
-        sanitized_line = password_re.sub(br'://\2', line)
-        sanitized_line = proxy_auth_re.sub(b'', sanitized_line)
+        sanitized_line = password_re.sub(rb"://\2", line)
+        sanitized_line = proxy_auth_re.sub(b"", sanitized_line)
         yield sanitized_line
 
 
@@ -144,7 +142,7 @@ class BuildManager:
         self._subprocess = None
         self._reaped_states = set()
         self.is_archive_private = False
-        self.home = os.environ['HOME']
+        self.home = os.environ["HOME"]
         self.abort_timeout = 120
         self.status_path = get_build_path(self.home, self._buildid, "status")
         self._final_extra_status = None
@@ -160,17 +158,23 @@ class BuildManager:
         self._subprocess = RunCapture(self._builder, iterate, stdin=stdin)
         text_args = [
             arg.decode("UTF-8", "replace") if isinstance(arg, bytes) else arg
-            for arg in args[1:]]
+            for arg in args[1:]
+        ]
         escaped_args = " ".join(shell_escape(arg) for arg in text_args)
         self._builder.log(f"RUN: {command} {escaped_args}\n")
         childfds = {
             0: devnull.fileno() if stdin is None else "w",
             1: "r",
             2: "r",
-            }
+        }
         self._reactor.spawnProcess(
-            self._subprocess, command, args, env=env,
-            path=self.home, childFDs=childfds)
+            self._subprocess,
+            command,
+            args,
+            env=env,
+            path=self.home,
+            childFDs=childfds,
+        )
 
     def runTargetSubProcess(self, command, *args, **kwargs):
         """Run a subprocess that operates on the target environment."""
@@ -180,18 +184,22 @@ class BuildManager:
             "--backend=%s" % self.backend_name,
             "--series=%s" % self.series,
             "--arch=%s" % self.arch_tag,
-            ]
+        ]
         for constraint in self.constraints:
             base_args.append("--constraint=%s" % constraint)
         base_args.append(self._buildid)
         self.runSubProcess(
-            self._intargetpath, base_args + list(args), **kwargs)
+            self._intargetpath, base_args + list(args), **kwargs
+        )
 
     def doUnpack(self):
         """Unpack the build chroot."""
         self.runTargetSubProcess(
-            "unpack-chroot", "--image-type", self.image_type,
-            self._chroottarfile)
+            "unpack-chroot",
+            "--image-type",
+            self.image_type,
+            self._chroottarfile,
+        )
 
     def doReapProcesses(self, state, notify=True):
         """Reap any processes left lying around in the chroot."""
@@ -207,8 +215,10 @@ class BuildManager:
             if notify:
                 iterate = partial(self.iterateReap, state)
             else:
+
                 def iterate(success):
                     pass
+
             self.runTargetSubProcess("scan-for-processes", iterate=iterate)
 
     def doCleanup(self):
@@ -245,32 +255,37 @@ class BuildManager:
         value keyed under the 'archive_private' string. If that value
         evaluates to True the build at hand is for a private archive.
         """
-        if 'build_url' in extra_args:
-            self._builder.log("%s\n" % extra_args['build_url'])
+        if "build_url" in extra_args:
+            self._builder.log("%s\n" % extra_args["build_url"])
 
         os.mkdir(get_build_path(self.home, self._buildid))
         for f in files:
-            os.symlink(self._builder.cachePath(files[f]),
-                       get_build_path(self.home, self._buildid, f))
+            os.symlink(
+                self._builder.cachePath(files[f]),
+                get_build_path(self.home, self._buildid, f),
+            )
         self._chroottarfile = self._builder.cachePath(chroot)
 
-        self.image_type = extra_args.get('image_type', 'chroot')
-        self.series = extra_args['series']
-        self.arch_tag = extra_args.get('arch_tag', self._builder.getArch())
-        self.fast_cleanup = extra_args.get('fast_cleanup', False)
-        self.constraints = extra_args.get('builder_constraints') or []
+        self.image_type = extra_args.get("image_type", "chroot")
+        self.series = extra_args["series"]
+        self.arch_tag = extra_args.get("arch_tag", self._builder.getArch())
+        self.fast_cleanup = extra_args.get("fast_cleanup", False)
+        self.constraints = extra_args.get("builder_constraints") or []
 
         # Check whether this is a build in a private archive and
         # whether the URLs in the buildlog file should be sanitized
         # so that they do not contain any embedded authentication
         # credentials.
-        if extra_args.get('archive_private'):
+        if extra_args.get("archive_private"):
             self.is_archive_private = True
 
         self.backend = make_backend(
-            self.backend_name, self._buildid,
-            series=self.series, arch=self.arch_tag,
-            constraints=self.constraints)
+            self.backend_name,
+            self._buildid,
+            series=self.series,
+            arch=self.arch_tag,
+            constraints=self.constraints,
+        )
 
         self.runSubProcess(self._preppath, ["builder-prep"])
 
@@ -290,7 +305,8 @@ class BuildManager:
         except Exception as e:
             print(
                 "Error deserialising extra status file: %s" % e,
-                file=sys.stderr)
+                file=sys.stderr,
+            )
         return {}
 
     def iterate(self, success, quiet=False):
@@ -301,8 +317,9 @@ class BuildManager:
         object created by runSubProcess to gather the results of the
         sub process.
         """
-        raise NotImplementedError("BuildManager should be subclassed to be "
-                                  "used")
+        raise NotImplementedError(
+            "BuildManager should be subclassed to be " "used"
+        )
 
     def iterateReap(self, state, success):
         """Perform an iteration of the builder following subprocess reaping.
@@ -312,8 +329,9 @@ class BuildManager:
         track of the state being reaped so that we can select the
         appropriate next state.
         """
-        raise NotImplementedError("BuildManager should be subclassed to be "
-                                  "used")
+        raise NotImplementedError(
+            "BuildManager should be subclassed to be " "used"
+        )
 
     def abortReap(self):
         """Abort by killing all processes in the chroot, as hard as we can.
@@ -339,8 +357,11 @@ class BuildManager:
         # forkbombing test suite, etc.).  In this case, fail the builder and
         # let an admin sort it out.
         self._subprocess.builderFailCall = self._reactor.callLater(
-            self.abort_timeout, self.builderFail,
-            "Failed to kill all processes.", primary_subprocess)
+            self.abort_timeout,
+            self.builderFail,
+            "Failed to kill all processes.",
+            primary_subprocess,
+        )
 
     def builderFail(self, reason, primary_subprocess):
         """Mark the builder as failed."""
@@ -353,7 +374,7 @@ class BuildManager:
         # doReapProcesses was called) may not have exited.  Kill it so that
         # we can proceed.
         try:
-            primary_subprocess.transport.signalProcess('KILL')
+            primary_subprocess.transport.signalProcess("KILL")
         except process.ProcessExitedAlready:
             self._builder.log("ABORTING: Process Exited Already\n")
         primary_subprocess.transport.loseConnection()
@@ -449,15 +470,16 @@ class Builder:
         the builder will fetch the file if it doesn't have it.
         Return a tuple containing: (<present>, <info>)
         """
-        extra_info = 'No URL'
+        extra_info = "No URL"
         cachefile = self.cachePath(sha1sum)
         if url is not None:
-            extra_info = 'Cache'
+            extra_info = "Cache"
             if not os.path.exists(cachefile):
-                self.log(f'Fetching {sha1sum} by url {url}')
+                self.log(f"Fetching {sha1sum} by url {url}")
                 if username or password:
                     opener = self.setupAuthHandler(
-                        url, username, password).open
+                        url, username, password
+                    ).open
                 else:
                     opener = urlopen
                 try:
@@ -467,23 +489,23 @@ class Builder:
                 # the PyLint warnings.
                 # pylint: disable-msg=W0703
                 except Exception as info:
-                    extra_info = 'Error accessing Librarian: %s' % info
+                    extra_info = "Error accessing Librarian: %s" % info
                     self.log(extra_info)
                 else:
-                    of = open(cachefile + '.tmp', "wb")
+                    of = open(cachefile + ".tmp", "wb")
                     # Upped for great justice to 256k
                     check_sum = hashlib.sha1()
-                    for chunk in iter(lambda: f.read(256*1024), b''):
+                    for chunk in iter(lambda: f.read(256 * 1024), b""):
                         of.write(chunk)
                         check_sum.update(chunk)
                     of.close()
                     f.close()
-                    extra_info = 'Download'
+                    extra_info = "Download"
                     if check_sum.hexdigest() != sha1sum:
-                        os.remove(cachefile + '.tmp')
+                        os.remove(cachefile + ".tmp")
                         extra_info = "Digests did not match, removing again!"
                     else:
-                        os.rename(cachefile + '.tmp', cachefile)
+                        os.rename(cachefile + ".tmp", cachefile)
                     self.log(extra_info)
         return (os.path.exists(cachefile), extra_info)
 
@@ -494,7 +516,7 @@ class Builder:
         of = open(tmppath, "wb")
         try:
             sha1 = hashlib.sha1()
-            for chunk in iter(lambda: f.read(256*1024), b''):
+            for chunk in iter(lambda: f.read(256 * 1024), b""):
                 sha1.update(chunk)
                 of.write(chunk)
             sha1sum = sha1.hexdigest()
@@ -534,7 +556,7 @@ class Builder:
     def clean(self):
         """Clean up pending files and reset the internal build state."""
         if self.builderstatus != BuilderStatus.WAITING:
-            raise ValueError('Builder is not WAITING when asked to clean')
+            raise ValueError("Builder is not WAITING when asked to clean")
         for f in set(self.waitingfiles.values()):
             os.remove(self.cachePath(f))
         self.builderstatus = BuilderStatus.IDLE
@@ -551,12 +573,13 @@ class Builder:
         """Write the provided data to the log."""
         if self._log is not None:
             data_bytes = (
-                data if isinstance(data, bytes) else data.encode("UTF-8"))
+                data if isinstance(data, bytes) else data.encode("UTF-8")
+            )
             self._log.write(data_bytes)
             self._log.flush()
         data_text = (
-            data if isinstance(data, str)
-            else data.decode("UTF-8", "replace"))
+            data if isinstance(data, str) else data.decode("UTF-8", "replace")
+        )
         if data_text.endswith("\n"):
             data_text = data_text[:-1]
         log.msg("Build log: " + data_text)
@@ -610,7 +633,7 @@ class Builder:
             # excerpt to be scrubbed) because it may be cut off thus
             # thwarting the detection of embedded passwords.
             clean_content_iter = _sanitizeURLs(log_lines[1:])
-            ret = b'\n'.join(clean_content_iter)
+            ret = b"\n".join(clean_content_iter)
 
         return ret
 
@@ -618,7 +641,8 @@ class Builder:
         """Start a build with the provided BuildManager instance."""
         if self.builderstatus != BuilderStatus.IDLE:
             raise ValueError(
-                "Builder is not IDLE when asked to start building")
+                "Builder is not IDLE when asked to start building"
+            )
         self.manager = manager
         self.builderstatus = BuilderStatus.BUILDING
         self.emptyLog()
@@ -631,10 +655,13 @@ class Builder:
 
     def builderFail(self):
         """Cease building because the builder has a problem."""
-        if self.builderstatus not in (BuilderStatus.BUILDING,
-                                      BuilderStatus.ABORTING):
+        if self.builderstatus not in (
+            BuilderStatus.BUILDING,
+            BuilderStatus.ABORTING,
+        ):
             raise ValueError(
-                "Builder is not BUILDING|ABORTING when set to BUILDERFAIL")
+                "Builder is not BUILDING|ABORTING when set to BUILDERFAIL"
+            )
         self.buildstatus = BuildStatus.BUILDERFAIL
 
     def chrootFail(self):
@@ -690,7 +717,8 @@ class Builder:
             self.builderstatus = BuilderStatus.WAITING
         else:
             raise ValueError(
-                "Builder is not BUILDING|ABORTING when told build is complete")
+                "Builder is not BUILDING|ABORTING when told build is complete"
+            )
 
     def sanitizeBuildlog(self, log_path):
         """Removes passwords from buildlog URLs.
@@ -706,18 +734,19 @@ class Builder:
         # First move the buildlog file that is to be sanitized out of
         # the way.
         unsanitized_path = self.cachePath(
-            os.path.basename(log_path) + '.unsanitized')
+            os.path.basename(log_path) + ".unsanitized"
+        )
         os.rename(log_path, unsanitized_path)
 
         # Open the unsanitized buildlog file for reading.
-        unsanitized_file = open(unsanitized_path, 'rb')
+        unsanitized_file = open(unsanitized_path, "rb")
 
         # Open the file that will hold the resulting, sanitized buildlog
         # content for writing.
         sanitized_file = None
 
         try:
-            sanitized_file = open(log_path, 'wb')
+            sanitized_file = open(log_path, "wb")
 
             # Scrub the buildlog file line by line
             clean_content_iter = _sanitizeURLs(unsanitized_file)
@@ -739,7 +768,7 @@ class XMLRPCBuilder(xmlrpc.XMLRPC):
         # versions of the form 'MAJOR.MINOR', the protocol is '1.0' for now
         # implying the presence of /filecache/ /filecache/buildlog and
         # the reduced and optimised XMLRPC interface.
-        self.protocolversion = '1.0'
+        self.protocolversion = "1.0"
         self.builder = Builder(config)
         self._managers = {}
         cache = apt.Cache()
@@ -759,8 +788,11 @@ class XMLRPCBuilder(xmlrpc.XMLRPC):
 
     def xmlrpc_info(self):
         """Return the protocol version and the manager methods supported."""
-        return (self.protocolversion, self.builder.getArch(),
-                list(self._managers))
+        return (
+            self.protocolversion,
+            self.builder.getArch(),
+            list(self._managers),
+        )
 
     def xmlrpc_status(self):
         """Return the status of the build daemon, as a dictionary.
@@ -769,7 +801,7 @@ class XMLRPCBuilder(xmlrpc.XMLRPC):
         but this always includes the builder status itself.
         """
         status = self.builder.builderstatus
-        statusname = status.split('.')[-1]
+        statusname = status.split(".")[-1]
         func = getattr(self, "status_" + statusname, None)
         if func is None:
             raise ValueError("Unknown status '%s'" % status)
@@ -803,9 +835,12 @@ class XMLRPCBuilder(xmlrpc.XMLRPC):
         ret = {
             "build_status": self.builder.buildstatus,
             "build_id": self.buildid,
-            }
+        }
         if self.builder.buildstatus in (
-                BuildStatus.OK, BuildStatus.PACKAGEFAIL, BuildStatus.DEPFAIL):
+            BuildStatus.OK,
+            BuildStatus.PACKAGEFAIL,
+            BuildStatus.DEPFAIL,
+        ):
             ret["filemap"] = self.builder.waitingfiles
             ret["dependencies"] = self.builder.builddependencies
         return ret
@@ -870,6 +905,7 @@ class XMLRPCBuilder(xmlrpc.XMLRPC):
         # filelist is consistent, chrootsum is available, let's initiate...
         self.buildid = buildid
         self.builder.startBuild(
-            self._managers[managertag](self.builder, buildid))
+            self._managers[managertag](self.builder, buildid)
+        )
         self.builder.manager.initiate(filemap, chrootsum, args)
         return (BuilderStatus.BUILDING, buildid)
diff --git a/lpbuildd/charm.py b/lpbuildd/charm.py
index c7eaae9..c3243bf 100644
--- a/lpbuildd/charm.py
+++ b/lpbuildd/charm.py
@@ -3,13 +3,9 @@
 
 import os
 
-from lpbuildd.debian import (
-    DebianBuildState,
-    DebianBuildManager,
-    )
+from lpbuildd.debian import DebianBuildManager, DebianBuildState
 from lpbuildd.proxy import BuildManagerProxyMixin
 
-
 RETCODE_SUCCESS = 0
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
@@ -69,8 +65,10 @@ class CharmBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         if retcode == RETCODE_SUCCESS:
             print("Returning build status: OK")
             return self.deferGatherResults()
-        elif (retcode >= RETCODE_FAILURE_INSTALL and
-              retcode <= RETCODE_FAILURE_BUILD):
+        elif (
+            retcode >= RETCODE_FAILURE_INSTALL
+            and retcode <= RETCODE_FAILURE_BUILD
+        ):
             if not self.alreadyfailed:
                 self._builder.buildFail()
                 print("Returning build status: Build failed.")
diff --git a/lpbuildd/check_implicit_pointer_functions.py b/lpbuildd/check_implicit_pointer_functions.py
index 67e3b45..e25b0ac 100755
--- a/lpbuildd/check_implicit_pointer_functions.py
+++ b/lpbuildd/check_implicit_pointer_functions.py
@@ -35,19 +35,21 @@
 import re
 
 implicit_pattern = re.compile(
-    br"([^:]*):(\d+):(\d+:)? warning: implicit declaration "
-    br"of function [`']([^']*)'")
+    rb"([^:]*):(\d+):(\d+:)? warning: implicit declaration "
+    rb"of function [`']([^']*)'"
+)
 pointer_pattern = re.compile(
-    br"([^:]*):(\d+):(\d+:)? warning: "
-    br"("
-    br"(assignment"
-    br"|initialization"
-    br"|return"
-    br"|passing arg \d+ of `[^']*'"
-    br"|passing arg \d+ of pointer to function"
-    br") makes pointer from integer without a cast"
-    br"|"
-    br"cast to pointer from integer of different size)")
+    rb"([^:]*):(\d+):(\d+:)? warning: "
+    rb"("
+    rb"(assignment"
+    rb"|initialization"
+    rb"|return"
+    rb"|passing arg \d+ of `[^']*'"
+    rb"|passing arg \d+ of pointer to function"
+    rb") makes pointer from integer without a cast"
+    rb"|"
+    rb"cast to pointer from integer of different size)"
+)
 
 
 def filter_log(in_file, out_file, in_line=False):
@@ -62,7 +64,7 @@ def filter_log(in_file, out_file, in_line=False):
         if in_line:
             out_file.write(line)
             out_file.flush()
-        if line == b'':
+        if line == b"":
             break
         m = implicit_pattern.match(line)
         if m:
@@ -74,20 +76,27 @@ def filter_log(in_file, out_file, in_line=False):
             if m:
                 pointer_filename = m.group(1)
                 pointer_linenum = int(m.group(2))
-                if (last_implicit_filename == pointer_filename
-                        and last_implicit_linenum == pointer_linenum):
+                if (
+                    last_implicit_filename == pointer_filename
+                    and last_implicit_linenum == pointer_linenum
+                ):
                     err = (
                         b"Function `%s' implicitly converted to pointer at "
-                        b"%s:%d" % (
-                            last_implicit_func, last_implicit_filename,
-                            last_implicit_linenum))
+                        b"%s:%d"
+                        % (
+                            last_implicit_func,
+                            last_implicit_filename,
+                            last_implicit_linenum,
+                        )
+                    )
                     errlist.append(err)
                     out_file.write(err + b"\n")
 
     if errlist:
         if in_line:
             out_file.write(b"\n".join(errlist) + b"\n\n")
-            out_file.write(b"""
+            out_file.write(
+                b"""
 
 Our automated build log filter detected the problem(s) above that will
 likely cause your package to segfault on architectures where the size of
@@ -101,5 +110,6 @@ on ia64, they are errors.  Please correct them for your next upload.
 More information can be found at:
 http://wiki.debian.org/ImplicitPointerConversions
 
-    """)
+    """
+            )
     return len(errlist)
diff --git a/lpbuildd/ci.py b/lpbuildd/ci.py
index 601d8b4..7886f38 100644
--- a/lpbuildd/ci.py
+++ b/lpbuildd/ci.py
@@ -1,22 +1,15 @@
 # Copyright 2022 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from configparser import (
-    NoOptionError,
-    NoSectionError,
-    )
 import os
-import yaml
+from configparser import NoOptionError, NoSectionError
 
+import yaml
 from twisted.internet import defer
 
-from lpbuildd.debian import (
-    DebianBuildManager,
-    DebianBuildState,
-    )
+from lpbuildd.debian import DebianBuildManager, DebianBuildState
 from lpbuildd.proxy import BuildManagerProxyMixin
 
-
 RETCODE_SUCCESS = 0
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
@@ -72,7 +65,8 @@ class CIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         self.proxy_args = self.startProxy()
         if self.revocation_endpoint:
             self.proxy_args.extend(
-                ["--revocation-endpoint", self.revocation_endpoint])
+                ["--revocation-endpoint", self.revocation_endpoint]
+            )
         args = list(self.proxy_args)
         for snap, channel in sorted(self.channels.items()):
             args.extend(["--channel", f"{snap}={channel}"])
@@ -91,13 +85,15 @@ class CIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             # in the default configuration is convenient for our production
             # deployments.
             clamav_database_url = self._builder._config.get(
-                "proxy", "clamavdatabase")
+                "proxy", "clamavdatabase"
+            )
             args.extend(["--clamav-database-url", clamav_database_url])
         except (NoSectionError, NoOptionError):
             pass
         try:
             snap_store_proxy_url = self._builder._config.get(
-                "proxy", "snapstore")
+                "proxy", "snapstore"
+            )
             args.extend(["--snap-store-proxy-url", snap_store_proxy_url])
         except (NoSectionError, NoOptionError):
             pass
@@ -121,8 +117,10 @@ class CIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         self.job_index = 0
         if retcode == RETCODE_SUCCESS:
             pass
-        elif (retcode >= RETCODE_FAILURE_INSTALL and
-              retcode <= RETCODE_FAILURE_BUILD):
+        elif (
+            retcode >= RETCODE_FAILURE_INSTALL
+            and retcode <= RETCODE_FAILURE_BUILD
+        ):
             if not self.alreadyfailed:
                 self._builder.log("Preparation failed.")
                 self._builder.buildFail()
@@ -159,20 +157,17 @@ class CIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
                 args.extend(["--package-repository", repository])
         if self.environment_variables is not None:
             for key, value in self.environment_variables.items():
-                args.extend(
-                    ["--environment-variable", f"{key}={value}"])
+                args.extend(["--environment-variable", f"{key}={value}"])
         if self.plugin_settings is not None:
             for key, value in self.plugin_settings.items():
-                args.extend(
-                    ["--plugin-setting", f"{key}={value}"])
+                args.extend(["--plugin-setting", f"{key}={value}"])
         if self.secrets is not None:
             text = yaml.dump(self.secrets)
             with self.backend.open(
                 "/build/.launchpad-secrets.yaml", mode="w"
             ) as f:
                 f.write(text)
-            args.extend(
-                ["--secrets", "/build/.launchpad-secrets.yaml"])
+            args.extend(["--secrets", "/build/.launchpad-secrets.yaml"])
         if self.scan_malware:
             args.append("--scan-malware")
 
@@ -191,8 +186,10 @@ class CIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             result = RESULT_SUCCEEDED
         else:
             result = RESULT_FAILED
-            if (retcode >= RETCODE_FAILURE_INSTALL and
-                    retcode <= RETCODE_FAILURE_BUILD):
+            if (
+                retcode >= RETCODE_FAILURE_INSTALL
+                and retcode <= RETCODE_FAILURE_BUILD
+            ):
                 self._builder.log("Job %s failed." % self.current_job_id)
                 if len(self.jobs[self.stage_index]) == 1:
                     # Single-job stage, so fail straight away in order to
@@ -212,15 +209,17 @@ class CIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             # End of stage.  Fail if any job in this stage has failed.
             current_stage_job_ids = [
                 _make_job_id(job_name, job_index)
-                for job_name, job_index in self.jobs[self.stage_index]]
+                for job_name, job_index in self.jobs[self.stage_index]
+            ]
             if any(
                 self.job_status[job_id]["result"] != RESULT_SUCCEEDED
                 for job_id in current_stage_job_ids
             ):
                 if not self.alreadyfailed:
                     self._builder.log(
-                        "Some jobs in %s failed; stopping." %
-                        current_stage_job_ids)
+                        "Some jobs in %s failed; stopping."
+                        % current_stage_job_ids
+                    )
                     self._builder.buildFail()
                 self.alreadyfailed = True
             self.stage_index += 1
@@ -254,7 +253,8 @@ class CIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         job_status = {}
         job_name, job_index = self.current_job
         job_output_path = os.path.join(
-            "/build", "output", job_name, str(job_index))
+            "/build", "output", job_name, str(job_index)
+        )
         for item_name in ("log", "properties"):
             item_path = os.path.join(job_output_path, item_name)
             if self.backend.path_exists(item_path):
@@ -263,16 +263,18 @@ class CIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
                 job_status[item_name] = self._builder.waitingfiles[item_id]
         files_path = os.path.join(job_output_path, "files")
         if self.backend.path_exists(files_path):
-            for entry in sorted(self.backend.find(
-                    files_path, include_directories=False)):
+            for entry in sorted(
+                self.backend.find(files_path, include_directories=False)
+            ):
                 path = os.path.join(files_path, entry)
                 if self.backend.islink(path):
                     continue
                 entry_base = os.path.basename(entry)
                 name = os.path.join(self.current_job_id, entry_base)
                 self.addWaitingFileFromBackend(path, name=name)
-                job_status.setdefault("output", {})[entry_base] = (
-                    self._builder.waitingfiles[name])
+                job_status.setdefault("output", {})[
+                    entry_base
+                ] = self._builder.waitingfiles[name]
 
         # Save a file map for this job in the extra status file.  This
         # allows buildd-manager to fetch job logs/output incrementally
diff --git a/lpbuildd/debian.py b/lpbuildd/debian.py
index f66bf27..46563af 100644
--- a/lpbuildd/debian.py
+++ b/lpbuildd/debian.py
@@ -5,28 +5,20 @@
 #      and Adam Conrad <adam.conrad@xxxxxxxxxxxxx>
 
 import base64
-from configparser import (
-    NoOptionError,
-    NoSectionError,
-    )
 import os
 import re
 import signal
+from configparser import NoOptionError, NoSectionError
 
-from twisted.internet import (
-    defer,
-    threads,
-    )
+from twisted.internet import defer, threads
 from twisted.python import log
 
-from lpbuildd.builder import (
-    BuildManager,
-    get_build_path,
-    )
+from lpbuildd.builder import BuildManager, get_build_path
 
 
 class DebianBuildState:
     """States for the DebianBuildManager."""
+
     INIT = "INIT"
     UNPACK = "UNPACK"
     MOUNT = "MOUNT"
@@ -54,8 +46,8 @@ class DebianBuildManager(BuildManager):
 
     def initiate(self, files, chroot, extra_args):
         """Initiate a build with a given set of files and chroot."""
-        self.sources_list = extra_args.get('archives')
-        self.trusted_keys = extra_args.get('trusted_keys')
+        self.sources_list = extra_args.get("archives")
+        self.trusted_keys = extra_args.get("trusted_keys")
 
         BuildManager.initiate(self, files, chroot, extra_args)
 
@@ -76,7 +68,8 @@ class DebianBuildManager(BuildManager):
     def doTrustedKeys(self):
         """Add trusted keys."""
         trusted_keys = b"".join(
-            base64.b64decode(key) for key in self.trusted_keys)
+            base64.b64decode(key) for key in self.trusted_keys
+        )
         self.runTargetSubProcess("add-trusted-keys", stdin=trusted_keys)
 
     def doUpdateChroot(self):
@@ -103,9 +96,9 @@ class DebianBuildManager(BuildManager):
             if not seenfiles and line.startswith("Files:"):
                 seenfiles = True
             elif seenfiles:
-                if not line.startswith(' '):
+                if not line.startswith(" "):
                     break
-                filename = line.split(' ')[-1]
+                filename = line.split(" ")[-1]
                 yield filename
 
     def getChangesFilename(self):
@@ -123,7 +116,8 @@ class DebianBuildManager(BuildManager):
         with open(path, errors="replace") as chfile:
             for fn in self._parseChangesFile(chfile):
                 self._builder.addWaitingFile(
-                    get_build_path(self.home, self._buildid, fn))
+                    get_build_path(self.home, self._buildid, fn)
+                )
 
     def deferGatherResults(self, reap=True):
         """Gather the results of the build in a thread."""
@@ -136,7 +130,8 @@ class DebianBuildManager(BuildManager):
                     self._builder.buildFail()
             else:
                 self._builder.log(
-                    "Failed to gather results: %s\n" % failure.value)
+                    "Failed to gather results: %s\n" % failure.value
+                )
                 self._builder.buildFail()
             self.alreadyfailed = True
 
@@ -144,7 +139,8 @@ class DebianBuildManager(BuildManager):
             self.doReapProcesses(self._state)
 
         d = threads.deferToThread(self.gatherResults).addErrback(
-            failed_to_gather)
+            failed_to_gather
+        )
         if reap:
             d.addCallback(reap_processes)
         return d
@@ -159,8 +155,10 @@ class DebianBuildManager(BuildManager):
             # we were terminated by a signal, which is close enough.
             success = 128 + signal.SIGKILL
         if not quiet:
-            log.msg("Iterating with success flag %s against stage %s"
-                    % (success, self._state))
+            log.msg(
+                "Iterating with success flag %s against stage %s"
+                % (success, self._state)
+            )
         func = getattr(self, "iterate_" + self._state, None)
         if func is None:
             raise ValueError("Unknown internal state " + self._state)
@@ -169,9 +167,10 @@ class DebianBuildManager(BuildManager):
         self._iterator = None
 
     def iterateReap(self, state, success):
-        log.msg("Iterating with success flag %s against stage %s after "
-                "reaping processes"
-                % (success, state))
+        log.msg(
+            "Iterating with success flag %s against stage %s after "
+            "reaping processes" % (success, state)
+        )
         func = getattr(self, "iterateReap_" + state, None)
         if func is None:
             raise ValueError("Unknown internal post-reap state " + state)
@@ -222,8 +221,9 @@ class DebianBuildManager(BuildManager):
                 self._state = DebianBuildState.UPDATE
                 self.doUpdateChroot()
 
-    def searchLogContents(self, patterns_and_flags,
-                          stop_patterns_and_flags=[]):
+    def searchLogContents(
+        self, patterns_and_flags, stop_patterns_and_flags=[]
+    ):
         """Search for any of a list of regex patterns in the build log.
 
         The build log is matched using a sliding window, which avoids having
@@ -239,10 +239,12 @@ class DebianBuildManager(BuildManager):
         chunk_size = 256 * 1024
         regexes = [
             re.compile(pattern.encode("UTF-8"), flags)
-            for pattern, flags in patterns_and_flags]
+            for pattern, flags in patterns_and_flags
+        ]
         stop_regexes = [
             re.compile(pattern.encode("UTF-8"), flags)
-            for pattern, flags in stop_patterns_and_flags]
+            for pattern, flags in stop_patterns_and_flags
+        ]
         buildlog_path = os.path.join(self._cachepath, "buildlog")
         with open(buildlog_path, "rb") as buildlog:
             window = b""
diff --git a/lpbuildd/livefs.py b/lpbuildd/livefs.py
index 3cc2fb3..6e8da92 100644
--- a/lpbuildd/livefs.py
+++ b/lpbuildd/livefs.py
@@ -1,17 +1,10 @@
 # Copyright 2013-2019 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from configparser import (
-    NoOptionError,
-    NoSectionError,
-    )
 import os
+from configparser import NoOptionError, NoSectionError
 
-from lpbuildd.debian import (
-    DebianBuildManager,
-    DebianBuildState,
-    )
-
+from lpbuildd.debian import DebianBuildManager, DebianBuildState
 
 RETCODE_SUCCESS = 0
 RETCODE_FAILURE_INSTALL = 200
@@ -77,7 +70,8 @@ class LiveFilesystemBuildManager(DebianBuildManager):
             args.extend(["--cohort-key", self.cohort_key])
         try:
             snap_store_proxy_url = self._builder._config.get(
-                "proxy", "snapstore")
+                "proxy", "snapstore"
+            )
             args.extend(["--snap-store-proxy-url", snap_store_proxy_url])
         except (NoSectionError, NoOptionError):
             pass
@@ -90,8 +84,10 @@ class LiveFilesystemBuildManager(DebianBuildManager):
         if retcode == RETCODE_SUCCESS:
             print("Returning build status: OK")
             return self.deferGatherResults()
-        elif (retcode >= RETCODE_FAILURE_INSTALL and
-              retcode <= RETCODE_FAILURE_BUILD):
+        elif (
+            retcode >= RETCODE_FAILURE_INSTALL
+            and retcode <= RETCODE_FAILURE_BUILD
+        ):
             if not self.alreadyfailed:
                 self._builder.buildFail()
                 print("Returning build status: Build failed.")
@@ -112,6 +108,5 @@ class LiveFilesystemBuildManager(DebianBuildManager):
         """Gather the results of the build and add them to the file cache."""
         for entry in sorted(self.backend.listdir("/build")):
             path = os.path.join("/build", entry)
-            if (entry.startswith("livecd.") and
-                    not self.backend.islink(path)):
+            if entry.startswith("livecd.") and not self.backend.islink(path):
                 self.addWaitingFileFromBackend(path)
diff --git a/lpbuildd/log.py b/lpbuildd/log.py
index cba0721..81337f9 100644
--- a/lpbuildd/log.py
+++ b/lpbuildd/log.py
@@ -5,10 +5,7 @@ import signal
 import sys
 
 from twisted.internet import reactor
-from twisted.python import (
-    log,
-    logfile,
-    )
+from twisted.python import log, logfile
 from zope.interface import implementer
 
 
@@ -29,11 +26,14 @@ class RotatableFileLogObserver:
             logFile = sys.stdout
         else:
             logFile = logfile.LogFile.fromFullPath(
-                logfilepath, rotateLength=None)
+                logfilepath, rotateLength=None
+            )
             # Override if signal is set to None or SIG_DFL (0)
             if not signal.getsignal(signal.SIGHUP):
+
                 def signalHandler(signal, frame):
                     reactor.callFromThread(logFile.reopen)
+
                 signal.signal(signal.SIGHUP, signalHandler)
         self.observer = log.FileLogObserver(logFile)
 
diff --git a/lpbuildd/oci.py b/lpbuildd/oci.py
index e112caf..5ce2cd8 100644
--- a/lpbuildd/oci.py
+++ b/lpbuildd/oci.py
@@ -1,10 +1,6 @@
 # Copyright 2019 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from configparser import (
-    NoOptionError,
-    NoSectionError,
-    )
 import gzip
 import hashlib
 import json
@@ -12,14 +8,11 @@ import os
 import shutil
 import tarfile
 import tempfile
+from configparser import NoOptionError, NoSectionError
 
-from lpbuildd.debian import (
-    DebianBuildManager,
-    DebianBuildState,
-    )
+from lpbuildd.debian import DebianBuildManager, DebianBuildState
 from lpbuildd.proxy import BuildManagerProxyMixin
 
-
 RETCODE_SUCCESS = 0
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
@@ -75,7 +68,8 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             args.extend(["--build-path", self.build_path])
         try:
             snap_store_proxy_url = self._builder._config.get(
-                "proxy", "snapstore")
+                "proxy", "snapstore"
+            )
             args.extend(["--snap-store-proxy-url", snap_store_proxy_url])
         except (NoSectionError, NoOptionError):
             pass
@@ -89,8 +83,10 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         if retcode == RETCODE_SUCCESS:
             print("Returning build status: OK")
             return self.deferGatherResults()
-        elif (retcode >= RETCODE_FAILURE_INSTALL and
-              retcode <= RETCODE_FAILURE_BUILD):
+        elif (
+            retcode >= RETCODE_FAILURE_INSTALL
+            and retcode <= RETCODE_FAILURE_BUILD
+        ):
             if not self.alreadyfailed:
                 self._builder.buildFail()
                 print("Returning build status: Build failed.")
@@ -108,7 +104,7 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         self.doUnmounting()
 
     def _calculateLayerSha(self, layer_path):
-        with open(layer_path, 'rb') as layer_tar:
+        with open(layer_path, "rb") as layer_tar:
             sha256_hash = hashlib.sha256()
             for byte_block in iter(lambda: layer_tar.read(4096), b""):
                 sha256_hash.update(byte_block)
@@ -122,9 +118,9 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             config = json.load(config_fp)
         diff_ids = config["rootfs"]["diff_ids"]
         digest_diff_map = {}
-        for diff_id, layer_id in zip(diff_ids, section['Layers']):
-            layer_id = layer_id.split('/')[0]
-            diff_file = os.path.join(sha_directory, diff_id.split(':')[1])
+        for diff_id, layer_id in zip(diff_ids, section["Layers"]):
+            layer_id = layer_id.split("/")[0]
+            diff_file = os.path.join(sha_directory, diff_id.split(":")[1])
             layer_path = os.path.join(extract_path, f"{layer_id}.tar.gz")
             self._builder.addWaitingFile(layer_path)
             # If we have a mapping between diff and existing digest,
@@ -145,7 +141,7 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             digest_diff_map[diff_id] = {
                 "digest": digest,
                 "source": source,
-                "layer_id": layer_id
+                "layer_id": layer_id,
             }
 
         return digest_diff_map
@@ -155,14 +151,16 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         extract_path = tempfile.mkdtemp(prefix=self.name)
         try:
             proc = self.backend.run(
-                ['docker', 'save', self.name],
-                get_output=True, return_process=True)
+                ["docker", "save", self.name],
+                get_output=True,
+                return_process=True,
+            )
             tar = tarfile.open(fileobj=proc.stdout, mode="r|")
         except Exception as e:
             self._builder.log(f"Unable to save image: {e}")
             raise
 
-        current_dir = ''
+        current_dir = ""
         gzip_layer = None
         symlinks = []
         try:
@@ -183,10 +181,11 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
                     # we can deal with it later
                     self._builder.log(
                         f"Found symlink at {file.name} referencing "
-                        f"{file.linkpath}")
+                        f"{file.linkpath}"
+                    )
                     symlinks.append(file)
                     continue
-                if current_dir and file.name.endswith('layer.tar'):
+                if current_dir and file.name.endswith("layer.tar"):
                     # This is the actual layer data.
                     # Instead of adding the layer.tar to a gzip directory
                     # we add the contents of untarred layer.tar to a gzip.
@@ -197,8 +196,8 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
                     # (directory_name.tar.gz/contents) otherwise we will endup
                     # with multiple gzips with the same name "layer.tar.gz".
                     fileobj = tar.extractfile(file)
-                    name = os.path.join(extract_path, f'{current_dir}.tar.gz')
-                    with gzip.GzipFile(name, 'wb') as gzip_layer:
+                    name = os.path.join(extract_path, f"{current_dir}.tar.gz")
+                    with gzip.GzipFile(name, "wb") as gzip_layer:
                         byte = fileobj.read(1)
                         while len(byte) > 0:
                             gzip_layer.write(byte)
@@ -224,14 +223,15 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             # we want the directory name, which should always be
             # the second component
             source_name = os.path.join(
-                extract_path,
-                f"{symlink.linkpath.split('/')[-2]}.tar.gz")
+                extract_path, f"{symlink.linkpath.split('/')[-2]}.tar.gz"
+            )
             target_name = os.path.join(
-                extract_path,
-                f"{symlink.name.split('/')[-2]}.tar.gz")
+                extract_path, f"{symlink.name.split('/')[-2]}.tar.gz"
+            )
             # Do a copy to dereference the symlink
             self._builder.log(
-                f"Dereferencing symlink from {source_name} to {target_name}")
+                f"Dereferencing symlink from {source_name} to {target_name}"
+            )
             shutil.copy(source_name, target_name)
 
         # We need these mapping files
@@ -239,24 +239,29 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         # This can change depending on the kernel options / docker package
         # used. This is correct for bionic buildd image
         # with apt installed docker.
-        sha_path = ('/var/lib/docker/image/'
-                    'vfs/distribution/v2metadata-by-diffid/sha256')
+        sha_path = (
+            "/var/lib/docker/image/"
+            "vfs/distribution/v2metadata-by-diffid/sha256"
+        )
         # If there have been no images pulled in the build process
         # (FROM scratch), then this directory will not exist and
         # we will have no contents from it.
         if self.backend.path_exists(sha_path):
-            sha_files = [x for x in self.backend.listdir(sha_path)
-                         if not x.startswith('.')]
+            sha_files = [
+                x
+                for x in self.backend.listdir(sha_path)
+                if not x.startswith(".")
+            ]
             for file in sha_files:
                 self.backend.copy_out(
                     os.path.join(sha_path, file),
-                    os.path.join(sha_directory, file)
+                    os.path.join(sha_directory, file),
                 )
         else:
             self._builder.log(f"No metadata directory at {sha_path}")
 
         # Parse the manifest for the other files we need
-        manifest_path = os.path.join(extract_path, 'manifest.json')
+        manifest_path = os.path.join(extract_path, "manifest.json")
         self._builder.addWaitingFile(manifest_path)
         with open(manifest_path) as manifest_fp:
             manifest = json.load(manifest_fp)
@@ -265,10 +270,12 @@ class OCIBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         try:
             for section in manifest:
                 digest_maps.append(
-                    self._gatherManifestSection(section, extract_path,
-                                                sha_directory))
-            digest_map_file = os.path.join(extract_path, 'digests.json')
-            with open(digest_map_file, 'w') as digest_map_fp:
+                    self._gatherManifestSection(
+                        section, extract_path, sha_directory
+                    )
+                )
+            digest_map_file = os.path.join(extract_path, "digests.json")
+            with open(digest_map_file, "w") as digest_map_fp:
                 json.dump(digest_maps, digest_map_fp)
             self._builder.addWaitingFile(digest_map_file)
         except Exception as e:
diff --git a/lpbuildd/pottery/intltool.py b/lpbuildd/pottery/intltool.py
index 975afee..a52fd5f 100644
--- a/lpbuildd/pottery/intltool.py
+++ b/lpbuildd/pottery/intltool.py
@@ -4,13 +4,13 @@
 """Functions to build PO templates on the builder."""
 
 __all__ = [
-    'check_potfiles_in',
-    'generate_pot',
-    'generate_pots',
-    'get_translation_domain',
-    'find_intltool_dirs',
-    'find_potfiles_in',
-    ]
+    "check_potfiles_in",
+    "generate_pot",
+    "generate_pots",
+    "get_translation_domain",
+    "find_intltool_dirs",
+    "find_potfiles_in",
+]
 
 import os.path
 import re
@@ -27,7 +27,8 @@ def find_potfiles_in(backend, package_dir):
         POTFILES.in, relative to `package_dir`.
     """
     paths = backend.find(
-        package_dir, include_directories=False, name="POTFILES.in")
+        package_dir, include_directories=False, name="POTFILES.in"
+    )
     return [os.path.dirname(path) for path in paths]
 
 
@@ -56,13 +57,17 @@ def check_potfiles_in(backend, path):
         return False
     # Remove stale files from a previous run of intltool-update -m.
     backend.run(
-        ["rm", "-f"] +
-        [os.path.join(path, name) for name in ("missing", "notexist")])
+        ["rm", "-f"]
+        + [os.path.join(path, name) for name in ("missing", "notexist")]
+    )
     with open("/dev/null", "w") as devnull:
         try:
             backend.run(
                 ["/usr/bin/intltool-update", "-m"],
-                stdout=devnull, stderr=devnull, cwd=path)
+                stdout=devnull,
+                stderr=devnull,
+                cwd=path,
+            )
         except subprocess.CalledProcessError:
             return False
 
@@ -82,8 +87,10 @@ def find_intltool_dirs(backend, package_dir):
     :returns: A list of directory names, relative to `package_dir`.
     """
     return sorted(
-        podir for podir in find_potfiles_in(backend, package_dir)
-        if check_potfiles_in(backend, os.path.join(package_dir, podir)))
+        podir
+        for podir in find_potfiles_in(backend, package_dir)
+        if check_potfiles_in(backend, os.path.join(package_dir, podir))
+    )
 
 
 def _get_AC_PACKAGE_NAME(config_file):
@@ -143,10 +150,10 @@ def get_translation_domain(backend, dirname):
     substitutions or multi-level substitutions are not supported.
     """
     locations = [
-        ('../configure.ac', 'GETTEXT_PACKAGE', True),
-        ('../configure.in', 'GETTEXT_PACKAGE', True),
-        ('Makefile.in.in', 'GETTEXT_PACKAGE', False),
-        ('Makevars', 'DOMAIN', False),
+        ("../configure.ac", "GETTEXT_PACKAGE", True),
+        ("../configure.in", "GETTEXT_PACKAGE", True),
+        ("Makefile.in.in", "GETTEXT_PACKAGE", False),
+        ("Makevars", "DOMAIN", False),
     ]
     value = None
     substitution = None
@@ -170,7 +177,8 @@ def get_translation_domain(backend, dirname):
                 if substitution is not None:
                     # Try to substitute with value.
                     value = _try_substitution(
-                        config_files, varname, substitution)
+                        config_files, varname, substitution
+                    )
                     if value is None:
                         # No substitution found; the setup is broken.
                         break
@@ -201,7 +209,10 @@ def generate_pot(backend, podir, domain):
         try:
             backend.run(
                 ["/usr/bin/intltool-update", "-p", "-g", domain],
-                stdout=devnull, stderr=devnull, cwd=podir)
+                stdout=devnull,
+                stderr=devnull,
+                cwd=podir,
+            )
             return domain
         except subprocess.CalledProcessError:
             return None
@@ -247,7 +258,7 @@ class ConfigFile:
             ('"', '"'),
             ("'", "'"),
             ("[", "]"),
-            ]
+        ]
         for (left, right) in quote_pairs:
             if identifier.startswith(left) and identifier.endswith(right):
                 return identifier[1:-1]
@@ -257,14 +268,17 @@ class ConfigFile:
     def getVariable(self, name):
         """Search the file for a variable definition with this name."""
         pattern = re.compile(
-            r"^%s[ \t]*=[ \t]*([^\s]*)" % re.escape(name), re.M)
+            r"^%s[ \t]*=[ \t]*([^\s]*)" % re.escape(name), re.M
+        )
         result = pattern.search(self.content)
         if result is None:
             return None
         return self._stripQuotes(result.group(1))
 
     def getFunctionParams(self, name):
-        """Search file for a function call with this name, return parameters.
+        """Search file for a function call with this name.
+
+        Return its parameters.
         """
         pattern = re.compile(r"^%s\(([^)]*)\)" % re.escape(name), re.M)
         result = pattern.search(self.content)
@@ -273,8 +287,8 @@ class ConfigFile:
         else:
             return [
                 self._stripQuotes(param.strip())
-                for param in result.group(1).split(',')
-                ]
+                for param in result.group(1).split(",")
+            ]
 
 
 class Substitution:
@@ -327,7 +341,6 @@ class Substitution:
             self.name = result.group(1)
 
     def replace(self, value):
-        """Return a copy of the variable text with the substitution resolved.
-        """
+        """Return copy of the variable text with the substitution resolved."""
         self.replaced = True
         return self.text.replace(self._replacement, value)
diff --git a/lpbuildd/pottery/tests/test_intltool.py b/lpbuildd/pottery/tests/test_intltool.py
index 28ff021..0a01b51 100644
--- a/lpbuildd/pottery/tests/test_intltool.py
+++ b/lpbuildd/pottery/tests/test_intltool.py
@@ -2,31 +2,25 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 import errno
-from io import StringIO
 import os
 import tarfile
+from io import StringIO
 from textwrap import dedent
 
 from fixtures import TempDir
 from testtools import TestCase
-from testtools.matchers import (
-    Equals,
-    MatchesSetwise,
-    )
+from testtools.matchers import Equals, MatchesSetwise
 
 from lpbuildd.pottery.intltool import (
-    check_potfiles_in,
     ConfigFile,
+    check_potfiles_in,
     find_intltool_dirs,
     find_potfiles_in,
     generate_pot,
     generate_pots,
     get_translation_domain,
-    )
-from lpbuildd.tests.fakebuilder import (
-    FakeMethod,
-    UncontainedBackend,
-    )
+)
+from lpbuildd.tests.fakebuilder import FakeMethod, UncontainedBackend
 
 
 class SetupTestPackageMixin:
@@ -44,8 +38,10 @@ class SetupTestPackageMixin:
         """
         # First build the path for the package.
         tarpath = os.path.join(
-            os.path.dirname(__file__), self.test_data_dir,
-            packagename + ".tar.bz2")
+            os.path.dirname(__file__),
+            self.test_data_dir,
+            packagename + ".tar.bz2",
+        )
         # Then change into the temporary directory and unpack it.
         parent = self.useFixture(TempDir()).path
         with tarfile.open(tarpath, "r|bz2") as tar:
@@ -58,21 +54,20 @@ class SetupTestPackageMixin:
         # Add files as requested.
         for path, content in buildfiles.items():
             directory = os.path.dirname(path)
-            if directory != '':
+            if directory != "":
                 try:
                     os.makedirs(os.path.join(package_dir, directory))
                 except OSError as e:
                     # Doesn't matter if it already exists.
                     if e.errno != errno.EEXIST:
                         raise
-            with open(os.path.join(package_dir, path), 'w') as the_file:
+            with open(os.path.join(package_dir, path), "w") as the_file:
                 the_file.write(content)
 
         return package_dir
 
 
 class TestDetectIntltool(TestCase, SetupTestPackageMixin):
-
     def test_detect_potfiles_in(self):
         # Find POTFILES.in in a package with multiple dirs when only one has
         # POTFILES.in.
@@ -87,14 +82,16 @@ class TestDetectIntltool(TestCase, SetupTestPackageMixin):
         package_dir = self.prepare_package("intltool_POTFILES_in_2")
         dirs = find_potfiles_in(backend, package_dir)
         self.assertThat(
-            dirs, MatchesSetwise(Equals("po"), Equals("module1/po")))
+            dirs, MatchesSetwise(Equals("po"), Equals("module1/po"))
+        )
 
     def test_check_potfiles_in_content_ok(self):
         # Ideally all files listed in POTFILES.in exist in the source package.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package("intltool_single_ok")
         self.assertTrue(
-            check_potfiles_in(backend, os.path.join(package_dir, "po")))
+            check_potfiles_in(backend, os.path.join(package_dir, "po"))
+        )
 
     def test_check_potfiles_in_content_ok_file_added(self):
         # If a file is not listed in POTFILES.in, the file is still good for
@@ -105,7 +102,8 @@ class TestDetectIntltool(TestCase, SetupTestPackageMixin):
         with open(added_path, "w") as added_file:
             added_file.write("/* Test file. */")
         self.assertTrue(
-            check_potfiles_in(backend, os.path.join(package_dir, "po")))
+            check_potfiles_in(backend, os.path.join(package_dir, "po"))
+        )
 
     def test_check_potfiles_in_content_not_ok_file_removed(self):
         # If a file is missing that is listed in POTFILES.in, the file
@@ -115,7 +113,8 @@ class TestDetectIntltool(TestCase, SetupTestPackageMixin):
         package_dir = self.prepare_package("intltool_single_ok")
         os.remove(os.path.join(package_dir, "src/sourcefile1.c"))
         self.assertFalse(
-            check_potfiles_in(backend, os.path.join(package_dir, "po")))
+            check_potfiles_in(backend, os.path.join(package_dir, "po"))
+        )
 
     def test_check_potfiles_in_wrong_directory(self):
         # Passing in the wrong directory will cause the check to fail
@@ -123,7 +122,8 @@ class TestDetectIntltool(TestCase, SetupTestPackageMixin):
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package("intltool_single_ok")
         self.assertFalse(
-            check_potfiles_in(backend, os.path.join(package_dir, "foo")))
+            check_potfiles_in(backend, os.path.join(package_dir, "foo"))
+        )
 
     def test_find_intltool_dirs(self):
         # Complete run: find all directories with intltool structure.
@@ -131,7 +131,8 @@ class TestDetectIntltool(TestCase, SetupTestPackageMixin):
         package_dir = self.prepare_package("intltool_full_ok")
         self.assertEqual(
             ["po-module1", "po-module2"],
-            find_intltool_dirs(backend, package_dir))
+            find_intltool_dirs(backend, package_dir),
+        )
 
     def test_find_intltool_dirs_broken(self):
         # Complete run: part of the intltool structure is broken.
@@ -139,18 +140,19 @@ class TestDetectIntltool(TestCase, SetupTestPackageMixin):
         package_dir = self.prepare_package("intltool_full_ok")
         os.remove(os.path.join(package_dir, "src/module1/sourcefile1.c"))
         self.assertEqual(
-            ["po-module2"], find_intltool_dirs(backend, package_dir))
+            ["po-module2"], find_intltool_dirs(backend, package_dir)
+        )
 
 
 class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
-
     def test_get_translation_domain_makevars(self):
         # Find a translation domain in Makevars.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package("intltool_domain_makevars")
         self.assertEqual(
             "translationdomain",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_makevars_subst_1(self):
         # Find a translation domain in Makevars, substituted from
@@ -161,10 +163,12 @@ class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
             {
                 "po/Makefile.in.in": "PACKAGE=packagename-in-in\n",
                 "po/Makevars": "DOMAIN = $(PACKAGE)\n",
-            })
+            },
+        )
         self.assertEqual(
             "packagename-in-in",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_makevars_subst_2(self):
         # Find a translation domain in Makevars, substituted from
@@ -176,10 +180,12 @@ class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
                 "configure.ac": "PACKAGE=packagename-ac\n",
                 "po/Makefile.in.in": "# No domain here.\n",
                 "po/Makevars": "DOMAIN = $(PACKAGE)\n",
-            })
+            },
+        )
         self.assertEqual(
             "packagename-ac",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_makefile_in_in(self):
         # Find a translation domain in Makefile.in.in.
@@ -187,7 +193,8 @@ class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
         package_dir = self.prepare_package("intltool_domain_makefile_in_in")
         self.assertEqual(
             "packagename-in-in",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_configure_ac(self):
         # Find a translation domain in configure.ac.
@@ -195,14 +202,20 @@ class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
         package_dir = self.prepare_package("intltool_domain_configure_ac")
         self.assertEqual(
             "packagename-ac",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def prepare_ac_init(self, parameters, extra_files=None):
         # Prepare test for various permutations of AC_INIT parameters
-        configure_ac_content = dedent("""
+        configure_ac_content = (
+            dedent(
+                """
             AC_INIT(%s)
             GETTEXT_PACKAGE=AC_PACKAGE_NAME
-            """) % parameters
+            """
+            )
+            % parameters
+        )
         files = {"configure.ac": configure_ac_content}
         if extra_files is not None:
             files.update(extra_files)
@@ -212,66 +225,79 @@ class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
         # Find a translation domain in configure.ac in AC_INIT.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_ac_init(
-            "packagename-ac-init, 1.0, http://bug.org";)
+            "packagename-ac-init, 1.0, http://bug.org";
+        )
         self.assertEqual(
             "packagename-ac-init",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_configure_ac_init_single_param(self):
         # Find a translation domain in configure.ac in AC_INIT.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_ac_init("[Just 1 param]")
         self.assertIsNone(
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po"))
+        )
 
     def test_get_translation_domain_configure_ac_init_brackets(self):
         # Find a translation domain in configure.ac in AC_INIT with brackets.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_ac_init(
-            "[packagename-ac-init], 1.0, http://bug.org";)
+            "[packagename-ac-init], 1.0, http://bug.org";
+        )
         self.assertEqual(
             "packagename-ac-init",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_configure_ac_init_tarname(self):
         # Find a translation domain in configure.ac in AC_INIT tar name
         # parameter.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_ac_init(
-            "[Package name], 1.0, http://bug.org, [package-tarname]")
+            "[Package name], 1.0, http://bug.org, [package-tarname]"
+        )
         self.assertEqual(
             "package-tarname",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_configure_ac_init_multiline(self):
         # Find a translation domain in configure.ac in AC_INIT when it
         # spans multiple lines.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_ac_init(
-            "[packagename-ac-init],\n    1.0,\n    http://bug.org";)
+            "[packagename-ac-init],\n    1.0,\n    http://bug.org";
+        )
         self.assertEqual(
             "packagename-ac-init",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_configure_ac_init_multiline_tarname(self):
         # Find a translation domain in configure.ac in AC_INIT tar name
         # parameter that is on a different line.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_ac_init(
-            "[Package name], 1.0,\n    http://bug.org, [package-tarname]")
+            "[Package name], 1.0,\n    http://bug.org, [package-tarname]"
+        )
         self.assertEqual(
             "package-tarname",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_substitute_package_from_ac_init(self):
         # PACKAGE is substituted from AC_INIT parameters as a fallback.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_ac_init(
             "[packagename-ac-init], 1.0, http://bug.org";,
-            {"po/Makevars": "DOMAIN = $(PACKAGE)\n"})
+            {"po/Makevars": "DOMAIN = $(PACKAGE)\n"},
+        )
         self.assertEqual(
             "packagename-ac-init",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_configure_in(self):
         # Find a translation domain in configure.in.
@@ -279,17 +305,20 @@ class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
         package_dir = self.prepare_package("intltool_domain_configure_in")
         self.assertEqual(
             "packagename-in",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_makefile_in_in_substitute(self):
         # Find a translation domain in Makefile.in.in with substitution from
         # configure.ac.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package(
-            "intltool_domain_makefile_in_in_substitute")
+            "intltool_domain_makefile_in_in_substitute"
+        )
         self.assertEqual(
             "domainname-ac-in-in",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_makefile_in_in_substitute_same_name(self):
         # Find a translation domain in Makefile.in.in with substitution from
@@ -297,43 +326,50 @@ class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
         # Makefile.in.in.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package(
-            "intltool_domain_makefile_in_in_substitute_same_name")
+            "intltool_domain_makefile_in_in_substitute_same_name"
+        )
         self.assertEqual(
             "packagename-ac-in-in",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_makefile_in_in_substitute_same_file(self):
         # Find a translation domain in Makefile.in.in with substitution from
         # the same file.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package(
-            "intltool_domain_makefile_in_in_substitute_same_file")
+            "intltool_domain_makefile_in_in_substitute_same_file"
+        )
         self.assertEqual(
             "domain-in-in-in-in",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
     def test_get_translation_domain_makefile_in_in_substitute_broken(self):
         # Find no translation domain in Makefile.in.in when the substitution
         # cannot be fulfilled.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package(
-            "intltool_domain_makefile_in_in_substitute_broken")
+            "intltool_domain_makefile_in_in_substitute_broken"
+        )
         self.assertIsNone(
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po"))
+        )
 
     def test_get_translation_domain_configure_in_substitute_version(self):
         # Find a translation domain in configure.in with Makefile-style
         # substitution from the same file.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package(
-            "intltool_domain_configure_in_substitute_version")
+            "intltool_domain_configure_in_substitute_version"
+        )
         self.assertEqual(
             "domainname-in42",
-            get_translation_domain(backend, os.path.join(package_dir, "po")))
+            get_translation_domain(backend, os.path.join(package_dir, "po")),
+        )
 
 
 class TestGenerateTemplates(TestCase, SetupTestPackageMixin):
-
     def test_generate_pot(self):
         # Generate a given PO template.
         backend = UncontainedBackend("1")
@@ -341,12 +377,15 @@ class TestGenerateTemplates(TestCase, SetupTestPackageMixin):
         self.assertEqual(
             "module1",
             generate_pot(
-                backend, os.path.join(package_dir, "po-module1"), "module1"),
-            "PO template generation failed.")
+                backend, os.path.join(package_dir, "po-module1"), "module1"
+            ),
+            "PO template generation failed.",
+        )
         expected_path = "po-module1/module1.pot"
         self.assertTrue(
             os.access(os.path.join(package_dir, expected_path), os.F_OK),
-            "Generated PO template '%s' not found." % expected_path)
+            "Generated PO template '%s' not found." % expected_path,
+        )
 
     def test_generate_pot_no_domain(self):
         # Generate a generic PO template.
@@ -355,12 +394,15 @@ class TestGenerateTemplates(TestCase, SetupTestPackageMixin):
         self.assertEqual(
             "messages",
             generate_pot(
-                backend, os.path.join(package_dir, "po-module1"), None),
-            "PO template generation failed.")
+                backend, os.path.join(package_dir, "po-module1"), None
+            ),
+            "PO template generation failed.",
+        )
         expected_path = "po-module1/messages.pot"
         self.assertTrue(
             os.access(os.path.join(package_dir, expected_path), os.F_OK),
-            "Generated PO template '%s' not found." % expected_path)
+            "Generated PO template '%s' not found." % expected_path,
+        )
 
     def test_generate_pot_empty_domain(self):
         # Generate a generic PO template.
@@ -369,11 +411,13 @@ class TestGenerateTemplates(TestCase, SetupTestPackageMixin):
         self.assertEqual(
             "messages",
             generate_pot(backend, os.path.join(package_dir, "po-module1"), ""),
-            "PO template generation failed.")
+            "PO template generation failed.",
+        )
         expected_path = "po-module1/messages.pot"
         self.assertTrue(
             os.access(os.path.join(package_dir, expected_path), os.F_OK),
-            "Generated PO template '%s' not found." % expected_path)
+            "Generated PO template '%s' not found." % expected_path,
+        )
 
     def test_generate_pot_not_intltool(self):
         # Fail when not an intltool setup.
@@ -383,59 +427,67 @@ class TestGenerateTemplates(TestCase, SetupTestPackageMixin):
         os.remove(os.path.join(package_dir, "po-module1/POTFILES.in"))
         self.assertIsNone(
             generate_pot(
-                backend, os.path.join(package_dir, "po-module1"), "nothing"),
-            "PO template generation should have failed.")
+                backend, os.path.join(package_dir, "po-module1"), "nothing"
+            ),
+            "PO template generation should have failed.",
+        )
         not_expected_path = "po-module1/nothing.pot"
         self.assertFalse(
             os.access(os.path.join(package_dir, not_expected_path), os.F_OK),
-            "Not expected PO template '%s' generated." % not_expected_path)
+            "Not expected PO template '%s' generated." % not_expected_path,
+        )
 
     def test_generate_pots(self):
         # Generate all PO templates in the package.
         backend = UncontainedBackend("1")
         package_dir = self.prepare_package("intltool_full_ok")
         expected_paths = [
-            'po-module1/packagename-module1.pot',
-            'po-module2/packagename-module2.pot',
-            ]
+            "po-module1/packagename-module1.pot",
+            "po-module2/packagename-module2.pot",
+        ]
         pots_list = generate_pots(backend, package_dir)
         self.assertEqual(expected_paths, pots_list)
         for expected_path in expected_paths:
             self.assertTrue(
                 os.access(os.path.join(package_dir, expected_path), os.F_OK),
-                "Generated PO template '%s' not found." % expected_path)
+                "Generated PO template '%s' not found." % expected_path,
+            )
 
 
 class TestConfigFile(TestCase):
-
     def _makeConfigFile(self, text):
         """Create a `ConfigFile` containing `text`."""
         return ConfigFile(StringIO(dedent(text)))
 
     def test_getVariable_smoke(self):
-        configfile = self._makeConfigFile("""
+        configfile = self._makeConfigFile(
+            """
             A = 1
             B = 2
             C = 3
-            """)
-        self.assertEqual('1', configfile.getVariable('A'))
-        self.assertEqual('2', configfile.getVariable('B'))
-        self.assertEqual('3', configfile.getVariable('C'))
+            """
+        )
+        self.assertEqual("1", configfile.getVariable("A"))
+        self.assertEqual("2", configfile.getVariable("B"))
+        self.assertEqual("3", configfile.getVariable("C"))
 
     def test_getVariable_exists(self):
         configfile = self._makeConfigFile("DDD=dd.d")
-        self.assertEqual('dd.d', configfile.getVariable('DDD'))
+        self.assertEqual("dd.d", configfile.getVariable("DDD"))
 
     def test_getVariable_ignores_mere_mention(self):
-        configfile = self._makeConfigFile("""
+        configfile = self._makeConfigFile(
+            """
             CCC
             CCC = ccc # (this is the real definition)
             CCC
-            """)
-        self.assertEqual('ccc', configfile.getVariable('CCC'))
+            """
+        )
+        self.assertEqual("ccc", configfile.getVariable("CCC"))
 
     def test_getVariable_ignores_irrelevancies(self):
-        configfile = self._makeConfigFile("""
+        configfile = self._makeConfigFile(
+            """
             A = a
             ===
             blah
@@ -443,128 +495,136 @@ class TestConfigFile(TestCase):
             a = case-insensitive
 
             Z = z
-            """)
-        self.assertEqual('a', configfile.getVariable('A'))
-        self.assertEqual('z', configfile.getVariable('Z'))
+            """
+        )
+        self.assertEqual("a", configfile.getVariable("A"))
+        self.assertEqual("z", configfile.getVariable("Z"))
 
     def test_getVariable_exists_spaces_comment(self):
         configfile = self._makeConfigFile("CCC = ccc # comment")
-        self.assertEqual('ccc', configfile.getVariable('CCC'))
+        self.assertEqual("ccc", configfile.getVariable("CCC"))
 
     def test_getVariable_empty(self):
         configfile = self._makeConfigFile("AAA=")
-        self.assertEqual('', configfile.getVariable('AAA'))
+        self.assertEqual("", configfile.getVariable("AAA"))
 
     def test_getVariable_empty_spaces(self):
         configfile = self._makeConfigFile("BBB = ")
-        self.assertEqual('', configfile.getVariable('BBB'))
+        self.assertEqual("", configfile.getVariable("BBB"))
 
     def test_getVariable_nonexistent(self):
         configfile = self._makeConfigFile("X = y")
-        self.assertIsNone(configfile.getVariable('FFF'))
+        self.assertIsNone(configfile.getVariable("FFF"))
 
     def test_getVariable_broken(self):
         configfile = self._makeConfigFile("EEE \n= eee")
-        self.assertIsNone(configfile.getVariable('EEE'))
+        self.assertIsNone(configfile.getVariable("EEE"))
 
     def test_getVariable_strips_quotes(self):
         # Quotes get stripped off variables.
         configfile = self._makeConfigFile("QQQ = 'qqq'")
-        self.assertEqual('qqq', configfile.getVariable('QQQ'))
+        self.assertEqual("qqq", configfile.getVariable("QQQ"))
 
         # This is done by invoking _stripQuotes (tested separately).
-        configfile._stripQuotes = FakeMethod(result='foo')
-        self.assertEqual('foo', configfile.getVariable('QQQ'))
+        configfile._stripQuotes = FakeMethod(result="foo")
+        self.assertEqual("foo", configfile.getVariable("QQQ"))
         self.assertNotEqual(0, configfile._stripQuotes.call_count)
 
     def test_getFunctionParams_single(self):
         configfile = self._makeConfigFile("FUNC_1(param1)")
-        self.assertEqual(['param1'], configfile.getFunctionParams('FUNC_1'))
+        self.assertEqual(["param1"], configfile.getFunctionParams("FUNC_1"))
 
     def test_getFunctionParams_multiple(self):
         configfile = self._makeConfigFile("FUNC_2(param1, param2, param3 )")
         self.assertEqual(
-            ['param1', 'param2', 'param3'],
-            configfile.getFunctionParams('FUNC_2'))
+            ["param1", "param2", "param3"],
+            configfile.getFunctionParams("FUNC_2"),
+        )
 
     def test_getFunctionParams_multiline_indented(self):
-        configfile = self._makeConfigFile("""
+        configfile = self._makeConfigFile(
+            """
             ML_FUNC_1(param1,
                 param2, param3)
-            """)
+            """
+        )
         self.assertEqual(
-            ['param1', 'param2', 'param3'],
-            configfile.getFunctionParams('ML_FUNC_1'))
+            ["param1", "param2", "param3"],
+            configfile.getFunctionParams("ML_FUNC_1"),
+        )
 
     def test_getFunctionParams_multiline_not_indented(self):
-        configfile = self._makeConfigFile("""
+        configfile = self._makeConfigFile(
+            """
             ML_FUNC_2(
             param1,
             param2)
-            """)
+            """
+        )
         self.assertEqual(
-            ['param1', 'param2'], configfile.getFunctionParams('ML_FUNC_2'))
+            ["param1", "param2"], configfile.getFunctionParams("ML_FUNC_2")
+        )
 
     def test_getFunctionParams_strips_quotes(self):
         # Quotes get stripped off function parameters.
         configfile = self._makeConfigFile('FUNC("param")')
-        self.assertEqual(['param'], configfile.getFunctionParams('FUNC'))
+        self.assertEqual(["param"], configfile.getFunctionParams("FUNC"))
 
         # This is done by invoking _stripQuotes (tested separately).
-        configfile._stripQuotes = FakeMethod(result='arg')
-        self.assertEqual(['arg'], configfile.getFunctionParams('FUNC'))
+        configfile._stripQuotes = FakeMethod(result="arg")
+        self.assertEqual(["arg"], configfile.getFunctionParams("FUNC"))
         self.assertNotEqual(0, configfile._stripQuotes.call_count)
 
     def test_stripQuotes_unquoted(self):
         # _stripQuotes leaves unquoted identifiers intact.
-        configfile = self._makeConfigFile('')
-        self.assertEqual('hello', configfile._stripQuotes('hello'))
+        configfile = self._makeConfigFile("")
+        self.assertEqual("hello", configfile._stripQuotes("hello"))
 
     def test_stripQuotes_empty(self):
-        configfile = self._makeConfigFile('')
-        self.assertEqual('', configfile._stripQuotes(''))
+        configfile = self._makeConfigFile("")
+        self.assertEqual("", configfile._stripQuotes(""))
 
     def test_stripQuotes_single_quotes(self):
         # Single quotes are stripped.
-        configfile = self._makeConfigFile('')
-        self.assertEqual('x', configfile._stripQuotes("'x'"))
+        configfile = self._makeConfigFile("")
+        self.assertEqual("x", configfile._stripQuotes("'x'"))
 
     def test_stripQuotes_double_quotes(self):
         # Double quotes are stripped.
-        configfile = self._makeConfigFile('')
-        self.assertEqual('y', configfile._stripQuotes('"y"'))
+        configfile = self._makeConfigFile("")
+        self.assertEqual("y", configfile._stripQuotes('"y"'))
 
     def test_stripQuotes_bracket_quotes(self):
         # Brackets are stripped.
-        configfile = self._makeConfigFile('')
-        self.assertEqual('z', configfile._stripQuotes('[z]'))
+        configfile = self._makeConfigFile("")
+        self.assertEqual("z", configfile._stripQuotes("[z]"))
 
     def test_stripQuotes_opening_brackets(self):
         # An opening bracket must be matched by a closing one.
-        configfile = self._makeConfigFile('')
-        self.assertEqual('[x[', configfile._stripQuotes('[x['))
+        configfile = self._makeConfigFile("")
+        self.assertEqual("[x[", configfile._stripQuotes("[x["))
 
     def test_stripQuotes_closing_brackets(self):
         # A closing bracket is not accepted as an opening quote.
-        configfile = self._makeConfigFile('')
-        self.assertEqual(']x]', configfile._stripQuotes(']x]'))
+        configfile = self._makeConfigFile("")
+        self.assertEqual("]x]", configfile._stripQuotes("]x]"))
 
     def test_stripQuotes_multiple(self):
         # Only a single layer of quotes is stripped.
-        configfile = self._makeConfigFile('')
+        configfile = self._makeConfigFile("")
         self.assertEqual('"n"', configfile._stripQuotes("'\"n\"'"))
 
     def test_stripQuotes_single_quote(self):
         # A string consisting of just one quote is not stripped.
-        configfile = self._makeConfigFile('')
+        configfile = self._makeConfigFile("")
         self.assertEqual("'", configfile._stripQuotes("'"))
 
     def test_stripQuotes_mismatched(self):
         # Mismatched quotes are not stripped.
-        configfile = self._makeConfigFile('')
+        configfile = self._makeConfigFile("")
         self.assertEqual("'foo\"", configfile._stripQuotes("'foo\""))
 
     def test_stripQuotes_unilateral(self):
         # A quote that's only on one end doesn't get stripped.
-        configfile = self._makeConfigFile('')
+        configfile = self._makeConfigFile("")
         self.assertEqual('"foo', configfile._stripQuotes('"foo'))
diff --git a/lpbuildd/proxy.py b/lpbuildd/proxy.py
index 94a383f..6e3cdc8 100644
--- a/lpbuildd/proxy.py
+++ b/lpbuildd/proxy.py
@@ -3,32 +3,23 @@
 
 import base64
 import io
-from urllib.error import (
-    HTTPError,
-    URLError,
-    )
+from urllib.error import HTTPError, URLError
 from urllib.parse import urlparse
-from urllib.request import (
-    Request,
-    urlopen,
-    )
+from urllib.request import Request, urlopen
 
 from twisted.application import strports
 from twisted.internet import reactor
 from twisted.internet.interfaces import IHalfCloseableProtocol
 from twisted.python.compat import intToBytes
-from twisted.web import (
-    http,
-    proxy,
-    )
+from twisted.web import http, proxy
 from zope.interface import implementer
 
 
 class BuilderProxyClient(proxy.ProxyClient):
-
     def __init__(self, command, rest, version, headers, data, father):
         proxy.ProxyClient.__init__(
-            self, command, rest, version, headers, data, father)
+            self, command, rest, version, headers, data, father
+        )
         # Why doesn't ProxyClient at least store this?
         self.version = version
         # We must avoid calling self.father.finish in the event that its
@@ -46,7 +37,8 @@ class BuilderProxyClient(proxy.ProxyClient):
         # For some reason, HTTPClient.sendCommand doesn't preserve the
         # protocol version.
         self.transport.writeSequence(
-            [command, b' ', path, b' ', self.version, b'\r\n'])
+            [command, b" ", path, b" ", self.version, b"\r\n"]
+        )
 
     def handleEndHeaders(self):
         self.father.handleEndHeaders()
@@ -95,14 +87,17 @@ class BuilderProxyRequest(http.Request):
         if b"host" not in headers and request_parsed.netloc:
             headers[b"host"] = request_parsed.netloc
         if remote_parsed.username:
-            auth = (remote_parsed.username + ":" +
-                    remote_parsed.password).encode("ASCII")
+            auth = (
+                remote_parsed.username + ":" + remote_parsed.password
+            ).encode("ASCII")
             authHeader = b"Basic " + base64.b64encode(auth)
             headers[b"proxy-authorization"] = authHeader
         self.client_factory = BuilderProxyClientFactory(
-            command, path, version, headers, b"", self)
+            command, path, version, headers, b"", self
+        )
         reactor.connectTCP(
-            remote_parsed.hostname, remote_parsed.port, self.client_factory)
+            remote_parsed.hostname, remote_parsed.port, self.client_factory
+        )
 
     def requestReceived(self, command, path, version):
         # We do most of our work in `allHeadersReceived` instead.
@@ -126,8 +121,13 @@ class BuilderProxyRequest(http.Request):
         self.startedWriting = 1
         lines = []
         lines.append(
-            self.clientproto + b" " + intToBytes(self.code) + b" " +
-            self.code_message + b"\r\n")
+            self.clientproto
+            + b" "
+            + intToBytes(self.code)
+            + b" "
+            + self.code_message
+            + b"\r\n"
+        )
         for name, values in self.responseHeaders.getAllRawHeaders():
             for value in values:
                 lines.extend([name, b": ", value, b"\r\n"])
@@ -163,7 +163,8 @@ class BuilderProxy(http.HTTPChannel):
     def allHeadersReceived(self):
         http.HTTPChannel.allHeadersReceived(self)
         self.requests[-1].allHeadersReceived(
-            self._command, self._path, self._version)
+            self._command, self._path, self._version
+        )
         if self._command == b"CONNECT":
             # This is a lie, but we don't want HTTPChannel to decide that
             # the request is finished just because a CONNECT request
@@ -200,21 +201,22 @@ class BuilderProxyFactory(http.HTTPFactory):
         agent = http._escape(request.getHeader(b"user-agent") or b"-")
         line = (
             '%(timestamp)s "%(method)s %(uri)s %(protocol)s" '
-            '%(code)d %(length)s "%(referrer)s" "%(agent)s"\n' % {
-                'timestamp': self._logDateTime,
-                'method': http._escape(request.method),
-                'uri': http._escape(request.uri),
-                'protocol': http._escape(request.clientproto),
-                'code': request.code,
-                'length': request.sentLength or "-",
-                'referrer': referrer,
-                'agent': agent,
-                })
+            '%(code)d %(length)s "%(referrer)s" "%(agent)s"\n'
+            % {
+                "timestamp": self._logDateTime,
+                "method": http._escape(request.method),
+                "uri": http._escape(request.uri),
+                "protocol": http._escape(request.clientproto),
+                "code": request.code,
+                "length": request.sentLength or "-",
+                "referrer": referrer,
+                "agent": agent,
+            }
+        )
         self.manager._builder.log(line.encode("UTF-8"))
 
 
 class BuildManagerProxyMixin:
-
     def startProxy(self):
         """Start the local builder proxy, if necessary."""
         if not self.proxy_url:
@@ -222,7 +224,8 @@ class BuildManagerProxyMixin:
         proxy_port = self._builder._config.get("builder", "proxyport")
         proxy_factory = BuilderProxyFactory(self, self.proxy_url, timeout=60)
         self.proxy_service = strports.service(
-            "tcp:%s" % proxy_port, proxy_factory)
+            "tcp:%s" % proxy_port, proxy_factory
+        )
         self.proxy_service.setServiceParent(self._builder.service)
         if self.backend_name == "lxd":
             proxy_host = self.backend.ipv4_network.ip
@@ -252,4 +255,5 @@ class BuildManagerProxyMixin:
             urlopen(req, timeout=15)
         except (HTTPError, URLError) as e:
             self._builder.log(
-                f"Unable to revoke token for {url.username}: {e}")
+                f"Unable to revoke token for {url.username}: {e}"
+            )
diff --git a/lpbuildd/snap.py b/lpbuildd/snap.py
index 07d9f3e..bc35997 100644
--- a/lpbuildd/snap.py
+++ b/lpbuildd/snap.py
@@ -1,19 +1,12 @@
 # Copyright 2015-2019 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from configparser import (
-    NoOptionError,
-    NoSectionError,
-    )
 import os
+from configparser import NoOptionError, NoSectionError
 
-from lpbuildd.debian import (
-    DebianBuildManager,
-    DebianBuildState,
-    )
+from lpbuildd.debian import DebianBuildManager, DebianBuildState
 from lpbuildd.proxy import BuildManagerProxyMixin
 
-
 RETCODE_SUCCESS = 0
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
@@ -39,7 +32,8 @@ class SnapBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         self.channels = extra_args.get("channels", {})
         self.build_request_id = extra_args.get("build_request_id")
         self.build_request_timestamp = extra_args.get(
-            "build_request_timestamp")
+            "build_request_timestamp"
+        )
         self.build_url = extra_args.get("build_url")
         self.branch = extra_args.get("branch")
         self.git_repository = extra_args.get("git_repository")
@@ -47,7 +41,8 @@ class SnapBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         self.proxy_url = extra_args.get("proxy_url")
         self.revocation_endpoint = extra_args.get("revocation_endpoint")
         self.build_source_tarball = extra_args.get(
-            "build_source_tarball", False)
+            "build_source_tarball", False
+        )
         self.private = extra_args.get("private", False)
         self.proxy_service = None
         self.target_architectures = extra_args.get("target_architectures")
@@ -63,7 +58,8 @@ class SnapBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             args.extend(["--build-request-id", str(self.build_request_id)])
         if self.build_request_timestamp:
             args.extend(
-                ["--build-request-timestamp", self.build_request_timestamp])
+                ["--build-request-timestamp", self.build_request_timestamp]
+            )
         if self.build_url:
             args.extend(["--build-url", self.build_url])
         args.extend(self.startProxy())
@@ -81,7 +77,8 @@ class SnapBuildManager(BuildManagerProxyMixin, DebianBuildManager):
             args.append("--private")
         try:
             snap_store_proxy_url = self._builder._config.get(
-                "proxy", "snapstore")
+                "proxy", "snapstore"
+            )
             args.extend(["--snap-store-proxy-url", snap_store_proxy_url])
         except (NoSectionError, NoOptionError):
             pass
@@ -98,8 +95,10 @@ class SnapBuildManager(BuildManagerProxyMixin, DebianBuildManager):
         if retcode == RETCODE_SUCCESS:
             print("Returning build status: OK")
             return self.deferGatherResults()
-        elif (retcode >= RETCODE_FAILURE_INSTALL and
-              retcode <= RETCODE_FAILURE_BUILD):
+        elif (
+            retcode >= RETCODE_FAILURE_INSTALL
+            and retcode <= RETCODE_FAILURE_BUILD
+        ):
             if not self.alreadyfailed:
                 self._builder.buildFail()
                 print("Returning build status: Build failed.")
@@ -125,10 +124,12 @@ class SnapBuildManager(BuildManagerProxyMixin, DebianBuildManager):
                 if self.backend.islink(path):
                     continue
                 if entry.endswith(
-                        (".snap", ".manifest", ".debug", ".dpkg.yaml")):
+                    (".snap", ".manifest", ".debug", ".dpkg.yaml")
+                ):
                     self.addWaitingFileFromBackend(path)
         if self.build_source_tarball:
             source_tarball_path = os.path.join(
-                "/build", "%s.tar.gz" % self.name)
+                "/build", "%s.tar.gz" % self.name
+            )
             if self.backend.path_exists(source_tarball_path):
                 self.addWaitingFileFromBackend(source_tarball_path)
diff --git a/lpbuildd/sourcepackagerecipe.py b/lpbuildd/sourcepackagerecipe.py
index 9b5520f..250c3b5 100644
--- a/lpbuildd/sourcepackagerecipe.py
+++ b/lpbuildd/sourcepackagerecipe.py
@@ -8,11 +8,7 @@ import os
 import re
 
 from lpbuildd.builder import get_build_path
-from lpbuildd.debian import (
-    DebianBuildManager,
-    DebianBuildState,
-)
-
+from lpbuildd.debian import DebianBuildManager, DebianBuildState
 
 RETCODE_SUCCESS = 0
 RETCODE_FAILURE_INSTALL = 200
@@ -27,7 +23,7 @@ def splat_file(path, contents):
     :param path: The path to store the string in.
     :param contents: The string to write to the file.
     """
-    file_obj = open(path, 'w')
+    file_obj = open(path, "w")
     try:
         file_obj.write(contents)
     finally:
@@ -42,11 +38,13 @@ def get_chroot_path(home, build_id, *extra):
     :param extra: Additional path elements.
     """
     return get_build_path(
-        home, build_id, 'chroot-autobuild', os.environ['HOME'][1:], *extra)
+        home, build_id, "chroot-autobuild", os.environ["HOME"][1:], *extra
+    )
 
 
 class SourcePackageRecipeBuildState(DebianBuildState):
     """The set of states that a recipe build can be in."""
+
     BUILD_RECIPE = "BUILD_RECIPE"
 
 
@@ -71,28 +69,35 @@ class SourcePackageRecipeBuildManager(DebianBuildManager):
         :param chroot: The sha1sum of the chroot to use.
         :param extra_args: A dict of extra arguments.
         """
-        self.recipe_text = extra_args['recipe_text']
-        self.suite = extra_args['suite']
-        self.component = extra_args['ogrecomponent']
-        self.author_name = extra_args['author_name']
-        self.author_email = extra_args['author_email']
-        self.archive_purpose = extra_args['archive_purpose']
-        self.git = extra_args.get('git', False)
+        self.recipe_text = extra_args["recipe_text"]
+        self.suite = extra_args["suite"]
+        self.component = extra_args["ogrecomponent"]
+        self.author_name = extra_args["author_name"]
+        self.author_email = extra_args["author_email"]
+        self.archive_purpose = extra_args["archive_purpose"]
+        self.git = extra_args.get("git", False)
 
         super().initiate(files, chroot, extra_args)
 
     def doRunBuild(self):
         """Run the build process to build the source package."""
-        os.makedirs(get_chroot_path(self.home, self._buildid, 'work'))
-        recipe_path = get_chroot_path(self.home, self._buildid, 'work/recipe')
+        os.makedirs(get_chroot_path(self.home, self._buildid, "work"))
+        recipe_path = get_chroot_path(self.home, self._buildid, "work/recipe")
         splat_file(recipe_path, self.recipe_text)
         args = ["buildrecipe"]
         if self.git:
             args.append("--git")
-        args.extend([
-            self._buildid, self.author_name.encode('utf-8'),
-            self.author_email, self.suite, self.series,
-            self.component, self.archive_purpose])
+        args.extend(
+            [
+                self._buildid,
+                self.author_name.encode("utf-8"),
+                self.author_email,
+                self.suite,
+                self.series,
+                self.component,
+                self.archive_purpose,
+            ]
+        )
         self.runSubProcess(self.build_recipe_path, args)
 
     def iterate_BUILD_RECIPE(self, retcode):
@@ -103,8 +108,9 @@ class SourcePackageRecipeBuildManager(DebianBuildManager):
         elif retcode == RETCODE_FAILURE_INSTALL_BUILD_DEPS:
             if not self.alreadyfailed:
                 rx = (
-                    r'The following packages have unmet dependencies:\n'
-                    r'.*: Depends: ([^ ]*( \([^)]*\))?)')
+                    r"The following packages have unmet dependencies:\n"
+                    r".*: Depends: ([^ ]*( \([^)]*\))?)"
+                )
                 _, mo = self.searchLogContents([[rx, re.M]])
                 if mo:
                     missing_dep = mo.group(1).decode("UTF-8", "replace")
@@ -115,8 +121,10 @@ class SourcePackageRecipeBuildManager(DebianBuildManager):
                     print("Returning build status: Build failed")
                     self._builder.buildFail()
             self.alreadyfailed = True
-        elif (retcode >= RETCODE_FAILURE_INSTALL and
-              retcode <= RETCODE_FAILURE_BUILD_SOURCE_PACKAGE):
+        elif (
+            retcode >= RETCODE_FAILURE_INSTALL
+            and retcode <= RETCODE_FAILURE_BUILD_SOURCE_PACKAGE
+        ):
             # XXX AaronBentley 2009-01-13: We should handle depwait separately
             if not self.alreadyfailed:
                 self._builder.buildFail()
@@ -138,7 +146,7 @@ class SourcePackageRecipeBuildManager(DebianBuildManager):
         """Return the path to the changes file."""
         work_path = get_build_path(self.home, self._buildid)
         for name in os.listdir(work_path):
-            if name.endswith('_source.changes'):
+            if name.endswith("_source.changes"):
                 return os.path.join(work_path, name)
 
     def gatherResults(self):
@@ -148,5 +156,6 @@ class SourcePackageRecipeBuildManager(DebianBuildManager):
         The manifest is also a useful record.
         """
         DebianBuildManager.gatherResults(self)
-        self._builder.addWaitingFile(get_build_path(
-            self.home, self._buildid, 'manifest'))
+        self._builder.addWaitingFile(
+            get_build_path(self.home, self._buildid, "manifest")
+        )
diff --git a/lpbuildd/target/apt.py b/lpbuildd/target/apt.py
index c1d30a7..c4f6b70 100644
--- a/lpbuildd/target/apt.py
+++ b/lpbuildd/target/apt.py
@@ -5,12 +5,11 @@ import logging
 import os
 import subprocess
 import sys
-from textwrap import dedent
 import time
+from textwrap import dedent
 
 from lpbuildd.target.operation import Operation
 
-
 logger = logging.getLogger(__name__)
 
 
@@ -22,10 +21,11 @@ class OverrideSourcesList(Operation):
     def add_arguments(cls, parser):
         super().add_arguments(parser)
         parser.add_argument(
-            "--apt-proxy-url", metavar="URL", help="APT proxy URL")
+            "--apt-proxy-url", metavar="URL", help="APT proxy URL"
+        )
         parser.add_argument(
-            "archives", metavar="ARCHIVE", nargs="+",
-            help="sources.list lines")
+            "archives", metavar="ARCHIVE", nargs="+", help="sources.list lines"
+        )
 
     def run(self):
         logger.info("Overriding sources.list in build-%s", self.args.build_id)
@@ -46,8 +46,10 @@ class OverrideSourcesList(Operation):
         with self.backend.open(
             "/etc/apt/apt.conf.d/99phasing", mode="w+"
         ) as apt_phasing_conf:
-            print('APT::Get::Always-Include-Phased-Updates "true";',
-                  file=apt_phasing_conf)
+            print(
+                'APT::Get::Always-Include-Phased-Updates "true";',
+                file=apt_phasing_conf,
+            )
             os.fchmod(apt_phasing_conf.fileno(), 0o644)
         if self.args.apt_proxy_url is not None:
             with self.backend.open(
@@ -55,17 +57,24 @@ class OverrideSourcesList(Operation):
             ) as apt_proxy_conf:
                 print(
                     f'Acquire::http::Proxy "{self.args.apt_proxy_url}";',
-                    file=apt_proxy_conf)
+                    file=apt_proxy_conf,
+                )
                 os.fchmod(apt_proxy_conf.fileno(), 0o644)
         for pocket in ("proposed", "backports"):
             with self.backend.open(
                 f"/etc/apt/preferences.d/{pocket}.pref", mode="w+"
             ) as preferences:
-                print(dedent(f"""\
+                print(
+                    dedent(
+                        f"""\
                     Package: *
                     Pin: release a=*-{pocket}
                     Pin-Priority: 500
-                    """), file=preferences, end="")
+                    """
+                    ),
+                    file=preferences,
+                    end="",
+                )
                 os.fchmod(preferences.fileno(), 0o644)
         return 0
 
@@ -86,19 +95,27 @@ class AddTrustedKeys(Operation):
         # it may call `lxc exec` and that apparently drains stdin.
         input_data = self.input_file.read()
         gpg_cmd = [
-            "gpg", "--ignore-time-conflict", "--no-options", "--no-keyring",
-            ]
+            "gpg",
+            "--ignore-time-conflict",
+            "--no-options",
+            "--no-keyring",
+        ]
         with self.backend.open(
             "/etc/apt/trusted.gpg.d/launchpad-buildd.gpg", mode="wb+"
         ) as keyring:
             subprocess.run(
-                gpg_cmd + ["--dearmor"], input=input_data, stdout=keyring,
-                check=True)
+                gpg_cmd + ["--dearmor"],
+                input=input_data,
+                stdout=keyring,
+                check=True,
+            )
             keyring.seek(0)
             subprocess.check_call(
-                gpg_cmd +
-                ["--show-keys", "--keyid-format", "long", "--fingerprint"],
-                stdin=keyring, stdout=self.show_keys_file)
+                gpg_cmd
+                + ["--show-keys", "--keyid-format", "long", "--fingerprint"],
+                stdin=keyring,
+                stdout=self.show_keys_file,
+            )
             os.fchmod(keyring.fileno(), 0o644)
         return 0
 
@@ -114,7 +131,7 @@ class Update(Operation):
                 "LANG": "C",
                 "DEBIAN_FRONTEND": "noninteractive",
                 "TTY": "unknown",
-                }
+            }
             apt_get = "/usr/bin/apt-get"
             update_args = [apt_get, "-uy", "update"]
             try:
@@ -124,8 +141,12 @@ class Update(Operation):
                 time.sleep(15)
                 self.backend.run(update_args, env=env, stdin=devnull)
             upgrade_args = [
-                apt_get, "-o", "DPkg::Options::=--force-confold", "-uy",
-                "--purge", "dist-upgrade",
-                ]
+                apt_get,
+                "-o",
+                "DPkg::Options::=--force-confold",
+                "-uy",
+                "--purge",
+                "dist-upgrade",
+            ]
             self.backend.run(upgrade_args, env=env, stdin=devnull)
         return 0
diff --git a/lpbuildd/target/backend.py b/lpbuildd/target/backend.py
index 28e3ef6..371af8f 100644
--- a/lpbuildd/target/backend.py
+++ b/lpbuildd/target/backend.py
@@ -20,7 +20,8 @@ class InvalidBuildFilePath(Exception):
 def check_path_escape(buildd_path, path_to_check):
     """Check the build file path doesn't escape the build directory."""
     build_file_path = os.path.realpath(
-        os.path.join(buildd_path, path_to_check))
+        os.path.join(buildd_path, path_to_check)
+    )
     common_path = os.path.commonprefix((build_file_path, buildd_path))
     if common_path != buildd_path:
         raise InvalidBuildFilePath("Invalid build file path.")
@@ -52,8 +53,16 @@ class Backend:
         """
         raise NotImplementedError
 
-    def run(self, args, cwd=None, env=None, input_text=None, get_output=False,
-            echo=False, **kwargs):
+    def run(
+        self,
+        args,
+        cwd=None,
+        env=None,
+        input_text=None,
+        get_output=False,
+        echo=False,
+        **kwargs,
+    ):
         """Run a command in the target environment.
 
         :param args: the command and arguments to run.
@@ -171,7 +180,10 @@ class Backend:
             with open("/dev/null", "w") as devnull:
                 output = self.run(
                     ["apt-cache", "show", package],
-                    get_output=True, stderr=devnull, universal_newlines=True)
+                    get_output=True,
+                    stderr=devnull,
+                    universal_newlines=True,
+                )
             return ("Package: %s" % package) in output.splitlines()
         except subprocess.CalledProcessError:
             return False
@@ -219,17 +231,21 @@ class Backend:
 def make_backend(name, build_id, series=None, arch=None, constraints=None):
     if name == "chroot":
         from lpbuildd.target.chroot import Chroot
+
         backend_factory = Chroot
     elif name == "lxd":
         from lpbuildd.target.lxd import LXD
+
         backend_factory = LXD
     elif name == "fake":
         # Only for use in tests.
         from lpbuildd.tests.fakebuilder import FakeBackend
+
         backend_factory = FakeBackend
     elif name == "uncontained":
         # Only for use in tests.
         from lpbuildd.tests.fakebuilder import UncontainedBackend
+
         backend_factory = UncontainedBackend
     else:
         raise KeyError("Unknown backend: %s" % name)
diff --git a/lpbuildd/target/build_charm.py b/lpbuildd/target/build_charm.py
index c1707d3..51297c8 100644
--- a/lpbuildd/target/build_charm.py
+++ b/lpbuildd/target/build_charm.py
@@ -11,7 +11,6 @@ from lpbuildd.target.proxy import BuilderProxyOperationMixin
 from lpbuildd.target.snapstore import SnapStoreOperationMixin
 from lpbuildd.target.vcs import VCSOperationMixin
 
-
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
 
@@ -19,8 +18,12 @@ RETCODE_FAILURE_BUILD = 201
 logger = logging.getLogger(__name__)
 
 
-class BuildCharm(BuilderProxyOperationMixin, VCSOperationMixin,
-                 SnapStoreOperationMixin, Operation):
+class BuildCharm(
+    BuilderProxyOperationMixin,
+    VCSOperationMixin,
+    SnapStoreOperationMixin,
+    Operation,
+):
 
     description = "Build a charm."
 
@@ -30,13 +33,19 @@ class BuildCharm(BuilderProxyOperationMixin, VCSOperationMixin,
     def add_arguments(cls, parser):
         super().add_arguments(parser)
         parser.add_argument(
-            "--channel", action=SnapChannelsAction, metavar="SNAP=CHANNEL",
-            dest="channels", default={}, help=(
+            "--channel",
+            action=SnapChannelsAction,
+            metavar="SNAP=CHANNEL",
+            dest="channels",
+            default={},
+            help=(
                 f"install SNAP from CHANNEL (supported snaps: "
-                f"{', '.join(cls.core_snap_names)}, charmcraft)"))
+                f"{', '.join(cls.core_snap_names)}, charmcraft)"
+            ),
+        )
         parser.add_argument(
-            "--build-path", default=".",
-            help="location of charm to build.")
+            "--build-path", default=".", help="location of charm to build."
+        )
         parser.add_argument("name", help="name of charm to build")
 
     def __init__(self, args, parser):
@@ -57,24 +66,35 @@ class BuildCharm(BuilderProxyOperationMixin, VCSOperationMixin,
                     deps.append(dep)
         deps.extend(self.vcs_deps)
         # See charmcraft.provider.CharmcraftBuilddBaseConfiguration.setup.
-        deps.extend([
-            "python3-pip",
-            "python3-setuptools",
-            ])
+        deps.extend(
+            [
+                "python3-pip",
+                "python3-setuptools",
+            ]
+        )
         self.backend.run(["apt-get", "-y", "install"] + deps)
         if self.backend.supports_snapd:
             self.snap_store_set_proxy()
         for snap_name in self.core_snap_names:
             if snap_name in self.args.channels:
                 self.backend.run(
-                    ["snap", "install",
-                     "--channel=%s" % self.args.channels[snap_name],
-                     snap_name])
+                    [
+                        "snap",
+                        "install",
+                        "--channel=%s" % self.args.channels[snap_name],
+                        snap_name,
+                    ]
+                )
         if "charmcraft" in self.args.channels:
             self.backend.run(
-                ["snap", "install", "--classic",
-                 "--channel=%s" % self.args.channels["charmcraft"],
-                 "charmcraft"])
+                [
+                    "snap",
+                    "install",
+                    "--classic",
+                    "--channel=%s" % self.args.channels["charmcraft"],
+                    "charmcraft",
+                ]
+            )
         else:
             self.backend.run(["snap", "install", "--classic", "charmcraft"])
         # The charmcraft snap can't see /build, so we have to do our work under
@@ -91,9 +111,8 @@ class BuildCharm(BuilderProxyOperationMixin, VCSOperationMixin,
     def build(self):
         logger.info("Running build phase...")
         build_context_path = os.path.join(
-            "/home/buildd",
-            self.args.name,
-            self.args.build_path)
+            "/home/buildd", self.args.name, self.args.build_path
+        )
         check_path_escape(self.buildd_path, build_context_path)
         env = self.build_proxy_environment(proxy_url=self.args.proxy_url)
         args = ["charmcraft", "pack", "-v", "--destructive-mode"]
@@ -103,12 +122,12 @@ class BuildCharm(BuilderProxyOperationMixin, VCSOperationMixin,
         try:
             self.install()
         except Exception:
-            logger.exception('Install failed')
+            logger.exception("Install failed")
             return RETCODE_FAILURE_INSTALL
         try:
             self.repo()
             self.build()
         except Exception:
-            logger.exception('Build failed')
+            logger.exception("Build failed")
             return RETCODE_FAILURE_BUILD
         return 0
diff --git a/lpbuildd/target/build_livefs.py b/lpbuildd/target/build_livefs.py
index 1d576d0..abf70d9 100644
--- a/lpbuildd/target/build_livefs.py
+++ b/lpbuildd/target/build_livefs.py
@@ -1,14 +1,13 @@
 # Copyright 2013-2019 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from collections import OrderedDict
 import logging
 import os
+from collections import OrderedDict
 
 from lpbuildd.target.operation import Operation
 from lpbuildd.target.snapstore import SnapStoreOperationMixin
 
-
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
 
@@ -34,48 +33,83 @@ class BuildLiveFS(SnapStoreOperationMixin, Operation):
     def add_arguments(cls, parser):
         super().add_arguments(parser)
         parser.add_argument(
-            "--subarch", metavar="SUBARCH",
-            help="build for subarchitecture SUBARCH")
+            "--subarch",
+            metavar="SUBARCH",
+            help="build for subarchitecture SUBARCH",
+        )
         parser.add_argument(
-            "--project", metavar="PROJECT", help="build for project PROJECT")
+            "--project", metavar="PROJECT", help="build for project PROJECT"
+        )
         parser.add_argument(
-            "--subproject", metavar="SUBPROJECT",
-            help="build for subproject SUBPROJECT")
+            "--subproject",
+            metavar="SUBPROJECT",
+            help="build for subproject SUBPROJECT",
+        )
         parser.add_argument("--datestamp", help="date stamp")
         parser.add_argument(
-            "--image-format", metavar="FORMAT",
-            help="produce an image in FORMAT")
+            "--image-format",
+            metavar="FORMAT",
+            help="produce an image in FORMAT",
+        )
         parser.add_argument(
-            "--image-target", dest="image_targets", default=[],
-            action="append", metavar="TARGET",
-            help="produce image for TARGET")
+            "--image-target",
+            dest="image_targets",
+            default=[],
+            action="append",
+            metavar="TARGET",
+            help="produce image for TARGET",
+        )
         parser.add_argument(
-            "--repo-snapshot-stamp", dest="repo_snapshot_stamp",
+            "--repo-snapshot-stamp",
+            dest="repo_snapshot_stamp",
             metavar="TIMESTAMP",
-            help="build against package repo state at TIMESTAMP")
+            help="build against package repo state at TIMESTAMP",
+        )
         parser.add_argument(
-            "--cohort-key", dest="cohort_key", metavar="COHORT_KEY",
-            help="use COHORT_KEY during snap downloads")
+            "--cohort-key",
+            dest="cohort_key",
+            metavar="COHORT_KEY",
+            help="use COHORT_KEY during snap downloads",
+        )
         parser.add_argument(
-            "--proposed", default=False, action="store_true",
-            help="enable use of -proposed pocket")
+            "--proposed",
+            default=False,
+            action="store_true",
+            help="enable use of -proposed pocket",
+        )
         parser.add_argument(
-            "--locale", metavar="LOCALE",
-            help="use ubuntu-defaults-image to build an image for LOCALE")
+            "--locale",
+            metavar="LOCALE",
+            help="use ubuntu-defaults-image to build an image for LOCALE",
+        )
         parser.add_argument(
-            "--extra-ppa", dest="extra_ppas", default=[], action="append",
-            help="use this additional PPA")
+            "--extra-ppa",
+            dest="extra_ppas",
+            default=[],
+            action="append",
+            help="use this additional PPA",
+        )
         parser.add_argument(
-            "--extra-snap", dest="extra_snaps", default=[], action="append",
-            help="use this additional snap")
+            "--extra-snap",
+            dest="extra_snaps",
+            default=[],
+            action="append",
+            help="use this additional snap",
+        )
         parser.add_argument(
-            "--channel", metavar="CHANNEL",
-            help="pull snaps from channel CHANNEL for ubuntu-core image")
+            "--channel",
+            metavar="CHANNEL",
+            help="pull snaps from channel CHANNEL for ubuntu-core image",
+        )
         parser.add_argument(
-            "--http-proxy", action="store", help="use this HTTP proxy for apt")
+            "--http-proxy", action="store", help="use this HTTP proxy for apt"
+        )
         parser.add_argument(
-            "--debug", default=False, action="store_true",
-            help="enable detailed live-build debugging")
+            "--debug",
+            default=False,
+            action="store_true",
+            help="enable detailed live-build debugging",
+        )
 
     def install(self):
         deps = ["livecd-rootfs"]
@@ -89,30 +123,42 @@ class BuildLiveFS(SnapStoreOperationMixin, Operation):
         if self.backend.supports_snapd:
             self.snap_store_set_proxy()
         if self.args.locale is not None:
-            self.backend.run([
-                "apt-get", "-y", "--install-recommends", "install",
-                "ubuntu-defaults-builder",
-                ])
+            self.backend.run(
+                [
+                    "apt-get",
+                    "-y",
+                    "--install-recommends",
+                    "install",
+                    "ubuntu-defaults-builder",
+                ]
+            )
 
     def build(self):
         if self.args.locale is not None:
-            self.run_build_command([
-                "ubuntu-defaults-image",
-                "--locale", self.args.locale,
-                "--arch", self.args.arch,
-                "--release", self.args.series,
-                ])
+            self.run_build_command(
+                [
+                    "ubuntu-defaults-image",
+                    "--locale",
+                    self.args.locale,
+                    "--arch",
+                    self.args.arch,
+                    "--release",
+                    self.args.series,
+                ]
+            )
         else:
             self.run_build_command(["rm", "-rf", "auto", "local"])
             self.run_build_command(["mkdir", "-p", "auto"])
             for lb_script in ("config", "build", "clean"):
                 lb_script_path = os.path.join(
-                    "/usr/share/livecd-rootfs/live-build/auto", lb_script)
+                    "/usr/share/livecd-rootfs/live-build/auto", lb_script
+                )
                 self.run_build_command(["ln", "-s", lb_script_path, "auto/"])
             if self.args.debug:
                 self.run_build_command(["mkdir", "-p", "local/functions"])
                 self.run_build_command(
-                    ["sh", "-c", "echo 'set -x' >local/functions/debug.sh"])
+                    ["sh", "-c", "echo 'set -x' >local/functions/debug.sh"]
+                )
             self.run_build_command(["lb", "clean", "--purge"])
 
             base_lb_env = OrderedDict()
@@ -126,10 +172,12 @@ class BuildLiveFS(SnapStoreOperationMixin, Operation):
                 base_lb_env["CHANNEL"] = self.args.channel
             if self.args.image_targets:
                 base_lb_env["IMAGE_TARGETS"] = " ".join(
-                    self.args.image_targets)
+                    self.args.image_targets
+                )
             if self.args.repo_snapshot_stamp:
-                base_lb_env["REPO_SNAPSHOT_STAMP"] = (
-                    self.args.repo_snapshot_stamp)
+                base_lb_env[
+                    "REPO_SNAPSHOT_STAMP"
+                ] = self.args.repo_snapshot_stamp
             if self.args.cohort_key:
                 base_lb_env["COHORT_KEY"] = self.args.cohort_key
             lb_env = base_lb_env.copy()
@@ -147,8 +195,8 @@ class BuildLiveFS(SnapStoreOperationMixin, Operation):
             if self.args.http_proxy:
                 proxy_dict = {
                     "http_proxy": self.args.http_proxy,
-                    "LB_APT_HTTP_PROXY": self.args.http_proxy
-                    }
+                    "LB_APT_HTTP_PROXY": self.args.http_proxy,
+                }
                 lb_env.update(proxy_dict)
                 base_lb_env.update(proxy_dict)
             self.run_build_command(["lb", "config"], env=lb_env)
@@ -158,11 +206,11 @@ class BuildLiveFS(SnapStoreOperationMixin, Operation):
         try:
             self.install()
         except Exception:
-            logger.exception('Install failed')
+            logger.exception("Install failed")
             return RETCODE_FAILURE_INSTALL
         try:
             self.build()
         except Exception:
-            logger.exception('Build failed')
+            logger.exception("Build failed")
             return RETCODE_FAILURE_BUILD
         return 0
diff --git a/lpbuildd/target/build_oci.py b/lpbuildd/target/build_oci.py
index ecbda0d..c9b4082 100644
--- a/lpbuildd/target/build_oci.py
+++ b/lpbuildd/target/build_oci.py
@@ -11,7 +11,6 @@ from lpbuildd.target.proxy import BuilderProxyOperationMixin
 from lpbuildd.target.snapstore import SnapStoreOperationMixin
 from lpbuildd.target.vcs import VCSOperationMixin
 
-
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
 
@@ -19,8 +18,12 @@ RETCODE_FAILURE_BUILD = 201
 logger = logging.getLogger(__name__)
 
 
-class BuildOCI(BuilderProxyOperationMixin, VCSOperationMixin,
-               SnapStoreOperationMixin, Operation):
+class BuildOCI(
+    BuilderProxyOperationMixin,
+    VCSOperationMixin,
+    SnapStoreOperationMixin,
+    Operation,
+):
 
     description = "Build an OCI image."
 
@@ -28,15 +31,21 @@ class BuildOCI(BuilderProxyOperationMixin, VCSOperationMixin,
     def add_arguments(cls, parser):
         super().add_arguments(parser)
         parser.add_argument(
-            "--build-file", help="path to Dockerfile in branch")
+            "--build-file", help="path to Dockerfile in branch"
+        )
         parser.add_argument(
-            "--build-path", default=".",
-            help="context directory for docker build")
+            "--build-path",
+            default=".",
+            help="context directory for docker build",
+        )
         parser.add_argument(
-            "--build-arg", default=[], action='append',
+            "--build-arg",
+            default=[],
+            action="append",
             help="A docker build ARG in the format of key=value. "
-                 "This option can be repeated many times. For example: "
-                 "--build-arg VAR1=A --build-arg VAR2=B")
+            "This option can be repeated many times. For example: "
+            "--build-arg VAR1=A --build-arg VAR2=B",
+        )
         parser.add_argument("name", help="name of image to build")
 
     def __init__(self, args, parser):
@@ -47,13 +56,16 @@ class BuildOCI(BuilderProxyOperationMixin, VCSOperationMixin,
         """Add systemd file for docker proxy settings."""
         # Create containing directory for systemd overrides
         self.backend.run(
-            ["mkdir", "-p", "/etc/systemd/system/docker.service.d"])
+            ["mkdir", "-p", "/etc/systemd/system/docker.service.d"]
+        )
         # we need both http_proxy and https_proxy. The contents of the files
         # are otherwise identical
-        for setting in ['http_proxy', 'https_proxy']:
-            contents = dedent(f"""[Service]
+        for setting in ["http_proxy", "https_proxy"]:
+            contents = dedent(
+                f"""[Service]
                 Environment="{setting.upper()}={self.args.proxy_url}"
-                """)
+                """
+            )
             file_path = f"/etc/systemd/system/docker.service.d/{setting}.conf"
             with self.backend.open(file_path, mode="w+") as systemd_file:
                 systemd_file.write(contents)
@@ -80,20 +92,21 @@ class BuildOCI(BuilderProxyOperationMixin, VCSOperationMixin,
         """Collect git or bzr branch."""
         logger.info("Running repo phase...")
         env = self.build_proxy_environment(proxy_url=self.args.proxy_url)
-        self.vcs_fetch(self.args.name, cwd="/home/buildd", env=env,
-                       git_shallow_clone=True)
+        self.vcs_fetch(
+            self.args.name, cwd="/home/buildd", env=env, git_shallow_clone=True
+        )
 
     def build(self):
         logger.info("Running build phase...")
         args = ["docker", "build", "--no-cache"]
         if self.args.proxy_url:
             for var in ("http_proxy", "https_proxy"):
-                args.extend(
-                    ["--build-arg", f"{var}={self.args.proxy_url}"])
+                args.extend(["--build-arg", f"{var}={self.args.proxy_url}"])
         args.extend(["--tag", self.args.name])
         if self.args.build_file is not None:
             build_file_path = os.path.join(
-                self.args.build_path, self.args.build_file)
+                self.args.build_path, self.args.build_file
+            )
             check_path_escape(self.buildd_path, build_file_path)
             args.extend(["--file", build_file_path])
 
@@ -103,7 +116,8 @@ class BuildOCI(BuilderProxyOperationMixin, VCSOperationMixin,
             args.extend(["--build-arg=%s" % arg])
 
         build_context_path = os.path.join(
-            self.buildd_path, self.args.build_path)
+            self.buildd_path, self.args.build_path
+        )
         check_path_escape(self.buildd_path, build_context_path)
         args.append(build_context_path)
         self.run_build_command(args)
@@ -112,12 +126,12 @@ class BuildOCI(BuilderProxyOperationMixin, VCSOperationMixin,
         try:
             self.install()
         except Exception:
-            logger.exception('Install failed')
+            logger.exception("Install failed")
             return RETCODE_FAILURE_INSTALL
         try:
             self.repo()
             self.build()
         except Exception:
-            logger.exception('Build failed')
+            logger.exception("Build failed")
             return RETCODE_FAILURE_BUILD
         return 0
diff --git a/lpbuildd/target/build_snap.py b/lpbuildd/target/build_snap.py
index 615c2d9..041b640 100644
--- a/lpbuildd/target/build_snap.py
+++ b/lpbuildd/target/build_snap.py
@@ -13,7 +13,6 @@ from lpbuildd.target.proxy import BuilderProxyOperationMixin
 from lpbuildd.target.snapstore import SnapStoreOperationMixin
 from lpbuildd.target.vcs import VCSOperationMixin
 
-
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
 
@@ -22,7 +21,6 @@ logger = logging.getLogger(__name__)
 
 
 class SnapChannelsAction(argparse.Action):
-
     def __init__(self, option_strings, dest, nargs=None, **kwargs):
         if nargs is not None:
             raise ValueError("nargs not allowed")
@@ -31,15 +29,20 @@ class SnapChannelsAction(argparse.Action):
     def __call__(self, parser, namespace, values, option_string=None):
         if "=" not in values:
             raise argparse.ArgumentError(
-                self, f"'{values}' is not of the form 'snap=channel'")
+                self, f"'{values}' is not of the form 'snap=channel'"
+            )
         snap, channel = values.split("=", 1)
         if getattr(namespace, self.dest, None) is None:
             setattr(namespace, self.dest, {})
         getattr(namespace, self.dest)[snap] = channel
 
 
-class BuildSnap(BuilderProxyOperationMixin, VCSOperationMixin,
-                SnapStoreOperationMixin, Operation):
+class BuildSnap(
+    BuilderProxyOperationMixin,
+    VCSOperationMixin,
+    SnapStoreOperationMixin,
+    Operation,
+):
 
     description = "Build a snap."
 
@@ -49,41 +52,59 @@ class BuildSnap(BuilderProxyOperationMixin, VCSOperationMixin,
     def add_arguments(cls, parser):
         super().add_arguments(parser)
         parser.add_argument(
-            "--channel", action=SnapChannelsAction, metavar="SNAP=CHANNEL",
-            dest="channels", default={}, help=(
+            "--channel",
+            action=SnapChannelsAction,
+            metavar="SNAP=CHANNEL",
+            dest="channels",
+            default={},
+            help=(
                 f"install SNAP from CHANNEL (supported snaps: "
-                f"{', '.join(cls.core_snap_names)}, snapcraft)"))
+                f"{', '.join(cls.core_snap_names)}, snapcraft)"
+            ),
+        )
         parser.add_argument(
             "--build-request-id",
-            help="ID of the request triggering this build on Launchpad")
+            help="ID of the request triggering this build on Launchpad",
+        )
         parser.add_argument(
             "--build-request-timestamp",
-            help="RFC3339 timestamp of the Launchpad build request")
+            help="RFC3339 timestamp of the Launchpad build request",
+        )
         parser.add_argument(
-            "--build-url", help="URL of this build on Launchpad")
+            "--build-url", help="URL of this build on Launchpad"
+        )
         parser.add_argument(
-            "--build-source-tarball", default=False, action="store_true",
+            "--build-source-tarball",
+            default=False,
+            action="store_true",
             help=(
                 "build a tarball containing all source code, including "
-                "external dependencies"))
+                "external dependencies"
+            ),
+        )
         parser.add_argument(
-            "--private", default=False, action="store_true",
-            help="build a private snap")
+            "--private",
+            default=False,
+            action="store_true",
+            help="build a private snap",
+        )
         parser.add_argument(
             "--target-arch",
             dest="target_architectures",
             action="append",
-            help="build for the specified architectures"
+            help="build for the specified architectures",
         )
         parser.add_argument("name", help="name of snap to build")
 
     def install_svn_servers(self):
         proxy = urlparse(self.args.proxy_url)
-        svn_servers = dedent(f"""\
+        svn_servers = dedent(
+            f"""\
             [global]
             http-proxy-host = {proxy.hostname}
             http-proxy-port = {proxy.port}
-            """)
+            """
+        )
         # We should never end up with an authenticated proxy here since
         # lpbuildd.snap deals with it, but it's almost as easy to just
         # handle it as to assert that we don't need to.
@@ -123,14 +144,23 @@ class BuildSnap(BuilderProxyOperationMixin, VCSOperationMixin,
         for snap_name in self.core_snap_names:
             if snap_name in self.args.channels:
                 self.backend.run(
-                    ["snap", "install",
-                     "--channel=%s" % self.args.channels[snap_name],
-                     snap_name])
+                    [
+                        "snap",
+                        "install",
+                        "--channel=%s" % self.args.channels[snap_name],
+                        snap_name,
+                    ]
+                )
         if "snapcraft" in self.args.channels:
             self.backend.run(
-                ["snap", "install", "--classic",
-                 "--channel=%s" % self.args.channels["snapcraft"],
-                 "snapcraft"])
+                [
+                    "snap",
+                    "install",
+                    "--classic",
+                    "--channel=%s" % self.args.channels["snapcraft"],
+                    "snapcraft",
+                ]
+            )
         if self.args.proxy_url:
             self.install_svn_servers()
 
@@ -145,7 +175,7 @@ class BuildSnap(BuilderProxyOperationMixin, VCSOperationMixin,
     def image_info(self):
         data = {}
         if self.args.build_request_id is not None:
-            data["build-request-id"] = f'lp-{self.args.build_request_id}'
+            data["build-request-id"] = f"lp-{self.args.build_request_id}"
         if self.args.build_request_timestamp is not None:
             data["build-request-timestamp"] = self.args.build_request_timestamp
         if self.args.build_url is not None:
@@ -165,14 +195,24 @@ class BuildSnap(BuilderProxyOperationMixin, VCSOperationMixin,
         self.run_build_command(
             ["snapcraft", "pull"],
             cwd=os.path.join("/build", self.args.name),
-            env=env)
+            env=env,
+        )
         if self.args.build_source_tarball:
             self.run_build_command(
-                ["tar", "-czf", "%s.tar.gz" % self.args.name,
-                 "--format=gnu", "--sort=name", "--exclude-vcs",
-                 "--numeric-owner", "--owner=0", "--group=0",
-                 self.args.name],
-                cwd="/build")
+                [
+                    "tar",
+                    "-czf",
+                    "%s.tar.gz" % self.args.name,
+                    "--format=gnu",
+                    "--sort=name",
+                    "--exclude-vcs",
+                    "--numeric-owner",
+                    "--owner=0",
+                    "--group=0",
+                    self.args.name,
+                ],
+                cwd="/build",
+            )
 
     def build(self):
         """Run all build, stage and snap phases."""
@@ -196,13 +236,13 @@ class BuildSnap(BuilderProxyOperationMixin, VCSOperationMixin,
         try:
             self.install()
         except Exception:
-            logger.exception('Install failed')
+            logger.exception("Install failed")
             return RETCODE_FAILURE_INSTALL
         try:
             self.repo()
             self.pull()
             self.build()
         except Exception:
-            logger.exception('Build failed')
+            logger.exception("Build failed")
             return RETCODE_FAILURE_BUILD
         return 0
diff --git a/lpbuildd/target/chroot.py b/lpbuildd/target/chroot.py
index 2dad812..d690ec1 100644
--- a/lpbuildd/target/chroot.py
+++ b/lpbuildd/target/chroot.py
@@ -7,14 +7,8 @@ import stat
 import subprocess
 import time
 
-from lpbuildd.target.backend import (
-    Backend,
-    BackendException,
-    )
-from lpbuildd.util import (
-    set_personality,
-    shell_escape,
-    )
+from lpbuildd.target.backend import Backend, BackendException
+from lpbuildd.util import set_personality, shell_escape
 
 
 class Chroot(Backend):
@@ -28,7 +22,8 @@ class Chroot(Backend):
         """See `Backend`."""
         if image_type == "chroot":
             subprocess.check_call(
-                ["sudo", "tar", "-C", self.build_path, "-xf", image_path])
+                ["sudo", "tar", "-C", self.build_path, "-xf", image_path]
+            )
         else:
             raise ValueError("Unhandled image type: %s" % image_type)
 
@@ -39,7 +34,7 @@ class Chroot(Backend):
             ("devpts", "gid=5,mode=620", "none", "dev/pts"),
             ("sysfs", None, "none", "sys"),
             ("tmpfs", None, "none", "dev/shm"),
-            )
+        )
         for mount in mounts:
             cmd = ["sudo", "mount", "-t", mount[0]]
             if mount[1]:
@@ -51,12 +46,23 @@ class Chroot(Backend):
         for path in ("/etc/hosts", "/etc/hostname", "/etc/resolv.conf"):
             self.copy_in(path, path)
 
-    def run(self, args, cwd=None, env=None, input_text=None, get_output=False,
-            echo=False, **kwargs):
+    def run(
+        self,
+        args,
+        cwd=None,
+        env=None,
+        input_text=None,
+        get_output=False,
+        echo=False,
+        **kwargs,
+    ):
         """See `Backend`."""
         if env:
-            args = ["env"] + [
-                f"{key}={value}" for key, value in env.items()] + args
+            args = (
+                ["env"]
+                + [f"{key}={value}" for key, value in env.items()]
+                + args
+            )
         if self.arch is not None:
             args = set_personality(args, self.arch, series=self.series)
         if cwd is not None:
@@ -66,11 +72,15 @@ class Chroot(Backend):
             # to use "env --chdir".
             escaped_args = " ".join(shell_escape(arg) for arg in args)
             args = [
-                "/bin/sh", "-c", f"cd {shell_escape(cwd)} && {escaped_args}",
-                ]
+                "/bin/sh",
+                "-c",
+                f"cd {shell_escape(cwd)} && {escaped_args}",
+            ]
         if echo:
-            print("Running in chroot: %s" % ' '.join(
-                shell_escape(arg) for arg in args))
+            print(
+                "Running in chroot: %s"
+                % " ".join(shell_escape(arg) for arg in args)
+            )
         cmd = ["sudo", "/usr/sbin/chroot", self.chroot_path] + args
         if input_text is None and not get_output:
             subprocess.check_call(cmd, **kwargs)
@@ -97,10 +107,22 @@ class Chroot(Backend):
         # in the target.
         mode = stat.S_IMODE(os.stat(source_path).st_mode)
         full_target_path = os.path.join(
-            self.chroot_path, target_path.lstrip("/"))
+            self.chroot_path, target_path.lstrip("/")
+        )
         subprocess.check_call(
-            ["sudo", "install", "-o", "root", "-g", "root", "-m", "%o" % mode,
-             source_path, full_target_path])
+            [
+                "sudo",
+                "install",
+                "-o",
+                "root",
+                "-g",
+                "root",
+                "-m",
+                "%o" % mode,
+                source_path,
+                full_target_path,
+            ]
+        )
 
     def copy_out(self, source_path, target_path):
         # Don't use install(1) here because running `os.stat` to get file mode
@@ -108,10 +130,17 @@ class Chroot(Backend):
         # to buildd (this is necessary so that buildd can read/write the copied
         # file).
         full_source_path = os.path.join(
-            self.chroot_path, source_path.lstrip("/"))
+            self.chroot_path, source_path.lstrip("/")
+        )
         subprocess.check_call(
-            ["sudo", "cp", "--preserve=timestamps",
-             full_source_path, target_path])
+            [
+                "sudo",
+                "cp",
+                "--preserve=timestamps",
+                full_source_path,
+                target_path,
+            ]
+        )
         uid, gid = os.getuid(), os.getgid()
         subprocess.check_call(["sudo", "chown", f"{uid}:{gid}", target_path])
 
@@ -151,12 +180,13 @@ class Chroot(Backend):
             if not mounts:
                 break
             retcodes = [
-                subprocess.call(["sudo", "umount", mount])
-                for mount in mounts]
+                subprocess.call(["sudo", "umount", mount]) for mount in mounts
+            ]
             if any(retcodes):
                 time.sleep(1)
         else:
             if list(self._get_chroot_mounts()):
                 subprocess.check_call(["lsof", self.chroot_path])
                 raise BackendException(
-                    "Failed to unmount %s" % self.chroot_path)
+                    "Failed to unmount %s" % self.chroot_path
+                )
diff --git a/lpbuildd/target/cli.py b/lpbuildd/target/cli.py
index 8ef534e..670cf28 100644
--- a/lpbuildd/target/cli.py
+++ b/lpbuildd/target/cli.py
@@ -1,33 +1,26 @@
 # Copyright 2017 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from argparse import ArgumentParser
 import logging
 import sys
+from argparse import ArgumentParser
 
-from lpbuildd.target.apt import (
-    AddTrustedKeys,
-    OverrideSourcesList,
-    Update,
-    )
+from lpbuildd.target.apt import AddTrustedKeys, OverrideSourcesList, Update
 from lpbuildd.target.build_charm import BuildCharm
-from lpbuildd.target.build_oci import BuildOCI
 from lpbuildd.target.build_livefs import BuildLiveFS
+from lpbuildd.target.build_oci import BuildOCI
 from lpbuildd.target.build_snap import BuildSnap
 from lpbuildd.target.generate_translation_templates import (
     GenerateTranslationTemplates,
-    )
+)
 from lpbuildd.target.lifecycle import (
     Create,
     KillProcesses,
     Remove,
     Start,
     Stop,
-    )
-from lpbuildd.target.run_ci import (
-    RunCI,
-    RunCIPrepare,
-    )
+)
+from lpbuildd.target.run_ci import RunCI, RunCIPrepare
 
 
 def configure_logging():
@@ -65,7 +58,7 @@ operations = {
     "umount-chroot": Stop,
     "unpack-chroot": Create,
     "update-debian-chroot": Update,
-    }
+}
 
 
 def parse_args(args=None):
@@ -73,7 +66,8 @@ def parse_args(args=None):
     subparsers = parser.add_subparsers(metavar="OPERATION")
     for name, factory in sorted(operations.items()):
         subparser = subparsers.add_parser(
-            name, description=factory.description, help=factory.description)
+            name, description=factory.description, help=factory.description
+        )
         factory.add_arguments(subparser)
         subparser.set_defaults(operation_factory=factory)
     args = parser.parse_args(args=args)
diff --git a/lpbuildd/target/generate_translation_templates.py b/lpbuildd/target/generate_translation_templates.py
index 190616b..2f8874d 100644
--- a/lpbuildd/target/generate_translation_templates.py
+++ b/lpbuildd/target/generate_translation_templates.py
@@ -8,7 +8,6 @@ from lpbuildd.pottery import intltool
 from lpbuildd.target.operation import Operation
 from lpbuildd.target.vcs import VCSOperationMixin
 
-
 logger = logging.getLogger(__name__)
 
 
@@ -26,7 +25,8 @@ class GenerateTranslationTemplates(VCSOperationMixin, Operation):
         super().add_arguments(parser)
         parser.add_argument(
             "result_name",
-            help="the name of the result tarball; should end in '.tar.gz'")
+            help="the name of the result tarball; should end in '.tar.gz'",
+        )
 
     def __init__(self, args, parser):
         super().__init__(args, parser)
@@ -42,14 +42,15 @@ class GenerateTranslationTemplates(VCSOperationMixin, Operation):
     def fetch(self, quiet=False):
         logger.info("Fetching %s...", self.vcs_description)
         self.vcs_fetch(
-            os.path.basename(self.branch_dir), cwd=self.work_dir, quiet=quiet)
+            os.path.basename(self.branch_dir), cwd=self.work_dir, quiet=quiet
+        )
 
     def _makeTarball(self, files):
         """Put the given files into a tarball in the working directory."""
         tarname = os.path.join(self.work_dir, self.args.result_name)
         logger.info("Making tarball with templates in %s..." % tarname)
         cmd = ["tar", "-C", self.branch_dir, "-czf", tarname]
-        files = [name for name in files if not name.endswith('/')]
+        files = [name for name in files if not name.endswith("/")]
         for path in files:
             full_path = os.path.join(self.branch_dir, path)
             logger.info("Adding template %s..." % full_path)
diff --git a/lpbuildd/target/lifecycle.py b/lpbuildd/target/lifecycle.py
index b5d0e84..b1b9f57 100644
--- a/lpbuildd/target/lifecycle.py
+++ b/lpbuildd/target/lifecycle.py
@@ -8,7 +8,6 @@ import sys
 from lpbuildd.target.backend import BackendException
 from lpbuildd.target.operation import Operation
 
-
 logger = logging.getLogger(__name__)
 
 
@@ -20,7 +19,8 @@ class Create(Operation):
     def add_arguments(cls, parser):
         super().add_arguments(parser)
         parser.add_argument(
-            "--image-type", default="chroot", help="base image type")
+            "--image-type", default="chroot", help="base image type"
+        )
         parser.add_argument("image_path", help="path to base image")
 
     def run(self):
@@ -57,7 +57,8 @@ class KillProcesses(Operation):
 
     def _run(self):
         logger.info(
-            "Scanning for processes to kill in build %s", self.args.build_id)
+            "Scanning for processes to kill in build %s", self.args.build_id
+        )
         self.backend.kill_processes()
         return 0
 
@@ -71,7 +72,7 @@ class Stop(Operation):
         try:
             self.backend.stop()
         except BackendException:
-            logger.exception('Failed to stop target')
+            logger.exception("Failed to stop target")
             return 1
         return 0
 
diff --git a/lpbuildd/target/lxd.py b/lpbuildd/target/lxd.py
index 333c0ff..a98d092 100644
--- a/lpbuildd/target/lxd.py
+++ b/lpbuildd/target/lxd.py
@@ -1,8 +1,6 @@
 # Copyright 2017 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from contextlib import closing
-from functools import cached_property
 import io
 import json
 import os
@@ -10,22 +8,17 @@ import re
 import stat
 import subprocess
 import tarfile
-from textwrap import dedent
 import time
+from contextlib import closing
+from functools import cached_property
+from textwrap import dedent
 
 import netaddr
 import pylxd
 from pylxd.exceptions import LXDAPIException
 
-from lpbuildd.target.backend import (
-    Backend,
-    BackendException,
-    )
-from lpbuildd.util import (
-    set_personality,
-    shell_escape,
-    )
-
+from lpbuildd.target.backend import Backend, BackendException
+from lpbuildd.util import set_personality, shell_escape
 
 LXD_RUNNING = 103
 
@@ -41,20 +34,24 @@ def get_device_mapper_major():
                 return int(line.split()[0])
         else:
             raise Exception(
-                "Cannot determine major device number for device-mapper")
+                "Cannot determine major device number for device-mapper"
+            )
 
 
-fallback_hosts = dedent("""\
+fallback_hosts = dedent(
+    """\
     127.0.0.1\tlocalhost
     ::1\tlocalhost ip6-localhost ip6-loopback
     fe00::0\tip6-localnet
     ff00::0\tip6-mcastprefix
     ff02::1\tip6-allnodes
     ff02::2\tip6-allrouters
-    """)
+    """
+)
 
 
-policy_rc_d = dedent("""\
+policy_rc_d = dedent(
+    """\
     #! /bin/sh
     while :; do
         case "$1" in
@@ -69,7 +66,8 @@ policy_rc_d = dedent("""\
                 ;;
         esac
     done
-    """)
+    """
+)
 
 
 class LXDException(Exception):
@@ -97,7 +95,7 @@ class LXD(Backend):
         "ppc64el": "ppc64le",
         "riscv64": "riscv64",
         "s390x": "s390x",
-        }
+    }
 
     profile_name = "lpbuildd"
     bridge_name = "lpbuilddbr0"
@@ -145,15 +143,23 @@ class LXD(Backend):
                 "os": "Ubuntu",
                 "series": self.series,
                 "architecture": self.arch,
-                "description":
-                    f"Launchpad chroot for Ubuntu {self.series} ({self.arch})",
-                },
-            }
+                "description": (
+                    f"Launchpad chroot for Ubuntu {self.series} ({self.arch})"
+                ),
+            },
+        }
         # Encoding this as JSON is good enough, and saves pulling in a YAML
         # library dependency.
-        metadata_yaml = json.dumps(
-            metadata, sort_keys=True, indent=4, separators=(",", ": "),
-            ensure_ascii=False).encode("UTF-8") + b"\n"
+        metadata_yaml = (
+            json.dumps(
+                metadata,
+                sort_keys=True,
+                indent=4,
+                separators=(",", ": "),
+                ensure_ascii=False,
+            ).encode("UTF-8")
+            + b"\n"
+        )
         metadata_file = tarfile.TarInfo(name="metadata.yaml")
         metadata_file.size = len(metadata_yaml)
         target_tarball.addfile(metadata_file, io.BytesIO(metadata_yaml))
@@ -175,8 +181,9 @@ class LXD(Backend):
                 elif entry.islnk():
                     # Update hardlinks to point to the right target
                     entry.linkname = (
-                        "rootfs" +
-                        entry.linkname.split("chroot-autobuild", 1)[-1])
+                        "rootfs"
+                        + entry.linkname.split("chroot-autobuild", 1)[-1]
+                    )
 
                 target_tarball.addfile(entry, fileobj=fileptr)
             finally:
@@ -206,11 +213,13 @@ class LXD(Backend):
             with io.BytesIO() as target_file:
                 with tarfile.open(name=image_path, mode="r") as source_tarball:
                     with tarfile.open(
-                            fileobj=target_file, mode="w") as target_tarball:
+                        fileobj=target_file, mode="w"
+                    ) as target_tarball:
                         self._convert(source_tarball, target_tarball)
 
                 image = self.client.images.create(
-                    target_file.getvalue(), wait=True)
+                    target_file.getvalue(), wait=True
+                )
         elif image_type == "lxd":
             with open(image_path, "rb") as image_file:
                 image = self.client.images.create(image_file.read(), wait=True)
@@ -230,52 +239,138 @@ class LXD(Backend):
     def iptables(self, args, check=True):
         call = subprocess.check_call if check else subprocess.call
         call(
-            ["sudo", "iptables", "-w"] + args +
-            ["-m", "comment", "--comment", "managed by launchpad-buildd"])
+            ["sudo", "iptables", "-w"]
+            + args
+            + ["-m", "comment", "--comment", "managed by launchpad-buildd"]
+        )
 
     def start_bridge(self):
         if not os.path.isdir(self.run_dir):
             os.makedirs(self.run_dir)
         subprocess.check_call(
-            ["sudo", "ip", "link", "add", "dev", self.bridge_name,
-             "type", "bridge"])
+            [
+                "sudo",
+                "ip",
+                "link",
+                "add",
+                "dev",
+                self.bridge_name,
+                "type",
+                "bridge",
+            ]
+        )
         subprocess.check_call(
-            ["sudo", "ip", "addr", "add", str(self.ipv4_network),
-             "dev", self.bridge_name])
+            [
+                "sudo",
+                "ip",
+                "addr",
+                "add",
+                str(self.ipv4_network),
+                "dev",
+                self.bridge_name,
+            ]
+        )
         subprocess.check_call(
-            ["sudo", "ip", "link", "set", "dev", self.bridge_name, "up"])
+            ["sudo", "ip", "link", "set", "dev", self.bridge_name, "up"]
+        )
         subprocess.check_call(
-            ["sudo", "sysctl", "-q", "-w", "net.ipv4.ip_forward=1"])
+            ["sudo", "sysctl", "-q", "-w", "net.ipv4.ip_forward=1"]
+        )
         self.iptables(
-            ["-t", "mangle", "-A", "FORWARD", "-i", self.bridge_name,
-             "-p", "tcp", "--tcp-flags", "SYN,RST", "SYN",
-             "-j", "TCPMSS", "--clamp-mss-to-pmtu"])
+            [
+                "-t",
+                "mangle",
+                "-A",
+                "FORWARD",
+                "-i",
+                self.bridge_name,
+                "-p",
+                "tcp",
+                "--tcp-flags",
+                "SYN,RST",
+                "SYN",
+                "-j",
+                "TCPMSS",
+                "--clamp-mss-to-pmtu",
+            ]
+        )
         self.iptables(
-            ["-t", "nat", "-A", "POSTROUTING",
-             "-s", str(self.ipv4_network), "!", "-d", str(self.ipv4_network),
-             "-j", "MASQUERADE"])
+            [
+                "-t",
+                "nat",
+                "-A",
+                "POSTROUTING",
+                "-s",
+                str(self.ipv4_network),
+                "!",
+                "-d",
+                str(self.ipv4_network),
+                "-j",
+                "MASQUERADE",
+            ]
+        )
         subprocess.check_call(
-            ["sudo", "/usr/sbin/dnsmasq", "-s", "lpbuildd", "-S", "/lpbuildd/",
-             "-u", "buildd", "--strict-order", "--bind-interfaces",
-             "--pid-file=%s" % self.dnsmasq_pid_file,
-             "--except-interface=lo", "--interface=%s" % self.bridge_name,
-             "--listen-address=%s" % str(self.ipv4_network.ip)])
+            [
+                "sudo",
+                "/usr/sbin/dnsmasq",
+                "-s",
+                "lpbuildd",
+                "-S",
+                "/lpbuildd/",
+                "-u",
+                "buildd",
+                "--strict-order",
+                "--bind-interfaces",
+                "--pid-file=%s" % self.dnsmasq_pid_file,
+                "--except-interface=lo",
+                "--interface=%s" % self.bridge_name,
+                "--listen-address=%s" % str(self.ipv4_network.ip),
+            ]
+        )
 
     def stop_bridge(self):
         if not os.path.isdir(self.sys_dir):
             return
         subprocess.call(
-            ["sudo", "ip", "addr", "flush", "dev", self.bridge_name])
+            ["sudo", "ip", "addr", "flush", "dev", self.bridge_name]
+        )
         subprocess.call(
-            ["sudo", "ip", "link", "set", "dev", self.bridge_name, "down"])
+            ["sudo", "ip", "link", "set", "dev", self.bridge_name, "down"]
+        )
         self.iptables(
-            ["-t", "mangle", "-D", "FORWARD", "-i", self.bridge_name,
-             "-p", "tcp", "--tcp-flags", "SYN,RST", "SYN",
-             "-j", "TCPMSS", "--clamp-mss-to-pmtu"])
+            [
+                "-t",
+                "mangle",
+                "-D",
+                "FORWARD",
+                "-i",
+                self.bridge_name,
+                "-p",
+                "tcp",
+                "--tcp-flags",
+                "SYN,RST",
+                "SYN",
+                "-j",
+                "TCPMSS",
+                "--clamp-mss-to-pmtu",
+            ]
+        )
         self.iptables(
-            ["-t", "nat", "-D", "POSTROUTING",
-             "-s", str(self.ipv4_network), "!", "-d", str(self.ipv4_network),
-             "-j", "MASQUERADE"], check=False)
+            [
+                "-t",
+                "nat",
+                "-D",
+                "POSTROUTING",
+                "-s",
+                str(self.ipv4_network),
+                "!",
+                "-d",
+                str(self.ipv4_network),
+                "-j",
+                "MASQUERADE",
+            ],
+            check=False,
+        )
         if os.path.exists(self.dnsmasq_pid_file):
             with open(self.dnsmasq_pid_file) as f:
                 try:
@@ -309,14 +404,18 @@ class LXD(Backend):
     def create_profile(self):
         for addr in self.ipv4_network:
             if addr not in (
-                    self.ipv4_network.network, self.ipv4_network.ip,
-                    self.ipv4_network.broadcast):
+                self.ipv4_network.network,
+                self.ipv4_network.ip,
+                self.ipv4_network.broadcast,
+            ):
                 ipv4_address = netaddr.IPNetwork(
-                    (int(addr), self.ipv4_network.prefixlen))
+                    (int(addr), self.ipv4_network.prefixlen)
+                )
                 break
         else:
             raise BackendException(
-                "%s has no usable IP addresses" % self.ipv4_network)
+                "%s has no usable IP addresses" % self.ipv4_network
+            )
 
         try:
             old_profile = self.client.profiles.get(self.profile_name)
@@ -332,23 +431,27 @@ class LXD(Backend):
             ("lxc.cgroup.devices.allow", ""),
             ("lxc.mount.auto", ""),
             ("lxc.mount.auto", "proc:rw sys:rw"),
-            ]
+        ]
 
         lxc_version = self._client.host_info["environment"]["driver_version"]
         major, minor = (int(v) for v in lxc_version.split(".")[0:2])
 
         if major >= 3:
-            raw_lxc_config.extend([
-                ("lxc.apparmor.profile", "unconfined"),
-                ("lxc.net.0.ipv4.address", ipv4_address),
-                ("lxc.net.0.ipv4.gateway", self.ipv4_network.ip),
-                ])
+            raw_lxc_config.extend(
+                [
+                    ("lxc.apparmor.profile", "unconfined"),
+                    ("lxc.net.0.ipv4.address", ipv4_address),
+                    ("lxc.net.0.ipv4.gateway", self.ipv4_network.ip),
+                ]
+            )
         else:
-            raw_lxc_config.extend([
-                ("lxc.aa_profile", "unconfined"),
-                ("lxc.network.0.ipv4", ipv4_address),
-                ("lxc.network.0.ipv4.gateway", self.ipv4_network.ip),
-                ])
+            raw_lxc_config.extend(
+                [
+                    ("lxc.aa_profile", "unconfined"),
+                    ("lxc.network.0.ipv4", ipv4_address),
+                    ("lxc.network.0.ipv4.gateway", self.ipv4_network.ip),
+                ]
+            )
 
         # Linux 4.4 on powerpc doesn't support all the seccomp bits that LXD
         # needs.
@@ -358,22 +461,23 @@ class LXD(Backend):
             "security.privileged": "true",
             "security.nesting": "true",
             "raw.lxc": "".join(
-                f"{key}={value}\n" for key, value in sorted(raw_lxc_config)),
-            }
+                f"{key}={value}\n" for key, value in sorted(raw_lxc_config)
+            ),
+        }
         devices = {
             "eth0": {
                 "name": "eth0",
                 "nictype": "bridged",
                 "parent": self.bridge_name,
                 "type": "nic",
-                },
-            }
+            },
+        }
         if major >= 3:
             devices["root"] = {
                 "path": "/",
                 "pool": "default",
                 "type": "disk",
-                }
+            }
         if "gpu-nvidia" in self.constraints:
             for i, path in enumerate(self._nvidia_container_paths):
                 # Skip devices here, because bind-mounted devices aren't
@@ -386,7 +490,7 @@ class LXD(Backend):
                         "path": path,
                         "source": path,
                         "type": "disk",
-                        }
+                    }
         self.client.profiles.create(self.profile_name, config, devices)
 
     def start(self):
@@ -396,16 +500,21 @@ class LXD(Backend):
         self.create_profile()
         self.start_bridge()
 
-        container = self.client.containers.create({
-            "name": self.name,
-            "profiles": [self.profile_name],
-            "source": {"type": "image", "alias": self.alias},
-            }, wait=True)
+        container = self.client.containers.create(
+            {
+                "name": self.name,
+                "profiles": [self.profile_name],
+                "source": {"type": "image", "alias": self.alias},
+            },
+            wait=True,
+        )
 
         hostname = subprocess.check_output(
-            ["hostname"], universal_newlines=True).rstrip("\n")
+            ["hostname"], universal_newlines=True
+        ).rstrip("\n")
         fqdn = subprocess.check_output(
-            ["hostname", "--fqdn"], universal_newlines=True).rstrip("\n")
+            ["hostname", "--fqdn"], universal_newlines=True
+        ).rstrip("\n")
         with self.open("/etc/hosts", mode="a") as hosts_file:
             hosts_file.seek(0, os.SEEK_END)
             if not hosts_file.tell():
@@ -421,8 +530,10 @@ class LXD(Backend):
 
         if os.path.islink(resolv_conf):
             resolv_conf = os.path.realpath(resolv_conf)
-            if (resolv_conf == "/run/systemd/resolve/stub-resolv.conf" and
-                    os.path.isfile("/run/systemd/resolve/resolv.conf")):
+            if (
+                resolv_conf == "/run/systemd/resolve/stub-resolv.conf"
+                and os.path.isfile("/run/systemd/resolve/resolv.conf")
+            ):
                 resolv_conf = "/run/systemd/resolve/resolv.conf"
 
         self.copy_in(resolv_conf, "/etc/resolv.conf")
@@ -443,7 +554,8 @@ class LXD(Backend):
                 for line in mounted_dev_file:
                     if in_script:
                         script += re.sub(
-                            r"^(\s*)(.*MAKEDEV)", r"\1: # \2", line)
+                            r"^(\s*)(.*MAKEDEV)", r"\1: # \2", line
+                        )
                         if line.strip() == "end script":
                             in_script = False
                     elif line.strip() == "script":
@@ -472,26 +584,44 @@ class LXD(Backend):
             time.sleep(1)
         if container is None or container.status_code != LXD_RUNNING:
             raise BackendException(
-                "Container failed to start within %d seconds" % timeout)
+                "Container failed to start within %d seconds" % timeout
+            )
 
         # Create loop devices.  We do this by hand rather than via the LXD
         # profile, as the latter approach creates lots of independent mounts
         # under /dev/, and that can cause confusion when building live
         # filesystems.
         self.run(
-            ["mknod", "-m", "0660", "/dev/loop-control", "c", "10", "237"])
+            ["mknod", "-m", "0660", "/dev/loop-control", "c", "10", "237"]
+        )
         for minor in range(256):
             self.run(
-                ["mknod", "-m", "0660", "/dev/loop%d" % minor,
-                 "b", "7", str(minor)])
+                [
+                    "mknod",
+                    "-m",
+                    "0660",
+                    "/dev/loop%d" % minor,
+                    "b",
+                    "7",
+                    str(minor),
+                ]
+            )
 
         # Create dm-# devices.  On focal kpartx looks for dm devices and hangs
         # in their absence.
         major = get_device_mapper_major()
         for minor in range(8):
             self.run(
-                ["mknod", "-m", "0660", "/dev/dm-%d" % minor,
-                 "b", str(major), str(minor)])
+                [
+                    "mknod",
+                    "-m",
+                    "0660",
+                    "/dev/dm-%d" % minor,
+                    "b",
+                    str(major),
+                    str(minor),
+                ]
+            )
 
         if "gpu-nvidia" in self.constraints:
             # Create nvidia* devices.  We have to do this here rather than
@@ -503,10 +633,16 @@ class LXD(Backend):
                     st = os.stat(path)
                     if stat.S_ISCHR(st.st_mode):
                         self.run(
-                            ["mknod", "-m", "0%o" % stat.S_IMODE(st.st_mode),
-                             path, "c",
-                             str(os.major(st.st_rdev)),
-                             str(os.minor(st.st_rdev))])
+                            [
+                                "mknod",
+                                "-m",
+                                "0%o" % stat.S_IMODE(st.st_mode),
+                                path,
+                                "c",
+                                str(os.major(st.st_rdev)),
+                                str(os.minor(st.st_rdev)),
+                            ]
+                        )
 
             # We bind-mounted several libraries into the container, so run
             # ldconfig to update the dynamic linker's cache.
@@ -519,10 +655,16 @@ class LXD(Backend):
         with self.open(
             "/etc/systemd/system/snapd.service.d/no-cdn.conf", mode="w+"
         ) as no_cdn_file:
-            print(dedent("""\
+            print(
+                dedent(
+                    """\
                 [Service]
                 Environment=SNAPPY_STORE_NO_CDN=1
-                """), file=no_cdn_file, end="")
+                """
+                ),
+                file=no_cdn_file,
+                end="",
+            )
             os.fchmod(no_cdn_file.fileno(), 0o644)
 
         # Refreshing snaps from a timer unit during a build isn't
@@ -530,8 +672,13 @@ class LXD(Backend):
         # systemctl existing.  This relies on /etc/systemd/system/ having
         # been created above.
         self.run(
-            ["ln", "-s", "/dev/null",
-             "/etc/systemd/system/snapd.refresh.timer"])
+            [
+                "ln",
+                "-s",
+                "/dev/null",
+                "/etc/systemd/system/snapd.refresh.timer",
+            ]
+        )
 
         if self.arch == "armhf":
             # Work around https://github.com/lxc/lxcfs/issues/553.  In
@@ -543,8 +690,17 @@ class LXD(Backend):
             except subprocess.CalledProcessError:
                 pass
 
-    def run(self, args, cwd=None, env=None, input_text=None, get_output=False,
-            echo=False, return_process=False, **kwargs):
+    def run(
+        self,
+        args,
+        cwd=None,
+        env=None,
+        input_text=None,
+        get_output=False,
+        echo=False,
+        return_process=False,
+        **kwargs,
+    ):
         """See `Backend`."""
         env_params = []
         if env:
@@ -559,11 +715,15 @@ class LXD(Backend):
             # to use "env --chdir".
             escaped_args = " ".join(shell_escape(arg) for arg in args)
             args = [
-                "/bin/sh", "-c", f"cd {shell_escape(cwd)} && {escaped_args}",
-                ]
+                "/bin/sh",
+                "-c",
+                f"cd {shell_escape(cwd)} && {escaped_args}",
+            ]
         if echo:
-            print("Running in container: %s" % ' '.join(
-                shell_escape(arg) for arg in args))
+            print(
+                "Running in container: %s"
+                % " ".join(shell_escape(arg) for arg in args)
+            )
         # pylxd's Container.execute doesn't support sending stdin, and it's
         # tedious to implement ourselves.
         cmd = ["lxc", "exec", self.name] + env_params + ["--"] + args
@@ -602,20 +762,23 @@ class LXD(Backend):
                 # numbers as of Go 1.13, and it's not clear that we can
                 # assume this.  Use plain 0 prefixes instead.
                 "X-LXD-mode": "0%o" % mode if mode else "0",
-                }
+            }
             try:
                 container.api.files.post(
-                    params=params, data=data, headers=headers)
+                    params=params, data=data, headers=headers
+                )
             except LXDAPIException as e:
                 raise LXDException(
-                    f"Failed to push {self.name}:{target_path}", e)
+                    f"Failed to push {self.name}:{target_path}", e
+                )
 
     def _get_file(self, container, *args, **kwargs):
         # pylxd < 2.1.1 tries to validate the response as JSON in streaming
         # mode and ends up running out of memory on large files.  Work
         # around this.
         response = container.api.files.session.get(
-            container.api.files._api_endpoint, *args, **kwargs)
+            container.api.files._api_endpoint, *args, **kwargs
+        )
         if response.status_code != 200:
             raise LXDAPIException(response)
         return response
@@ -629,14 +792,14 @@ class LXD(Backend):
             params = {"path": source_path}
             try:
                 with closing(
-                        self._get_file(
-                            container, params=params,
-                            stream=True)) as response:
+                    self._get_file(container, params=params, stream=True)
+                ) as response:
                     for chunk in response.iter_content(chunk_size=65536):
                         target_file.write(chunk)
             except LXDAPIException as e:
                 raise LXDException(
-                    f"Failed to pull {self.name}:{source_path}", e)
+                    f"Failed to pull {self.name}:{source_path}", e
+                )
 
     def stop(self):
         """See `Backend`."""
diff --git a/lpbuildd/target/operation.py b/lpbuildd/target/operation.py
index 724f8c0..1590271 100644
--- a/lpbuildd/target/operation.py
+++ b/lpbuildd/target/operation.py
@@ -15,25 +15,36 @@ class Operation:
     @classmethod
     def add_arguments(cls, parser):
         parser.add_argument(
-            "--backend", choices=["chroot", "lxd", "fake", "uncontained"],
-            help="use this type of backend")
+            "--backend",
+            choices=["chroot", "lxd", "fake", "uncontained"],
+            help="use this type of backend",
+        )
         parser.add_argument(
-            "--series", metavar="SERIES", help="operate on series SERIES")
+            "--series", metavar="SERIES", help="operate on series SERIES"
+        )
         parser.add_argument(
-            "--arch", metavar="ARCH", help="operate on architecture ARCH")
+            "--arch", metavar="ARCH", help="operate on architecture ARCH"
+        )
         parser.add_argument(
-            "--constraint", metavar="CONSTRAINT", action="append",
+            "--constraint",
+            metavar="CONSTRAINT",
+            action="append",
             dest="constraints",
-            help="add builder resource tag for this build")
+            help="add builder resource tag for this build",
+        )
         parser.add_argument(
-            "build_id", metavar="ID", help="operate on build ID")
+            "build_id", metavar="ID", help="operate on build ID"
+        )
 
     def __init__(self, args, parser):
         self.args = args
         self.backend = make_backend(
-            self.args.backend, self.args.build_id,
-            series=self.args.series, arch=self.args.arch,
-            constraints=self.args.constraints)
+            self.args.backend,
+            self.args.build_id,
+            series=self.args.series,
+            arch=self.args.arch,
+            constraints=self.args.constraints,
+        )
 
     def run_build_command(self, args, env=None, **kwargs):
         """Run a build command in the target.
diff --git a/lpbuildd/target/proxy.py b/lpbuildd/target/proxy.py
index b952d4e..2f264c1 100644
--- a/lpbuildd/target/proxy.py
+++ b/lpbuildd/target/proxy.py
@@ -1,9 +1,9 @@
 # Copyright 2019-2020 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from collections import OrderedDict
 import os
 import sys
+from collections import OrderedDict
 
 
 class BuilderProxyOperationMixin:
@@ -19,7 +19,8 @@ class BuilderProxyOperationMixin:
         parser.add_argument("--proxy-url", help="builder proxy url")
         parser.add_argument(
             "--revocation-endpoint",
-            help="builder proxy token revocation endpoint")
+            help="builder proxy token revocation endpoint",
+        )
 
     @property
     def proxy_deps(self):
@@ -28,7 +29,8 @@ class BuilderProxyOperationMixin:
     def install_git_proxy(self):
         self.backend.copy_in(
             os.path.join(self.bin, "lpbuildd-git-proxy"),
-            "/usr/local/bin/lpbuildd-git-proxy")
+            "/usr/local/bin/lpbuildd-git-proxy",
+        )
 
     def build_proxy_environment(self, proxy_url=None, env=None):
         """Extend a command environment to include http proxy variables."""
diff --git a/lpbuildd/target/run_ci.py b/lpbuildd/target/run_ci.py
index c0e8703..dd3a0fd 100644
--- a/lpbuildd/target/run_ci.py
+++ b/lpbuildd/target/run_ci.py
@@ -11,7 +11,6 @@ from lpbuildd.target.snapstore import SnapStoreOperationMixin
 from lpbuildd.target.vcs import VCSOperationMixin
 from lpbuildd.util import shell_escape
 
-
 RETCODE_FAILURE_INSTALL = 200
 RETCODE_FAILURE_BUILD = 201
 
@@ -19,8 +18,12 @@ RETCODE_FAILURE_BUILD = 201
 logger = logging.getLogger(__name__)
 
 
-class RunCIPrepare(BuilderProxyOperationMixin, VCSOperationMixin,
-                   SnapStoreOperationMixin, Operation):
+class RunCIPrepare(
+    BuilderProxyOperationMixin,
+    VCSOperationMixin,
+    SnapStoreOperationMixin,
+    Operation,
+):
 
     description = "Prepare for running CI jobs."
     buildd_path = "/build/tree"
@@ -29,8 +32,13 @@ class RunCIPrepare(BuilderProxyOperationMixin, VCSOperationMixin,
     def add_arguments(cls, parser):
         super().add_arguments(parser)
         parser.add_argument(
-            "--channel", action=SnapChannelsAction, metavar="SNAP=CHANNEL",
-            dest="channels", default={}, help="install SNAP from CHANNEL")
+            "--channel",
+            action=SnapChannelsAction,
+            metavar="SNAP=CHANNEL",
+            dest="channels",
+            default={},
+            help="install SNAP from CHANNEL",
+        )
         parser.add_argument(
             "--scan-malware",
             action="store_true",
@@ -61,7 +69,8 @@ class RunCIPrepare(BuilderProxyOperationMixin, VCSOperationMixin,
         for snap_name, channel in sorted(self.args.channels.items()):
             if snap_name not in ("lxd", "lpcraft"):
                 self.backend.run(
-                    ["snap", "install", "--channel=%s" % channel, snap_name])
+                    ["snap", "install", "--channel=%s" % channel, snap_name]
+                )
         for snap_name, classic in (("lxd", False), ("lpcraft", True)):
             cmd = ["snap", "install"]
             if classic:
@@ -121,7 +130,8 @@ class RunCI(BuilderProxyOperationMixin, Operation):
         super().add_arguments(parser)
         parser.add_argument("job_name", help="job name to run")
         parser.add_argument(
-            "job_index", type=int, help="index within job name to run")
+            "job_index", type=int, help="index within job name to run"
+        )
         parser.add_argument(
             "--environment-variable",
             dest="environment_variables",
@@ -176,7 +186,8 @@ class RunCI(BuilderProxyOperationMixin, Operation):
         output_path = os.path.join("/build", "output")
         # This matches the per-job output path used by lpcraft.
         job_output_path = os.path.join(
-            output_path, self.args.job_name, str(self.args.job_index))
+            output_path, self.args.job_name, str(self.args.job_index)
+        )
         self.backend.run(["mkdir", "-p", job_output_path])
         self.backend.run(["chown", "-R", "buildd:buildd", output_path])
         lpcraft_args = [
@@ -199,27 +210,29 @@ class RunCI(BuilderProxyOperationMixin, Operation):
             lpcraft_args.extend(["--set-env", f"{key}={value}"])
 
         plugin_settings = dict(
-            pair.split("=", maxsplit=1)
-            for pair in self.args.plugin_settings
+            pair.split("=", maxsplit=1) for pair in self.args.plugin_settings
         )
         for key, value in plugin_settings.items():
             lpcraft_args.extend(["--plugin-setting", f"{key}={value}"])
 
         if self.args.secrets:
-            lpcraft_args.extend(
-                ["--secrets", self.args.secrets])
+            lpcraft_args.extend(["--secrets", self.args.secrets])
 
         if "gpu-nvidia" in self.backend.constraints:
             lpcraft_args.append("--gpu-nvidia")
 
-        escaped_lpcraft_args = (
-            " ".join(shell_escape(arg) for arg in lpcraft_args))
+        escaped_lpcraft_args = " ".join(
+            shell_escape(arg) for arg in lpcraft_args
+        )
         tee_args = ["tee", os.path.join(job_output_path, "log")]
         escaped_tee_args = " ".join(shell_escape(arg) for arg in tee_args)
         args = [
-            "/bin/bash", "-o", "pipefail", "-c",
+            "/bin/bash",
+            "-o",
+            "pipefail",
+            "-c",
             f"{escaped_lpcraft_args} 2>&1 | {escaped_tee_args}",
-            ]
+        ]
         self.run_build_command(args, env=env)
 
         if self.args.scan_malware:
diff --git a/lpbuildd/target/snapstore.py b/lpbuildd/target/snapstore.py
index e087d7e..348e6d5 100644
--- a/lpbuildd/target/snapstore.py
+++ b/lpbuildd/target/snapstore.py
@@ -1,11 +1,7 @@
 # Copyright 2019 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from urllib.parse import (
-    urljoin,
-    urlparse,
-    urlunparse,
-    )
+from urllib.parse import urljoin, urlparse, urlunparse
 
 import requests
 
@@ -17,8 +13,10 @@ class SnapStoreOperationMixin:
     def add_arguments(cls, parser):
         super().add_arguments(parser)
         parser.add_argument(
-            "--snap-store-proxy-url", metavar="URL",
-            help="snap store proxy URL")
+            "--snap-store-proxy-url",
+            metavar="URL",
+            help="snap store proxy URL",
+        )
 
     def snap_store_set_proxy(self):
         if self.args.snap_store_proxy_url is None:
@@ -27,13 +25,17 @@ class SnapStoreOperationMixin:
         # domain.
         parsed_url = urlparse(self.args.snap_store_proxy_url)
         canonical_url = urlunparse(
-            [parsed_url.scheme, parsed_url.netloc, "", "", "", ""])
+            [parsed_url.scheme, parsed_url.netloc, "", "", "", ""]
+        )
         assertions_response = requests.get(
-            urljoin(canonical_url, "v2/auth/store/assertions"))
+            urljoin(canonical_url, "v2/auth/store/assertions")
+        )
         assertions_response.raise_for_status()
         self.backend.run(
-            ["snap", "ack", "/dev/stdin"], input_text=assertions_response.text)
+            ["snap", "ack", "/dev/stdin"], input_text=assertions_response.text
+        )
         store_id = assertions_response.headers.get("X-Assertion-Store-Id")
         if store_id is not None:
             self.backend.run(
-                ["snap", "set", "core", f"proxy.store={store_id}"])
+                ["snap", "set", "core", f"proxy.store={store_id}"]
+            )
diff --git a/lpbuildd/target/tests/matchers.py b/lpbuildd/target/tests/matchers.py
index 388c2ef..dfbc74e 100644
--- a/lpbuildd/target/tests/matchers.py
+++ b/lpbuildd/target/tests/matchers.py
@@ -1,19 +1,22 @@
 # Copyright 2021 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from testtools.matchers import (
-    Equals,
-    Is,
-    MatchesDict,
-    MatchesListwise,
-    )
+from testtools.matchers import Equals, Is, MatchesDict, MatchesListwise
 
 
 class RanCommand(MatchesListwise):
-
-    def __init__(self, args, echo=None, cwd=None, input_text=None,
-                 stdout=None, stderr=None, get_output=None,
-                 universal_newlines=None, **env):
+    def __init__(
+        self,
+        args,
+        echo=None,
+        cwd=None,
+        input_text=None,
+        stdout=None,
+        stderr=None,
+        get_output=None,
+        universal_newlines=None,
+        **env,
+    ):
         kwargs_matcher = {}
         if echo is not None:
             kwargs_matcher["echo"] = Is(echo)
@@ -31,24 +34,22 @@ class RanCommand(MatchesListwise):
             kwargs_matcher["universal_newlines"] = Is(universal_newlines)
         if env:
             kwargs_matcher["env"] = MatchesDict(
-                {key: Equals(value) for key, value in env.items()})
+                {key: Equals(value) for key, value in env.items()}
+            )
         super().__init__([Equals((args,)), MatchesDict(kwargs_matcher)])
 
 
 class RanAptGet(RanCommand):
-
     def __init__(self, *args):
         super().__init__(["apt-get", "-y"] + list(args))
 
 
 class RanSnap(RanCommand):
-
     def __init__(self, *args, **kwargs):
         super().__init__(["snap"] + list(args), **kwargs)
 
 
 class RanBuildCommand(RanCommand):
-
     def __init__(self, args, **kwargs):
         kwargs.setdefault("cwd", "/build")
         kwargs.setdefault("LANG", "C.UTF-8")
diff --git a/lpbuildd/target/tests/test_apt.py b/lpbuildd/target/tests/test_apt.py
index 380f735..26eac33 100644
--- a/lpbuildd/target/tests/test_apt.py
+++ b/lpbuildd/target/tests/test_apt.py
@@ -4,8 +4,8 @@
 import stat
 import subprocess
 import tempfile
-from textwrap import dedent
 import time
+from textwrap import dedent
 
 from fixtures import FakeLogger
 from systemfixtures import FakeTime
@@ -15,14 +15,13 @@ from testtools.matchers import (
     Equals,
     MatchesDict,
     MatchesListwise,
-    )
+)
 
 from lpbuildd.target.cli import parse_args
 from lpbuildd.tests.fakebuilder import FakeMethod
 
 
 class MockCopyIn(FakeMethod):
-
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
         self.source_bytes = None
@@ -34,71 +33,116 @@ class MockCopyIn(FakeMethod):
 
 
 class TestOverrideSourcesList(TestCase):
-
     def test_succeeds(self):
         args = [
             "override-sources-list",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "deb http://archive.ubuntu.com/ubuntu xenial main",
             "deb http://ppa.launchpad.net/launchpad/ppa/ubuntu xenial main",
-            ]
+        ]
         override_sources_list = parse_args(args=args).operation
         self.assertEqual(0, override_sources_list.run())
         self.assertEqual(
-            (dedent("""\
+            (
+                dedent(
+                    """\
                 deb http://archive.ubuntu.com/ubuntu xenial main
                 deb http://ppa.launchpad.net/launchpad/ppa/ubuntu xenial main
-                """).encode("UTF-8"), stat.S_IFREG | 0o644),
-            override_sources_list.backend.backend_fs["/etc/apt/sources.list"])
+                """
+                ).encode("UTF-8"),
+                stat.S_IFREG | 0o644,
+            ),
+            override_sources_list.backend.backend_fs["/etc/apt/sources.list"],
+        )
         self.assertEqual(
             (b'Acquire::Retries "3";\n', stat.S_IFREG | 0o644),
             override_sources_list.backend.backend_fs[
-                "/etc/apt/apt.conf.d/99retries"])
+                "/etc/apt/apt.conf.d/99retries"
+            ],
+        )
         self.assertEqual(
-            (b'APT::Get::Always-Include-Phased-Updates "true";\n',
-             stat.S_IFREG | 0o644),
+            (
+                b'APT::Get::Always-Include-Phased-Updates "true";\n',
+                stat.S_IFREG | 0o644,
+            ),
             override_sources_list.backend.backend_fs[
-                "/etc/apt/apt.conf.d/99phasing"])
+                "/etc/apt/apt.conf.d/99phasing"
+            ],
+        )
         self.assertEqual(
-            (b"Package: *\nPin: release a=*-proposed\nPin-Priority: 500\n",
-             stat.S_IFREG | 0o644),
+            (
+                b"Package: *\nPin: release a=*-proposed\nPin-Priority: 500\n",
+                stat.S_IFREG | 0o644,
+            ),
             override_sources_list.backend.backend_fs[
-                "/etc/apt/preferences.d/proposed.pref"])
+                "/etc/apt/preferences.d/proposed.pref"
+            ],
+        )
         self.assertEqual(
-            (b"Package: *\nPin: release a=*-backports\nPin-Priority: 500\n",
-             stat.S_IFREG | 0o644),
+            (
+                b"Package: *\nPin: release a=*-backports\nPin-Priority: 500\n",
+                stat.S_IFREG | 0o644,
+            ),
             override_sources_list.backend.backend_fs[
-                "/etc/apt/preferences.d/backports.pref"])
+                "/etc/apt/preferences.d/backports.pref"
+            ],
+        )
 
     def test_apt_proxy(self):
         args = [
             "override-sources-list",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--apt-proxy-url", "http://apt-proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--apt-proxy-url",
+            "http://apt-proxy.example:3128/";,
             "deb http://archive.ubuntu.com/ubuntu xenial main",
-            ]
+        ]
         override_sources_list = parse_args(args=args).operation
         self.assertEqual(0, override_sources_list.run())
         self.assertEqual(
-            (dedent("""\
+            (
+                dedent(
+                    """\
                 deb http://archive.ubuntu.com/ubuntu xenial main
-                """).encode("UTF-8"), stat.S_IFREG | 0o644),
-            override_sources_list.backend.backend_fs["/etc/apt/sources.list"])
+                """
+                ).encode("UTF-8"),
+                stat.S_IFREG | 0o644,
+            ),
+            override_sources_list.backend.backend_fs["/etc/apt/sources.list"],
+        )
         self.assertEqual(
             (b'Acquire::Retries "3";\n', stat.S_IFREG | 0o644),
             override_sources_list.backend.backend_fs[
-                "/etc/apt/apt.conf.d/99retries"])
+                "/etc/apt/apt.conf.d/99retries"
+            ],
+        )
         self.assertEqual(
-            (b'APT::Get::Always-Include-Phased-Updates "true";\n',
-             stat.S_IFREG | 0o644),
+            (
+                b'APT::Get::Always-Include-Phased-Updates "true";\n',
+                stat.S_IFREG | 0o644,
+            ),
             override_sources_list.backend.backend_fs[
-                "/etc/apt/apt.conf.d/99phasing"])
+                "/etc/apt/apt.conf.d/99phasing"
+            ],
+        )
         self.assertEqual(
-            (dedent("""\
+            (
+                dedent(
+                    """\
                 Acquire::http::Proxy "http://apt-proxy.example:3128/";;
-                """).encode("UTF-8"), stat.S_IFREG | 0o644),
+                """
+                ).encode("UTF-8"),
+                stat.S_IFREG | 0o644,
+            ),
             override_sources_list.backend.backend_fs[
-                "/etc/apt/apt.conf.d/99proxy"])
+                "/etc/apt/apt.conf.d/99proxy"
+            ],
+        )
 
 
 # Output of:
@@ -108,7 +152,8 @@ class TestOverrideSourcesList(TestCase):
 #         F6ECB3762474EDA9D21B7022871920D1991BC93C
 # (For test purposes, the exact key ID isn't particularly important.  This
 # just needs to be some kind of valid GPG public key.)
-TEST_GPG_KEY = dedent("""\
+TEST_GPG_KEY = dedent(
+    """\
     -----BEGIN PGP PUBLIC KEY BLOCK-----
 
     mQINBFufwdoBEADv/Gxytx/LcSXYuM0MwKojbBye81s0G1nEx+lz6VAUpIUZnbkq
@@ -138,16 +183,19 @@ TEST_GPG_KEY = dedent("""\
     uOgcXny1UlwtCUzlrSaP
     =9AdM
     -----END PGP PUBLIC KEY BLOCK-----
-    """)
+    """
+)
 
 
 class TestAddTrustedKeys(TestCase):
-
     def test_add_trusted_keys(self):
         args = [
             "add-trusted-keys",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         add_trusted_keys = parse_args(args=args).operation
         with tempfile.NamedTemporaryFile(mode="wb+") as keys_file:
             keys_file.write(TEST_GPG_KEY.encode())
@@ -157,54 +205,81 @@ class TestAddTrustedKeys(TestCase):
                 add_trusted_keys.show_keys_file = show_keys_file
                 self.assertEqual(0, add_trusted_keys.run())
                 expected_dearmored_key = subprocess.run(
-                    ["gpg", "--ignore-time-conflict", "--no-options",
-                     "--no-keyring", "--dearmor"],
-                    input=TEST_GPG_KEY.encode(), capture_output=True).stdout
+                    [
+                        "gpg",
+                        "--ignore-time-conflict",
+                        "--no-options",
+                        "--no-keyring",
+                        "--dearmor",
+                    ],
+                    input=TEST_GPG_KEY.encode(),
+                    capture_output=True,
+                ).stdout
                 self.assertEqual(
                     (expected_dearmored_key, stat.S_IFREG | 0o644),
                     add_trusted_keys.backend.backend_fs[
-                        "/etc/apt/trusted.gpg.d/launchpad-buildd.gpg"])
+                        "/etc/apt/trusted.gpg.d/launchpad-buildd.gpg"
+                    ],
+                )
                 show_keys_file.seek(0)
                 self.assertIn(
                     "Key fingerprint = F6EC B376 2474 EDA9 D21B  "
                     "7022 8719 20D1 991B C93C",
-                    show_keys_file.read().decode())
+                    show_keys_file.read().decode(),
+                )
 
 
 class RanAptGet(MatchesListwise):
-
     def __init__(self, args_list):
-        super().__init__([
-            MatchesListwise([
-                Equals((["/usr/bin/apt-get"] + args,)),
-                ContainsDict({
-                    "env": MatchesDict({
-                        "LANG": Equals("C"),
-                        "DEBIAN_FRONTEND": Equals("noninteractive"),
-                        "TTY": Equals("unknown"),
-                        }),
-                    }),
-                ]) for args in args_list
-            ])
+        super().__init__(
+            [
+                MatchesListwise(
+                    [
+                        Equals((["/usr/bin/apt-get"] + args,)),
+                        ContainsDict(
+                            {
+                                "env": MatchesDict(
+                                    {
+                                        "LANG": Equals("C"),
+                                        "DEBIAN_FRONTEND": Equals(
+                                            "noninteractive"
+                                        ),
+                                        "TTY": Equals("unknown"),
+                                    }
+                                ),
+                            }
+                        ),
+                    ]
+                )
+                for args in args_list
+            ]
+        )
 
 
 class TestUpdate(TestCase):
-
     def test_succeeds(self):
         self.useFixture(FakeTime())
         start_time = time.time()
         args = [
             "update-debian-chroot",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         update = parse_args(args=args).operation
         self.assertEqual(0, update.run())
 
         expected_args = [
             ["-uy", "update"],
-            ["-o", "DPkg::Options::=--force-confold", "-uy", "--purge",
-             "dist-upgrade"],
-            ]
+            [
+                "-o",
+                "DPkg::Options::=--force-confold",
+                "-uy",
+                "--purge",
+                "dist-upgrade",
+            ],
+        ]
         self.assertThat(update.backend.run.calls, RanAptGet(expected_args))
         self.assertEqual(start_time, time.time())
 
@@ -220,8 +295,11 @@ class TestUpdate(TestCase):
         start_time = time.time()
         args = [
             "update-debian-chroot",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         update = parse_args(args=args).operation
         update.backend.run = FailFirstTime()
         self.assertEqual(0, update.run())
@@ -229,12 +307,18 @@ class TestUpdate(TestCase):
         expected_args = [
             ["-uy", "update"],
             ["-uy", "update"],
-            ["-o", "DPkg::Options::=--force-confold", "-uy", "--purge",
-             "dist-upgrade"],
-            ]
+            [
+                "-o",
+                "DPkg::Options::=--force-confold",
+                "-uy",
+                "--purge",
+                "dist-upgrade",
+            ],
+        ]
         self.assertThat(update.backend.run.calls, RanAptGet(expected_args))
         self.assertEqual(
             "Updating target for build 1\n"
             "Waiting 15 seconds and trying again ...\n",
-            logger.output)
+            logger.output,
+        )
         self.assertEqual(start_time + 15, time.time())
diff --git a/lpbuildd/target/tests/test_backend.py b/lpbuildd/target/tests/test_backend.py
index ff617e3..05d91fa 100644
--- a/lpbuildd/target/tests/test_backend.py
+++ b/lpbuildd/target/tests/test_backend.py
@@ -1,15 +1,14 @@
 # Copyright 2022 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
-from unittest.mock import patch, ANY
+from unittest.mock import ANY, patch
 
-from testtools import TestCase
 from fixtures import TempDir
+from testtools import TestCase
 
 from lpbuildd.tests.fakebuilder import UncontainedBackend
 
 
 class TestBackend(TestCase):
-
     def test_open(self):
         backend = UncontainedBackend("1")
         backend_root = self.useFixture(TempDir())
diff --git a/lpbuildd/target/tests/test_build_charm.py b/lpbuildd/target/tests/test_build_charm.py
index b110591..9f98fa0 100644
--- a/lpbuildd/target/tests/test_build_charm.py
+++ b/lpbuildd/target/tests/test_build_charm.py
@@ -7,157 +7,232 @@ import stat
 import subprocess
 from textwrap import dedent
 
-from fixtures import (
-    FakeLogger,
-    TempDir,
-    )
 import responses
+from fixtures import FakeLogger, TempDir
 from systemfixtures import FakeFilesystem
-from testtools.matchers import (
-    AnyMatch,
-    MatchesAll,
-    MatchesListwise,
-    )
+from testtools.matchers import AnyMatch, MatchesAll, MatchesListwise
 from testtools.testcase import TestCase
 
 from lpbuildd.target.backend import InvalidBuildFilePath
 from lpbuildd.target.build_charm import (
     RETCODE_FAILURE_BUILD,
     RETCODE_FAILURE_INSTALL,
-    )
-from lpbuildd.tests.fakebuilder import FakeMethod
-from lpbuildd.target.tests.test_build_snap import (
-    FakeRevisionID,
-    RanSnap,
-    )
+)
 from lpbuildd.target.cli import parse_args
 from lpbuildd.target.tests.matchers import (
     RanAptGet,
     RanBuildCommand,
     RanCommand,
-    )
+)
+from lpbuildd.target.tests.test_build_snap import FakeRevisionID, RanSnap
+from lpbuildd.tests.fakebuilder import FakeMethod
 
 
 class TestBuildCharm(TestCase):
-
     def test_run_build_command_no_env(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.run_build_command(["echo", "hello world"])
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["echo", "hello world"],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["echo", "hello world"], cwd="/home/buildd/test-image"
+                    ),
+                ]
+            ),
+        )
 
     def test_run_build_command_env(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.run_build_command(
-            ["echo", "hello world"], env={"FOO": "bar baz"})
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["echo", "hello world"],
-                FOO="bar baz",
-                cwd="/home/buildd/test-image")
-            ]))
+            ["echo", "hello world"], env={"FOO": "bar baz"}
+        )
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["echo", "hello world"],
+                        FOO="bar baz",
+                        cwd="/home/buildd/test-image",
+                    )
+                ]
+            ),
+        )
 
     def test_install_channels(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--channel=core=candidate", "--channel=core18=beta",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--channel=core=candidate",
+            "--channel=core18=beta",
             "--channel=charmcraft=edge",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.install()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "bzr", "python3-pip", "python3-setuptools"),
-            RanSnap("install", "--channel=candidate", "core"),
-            RanSnap("install", "--channel=beta", "core18"),
-            RanSnap("install", "--classic", "--channel=edge", "charmcraft"),
-            RanCommand(["mkdir", "-p", "/home/buildd"]),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet(
+                        "install", "bzr", "python3-pip", "python3-setuptools"
+                    ),
+                    RanSnap("install", "--channel=candidate", "core"),
+                    RanSnap("install", "--channel=beta", "core18"),
+                    RanSnap(
+                        "install", "--classic", "--channel=edge", "charmcraft"
+                    ),
+                    RanCommand(["mkdir", "-p", "/home/buildd"]),
+                ]
+            ),
+        )
 
     def test_install_bzr(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image"
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.install()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "bzr", "python3-pip", "python3-setuptools"),
-            RanSnap("install", "--classic", "charmcraft"),
-            RanCommand(["mkdir", "-p", "/home/buildd"]),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet(
+                        "install", "bzr", "python3-pip", "python3-setuptools"
+                    ),
+                    RanSnap("install", "--classic", "charmcraft"),
+                    RanCommand(["mkdir", "-p", "/home/buildd"]),
+                ]
+            ),
+        )
 
     def test_install_git(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "test-image"
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.install()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git", "python3-pip", "python3-setuptools"),
-            RanSnap("install", "--classic", "charmcraft"),
-            RanCommand(["mkdir", "-p", "/home/buildd"]),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet(
+                        "install", "git", "python3-pip", "python3-setuptools"
+                    ),
+                    RanSnap("install", "--classic", "charmcraft"),
+                    RanCommand(["mkdir", "-p", "/home/buildd"]),
+                ]
+            ),
+        )
 
     @responses.activate
     def test_install_snap_store_proxy(self):
-        store_assertion = dedent("""\
+        store_assertion = dedent(
+            """\
             type: store
             store: store-id
             url: http://snap-store-proxy.example
 
             body
-            """)
+            """
+        )
 
         def respond(request):
             return 200, {"X-Assertion-Store-Id": "store-id"}, store_assertion
 
         responses.add_callback(
-            "GET", "http://snap-store-proxy.example/v2/auth/store/assertions";,
-            callback=respond)
+            "GET",
+            "http://snap-store-proxy.example/v2/auth/store/assertions";,
+            callback=respond,
+        )
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--snap-store-proxy-url", "http://snap-store-proxy.example/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--snap-store-proxy-url",
+            "http://snap-store-proxy.example/";,
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.install()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git", "python3-pip", "python3-setuptools"),
-            RanCommand(
-                ["snap", "ack", "/dev/stdin"], input_text=store_assertion),
-            RanCommand(["snap", "set", "core", "proxy.store=store-id"]),
-            RanSnap("install", "--classic", "charmcraft"),
-            RanCommand(["mkdir", "-p", "/home/buildd"]),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet(
+                        "install", "git", "python3-pip", "python3-setuptools"
+                    ),
+                    RanCommand(
+                        ["snap", "ack", "/dev/stdin"],
+                        input_text=store_assertion,
+                    ),
+                    RanCommand(
+                        ["snap", "set", "core", "proxy.store=store-id"]
+                    ),
+                    RanSnap("install", "--classic", "charmcraft"),
+                    RanCommand(["mkdir", "-p", "/home/buildd"]),
+                ]
+            ),
+        )
 
     def test_install_proxy(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.bin = "/builderbin"
         self.useFixture(FakeFilesystem()).add("/builderbin")
@@ -166,36 +241,62 @@ class TestBuildCharm(TestCase):
             proxy_script.write("proxy script\n")
             os.fchmod(proxy_script.fileno(), 0o755)
         build_charm.install()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanAptGet(
-                "install", "python3", "socat", "git",
-                "python3-pip", "python3-setuptools"),
-            RanSnap("install", "--classic", "charmcraft"),
-            RanCommand(["mkdir", "-p", "/home/buildd"]),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet(
+                        "install",
+                        "python3",
+                        "socat",
+                        "git",
+                        "python3-pip",
+                        "python3-setuptools",
+                    ),
+                    RanSnap("install", "--classic", "charmcraft"),
+                    RanCommand(["mkdir", "-p", "/home/buildd"]),
+                ]
+            ),
+        )
         self.assertEqual(
             (b"proxy script\n", stat.S_IFREG | 0o755),
             build_charm.backend.backend_fs[
-                "/usr/local/bin/lpbuildd-git-proxy"])
+                "/usr/local/bin/lpbuildd-git-proxy"
+            ],
+        )
 
     def test_repo_bzr(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.build_path = self.useFixture(TempDir()).path
         build_charm.backend.run = FakeRevisionID("42")
         build_charm.repo()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["bzr", "branch", "lp:foo", "test-image"], cwd="/home/buildd"),
-            RanBuildCommand(
-                ["bzr", "revno"],
-                cwd="/home/buildd/test-image", get_output=True,
-                universal_newlines=True),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["bzr", "branch", "lp:foo", "test-image"],
+                        cwd="/home/buildd",
+                    ),
+                    RanBuildCommand(
+                        ["bzr", "revno"],
+                        cwd="/home/buildd/test-image",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_charm.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "42"}, json.load(status))
@@ -203,28 +304,49 @@ class TestBuildCharm(TestCase):
     def test_repo_git(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.build_path = self.useFixture(TempDir()).path
         build_charm.backend.run = FakeRevisionID("0" * 40)
         build_charm.repo()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "test-image"],
-                cwd="/home/buildd"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "HEAD"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "rev-parse", "HEAD^{}"],
-                cwd="/home/buildd/test-image",
-                get_output=True, universal_newlines=True),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "test-image"],
+                        cwd="/home/buildd",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "HEAD"],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "HEAD^{}"],
+                        cwd="/home/buildd/test-image",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_charm.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -232,28 +354,51 @@ class TestBuildCharm(TestCase):
     def test_repo_git_with_path(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "--git-path", "next", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--git-path",
+            "next",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.build_path = self.useFixture(TempDir()).path
         build_charm.backend.run = FakeRevisionID("0" * 40)
         build_charm.repo()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "test-image"],
-                cwd="/home/buildd"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "next"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "rev-parse", "next^{}"],
-                cwd="/home/buildd/test-image", get_output=True,
-                universal_newlines=True),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "test-image"],
+                        cwd="/home/buildd",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "next"],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "next^{}"],
+                        cwd="/home/buildd/test-image",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_charm.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -261,29 +406,51 @@ class TestBuildCharm(TestCase):
     def test_repo_git_with_tag_path(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "--git-path", "refs/tags/1.0",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--git-path",
+            "refs/tags/1.0",
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.build_path = self.useFixture(TempDir()).path
         build_charm.backend.run = FakeRevisionID("0" * 40)
         build_charm.repo()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "test-image"],
-                cwd="/home/buildd"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "refs/tags/1.0"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "rev-parse", "refs/tags/1.0^{}"],
-                cwd="/home/buildd/test-image", get_output=True,
-                universal_newlines=True),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "test-image"],
+                        cwd="/home/buildd",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "refs/tags/1.0"],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "refs/tags/1.0^{}"],
+                        cwd="/home/buildd/test-image",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_charm.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -291,11 +458,16 @@ class TestBuildCharm(TestCase):
     def test_repo_proxy(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.build_path = self.useFixture(TempDir()).path
         build_charm.backend.run = FakeRevisionID("0" * 40)
@@ -305,22 +477,41 @@ class TestBuildCharm(TestCase):
             "https_proxy": "http://proxy.example:3128/";,
             "GIT_PROXY_COMMAND": "/usr/local/bin/lpbuildd-git-proxy",
             "SNAPPY_STORE_NO_CDN": "1",
-            }
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "test-image"],
-                cwd="/home/buildd", **env),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "HEAD"],
-                cwd="/home/buildd/test-image", **env),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/home/buildd/test-image", **env),
-            RanBuildCommand(
-                ["git", "rev-parse", "HEAD^{}"],
-                cwd="/home/buildd/test-image", get_output=True,
-                universal_newlines=True),
-            ]))
+        }
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "test-image"],
+                        cwd="/home/buildd",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "HEAD"],
+                        cwd="/home/buildd/test-image",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/home/buildd/test-image",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "HEAD^{}"],
+                        cwd="/home/buildd/test-image",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_charm.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -328,41 +519,70 @@ class TestBuildCharm(TestCase):
     def test_build(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
-        build_charm.backend.add_dir('/build/test-directory')
+        build_charm.backend.add_dir("/build/test-directory")
         build_charm.build()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["charmcraft", "pack", "-v", "--destructive-mode"],
-                cwd="/home/buildd/test-image/."),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["charmcraft", "pack", "-v", "--destructive-mode"],
+                        cwd="/home/buildd/test-image/.",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_with_path(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-path", "build-aux/",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-path",
+            "build-aux/",
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
-        build_charm.backend.add_dir('/build/test-directory')
+        build_charm.backend.add_dir("/build/test-directory")
         build_charm.build()
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["charmcraft", "pack", "-v", "--destructive-mode"],
-                cwd="/home/buildd/test-image/build-aux/"),
-            ]))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["charmcraft", "pack", "-v", "--destructive-mode"],
+                        cwd="/home/buildd/test-image/build-aux/",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_proxy(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.build()
         env = {
@@ -370,33 +590,57 @@ class TestBuildCharm(TestCase):
             "https_proxy": "http://proxy.example:3128/";,
             "GIT_PROXY_COMMAND": "/usr/local/bin/lpbuildd-git-proxy",
             "SNAPPY_STORE_NO_CDN": "1",
-            }
-        self.assertThat(build_charm.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["charmcraft", "pack", "-v", "--destructive-mode"],
-                cwd="/home/buildd/test-image/.", **env),
-            ]))
+        }
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["charmcraft", "pack", "-v", "--destructive-mode"],
+                        cwd="/home/buildd/test-image/.",
+                        **env,
+                    ),
+                ]
+            ),
+        )
 
     def test_run_succeeds(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.build_path = self.useFixture(TempDir()).path
         build_charm.backend.run = FakeRevisionID("42")
         self.assertEqual(0, build_charm.run())
-        self.assertThat(build_charm.backend.run.calls, MatchesAll(
-            AnyMatch(RanAptGet(
-                "install", "bzr", "python3-pip", "python3-setuptools"),),
-            AnyMatch(RanBuildCommand(
-                ["bzr", "branch", "lp:foo", "test-image"],
-                cwd="/home/buildd")),
-            AnyMatch(RanBuildCommand(
-                ["charmcraft", "pack", "-v", "--destructive-mode"],
-                cwd="/home/buildd/test-image/.")),
-            ))
+        self.assertThat(
+            build_charm.backend.run.calls,
+            MatchesAll(
+                AnyMatch(
+                    RanAptGet(
+                        "install", "bzr", "python3-pip", "python3-setuptools"
+                    ),
+                ),
+                AnyMatch(
+                    RanBuildCommand(
+                        ["bzr", "branch", "lp:foo", "test-image"],
+                        cwd="/home/buildd",
+                    )
+                ),
+                AnyMatch(
+                    RanBuildCommand(
+                        ["charmcraft", "pack", "-v", "--destructive-mode"],
+                        cwd="/home/buildd/test-image/.",
+                    )
+                ),
+            ),
+        )
 
     def test_run_install_fails(self):
         class FailInstall(FakeMethod):
@@ -408,9 +652,14 @@ class TestBuildCharm(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.run = FailInstall()
         self.assertEqual(RETCODE_FAILURE_INSTALL, build_charm.run())
@@ -425,9 +674,14 @@ class TestBuildCharm(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.run = FailRepo()
         self.assertEqual(RETCODE_FAILURE_BUILD, build_charm.run())
@@ -442,9 +696,14 @@ class TestBuildCharm(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.backend.build_path = self.useFixture(TempDir()).path
         build_charm.backend.run = FailBuild()
@@ -453,35 +712,53 @@ class TestBuildCharm(TestCase):
     def test_build_with_invalid_build_path_parent(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-path", "../",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-path",
+            "../",
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
-        build_charm.backend.add_dir('/build/test-directory')
+        build_charm.backend.add_dir("/build/test-directory")
         self.assertRaises(InvalidBuildFilePath, build_charm.build)
 
     def test_build_with_invalid_build_path_absolute(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-path", "/etc",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-path",
+            "/etc",
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
-        build_charm.backend.add_dir('/build/test-directory')
+        build_charm.backend.add_dir("/build/test-directory")
         self.assertRaises(InvalidBuildFilePath, build_charm.build)
 
     def test_build_with_invalid_build_path_symlink(self):
         args = [
             "build-charm",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-path", "build/",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-path",
+            "build/",
             "test-image",
-            ]
+        ]
         build_charm = parse_args(args=args).operation
         build_charm.buildd_path = self.useFixture(TempDir()).path
         os.symlink(
-            '/etc/hosts',
-            os.path.join(build_charm.buildd_path, 'build'))
+            "/etc/hosts", os.path.join(build_charm.buildd_path, "build")
+        )
         self.assertRaises(InvalidBuildFilePath, build_charm.build)
diff --git a/lpbuildd/target/tests/test_build_livefs.py b/lpbuildd/target/tests/test_build_livefs.py
index e8fa92a..ad220c3 100644
--- a/lpbuildd/target/tests/test_build_livefs.py
+++ b/lpbuildd/target/tests/test_build_livefs.py
@@ -4,187 +4,321 @@
 import subprocess
 from textwrap import dedent
 
-from fixtures import FakeLogger
 import responses
+from fixtures import FakeLogger
 from testtools import TestCase
-from testtools.matchers import (
-    AnyMatch,
-    MatchesAll,
-    MatchesListwise,
-    )
+from testtools.matchers import AnyMatch, MatchesAll, MatchesListwise
 
 from lpbuildd.target.build_livefs import (
     RETCODE_FAILURE_BUILD,
     RETCODE_FAILURE_INSTALL,
-    )
+)
 from lpbuildd.target.cli import parse_args
 from lpbuildd.target.tests.matchers import (
     RanAptGet,
     RanBuildCommand,
     RanCommand,
-    )
+)
 from lpbuildd.tests.fakebuilder import FakeMethod
 
 
 class TestBuildLiveFS(TestCase):
-
     def test_install(self):
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.install()
-        self.assertThat(build_livefs.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "livecd-rootfs"),
-            ]))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "livecd-rootfs"),
+                ]
+            ),
+        )
 
     def test_install_locale(self):
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "--locale=zh_CN",
-            ]
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.install()
-        self.assertThat(build_livefs.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "livecd-rootfs"),
-            RanAptGet(
-                "--install-recommends", "install", "ubuntu-defaults-builder"),
-            ]))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "livecd-rootfs"),
+                    RanAptGet(
+                        "--install-recommends",
+                        "install",
+                        "ubuntu-defaults-builder",
+                    ),
+                ]
+            ),
+        )
 
     @responses.activate
     def test_install_snap_store_proxy(self):
-        store_assertion = dedent("""\
+        store_assertion = dedent(
+            """\
             type: store
             store: store-id
             url: http://snap-store-proxy.example
 
             body
-            """)
+            """
+        )
 
         def respond(request):
             return 200, {"X-Assertion-Store-Id": "store-id"}, store_assertion
 
         responses.add_callback(
-            "GET", "http://snap-store-proxy.example/v2/auth/store/assertions";,
-            callback=respond)
+            "GET",
+            "http://snap-store-proxy.example/v2/auth/store/assertions";,
+            callback=respond,
+        )
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--snap-store-proxy-url", "http://snap-store-proxy.example/";,
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--snap-store-proxy-url",
+            "http://snap-store-proxy.example/";,
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.install()
-        self.assertThat(build_livefs.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "livecd-rootfs"),
-            RanCommand(
-                ["snap", "ack", "/dev/stdin"], input_text=store_assertion),
-            RanCommand(["snap", "set", "core", "proxy.store=store-id"]),
-            ]))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "livecd-rootfs"),
+                    RanCommand(
+                        ["snap", "ack", "/dev/stdin"],
+                        input_text=store_assertion,
+                    ),
+                    RanCommand(
+                        ["snap", "set", "core", "proxy.store=store-id"]
+                    ),
+                ]
+            ),
+        )
 
     def test_build(self):
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "--project=ubuntu",
-            ]
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.build()
-        self.assertThat(build_livefs.backend.run.calls, MatchesListwise([
-            RanBuildCommand(["rm", "-rf", "auto", "local"]),
-            RanBuildCommand(["mkdir", "-p", "auto"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/config", "auto/"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/build", "auto/"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/clean", "auto/"]),
-            RanBuildCommand(["lb", "clean", "--purge"]),
-            RanBuildCommand(
-                ["lb", "config"],
-                PROJECT="ubuntu", ARCH="amd64", SUITE="xenial"),
-            RanBuildCommand(["lb", "build"], PROJECT="ubuntu", ARCH="amd64"),
-            ]))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(["rm", "-rf", "auto", "local"]),
+                    RanBuildCommand(["mkdir", "-p", "auto"]),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/config",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/build",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/clean",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(["lb", "clean", "--purge"]),
+                    RanBuildCommand(
+                        ["lb", "config"],
+                        PROJECT="ubuntu",
+                        ARCH="amd64",
+                        SUITE="xenial",
+                    ),
+                    RanBuildCommand(
+                        ["lb", "build"], PROJECT="ubuntu", ARCH="amd64"
+                    ),
+                ]
+            ),
+        )
 
     def test_build_locale(self):
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "--locale=zh_CN",
-            ]
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.build()
-        self.assertThat(build_livefs.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["ubuntu-defaults-image", "--locale", "zh_CN",
-                 "--arch", "amd64", "--release", "xenial"]),
-            ]))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "ubuntu-defaults-image",
+                            "--locale",
+                            "zh_CN",
+                            "--arch",
+                            "amd64",
+                            "--release",
+                            "xenial",
+                        ]
+                    ),
+                ]
+            ),
+        )
 
     def test_build_extra_ppas_and_snaps(self):
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "--project=ubuntu-core",
-            "--extra-ppa=owner1/name1", "--extra-ppa=owner2/name2",
-            "--extra-snap=snap1", "--extra-snap=snap2",
-            ]
+            "--extra-ppa=owner1/name1",
+            "--extra-ppa=owner2/name2",
+            "--extra-snap=snap1",
+            "--extra-snap=snap2",
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.build()
-        self.assertThat(build_livefs.backend.run.calls, MatchesListwise([
-            RanBuildCommand(["rm", "-rf", "auto", "local"]),
-            RanBuildCommand(["mkdir", "-p", "auto"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/config", "auto/"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/build", "auto/"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/clean", "auto/"]),
-            RanBuildCommand(["lb", "clean", "--purge"]),
-            RanBuildCommand(
-                ["lb", "config"],
-                PROJECT="ubuntu-core", ARCH="amd64", SUITE="xenial",
-                EXTRA_PPAS="owner1/name1 owner2/name2",
-                EXTRA_SNAPS="snap1 snap2"),
-            RanBuildCommand(
-                ["lb", "build"], PROJECT="ubuntu-core", ARCH="amd64"),
-            ]))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(["rm", "-rf", "auto", "local"]),
+                    RanBuildCommand(["mkdir", "-p", "auto"]),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/config",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/build",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/clean",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(["lb", "clean", "--purge"]),
+                    RanBuildCommand(
+                        ["lb", "config"],
+                        PROJECT="ubuntu-core",
+                        ARCH="amd64",
+                        SUITE="xenial",
+                        EXTRA_PPAS="owner1/name1 owner2/name2",
+                        EXTRA_SNAPS="snap1 snap2",
+                    ),
+                    RanBuildCommand(
+                        ["lb", "build"], PROJECT="ubuntu-core", ARCH="amd64"
+                    ),
+                ]
+            ),
+        )
 
     def test_build_debug(self):
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--project=ubuntu", "--debug",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--project=ubuntu",
+            "--debug",
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.build()
-        self.assertThat(build_livefs.backend.run.calls, MatchesListwise([
-            RanBuildCommand(["rm", "-rf", "auto", "local"]),
-            RanBuildCommand(["mkdir", "-p", "auto"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/config", "auto/"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/build", "auto/"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/clean", "auto/"]),
-            RanBuildCommand(["mkdir", "-p", "local/functions"]),
-            RanBuildCommand(
-                ["sh", "-c", "echo 'set -x' >local/functions/debug.sh"]),
-            RanBuildCommand(["lb", "clean", "--purge"]),
-            RanBuildCommand(
-                ["lb", "config"],
-                PROJECT="ubuntu", ARCH="amd64", SUITE="xenial"),
-            RanBuildCommand(["lb", "build"], PROJECT="ubuntu", ARCH="amd64"),
-            ]))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(["rm", "-rf", "auto", "local"]),
+                    RanBuildCommand(["mkdir", "-p", "auto"]),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/config",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/build",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/clean",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(["mkdir", "-p", "local/functions"]),
+                    RanBuildCommand(
+                        ["sh", "-c", "echo 'set -x' >local/functions/debug.sh"]
+                    ),
+                    RanBuildCommand(["lb", "clean", "--purge"]),
+                    RanBuildCommand(
+                        ["lb", "config"],
+                        PROJECT="ubuntu",
+                        ARCH="amd64",
+                        SUITE="xenial",
+                    ),
+                    RanBuildCommand(
+                        ["lb", "build"], PROJECT="ubuntu", ARCH="amd64"
+                    ),
+                ]
+            ),
+        )
 
     def test_build_with_http_proxy(self):
         proxy = "http://example.com:8000";
@@ -193,44 +327,79 @@ class TestBuildLiveFS(TestCase):
             "ARCH": "amd64",
             "http_proxy": proxy,
             "LB_APT_HTTP_PROXY": proxy,
-            }
+        }
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "--project=ubuntu-cpc",
             f"--http-proxy={proxy}",
-            ]
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.build()
-        self.assertThat(build_livefs.backend.run.calls, MatchesListwise([
-            RanBuildCommand(["rm", "-rf", "auto", "local"]),
-            RanBuildCommand(["mkdir", "-p", "auto"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/config", "auto/"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/build", "auto/"]),
-            RanBuildCommand(
-                ["ln", "-s",
-                 "/usr/share/livecd-rootfs/live-build/auto/clean", "auto/"]),
-            RanBuildCommand(["lb", "clean", "--purge"]),
-            RanBuildCommand(["lb", "config"], SUITE="xenial", **expected_env),
-            RanBuildCommand(["lb", "build"], **expected_env),
-            ]))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(["rm", "-rf", "auto", "local"]),
+                    RanBuildCommand(["mkdir", "-p", "auto"]),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/config",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/build",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(
+                        [
+                            "ln",
+                            "-s",
+                            "/usr/share/livecd-rootfs/live-build/auto/clean",
+                            "auto/",
+                        ]
+                    ),
+                    RanBuildCommand(["lb", "clean", "--purge"]),
+                    RanBuildCommand(
+                        ["lb", "config"], SUITE="xenial", **expected_env
+                    ),
+                    RanBuildCommand(["lb", "build"], **expected_env),
+                ]
+            ),
+        )
 
     def test_run_succeeds(self):
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "--project=ubuntu",
-            ]
+        ]
         build_livefs = parse_args(args=args).operation
         self.assertEqual(0, build_livefs.run())
-        self.assertThat(build_livefs.backend.run.calls, MatchesAll(
-            AnyMatch(RanAptGet("install", "livecd-rootfs")),
-            AnyMatch(RanBuildCommand(
-                ["lb", "build"], PROJECT="ubuntu", ARCH="amd64"))))
+        self.assertThat(
+            build_livefs.backend.run.calls,
+            MatchesAll(
+                AnyMatch(RanAptGet("install", "livecd-rootfs")),
+                AnyMatch(
+                    RanBuildCommand(
+                        ["lb", "build"], PROJECT="ubuntu", ARCH="amd64"
+                    )
+                ),
+            ),
+        )
 
     def test_run_install_fails(self):
         class FailInstall(FakeMethod):
@@ -242,9 +411,12 @@ class TestBuildLiveFS(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "--project=ubuntu",
-            ]
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.backend.run = FailInstall()
         self.assertEqual(RETCODE_FAILURE_INSTALL, build_livefs.run())
@@ -259,9 +431,12 @@ class TestBuildLiveFS(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "buildlivefs",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "--project=ubuntu",
-            ]
+        ]
         build_livefs = parse_args(args=args).operation
         build_livefs.backend.run = FailBuild()
         self.assertEqual(RETCODE_FAILURE_BUILD, build_livefs.run())
diff --git a/lpbuildd/target/tests/test_build_oci.py b/lpbuildd/target/tests/test_build_oci.py
index 7e9c916..116d343 100644
--- a/lpbuildd/target/tests/test_build_oci.py
+++ b/lpbuildd/target/tests/test_build_oci.py
@@ -6,133 +6,190 @@ import stat
 import subprocess
 from textwrap import dedent
 
-from fixtures import (
-    FakeLogger,
-    TempDir,
-    )
 import responses
+from fixtures import FakeLogger, TempDir
 from systemfixtures import FakeFilesystem
 from testtools import TestCase
-from testtools.matchers import (
-    AnyMatch,
-    MatchesAll,
-    MatchesListwise,
-    )
+from testtools.matchers import AnyMatch, MatchesAll, MatchesListwise
 
 from lpbuildd.target.backend import InvalidBuildFilePath
 from lpbuildd.target.build_oci import (
     RETCODE_FAILURE_BUILD,
     RETCODE_FAILURE_INSTALL,
-    )
+)
 from lpbuildd.target.cli import parse_args
 from lpbuildd.target.tests.matchers import (
     RanAptGet,
     RanBuildCommand,
     RanCommand,
-    )
+)
 from lpbuildd.tests.fakebuilder import FakeMethod
 
 
 class TestBuildOCI(TestCase):
-
     def test_run_build_command_no_env(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.run_build_command(["echo", "hello world"])
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["echo", "hello world"],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["echo", "hello world"], cwd="/home/buildd/test-image"
+                    ),
+                ]
+            ),
+        )
 
     def test_run_build_command_env(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.run_build_command(
-            ["echo", "hello world"], env={"FOO": "bar baz"})
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["echo", "hello world"],
-                FOO="bar baz",
-                cwd="/home/buildd/test-image")
-            ]))
+            ["echo", "hello world"], env={"FOO": "bar baz"}
+        )
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["echo", "hello world"],
+                        FOO="bar baz",
+                        cwd="/home/buildd/test-image",
+                    )
+                ]
+            ),
+        )
 
     def test_install_bzr(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image"
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.install()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "bzr", "docker.io"),
-            RanCommand(["systemctl", "restart", "docker"]),
-            RanCommand(["mkdir", "-p", "/home/buildd"]),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "bzr", "docker.io"),
+                    RanCommand(["systemctl", "restart", "docker"]),
+                    RanCommand(["mkdir", "-p", "/home/buildd"]),
+                ]
+            ),
+        )
 
     def test_install_git(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "test-image"
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.install()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git", "docker.io"),
-            RanCommand(["systemctl", "restart", "docker"]),
-            RanCommand(["mkdir", "-p", "/home/buildd"]),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git", "docker.io"),
+                    RanCommand(["systemctl", "restart", "docker"]),
+                    RanCommand(["mkdir", "-p", "/home/buildd"]),
+                ]
+            ),
+        )
 
     @responses.activate
     def test_install_snap_store_proxy(self):
-        store_assertion = dedent("""\
+        store_assertion = dedent(
+            """\
             type: store
             store: store-id
             url: http://snap-store-proxy.example
 
             body
-            """)
+            """
+        )
 
         def respond(request):
             return 200, {"X-Assertion-Store-Id": "store-id"}, store_assertion
 
         responses.add_callback(
-            "GET", "http://snap-store-proxy.example/v2/auth/store/assertions";,
-            callback=respond)
+            "GET",
+            "http://snap-store-proxy.example/v2/auth/store/assertions";,
+            callback=respond,
+        )
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--snap-store-proxy-url", "http://snap-store-proxy.example/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--snap-store-proxy-url",
+            "http://snap-store-proxy.example/";,
             "test-snap",
-            ]
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.install()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git", "snapcraft"),
-            RanCommand(
-                ["snap", "ack", "/dev/stdin"], input_text=store_assertion),
-            RanCommand(["snap", "set", "core", "proxy.store=store-id"]),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git", "snapcraft"),
+                    RanCommand(
+                        ["snap", "ack", "/dev/stdin"],
+                        input_text=store_assertion,
+                    ),
+                    RanCommand(
+                        ["snap", "set", "core", "proxy.store=store-id"]
+                    ),
+                ]
+            ),
+        )
 
     def test_install_proxy(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.bin = "/builderbin"
         self.useFixture(FakeFilesystem()).add("/builderbin")
@@ -141,110 +198,220 @@ class TestBuildOCI(TestCase):
             proxy_script.write("proxy script\n")
             os.fchmod(proxy_script.fileno(), 0o755)
         build_oci.install()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanCommand(
-                ["mkdir", "-p", "/etc/systemd/system/docker.service.d"]),
-            RanAptGet("install", "python3", "socat", "git", "docker.io"),
-            RanCommand(["systemctl", "restart", "docker"]),
-            RanCommand(["mkdir", "-p", "/home/buildd"]),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanCommand(
+                        ["mkdir", "-p", "/etc/systemd/system/docker.service.d"]
+                    ),
+                    RanAptGet(
+                        "install", "python3", "socat", "git", "docker.io"
+                    ),
+                    RanCommand(["systemctl", "restart", "docker"]),
+                    RanCommand(["mkdir", "-p", "/home/buildd"]),
+                ]
+            ),
+        )
         self.assertEqual(
             (b"proxy script\n", stat.S_IFREG | 0o755),
-            build_oci.backend.backend_fs["/usr/local/bin/lpbuildd-git-proxy"])
+            build_oci.backend.backend_fs["/usr/local/bin/lpbuildd-git-proxy"],
+        )
 
     def test_repo_bzr(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.build_path = self.useFixture(TempDir()).path
         build_oci.backend.run = FakeMethod()
         build_oci.repo()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["bzr", "branch", "lp:foo", "test-image"], cwd="/home/buildd"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["bzr", "branch", "lp:foo", "test-image"],
+                        cwd="/home/buildd",
+                    ),
+                ]
+            ),
+        )
 
     def test_repo_git(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.build_path = self.useFixture(TempDir()).path
         build_oci.backend.run = FakeMethod()
         build_oci.repo()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "--depth", "1", "--no-single-branch",
-                 "lp:foo", "test-image"],
-                cwd="/home/buildd"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "HEAD"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "clone",
+                            "-n",
+                            "--depth",
+                            "1",
+                            "--no-single-branch",
+                            "lp:foo",
+                            "test-image",
+                        ],
+                        cwd="/home/buildd",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "HEAD"],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_repo_git_with_path(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "--git-path", "next", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--git-path",
+            "next",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.build_path = self.useFixture(TempDir()).path
         build_oci.backend.run = FakeMethod()
         build_oci.repo()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "--depth", "1", "--no-single-branch",
-                 "lp:foo", "test-image"],
-                cwd="/home/buildd"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "next"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "clone",
+                            "-n",
+                            "--depth",
+                            "1",
+                            "--no-single-branch",
+                            "lp:foo",
+                            "test-image",
+                        ],
+                        cwd="/home/buildd",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "next"],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_repo_git_with_tag_path(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "--git-path", "refs/tags/1.0",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--git-path",
+            "refs/tags/1.0",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.build_path = self.useFixture(TempDir()).path
         build_oci.backend.run = FakeMethod()
         build_oci.repo()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "--depth", "1", "--no-single-branch",
-                 "lp:foo", "test-image"],
-                cwd="/home/buildd"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "refs/tags/1.0"],
-                cwd="/home/buildd/test-image"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "clone",
+                            "-n",
+                            "--depth",
+                            "1",
+                            "--no-single-branch",
+                            "lp:foo",
+                            "test-image",
+                        ],
+                        cwd="/home/buildd",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "refs/tags/1.0"],
+                        cwd="/home/buildd/test-image",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_repo_proxy(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.build_path = self.useFixture(TempDir()).path
         build_oci.backend.run = FakeMethod()
@@ -254,150 +421,311 @@ class TestBuildOCI(TestCase):
             "https_proxy": "http://proxy.example:3128/";,
             "GIT_PROXY_COMMAND": "/usr/local/bin/lpbuildd-git-proxy",
             "SNAPPY_STORE_NO_CDN": "1",
-            }
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "--depth", "1", "--no-single-branch",
-                 "lp:foo", "test-image"],
-                cwd="/home/buildd", **env),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "HEAD"],
-                cwd="/home/buildd/test-image", **env),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/home/buildd/test-image", **env),
-            ]))
+        }
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "clone",
+                            "-n",
+                            "--depth",
+                            "1",
+                            "--no-single-branch",
+                            "lp:foo",
+                            "test-image",
+                        ],
+                        cwd="/home/buildd",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "HEAD"],
+                        cwd="/home/buildd/test-image",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/home/buildd/test-image",
+                        **env,
+                    ),
+                ]
+            ),
+        )
 
     def test_build(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         build_oci.build()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["docker", "build", "--no-cache", "--tag", "test-image",
-                 "/home/buildd/test-image/."],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "docker",
+                            "build",
+                            "--no-cache",
+                            "--tag",
+                            "test-image",
+                            "/home/buildd/test-image/.",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_with_file(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-file", "build-aux/Dockerfile",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-file",
+            "build-aux/Dockerfile",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         build_oci.build()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["docker", "build", "--no-cache", "--tag", "test-image",
-                 "--file", "./build-aux/Dockerfile",
-                 "/home/buildd/test-image/."],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "docker",
+                            "build",
+                            "--no-cache",
+                            "--tag",
+                            "test-image",
+                            "--file",
+                            "./build-aux/Dockerfile",
+                            "/home/buildd/test-image/.",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_with_path(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-path", "a-sub-directory/",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-path",
+            "a-sub-directory/",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         build_oci.build()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["docker", "build", "--no-cache", "--tag", "test-image",
-                 "/home/buildd/test-image/a-sub-directory/"],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "docker",
+                            "build",
+                            "--no-cache",
+                            "--tag",
+                            "test-image",
+                            "/home/buildd/test-image/a-sub-directory/",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_with_file_and_path(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-file", "build-aux/Dockerfile",
-            "--build-path", "test-build-path",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-file",
+            "build-aux/Dockerfile",
+            "--build-path",
+            "test-build-path",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         build_oci.build()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["docker", "build", "--no-cache", "--tag", "test-image",
-                 "--file", "test-build-path/build-aux/Dockerfile",
-                 "/home/buildd/test-image/test-build-path"],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "docker",
+                            "build",
+                            "--no-cache",
+                            "--tag",
+                            "test-image",
+                            "--file",
+                            "test-build-path/build-aux/Dockerfile",
+                            "/home/buildd/test-image/test-build-path",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_with_args(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-file", "build-aux/Dockerfile",
-            "--build-path", "test-build-path",
-            "--build-arg=VAR1=xxx", "--build-arg=VAR2=yyy",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-file",
+            "build-aux/Dockerfile",
+            "--build-path",
+            "test-build-path",
+            "--build-arg=VAR1=xxx",
+            "--build-arg=VAR2=yyy",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         build_oci.build()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["docker", "build", "--no-cache", "--tag", "test-image",
-                 "--file", "test-build-path/build-aux/Dockerfile",
-                 "--build-arg=VAR1=xxx", "--build-arg=VAR2=yyy",
-                 "/home/buildd/test-image/test-build-path"],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "docker",
+                            "build",
+                            "--no-cache",
+                            "--tag",
+                            "test-image",
+                            "--file",
+                            "test-build-path/build-aux/Dockerfile",
+                            "--build-arg=VAR1=xxx",
+                            "--build-arg=VAR2=yyy",
+                            "/home/buildd/test-image/test-build-path",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_proxy(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         build_oci.build()
-        self.assertThat(build_oci.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["docker", "build", "--no-cache",
-                 "--build-arg", "http_proxy=http://proxy.example:3128/";,
-                 "--build-arg", "https_proxy=http://proxy.example:3128/";,
-                 "--tag", "test-image", "/home/buildd/test-image/."],
-                cwd="/home/buildd/test-image"),
-            ]))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        [
+                            "docker",
+                            "build",
+                            "--no-cache",
+                            "--build-arg",
+                            "http_proxy=http://proxy.example:3128/";,
+                            "--build-arg",
+                            "https_proxy=http://proxy.example:3128/";,
+                            "--tag",
+                            "test-image",
+                            "/home/buildd/test-image/.",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    ),
+                ]
+            ),
+        )
 
     def test_run_succeeds(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.build_path = self.useFixture(TempDir()).path
         build_oci.backend.run = FakeMethod()
         self.assertEqual(0, build_oci.run())
-        self.assertThat(build_oci.backend.run.calls, MatchesAll(
-            AnyMatch(RanAptGet("install", "bzr", "docker.io")),
-            AnyMatch(RanBuildCommand(
-                ["bzr", "branch", "lp:foo", "test-image"],
-                cwd="/home/buildd")),
-            AnyMatch(RanBuildCommand(
-                ["docker", "build", "--no-cache", "--tag", "test-image",
-                 "/home/buildd/test-image/."],
-                cwd="/home/buildd/test-image")),
-            ))
+        self.assertThat(
+            build_oci.backend.run.calls,
+            MatchesAll(
+                AnyMatch(RanAptGet("install", "bzr", "docker.io")),
+                AnyMatch(
+                    RanBuildCommand(
+                        ["bzr", "branch", "lp:foo", "test-image"],
+                        cwd="/home/buildd",
+                    )
+                ),
+                AnyMatch(
+                    RanBuildCommand(
+                        [
+                            "docker",
+                            "build",
+                            "--no-cache",
+                            "--tag",
+                            "test-image",
+                            "/home/buildd/test-image/.",
+                        ],
+                        cwd="/home/buildd/test-image",
+                    )
+                ),
+            ),
+        )
 
     def test_run_install_fails(self):
         class FailInstall(FakeMethod):
@@ -409,9 +737,14 @@ class TestBuildOCI(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.run = FailInstall()
         self.assertEqual(RETCODE_FAILURE_INSTALL, build_oci.run())
@@ -426,9 +759,14 @@ class TestBuildOCI(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.run = FailRepo()
         self.assertEqual(RETCODE_FAILURE_BUILD, build_oci.run())
@@ -443,9 +781,14 @@ class TestBuildOCI(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-image",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-image",
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.backend.build_path = self.useFixture(TempDir()).path
         build_oci.backend.run = FailBuild()
@@ -454,71 +797,105 @@ class TestBuildOCI(TestCase):
     def test_build_with_invalid_file_path_parent(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-file", "../build-aux/Dockerfile",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-file",
+            "../build-aux/Dockerfile",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         self.assertRaises(InvalidBuildFilePath, build_oci.build)
 
     def test_build_with_invalid_file_path_absolute(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-file", "/etc/Dockerfile",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-file",
+            "/etc/Dockerfile",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         self.assertRaises(InvalidBuildFilePath, build_oci.build)
 
     def test_build_with_invalid_file_path_symlink(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-file", "Dockerfile",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-file",
+            "Dockerfile",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.buildd_path = self.useFixture(TempDir()).path
         os.symlink(
-            '/etc/hosts',
-            os.path.join(build_oci.buildd_path, 'Dockerfile'))
+            "/etc/hosts", os.path.join(build_oci.buildd_path, "Dockerfile")
+        )
         self.assertRaises(InvalidBuildFilePath, build_oci.build)
 
     def test_build_with_invalid_build_path_parent(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-path", "../",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-path",
+            "../",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         self.assertRaises(InvalidBuildFilePath, build_oci.build)
 
     def test_build_with_invalid_build_path_absolute(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-path", "/etc",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-path",
+            "/etc",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
-        build_oci.backend.add_dir('/build/test-directory')
+        build_oci.backend.add_dir("/build/test-directory")
         self.assertRaises(InvalidBuildFilePath, build_oci.build)
 
     def test_build_with_invalid_build_path_symlink(self):
         args = [
             "build-oci",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-path", "build/",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-path",
+            "build/",
             "test-image",
-            ]
+        ]
         build_oci = parse_args(args=args).operation
         build_oci.buildd_path = self.useFixture(TempDir()).path
-        os.symlink(
-            '/etc/hosts',
-            os.path.join(build_oci.buildd_path, 'build'))
+        os.symlink("/etc/hosts", os.path.join(build_oci.buildd_path, "build"))
         self.assertRaises(InvalidBuildFilePath, build_oci.build)
diff --git a/lpbuildd/target/tests/test_build_snap.py b/lpbuildd/target/tests/test_build_snap.py
index 5d94d8d..1eb4f05 100644
--- a/lpbuildd/target/tests/test_build_snap.py
+++ b/lpbuildd/target/tests/test_build_snap.py
@@ -7,48 +7,40 @@ import stat
 import subprocess
 from textwrap import dedent
 
-from fixtures import (
-    FakeLogger,
-    TempDir,
-    )
 import responses
+from fixtures import FakeLogger, TempDir
 from systemfixtures import FakeFilesystem
 from testtools import TestCase
-from testtools.matchers import (
-    AnyMatch,
-    MatchesAll,
-    MatchesListwise,
-    )
+from testtools.matchers import AnyMatch, MatchesAll, MatchesListwise
 
 from lpbuildd.target.build_snap import (
     RETCODE_FAILURE_BUILD,
     RETCODE_FAILURE_INSTALL,
-    )
+)
 from lpbuildd.target.cli import parse_args
 from lpbuildd.target.tests.matchers import (
     RanAptGet,
     RanBuildCommand,
     RanCommand,
     RanSnap,
-    )
+)
 from lpbuildd.tests.fakebuilder import FakeMethod
 
 
 class FakeRevisionID(FakeMethod):
-
     def __init__(self, revision_id):
         super().__init__()
         self.revision_id = revision_id
 
     def __call__(self, run_args, *args, **kwargs):
         super().__call__(run_args, *args, **kwargs)
-        if (run_args[:2] == ["bzr", "revno"] or
-                (run_args[0] == "git" and "rev-parse" in run_args)):
+        if run_args[:2] == ["bzr", "revno"] or (
+            run_args[0] == "git" and "rev-parse" in run_args
+        ):
             return "%s\n" % self.revision_id
 
 
 class FakeSnapcraft(FakeMethod):
-
     def __init__(self, backend, name):
         super().__init__()
         self.backend = backend
@@ -61,71 +53,113 @@ class FakeSnapcraft(FakeMethod):
 
 
 class TestBuildSnap(TestCase):
-
     def test_install_bzr(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-snap"
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.install()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "bzr", "snapcraft"),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "bzr", "snapcraft"),
+                ]
+            ),
+        )
 
     def test_install_git(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "test-snap"
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.install()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git", "snapcraft"),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git", "snapcraft"),
+                ]
+            ),
+        )
 
     @responses.activate
     def test_install_snap_store_proxy(self):
-        store_assertion = dedent("""\
+        store_assertion = dedent(
+            """\
             type: store
             store: store-id
             url: http://snap-store-proxy.example
 
             body
-            """)
+            """
+        )
 
         def respond(request):
             return 200, {"X-Assertion-Store-Id": "store-id"}, store_assertion
 
         responses.add_callback(
-            "GET", "http://snap-store-proxy.example/v2/auth/store/assertions";,
-            callback=respond)
+            "GET",
+            "http://snap-store-proxy.example/v2/auth/store/assertions";,
+            callback=respond,
+        )
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--snap-store-proxy-url", "http://snap-store-proxy.example/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--snap-store-proxy-url",
+            "http://snap-store-proxy.example/";,
             "test-snap",
-            ]
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.install()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git", "snapcraft"),
-            RanCommand(
-                ["snap", "ack", "/dev/stdin"], input_text=store_assertion),
-            RanCommand(["snap", "set", "core", "proxy.store=store-id"]),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git", "snapcraft"),
+                    RanCommand(
+                        ["snap", "ack", "/dev/stdin"],
+                        input_text=store_assertion,
+                    ),
+                    RanCommand(
+                        ["snap", "set", "core", "proxy.store=store-id"]
+                    ),
+                ]
+            ),
+        )
 
     def test_install_proxy(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-snap",
-            ]
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.bin = "/builderbin"
         self.useFixture(FakeFilesystem()).add("/builderbin")
@@ -134,55 +168,92 @@ class TestBuildSnap(TestCase):
             proxy_script.write("proxy script\n")
             os.fchmod(proxy_script.fileno(), 0o755)
         build_snap.install()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "python3", "socat", "git", "snapcraft"),
-            RanCommand(["mkdir", "-p", "/root/.subversion"]),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet(
+                        "install", "python3", "socat", "git", "snapcraft"
+                    ),
+                    RanCommand(["mkdir", "-p", "/root/.subversion"]),
+                ]
+            ),
+        )
         self.assertEqual(
             (b"proxy script\n", stat.S_IFREG | 0o755),
-            build_snap.backend.backend_fs["/usr/local/bin/lpbuildd-git-proxy"])
+            build_snap.backend.backend_fs["/usr/local/bin/lpbuildd-git-proxy"],
+        )
         self.assertEqual(
-            (b"[global]\n"
-             b"http-proxy-host = proxy.example\n"
-             b"http-proxy-port = 3128\n",
-             stat.S_IFREG | 0o644),
-            build_snap.backend.backend_fs["/root/.subversion/servers"])
+            (
+                b"[global]\n"
+                b"http-proxy-host = proxy.example\n"
+                b"http-proxy-port = 3128\n",
+                stat.S_IFREG | 0o644,
+            ),
+            build_snap.backend.backend_fs["/root/.subversion/servers"],
+        )
 
     def test_install_channels(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--channel=core=candidate", "--channel=core18=beta",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--channel=core=candidate",
+            "--channel=core18=beta",
             "--channel=snapcraft=edge",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.install()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "bzr", "sudo"),
-            RanSnap("install", "--channel=candidate", "core"),
-            RanSnap("install", "--channel=beta", "core18"),
-            RanSnap("install", "--classic", "--channel=edge", "snapcraft"),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "bzr", "sudo"),
+                    RanSnap("install", "--channel=candidate", "core"),
+                    RanSnap("install", "--channel=beta", "core18"),
+                    RanSnap(
+                        "install", "--classic", "--channel=edge", "snapcraft"
+                    ),
+                ]
+            ),
+        )
 
     def test_repo_bzr(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.build_path = self.useFixture(TempDir()).path
         build_snap.backend.run = FakeRevisionID("42")
         build_snap.repo()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["bzr", "branch", "lp:foo", "test-snap"], cwd="/build"),
-            RanBuildCommand(
-                ["bzr", "revno"],
-                cwd="/build/test-snap", get_output=True,
-                universal_newlines=True),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["bzr", "branch", "lp:foo", "test-snap"], cwd="/build"
+                    ),
+                    RanBuildCommand(
+                        ["bzr", "revno"],
+                        cwd="/build/test-snap",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_snap.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "42"}, json.load(status))
@@ -190,26 +261,49 @@ class TestBuildSnap(TestCase):
     def test_repo_git(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.build_path = self.useFixture(TempDir()).path
         build_snap.backend.run = FakeRevisionID("0" * 40)
         build_snap.repo()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "test-snap"], cwd="/build"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "HEAD"], cwd="/build/test-snap"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/build/test-snap"),
-            RanBuildCommand(
-                ["git", "rev-parse", "HEAD^{}"],
-                cwd="/build/test-snap",
-                get_output=True, universal_newlines=True),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "test-snap"],
+                        cwd="/build",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "HEAD"],
+                        cwd="/build/test-snap",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/build/test-snap",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "HEAD^{}"],
+                        cwd="/build/test-snap",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_snap.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -217,26 +311,51 @@ class TestBuildSnap(TestCase):
     def test_repo_git_with_path(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "--git-path", "next", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--git-path",
+            "next",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.build_path = self.useFixture(TempDir()).path
         build_snap.backend.run = FakeRevisionID("0" * 40)
         build_snap.repo()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "test-snap"], cwd="/build"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "next"], cwd="/build/test-snap"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/build/test-snap"),
-            RanBuildCommand(
-                ["git", "rev-parse", "next^{}"],
-                cwd="/build/test-snap", get_output=True,
-                universal_newlines=True),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "test-snap"],
+                        cwd="/build",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "next"],
+                        cwd="/build/test-snap",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/build/test-snap",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "next^{}"],
+                        cwd="/build/test-snap",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_snap.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -244,28 +363,51 @@ class TestBuildSnap(TestCase):
     def test_repo_git_with_tag_path(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "--git-path", "refs/tags/1.0",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--git-path",
+            "refs/tags/1.0",
             "test-snap",
-            ]
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.build_path = self.useFixture(TempDir()).path
         build_snap.backend.run = FakeRevisionID("0" * 40)
         build_snap.repo()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "test-snap"], cwd="/build"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "refs/tags/1.0"],
-                cwd="/build/test-snap"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/build/test-snap"),
-            RanBuildCommand(
-                ["git", "rev-parse", "refs/tags/1.0^{}"],
-                cwd="/build/test-snap", get_output=True,
-                universal_newlines=True),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "test-snap"],
+                        cwd="/build",
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "refs/tags/1.0"],
+                        cwd="/build/test-snap",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/build/test-snap",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "refs/tags/1.0^{}"],
+                        cwd="/build/test-snap",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_snap.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -273,11 +415,16 @@ class TestBuildSnap(TestCase):
     def test_repo_proxy(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-snap",
-            ]
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.build_path = self.useFixture(TempDir()).path
         build_snap.backend.run = FakeRevisionID("0" * 40)
@@ -287,22 +434,41 @@ class TestBuildSnap(TestCase):
             "https_proxy": "http://proxy.example:3128/";,
             "GIT_PROXY_COMMAND": "/usr/local/bin/lpbuildd-git-proxy",
             "SNAPPY_STORE_NO_CDN": "1",
-            }
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "test-snap"],
-                cwd="/build", **env),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "HEAD"],
-                cwd="/build/test-snap", **env),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/build/test-snap", **env),
-            RanBuildCommand(
-                ["git", "rev-parse", "HEAD^{}"],
-                cwd="/build/test-snap", get_output=True,
-                universal_newlines=True),
-            ]))
+        }
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "test-snap"],
+                        cwd="/build",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "HEAD"],
+                        cwd="/build/test-snap",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/build/test-snap",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "HEAD^{}"],
+                        cwd="/build/test-snap",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(build_snap.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -310,9 +476,14 @@ class TestBuildSnap(TestCase):
     def test_pull(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.pull()
         env = {
@@ -321,20 +492,33 @@ class TestBuildSnap(TestCase):
             "SNAPCRAFT_BUILD_INFO": "1",
             "SNAPCRAFT_IMAGE_INFO": "{}",
             "SNAPCRAFT_BUILD_ENVIRONMENT": "host",
-            }
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft", "pull"], cwd="/build/test-snap", **env),
-            ]))
+        }
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft", "pull"], cwd="/build/test-snap", **env
+                    ),
+                ]
+            ),
+        )
 
     def test_pull_proxy(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--build-url", "https://launchpad.example/build";,
-            "--branch", "lp:foo", "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--build-url",
+            "https://launchpad.example/build";,
+            "--branch",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-snap",
-            ]
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.pull()
         env = {
@@ -342,24 +526,37 @@ class TestBuildSnap(TestCase):
             "SNAPCRAFT_SETUP_CORE": "1",
             "SNAPCRAFT_BUILD_INFO": "1",
             "SNAPCRAFT_IMAGE_INFO": (
-                '{"build_url": "https://launchpad.example/build"}'),
+                '{"build_url": "https://launchpad.example/build"}'
+            ),
             "SNAPCRAFT_BUILD_ENVIRONMENT": "host",
             "http_proxy": "http://proxy.example:3128/";,
             "https_proxy": "http://proxy.example:3128/";,
             "GIT_PROXY_COMMAND": "/usr/local/bin/lpbuildd-git-proxy",
             "SNAPPY_STORE_NO_CDN": "1",
-            }
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft", "pull"], cwd="/build/test-snap", **env),
-            ]))
+        }
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft", "pull"], cwd="/build/test-snap", **env
+                    ),
+                ]
+            ),
+        )
 
     def test_pull_build_source_tarball(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--build-source-tarball", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--build-source-tarball",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.pull()
         env = {
@@ -368,24 +565,45 @@ class TestBuildSnap(TestCase):
             "SNAPCRAFT_BUILD_INFO": "1",
             "SNAPCRAFT_IMAGE_INFO": "{}",
             "SNAPCRAFT_BUILD_ENVIRONMENT": "host",
-            }
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft", "pull"], cwd="/build/test-snap", **env),
-            RanBuildCommand(
-                ["tar", "-czf", "test-snap.tar.gz",
-                 "--format=gnu", "--sort=name", "--exclude-vcs",
-                 "--numeric-owner", "--owner=0", "--group=0",
-                 "test-snap"],
-                cwd="/build"),
-            ]))
+        }
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft", "pull"], cwd="/build/test-snap", **env
+                    ),
+                    RanBuildCommand(
+                        [
+                            "tar",
+                            "-czf",
+                            "test-snap.tar.gz",
+                            "--format=gnu",
+                            "--sort=name",
+                            "--exclude-vcs",
+                            "--numeric-owner",
+                            "--owner=0",
+                            "--group=0",
+                            "test-snap",
+                        ],
+                        cwd="/build",
+                    ),
+                ]
+            ),
+        )
 
     def test_pull_private(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--private", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--private",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.pull()
         env = {
@@ -393,136 +611,243 @@ class TestBuildSnap(TestCase):
             "SNAPCRAFT_SETUP_CORE": "1",
             "SNAPCRAFT_IMAGE_INFO": "{}",
             "SNAPCRAFT_BUILD_ENVIRONMENT": "host",
-            }
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft", "pull"], cwd="/build/test-snap", **env),
-            ]))
+        }
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft", "pull"], cwd="/build/test-snap", **env
+                    ),
+                ]
+            ),
+        )
 
     def test_build(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.run = FakeSnapcraft(
-            build_snap.backend, "test-snap_1.snap")
+            build_snap.backend, "test-snap_1.snap"
+        )
         build_snap.build()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft"], cwd="/build/test-snap",
-                SNAPCRAFT_BUILD_INFO="1", SNAPCRAFT_IMAGE_INFO="{}",
-                SNAPCRAFT_BUILD_ENVIRONMENT="host"),
-            RanBuildCommand(
-                ["sha512sum", "test-snap_1.snap"], cwd="/build/test-snap"),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft"],
+                        cwd="/build/test-snap",
+                        SNAPCRAFT_BUILD_INFO="1",
+                        SNAPCRAFT_IMAGE_INFO="{}",
+                        SNAPCRAFT_BUILD_ENVIRONMENT="host",
+                    ),
+                    RanBuildCommand(
+                        ["sha512sum", "test-snap_1.snap"],
+                        cwd="/build/test-snap",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_proxy(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--build-url", "https://launchpad.example/build";,
-            "--branch", "lp:foo", "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--build-url",
+            "https://launchpad.example/build";,
+            "--branch",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "test-snap",
-            ]
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.run = FakeSnapcraft(
-            build_snap.backend, "test-snap_1.snap")
+            build_snap.backend, "test-snap_1.snap"
+        )
         build_snap.build()
         env = {
             "SNAPCRAFT_BUILD_INFO": "1",
             "SNAPCRAFT_IMAGE_INFO": (
-                '{"build_url": "https://launchpad.example/build"}'),
+                '{"build_url": "https://launchpad.example/build"}'
+            ),
             "SNAPCRAFT_BUILD_ENVIRONMENT": "host",
             "http_proxy": "http://proxy.example:3128/";,
             "https_proxy": "http://proxy.example:3128/";,
             "GIT_PROXY_COMMAND": "/usr/local/bin/lpbuildd-git-proxy",
             "SNAPPY_STORE_NO_CDN": "1",
-            }
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(["snapcraft"], cwd="/build/test-snap", **env),
-            RanBuildCommand(
-                ["sha512sum", "test-snap_1.snap"], cwd="/build/test-snap"),
-            ]))
+        }
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft"], cwd="/build/test-snap", **env
+                    ),
+                    RanBuildCommand(
+                        ["sha512sum", "test-snap_1.snap"],
+                        cwd="/build/test-snap",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_private(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "--private", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--private",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.run = FakeSnapcraft(
-            build_snap.backend, "test-snap_1.snap")
+            build_snap.backend, "test-snap_1.snap"
+        )
         build_snap.build()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft"], cwd="/build/test-snap",
-                SNAPCRAFT_IMAGE_INFO="{}", SNAPCRAFT_BUILD_ENVIRONMENT="host"),
-            RanBuildCommand(
-                ["sha512sum", "test-snap_1.snap"], cwd="/build/test-snap"),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft"],
+                        cwd="/build/test-snap",
+                        SNAPCRAFT_IMAGE_INFO="{}",
+                        SNAPCRAFT_BUILD_ENVIRONMENT="host",
+                    ),
+                    RanBuildCommand(
+                        ["sha512sum", "test-snap_1.snap"],
+                        cwd="/build/test-snap",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_including_build_request_id(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--build-request-id", "13", "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--build-request-id",
+            "13",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.run = FakeSnapcraft(
-            build_snap.backend, "test-snap_1.snap")
+            build_snap.backend, "test-snap_1.snap"
+        )
         build_snap.build()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft"], cwd="/build/test-snap",
-                SNAPCRAFT_BUILD_INFO="1",
-                SNAPCRAFT_IMAGE_INFO='{"build-request-id": "lp-13"}',
-                SNAPCRAFT_BUILD_ENVIRONMENT="host"),
-            RanBuildCommand(
-                ["sha512sum", "test-snap_1.snap"], cwd="/build/test-snap"),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft"],
+                        cwd="/build/test-snap",
+                        SNAPCRAFT_BUILD_INFO="1",
+                        SNAPCRAFT_IMAGE_INFO='{"build-request-id": "lp-13"}',
+                        SNAPCRAFT_BUILD_ENVIRONMENT="host",
+                    ),
+                    RanBuildCommand(
+                        ["sha512sum", "test-snap_1.snap"],
+                        cwd="/build/test-snap",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_including_build_request_timestamp(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--build-request-timestamp", "2018-04-13T14:50:02Z",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--build-request-timestamp",
+            "2018-04-13T14:50:02Z",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.run = FakeSnapcraft(
-            build_snap.backend, "test-snap_1.snap")
+            build_snap.backend, "test-snap_1.snap"
+        )
         build_snap.build()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft"], cwd="/build/test-snap",
-                SNAPCRAFT_BUILD_INFO="1",
-                SNAPCRAFT_IMAGE_INFO=(
-                    '{"build-request-timestamp": "2018-04-13T14:50:02Z"}'),
-                SNAPCRAFT_BUILD_ENVIRONMENT="host"),
-            RanBuildCommand(
-                ["sha512sum", "test-snap_1.snap"], cwd="/build/test-snap"),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft"],
+                        cwd="/build/test-snap",
+                        SNAPCRAFT_BUILD_INFO="1",
+                        SNAPCRAFT_IMAGE_INFO=(
+                            '{"build-request-timestamp": '
+                            '"2018-04-13T14:50:02Z"}'
+                        ),
+                        SNAPCRAFT_BUILD_ENVIRONMENT="host",
+                    ),
+                    RanBuildCommand(
+                        ["sha512sum", "test-snap_1.snap"],
+                        cwd="/build/test-snap",
+                    ),
+                ]
+            ),
+        )
 
     def test_build_target_architectures(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo",
-            "--target-arch", "i386",
-            "--target-arch", "amd64",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "--target-arch",
+            "i386",
+            "--target-arch",
+            "amd64",
             "test-snap",
-            ]
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.build()
-        self.assertThat(build_snap.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["snapcraft"], cwd="/build/test-snap",
-                SNAPCRAFT_BUILD_INFO="1", SNAPCRAFT_IMAGE_INFO="{}",
-                SNAPCRAFT_BUILD_ENVIRONMENT="host", SNAPCRAFT_BUILD_FOR="i386"),
-            ]))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["snapcraft"],
+                        cwd="/build/test-snap",
+                        SNAPCRAFT_BUILD_INFO="1",
+                        SNAPCRAFT_IMAGE_INFO="{}",
+                        SNAPCRAFT_BUILD_ENVIRONMENT="host",
+                        SNAPCRAFT_BUILD_FOR="i386",
+                    ),
+                ]
+            ),
+        )
 
     # XXX cjwatson 2017-08-07: Test revoke_token.  It may be easiest to
     # convert it to requests first.
@@ -530,35 +855,59 @@ class TestBuildSnap(TestCase):
     def test_run_succeeds(self):
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--build-request-id", "13",
-            "--build-url", "https://launchpad.example/build";,
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--build-request-id",
+            "13",
+            "--build-url",
+            "https://launchpad.example/build";,
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.build_path = self.useFixture(TempDir()).path
         build_snap.backend.run = FakeRevisionID("42")
         self.assertEqual(0, build_snap.run())
-        self.assertThat(build_snap.backend.run.calls, MatchesAll(
-            AnyMatch(RanAptGet("install", "bzr", "snapcraft")),
-            AnyMatch(RanBuildCommand(
-                ["bzr", "branch", "lp:foo", "test-snap"], cwd="/build")),
-            AnyMatch(RanBuildCommand(
-                ["snapcraft", "pull"], cwd="/build/test-snap",
-                SNAPCRAFT_LOCAL_SOURCES="1", SNAPCRAFT_SETUP_CORE="1",
-                SNAPCRAFT_BUILD_INFO="1",
-                SNAPCRAFT_IMAGE_INFO=(
-                    '{"build-request-id": "lp-13",'
-                    ' "build_url": "https://launchpad.example/build"}'),
-                SNAPCRAFT_BUILD_ENVIRONMENT="host")),
-            AnyMatch(RanBuildCommand(
-                ["snapcraft"], cwd="/build/test-snap",
-                SNAPCRAFT_BUILD_INFO="1",
-                SNAPCRAFT_IMAGE_INFO=(
-                    '{"build-request-id": "lp-13",'
-                    ' "build_url": "https://launchpad.example/build"}'),
-                SNAPCRAFT_BUILD_ENVIRONMENT="host")),
-            ))
+        self.assertThat(
+            build_snap.backend.run.calls,
+            MatchesAll(
+                AnyMatch(RanAptGet("install", "bzr", "snapcraft")),
+                AnyMatch(
+                    RanBuildCommand(
+                        ["bzr", "branch", "lp:foo", "test-snap"], cwd="/build"
+                    )
+                ),
+                AnyMatch(
+                    RanBuildCommand(
+                        ["snapcraft", "pull"],
+                        cwd="/build/test-snap",
+                        SNAPCRAFT_LOCAL_SOURCES="1",
+                        SNAPCRAFT_SETUP_CORE="1",
+                        SNAPCRAFT_BUILD_INFO="1",
+                        SNAPCRAFT_IMAGE_INFO=(
+                            '{"build-request-id": "lp-13",'
+                            ' "build_url": "https://launchpad.example/build"}'
+                        ),
+                        SNAPCRAFT_BUILD_ENVIRONMENT="host",
+                    )
+                ),
+                AnyMatch(
+                    RanBuildCommand(
+                        ["snapcraft"],
+                        cwd="/build/test-snap",
+                        SNAPCRAFT_BUILD_INFO="1",
+                        SNAPCRAFT_IMAGE_INFO=(
+                            '{"build-request-id": "lp-13",'
+                            ' "build_url": "https://launchpad.example/build"}'
+                        ),
+                        SNAPCRAFT_BUILD_ENVIRONMENT="host",
+                    )
+                ),
+            ),
+        )
 
     def test_run_install_fails(self):
         class FailInstall(FakeMethod):
@@ -570,9 +919,14 @@ class TestBuildSnap(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.run = FailInstall()
         self.assertEqual(RETCODE_FAILURE_INSTALL, build_snap.run())
@@ -587,9 +941,14 @@ class TestBuildSnap(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.run = FailRepo()
         self.assertEqual(RETCODE_FAILURE_BUILD, build_snap.run())
@@ -606,9 +965,14 @@ class TestBuildSnap(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.build_path = self.useFixture(TempDir()).path
         build_snap.backend.run = FailPull()
@@ -626,9 +990,14 @@ class TestBuildSnap(TestCase):
         self.useFixture(FakeLogger())
         args = [
             "buildsnap",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", "test-snap",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            "test-snap",
+        ]
         build_snap = parse_args(args=args).operation
         build_snap.backend.build_path = self.useFixture(TempDir()).path
         build_snap.backend.run = FailBuild()
diff --git a/lpbuildd/target/tests/test_chroot.py b/lpbuildd/target/tests/test_chroot.py
index 8f2fd38..e252268 100644
--- a/lpbuildd/target/tests/test_chroot.py
+++ b/lpbuildd/target/tests/test_chroot.py
@@ -4,17 +4,11 @@
 import io
 import os.path
 import signal
-from textwrap import dedent
 import time
+from textwrap import dedent
 
-from fixtures import (
-    EnvironmentVariable,
-    TempDir,
-    )
-from systemfixtures import (
-    FakeProcesses,
-    FakeTime,
-    )
+from fixtures import EnvironmentVariable, TempDir
+from systemfixtures import FakeProcesses, FakeTime
 from testtools import TestCase
 from testtools.matchers import DirContains
 
@@ -25,11 +19,10 @@ from lpbuildd.target.tests.testfixtures import (
     FakeFilesystem,
     KillFixture,
     SudoUmount,
-    )
+)
 
 
 class TestChroot(TestCase):
-
     def setUp(self):
         super().setUp()
         self.useFixture(CarefulFakeProcessFixture())
@@ -41,12 +34,19 @@ class TestChroot(TestCase):
         Chroot("1", "xenial", "amd64").create("/path/to/tarball", "chroot")
 
         expected_args = [
-            ["sudo", "tar", "-C", "/expected/home/build-1",
-             "-xf", "/path/to/tarball"],
-            ]
+            [
+                "sudo",
+                "tar",
+                "-C",
+                "/expected/home/build-1",
+                "-xf",
+                "/path/to/tarball",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_start(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -63,62 +63,134 @@ class TestChroot(TestCase):
         Chroot("1", "xenial", "amd64").start()
 
         expected_args = [
-            ["sudo", "mount", "-t", "proc", "none",
-             "/expected/home/build-1/chroot-autobuild/proc"],
-            ["sudo", "mount", "-t", "devpts", "-o", "gid=5,mode=620", "none",
-             "/expected/home/build-1/chroot-autobuild/dev/pts"],
-            ["sudo", "mount", "-t", "sysfs", "none",
-             "/expected/home/build-1/chroot-autobuild/sys"],
-            ["sudo", "mount", "-t", "tmpfs", "none",
-             "/expected/home/build-1/chroot-autobuild/dev/shm"],
-            ["sudo", "install", "-o", "root", "-g", "root", "-m", "644",
-             "/etc/hosts",
-             "/expected/home/build-1/chroot-autobuild/etc/hosts"],
-            ["sudo", "install", "-o", "root", "-g", "root", "-m", "644",
-             "/etc/hostname",
-             "/expected/home/build-1/chroot-autobuild/etc/hostname"],
-            ["sudo", "install", "-o", "root", "-g", "root", "-m", "644",
-             "/etc/resolv.conf",
-             "/expected/home/build-1/chroot-autobuild/etc/resolv.conf"],
-            ]
+            [
+                "sudo",
+                "mount",
+                "-t",
+                "proc",
+                "none",
+                "/expected/home/build-1/chroot-autobuild/proc",
+            ],
+            [
+                "sudo",
+                "mount",
+                "-t",
+                "devpts",
+                "-o",
+                "gid=5,mode=620",
+                "none",
+                "/expected/home/build-1/chroot-autobuild/dev/pts",
+            ],
+            [
+                "sudo",
+                "mount",
+                "-t",
+                "sysfs",
+                "none",
+                "/expected/home/build-1/chroot-autobuild/sys",
+            ],
+            [
+                "sudo",
+                "mount",
+                "-t",
+                "tmpfs",
+                "none",
+                "/expected/home/build-1/chroot-autobuild/dev/shm",
+            ],
+            [
+                "sudo",
+                "install",
+                "-o",
+                "root",
+                "-g",
+                "root",
+                "-m",
+                "644",
+                "/etc/hosts",
+                "/expected/home/build-1/chroot-autobuild/etc/hosts",
+            ],
+            [
+                "sudo",
+                "install",
+                "-o",
+                "root",
+                "-g",
+                "root",
+                "-m",
+                "644",
+                "/etc/hostname",
+                "/expected/home/build-1/chroot-autobuild/etc/hostname",
+            ],
+            [
+                "sudo",
+                "install",
+                "-o",
+                "root",
+                "-g",
+                "root",
+                "-m",
+                "644",
+                "/etc/resolv.conf",
+                "/expected/home/build-1/chroot-autobuild/etc/resolv.conf",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_run(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(lambda _: {}, name="sudo")
         Chroot("1", "xenial", "amd64").run(
-            ["apt-get", "update"], env={"LANG": "C"})
+            ["apt-get", "update"], env={"LANG": "C"}
+        )
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "env", "LANG=C", "apt-get", "update"],
-            ]
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "env",
+                "LANG=C",
+                "apt-get",
+                "update",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_run_get_output(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(
-            lambda _: {"stdout": io.BytesIO(b"hello\n")}, name="sudo")
+            lambda _: {"stdout": io.BytesIO(b"hello\n")}, name="sudo"
+        )
         self.assertEqual(
             b"hello\n",
             Chroot("1", "xenial", "amd64").run(
-                ["echo", "hello"], get_output=True))
+                ["echo", "hello"], get_output=True
+            ),
+        )
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "echo", "hello"],
-            ]
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "echo",
+                "hello",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_run_non_ascii_arguments(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -128,29 +200,44 @@ class TestChroot(TestCase):
         Chroot("1", "xenial", "amd64").run(["echo", arg])
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "echo", arg],
-            ]
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "echo",
+                arg,
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_run_env_shell_metacharacters(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(lambda _: {}, name="sudo")
         Chroot("1", "xenial", "amd64").run(
-            ["echo", "hello"], env={"OBJECT": "{'foo': 'bar'}"})
+            ["echo", "hello"], env={"OBJECT": "{'foo': 'bar'}"}
+        )
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "env", "OBJECT={'foo': 'bar'}", "echo", "hello"],
-            ]
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "env",
+                "OBJECT={'foo': 'bar'}",
+                "echo",
+                "hello",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_copy_in(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -165,32 +252,50 @@ class TestChroot(TestCase):
         Chroot("1", "xenial", "amd64").copy_in(source_path, target_path)
 
         expected_target_path = (
-            "/expected/home/build-1/chroot-autobuild/path/to/target")
+            "/expected/home/build-1/chroot-autobuild/path/to/target"
+        )
         expected_args = [
-            ["sudo", "install", "-o", "root", "-g", "root", "-m", "644",
-             source_path, expected_target_path],
-            ]
+            [
+                "sudo",
+                "install",
+                "-o",
+                "root",
+                "-g",
+                "root",
+                "-m",
+                "644",
+                source_path,
+                expected_target_path,
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_copy_out(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(lambda _: {}, name="sudo")
         Chroot("1", "xenial", "amd64").copy_out(
-            "/path/to/source", "/path/to/target")
+            "/path/to/source", "/path/to/target"
+        )
 
         uid, gid = os.getuid(), os.getgid()
         expected_args = [
-            ["sudo", "cp", "--preserve=timestamps",
-             "/expected/home/build-1/chroot-autobuild/path/to/source",
-             "/path/to/target"],
+            [
+                "sudo",
+                "cp",
+                "--preserve=timestamps",
+                "/expected/home/build-1/chroot-autobuild/path/to/source",
+                "/path/to/target",
+            ],
             ["sudo", "chown", f"{uid}:{gid}", "/path/to/target"],
-            ]
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_path_exists(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -201,14 +306,21 @@ class TestChroot(TestCase):
         self.assertFalse(Chroot("1", "xenial", "amd64").path_exists("/absent"))
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "test", "-e", path]
-            for path in ("/present", "/absent")
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "test",
+                "-e",
+                path,
             ]
+            for path in ("/present", "/absent")
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_isdir(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -219,14 +331,21 @@ class TestChroot(TestCase):
         self.assertFalse(Chroot("1", "xenial", "amd64").isdir("/file"))
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "test", "-d", path]
-            for path in ("/dir", "/file")
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "test",
+                "-d",
+                path,
             ]
+            for path in ("/dir", "/file")
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_islink(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -237,48 +356,68 @@ class TestChroot(TestCase):
         self.assertFalse(Chroot("1", "xenial", "amd64").islink("/file"))
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "test", "-h", path]
-            for path in ("/link", "/file")
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "test",
+                "-h",
+                path,
             ]
+            for path in ("/link", "/file")
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_find(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
         processes_fixture = self.useFixture(FakeProcesses())
-        test_proc_infos = iter([
-            {"stdout": io.BytesIO(b"foo\0bar\0bar/bar\0bar/baz\0")},
-            {"stdout": io.BytesIO(b"foo\0bar\0")},
-            {"stdout": io.BytesIO(b"foo\0bar/bar\0bar/baz\0")},
-            {"stdout": io.BytesIO(b"bar\0bar/bar\0")},
-            {"stdout": io.BytesIO(b"")},
-            ])
+        test_proc_infos = iter(
+            [
+                {"stdout": io.BytesIO(b"foo\0bar\0bar/bar\0bar/baz\0")},
+                {"stdout": io.BytesIO(b"foo\0bar\0")},
+                {"stdout": io.BytesIO(b"foo\0bar/bar\0bar/baz\0")},
+                {"stdout": io.BytesIO(b"bar\0bar/bar\0")},
+                {"stdout": io.BytesIO(b"")},
+            ]
+        )
         processes_fixture.add(lambda _: next(test_proc_infos), name="sudo")
         self.assertEqual(
             ["foo", "bar", "bar/bar", "bar/baz"],
-            Chroot("1", "xenial", "amd64").find("/path"))
+            Chroot("1", "xenial", "amd64").find("/path"),
+        )
         self.assertEqual(
             ["foo", "bar"],
-            Chroot("1", "xenial", "amd64").find("/path", max_depth=1))
+            Chroot("1", "xenial", "amd64").find("/path", max_depth=1),
+        )
         self.assertEqual(
             ["foo", "bar/bar", "bar/baz"],
             Chroot("1", "xenial", "amd64").find(
-                "/path", include_directories=False))
+                "/path", include_directories=False
+            ),
+        )
         self.assertEqual(
             ["bar", "bar/bar"],
-            Chroot("1", "xenial", "amd64").find("/path", name="bar"))
+            Chroot("1", "xenial", "amd64").find("/path", name="bar"),
+        )
         self.assertEqual(
             [],
-            Chroot("1", "xenial", "amd64").find("/path", name="nonexistent"))
+            Chroot("1", "xenial", "amd64").find("/path", name="nonexistent"),
+        )
 
         find_prefix = [
-            "sudo", "/usr/sbin/chroot",
+            "sudo",
+            "/usr/sbin/chroot",
             "/expected/home/build-1/chroot-autobuild",
-            "linux64", "find", "/path", "-mindepth", "1",
-            ]
+            "linux64",
+            "find",
+            "/path",
+            "-mindepth",
+            "1",
+        ]
         find_suffix = ["-printf", "%P\\0"]
         expected_args = [
             find_prefix + find_suffix,
@@ -286,55 +425,81 @@ class TestChroot(TestCase):
             find_prefix + ["!", "-type", "d"] + find_suffix,
             find_prefix + ["-name", "bar"] + find_suffix,
             find_prefix + ["-name", "nonexistent"] + find_suffix,
-            ]
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_listdir(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(
-            lambda _: {"stdout": io.BytesIO(b"foo\0bar\0baz\0")}, name="sudo")
+            lambda _: {"stdout": io.BytesIO(b"foo\0bar\0baz\0")}, name="sudo"
+        )
         self.assertEqual(
             ["foo", "bar", "baz"],
-            Chroot("1", "xenial", "amd64").listdir("/path"))
+            Chroot("1", "xenial", "amd64").listdir("/path"),
+        )
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "find", "/path", "-mindepth", "1", "-maxdepth", "1",
-             "-printf", "%P\\0"],
-            ]
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "find",
+                "/path",
+                "-mindepth",
+                "1",
+                "-maxdepth",
+                "1",
+                "-printf",
+                "%P\\0",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_is_package_available(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
         processes_fixture = self.useFixture(FakeProcesses())
-        test_proc_infos = iter([
-            {"stdout": io.StringIO("Package: snapd\n")},
-            {"returncode": 100},
-            {"stderr": io.StringIO("N: No packages found\n")},
-            ])
+        test_proc_infos = iter(
+            [
+                {"stdout": io.StringIO("Package: snapd\n")},
+                {"returncode": 100},
+                {"stderr": io.StringIO("N: No packages found\n")},
+            ]
+        )
         processes_fixture.add(lambda _: next(test_proc_infos), name="sudo")
         self.assertTrue(
-            Chroot("1", "xenial", "amd64").is_package_available("snapd"))
+            Chroot("1", "xenial", "amd64").is_package_available("snapd")
+        )
         self.assertFalse(
-            Chroot("1", "xenial", "amd64").is_package_available("nonexistent"))
+            Chroot("1", "xenial", "amd64").is_package_available("nonexistent")
+        )
         self.assertFalse(
-            Chroot("1", "xenial", "amd64").is_package_available("virtual"))
+            Chroot("1", "xenial", "amd64").is_package_available("virtual")
+        )
 
         expected_args = [
-            ["sudo", "/usr/sbin/chroot",
-             "/expected/home/build-1/chroot-autobuild",
-             "linux64", "apt-cache", "show", package]
-            for package in ("snapd", "nonexistent", "virtual")
+            [
+                "sudo",
+                "/usr/sbin/chroot",
+                "/expected/home/build-1/chroot-autobuild",
+                "linux64",
+                "apt-cache",
+                "show",
+                package,
             ]
+            for package in ("snapd", "nonexistent", "virtual")
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_kill_processes(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -352,20 +517,20 @@ class TestChroot(TestCase):
         os.symlink("/expected/home/build-1/chroot-autobuild", "/proc/11/root")
         os.mkdir("/proc/12")
         os.symlink(
-            "/expected/home/build-1/chroot-autobuild/submount",
-            "/proc/12/root")
+            "/expected/home/build-1/chroot-autobuild/submount", "/proc/12/root"
+        )
         os.mkdir("/proc/13")
         os.symlink(
-            "/expected/home/build-1/chroot-autobuildsomething",
-            "/proc/13/root")
+            "/expected/home/build-1/chroot-autobuildsomething", "/proc/13/root"
+        )
         with open("/proc/version", "w"):
             pass
         kill_fixture = self.useFixture(KillFixture(delays={10: 1}))
         Chroot("1", "xenial", "amd64").kill_processes()
 
         self.assertEqual(
-            [(pid, signal.SIGKILL) for pid in (11, 12, 10)],
-            kill_fixture.kills)
+            [(pid, signal.SIGKILL) for pid in (11, 12, 10)], kill_fixture.kills
+        )
         self.assertThat("/proc", DirContains(["1", "13", "version"]))
 
     def _make_initial_proc_mounts(self):
@@ -375,14 +540,18 @@ class TestChroot(TestCase):
         os.mkdir("/proc")
         with open("/proc/mounts", "w") as mounts_file:
             chroot = "/expected/home/build-1/chroot-autobuild"
-            mounts_file.write(dedent(f"""\
+            mounts_file.write(
+                dedent(
+                    f"""\
                 sysfs /sys sysfs rw,nosuid,nodev,noexec,relatime 0 0
                 proc /proc proc rw,nosuid,nodev,noexec,relatime 0 0
                 none {chroot}/proc proc rw,relatime 0 0
                 none {chroot}/dev/pts devpts rw,relative,gid=5,mode=620 0 0
                 none {chroot}/sys sysfs rw,relatime 0 0
                 none {chroot}/dev/shm tmpfs rw,relatime 0 0
-                """))
+                """
+                )
+            )
 
     def test_stop(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -399,10 +568,11 @@ class TestChroot(TestCase):
             ["sudo", "umount", expected_chroot_path + "/sys"],
             ["sudo", "umount", expected_chroot_path + "/dev/pts"],
             ["sudo", "umount", expected_chroot_path + "/proc"],
-            ]
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
         self.assertEqual(start_time, time.time())
 
     def test_stop_retries(self):
@@ -422,10 +592,11 @@ class TestChroot(TestCase):
             ["sudo", "umount", expected_chroot_path + "/dev/pts"],
             ["sudo", "umount", expected_chroot_path + "/proc"],
             ["sudo", "umount", expected_chroot_path + "/sys"],
-            ]
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
         self.assertEqual(start_time + 1, time.time())
 
     def test_stop_too_many_retries(self):
@@ -438,7 +609,8 @@ class TestChroot(TestCase):
         self.useFixture(FakeTime())
         start_time = time.time()
         self.assertRaises(
-            BackendException, Chroot("1", "xenial", "amd64").stop)
+            BackendException, Chroot("1", "xenial", "amd64").stop
+        )
 
         expected_chroot_path = "/expected/home/build-1/chroot-autobuild"
         expected_args = [
@@ -446,13 +618,15 @@ class TestChroot(TestCase):
             ["sudo", "umount", expected_chroot_path + "/sys"],
             ["sudo", "umount", expected_chroot_path + "/dev/pts"],
             ["sudo", "umount", expected_chroot_path + "/proc"],
-            ]
+        ]
         expected_args.extend(
-            [["sudo", "umount", expected_chroot_path + "/sys"]] * 19)
+            [["sudo", "umount", expected_chroot_path + "/sys"]] * 19
+        )
         expected_args.append(["lsof", expected_chroot_path])
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
         self.assertEqual(start_time + 20, time.time())
 
     def test_remove(self):
@@ -464,4 +638,5 @@ class TestChroot(TestCase):
         expected_args = [["sudo", "rm", "-rf", "/expected/home/build-1"]]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
diff --git a/lpbuildd/target/tests/test_generate_translation_templates.py b/lpbuildd/target/tests/test_generate_translation_templates.py
index 0ce26c8..147bc1c 100644
--- a/lpbuildd/target/tests/test_generate_translation_templates.py
+++ b/lpbuildd/target/tests/test_generate_translation_templates.py
@@ -6,23 +6,12 @@ import subprocess
 import tarfile
 from unittest import mock
 
-from fixtures import (
-    EnvironmentVariable,
-    FakeLogger,
-    TempDir,
-    )
+from fixtures import EnvironmentVariable, FakeLogger, TempDir
 from testtools import TestCase
-from testtools.matchers import (
-    Equals,
-    MatchesListwise,
-    MatchesSetwise,
-    )
+from testtools.matchers import Equals, MatchesListwise, MatchesSetwise
 
 from lpbuildd.target.cli import parse_args
-from lpbuildd.target.tests.matchers import (
-    RanAptGet,
-    RanCommand,
-    )
+from lpbuildd.target.tests.matchers import RanAptGet, RanCommand
 
 
 class TestGenerateTranslationTemplates(TestCase):
@@ -46,7 +35,7 @@ class TestGenerateTranslationTemplates(TestCase):
         """
         branch_path = self.useFixture(TempDir()).path
         for name, contents in content_map.items():
-            with open(os.path.join(branch_path, name), 'wb') as f:
+            with open(os.path.join(branch_path, name), "wb") as f:
                 f.write(contents)
         return branch_path
 
@@ -66,78 +55,118 @@ class TestGenerateTranslationTemplates(TestCase):
             with EnvironmentVariable("BRZ_EMAIL", committer_id):
                 subprocess.check_call(
                     ["bzr", "commit", "-q", "-m", "Populating branch."],
-                    cwd=branch_path)
+                    cwd=branch_path,
+                )
 
     def make_git_branch(self, branch_path):
         subprocess.check_call(["git", "init", "-q"], cwd=branch_path)
         subprocess.check_call(
-            ["git", "config", "user.name", "Committer"], cwd=branch_path)
+            ["git", "config", "user.name", "Committer"], cwd=branch_path
+        )
         subprocess.check_call(
             ["git", "config", "user.email", "committer@xxxxxxxxxxx"],
-            cwd=branch_path)
+            cwd=branch_path,
+        )
         subprocess.check_call(["git", "add", "."], cwd=branch_path)
         subprocess.check_call(
-            ["git", "commit", "-q", "--allow-empty",
-             "-m", "Populating branch"],
-            cwd=branch_path)
+            [
+                "git",
+                "commit",
+                "-q",
+                "--allow-empty",
+                "-m",
+                "Populating branch",
+            ],
+            cwd=branch_path,
+        )
 
     def test_install_bzr(self):
         args = [
             "generate-translation-templates",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", "lp:foo", self.result_name,
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            "lp:foo",
+            self.result_name,
+        ]
         generator = parse_args(args=args).operation
         generator.install()
-        self.assertThat(generator.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "intltool", "bzr"),
-            ]))
+        self.assertThat(
+            generator.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "intltool", "bzr"),
+                ]
+            ),
+        )
 
     def test_install_git(self):
         args = [
             "generate-translation-templates",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", self.result_name,
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            self.result_name,
+        ]
         generator = parse_args(args=args).operation
         generator.install()
-        self.assertThat(generator.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "intltool", "git"),
-            ]))
+        self.assertThat(
+            generator.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "intltool", "git"),
+                ]
+            ),
+        )
 
     def test_fetch_bzr(self):
         # fetch can retrieve branch contents from a Bazaar branch.
         marker_text = b"Ceci n'est pas cet branch."
-        branch_path = self.make_branch_contents({'marker.txt': marker_text})
+        branch_path = self.make_branch_contents({"marker.txt": marker_text})
         self.make_bzr_branch(branch_path)
 
         args = [
             "generate-translation-templates",
-            "--backend=uncontained", "--series=xenial", "--arch=amd64", "1",
-            "--branch", branch_path, self.result_name,
-            ]
+            "--backend=uncontained",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            branch_path,
+            self.result_name,
+        ]
         generator = parse_args(args=args).operation
         generator.fetch(quiet=True)
 
-        marker_path = os.path.join(generator.branch_dir, 'marker.txt')
+        marker_path = os.path.join(generator.branch_dir, "marker.txt")
         with open(marker_path, "rb") as marker_file:
             self.assertEqual(marker_text, marker_file.read())
 
     def test_fetch_git(self):
         # fetch can retrieve branch contents from a Git repository.
         marker_text = b"Ceci n'est pas cet branch."
-        branch_path = self.make_branch_contents({'marker.txt': marker_text})
+        branch_path = self.make_branch_contents({"marker.txt": marker_text})
         self.make_git_branch(branch_path)
 
         args = [
             "generate-translation-templates",
-            "--backend=uncontained", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", branch_path, self.result_name,
-            ]
+            "--backend=uncontained",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            branch_path,
+            self.result_name,
+        ]
         generator = parse_args(args=args).operation
         generator.fetch(quiet=True)
 
-        marker_path = os.path.join(generator.branch_dir, 'marker.txt')
+        marker_path = os.path.join(generator.branch_dir, "marker.txt")
         with open(marker_path, "rb") as marker_file:
             self.assertEqual(marker_text, marker_file.read())
 
@@ -145,46 +174,61 @@ class TestGenerateTranslationTemplates(TestCase):
         # fetch can retrieve branch contents from a Git repository and
         # branch name.
         marker_text = b"Ceci n'est pas cet branch."
-        branch_path = self.make_branch_contents({'marker.txt': marker_text})
+        branch_path = self.make_branch_contents({"marker.txt": marker_text})
         self.make_git_branch(branch_path)
         subprocess.call(
-            ["git", "branch", "-m", "master", "next"], cwd=branch_path)
+            ["git", "branch", "-m", "master", "next"], cwd=branch_path
+        )
 
         args = [
             "generate-translation-templates",
-            "--backend=uncontained", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", branch_path, "--git-path", "next",
+            "--backend=uncontained",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            branch_path,
+            "--git-path",
+            "next",
             self.result_name,
-            ]
+        ]
         generator = parse_args(args=args).operation
         generator.fetch(quiet=True)
 
-        marker_path = os.path.join(generator.branch_dir, 'marker.txt')
+        marker_path = os.path.join(generator.branch_dir, "marker.txt")
         with open(marker_path, "rb") as marker_file:
             self.assertEqual(marker_text, marker_file.read())
 
     def test_templates_tarball(self):
         # Create a tarball from pot files.
-        branchdir = os.path.join(self.home_dir, 'branchdir')
+        branchdir = os.path.join(self.home_dir, "branchdir")
         dummy_tar = os.path.join(
-            os.path.dirname(__file__), 'dummy_templates.tar.gz')
-        with tarfile.open(dummy_tar, 'r|*') as tar:
+            os.path.dirname(__file__), "dummy_templates.tar.gz"
+        )
+        with tarfile.open(dummy_tar, "r|*") as tar:
             tar.extractall(branchdir)
             potnames = [
                 member.name
-                for member in tar.getmembers() if not member.isdir()]
+                for member in tar.getmembers()
+                if not member.isdir()
+            ]
         self.make_bzr_branch(branchdir)
 
         args = [
             "generate-translation-templates",
-            "--backend=uncontained", "--series=xenial", "--arch=amd64", "1",
-            "--branch", branchdir, self.result_name,
-            ]
+            "--backend=uncontained",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            branchdir,
+            self.result_name,
+        ]
         generator = parse_args(args=args).operation
         generator.fetch(quiet=True)
         generator._makeTarball(potnames)
         result_path = os.path.join(self.home_dir, self.result_name)
-        with tarfile.open(result_path, 'r|*') as tar:
+        with tarfile.open(result_path, "r|*") as tar:
             tarnames = tar.getnames()
         self.assertThat(tarnames, MatchesSetwise(*(map(Equals, potnames))))
 
@@ -197,32 +241,64 @@ class TestGenerateTranslationTemplates(TestCase):
 
         args = [
             "generate-translation-templates",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--branch", branch_url, self.result_name,
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--branch",
+            branch_url,
+            self.result_name,
+        ]
         generator = parse_args(args=args).operation
         generator.backend.add_file(os.path.join(po_dir, "POTFILES.in"), b"")
         generator.backend.add_file(
-            os.path.join(po_dir, "Makevars"), b"DOMAIN = test\n")
+            os.path.join(po_dir, "Makevars"), b"DOMAIN = test\n"
+        )
         generator.run()
-        self.assertThat(generator.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "intltool", "bzr"),
-            RanCommand(
-                ["bzr", "branch", "lp:~my/branch", "source-tree"],
-                cwd=self.home_dir, LANG="C.UTF-8", SHELL="/bin/sh"),
-            RanCommand(
-                ["rm", "-f",
-                 os.path.join(po_dir, "missing"),
-                 os.path.join(po_dir, "notexist")]),
-            RanCommand(
-                ["/usr/bin/intltool-update", "-m"],
-                stdout=mock.ANY, stderr=mock.ANY, cwd=po_dir),
-            RanCommand(
-                ["/usr/bin/intltool-update", "-p", "-g", "test"],
-                stdout=mock.ANY, stderr=mock.ANY, cwd=po_dir),
-            RanCommand(
-                ["tar", "-C", branch_dir, "-czf", result_path, "po/test.pot"]),
-            ]))
+        self.assertThat(
+            generator.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "intltool", "bzr"),
+                    RanCommand(
+                        ["bzr", "branch", "lp:~my/branch", "source-tree"],
+                        cwd=self.home_dir,
+                        LANG="C.UTF-8",
+                        SHELL="/bin/sh",
+                    ),
+                    RanCommand(
+                        [
+                            "rm",
+                            "-f",
+                            os.path.join(po_dir, "missing"),
+                            os.path.join(po_dir, "notexist"),
+                        ]
+                    ),
+                    RanCommand(
+                        ["/usr/bin/intltool-update", "-m"],
+                        stdout=mock.ANY,
+                        stderr=mock.ANY,
+                        cwd=po_dir,
+                    ),
+                    RanCommand(
+                        ["/usr/bin/intltool-update", "-p", "-g", "test"],
+                        stdout=mock.ANY,
+                        stderr=mock.ANY,
+                        cwd=po_dir,
+                    ),
+                    RanCommand(
+                        [
+                            "tar",
+                            "-C",
+                            branch_dir,
+                            "-czf",
+                            result_path,
+                            "po/test.pot",
+                        ]
+                    ),
+                ]
+            ),
+        )
 
     def test_run_git(self):
         # Install dependencies and generate a templates tarball from Git.
@@ -233,35 +309,85 @@ class TestGenerateTranslationTemplates(TestCase):
 
         args = [
             "generate-translation-templates",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--git-repository", repository_url, self.result_name,
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            repository_url,
+            self.result_name,
+        ]
         generator = parse_args(args=args).operation
         generator.backend.add_file(os.path.join(po_dir, "POTFILES.in"), b"")
         generator.backend.add_file(
-            os.path.join(po_dir, "Makevars"), b"DOMAIN = test\n")
+            os.path.join(po_dir, "Makevars"), b"DOMAIN = test\n"
+        )
         generator.run()
-        self.assertThat(generator.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "intltool", "git"),
-            RanCommand(
-                ["git", "clone", "-n", "lp:~my/repository", "source-tree"],
-                cwd=self.home_dir, LANG="C.UTF-8", SHELL="/bin/sh"),
-            RanCommand(
-                ["git", "checkout", "-q", "HEAD"],
-                cwd=branch_dir, LANG="C.UTF-8", SHELL="/bin/sh"),
-            RanCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd=branch_dir, LANG="C.UTF-8", SHELL="/bin/sh"),
-            RanCommand(
-                ["rm", "-f",
-                 os.path.join(po_dir, "missing"),
-                 os.path.join(po_dir, "notexist")]),
-            RanCommand(
-                ["/usr/bin/intltool-update", "-m"],
-                stdout=mock.ANY, stderr=mock.ANY, cwd=po_dir),
-            RanCommand(
-                ["/usr/bin/intltool-update", "-p", "-g", "test"],
-                stdout=mock.ANY, stderr=mock.ANY, cwd=po_dir),
-            RanCommand(
-                ["tar", "-C", branch_dir, "-czf", result_path, "po/test.pot"]),
-            ]))
+        self.assertThat(
+            generator.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "intltool", "git"),
+                    RanCommand(
+                        [
+                            "git",
+                            "clone",
+                            "-n",
+                            "lp:~my/repository",
+                            "source-tree",
+                        ],
+                        cwd=self.home_dir,
+                        LANG="C.UTF-8",
+                        SHELL="/bin/sh",
+                    ),
+                    RanCommand(
+                        ["git", "checkout", "-q", "HEAD"],
+                        cwd=branch_dir,
+                        LANG="C.UTF-8",
+                        SHELL="/bin/sh",
+                    ),
+                    RanCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd=branch_dir,
+                        LANG="C.UTF-8",
+                        SHELL="/bin/sh",
+                    ),
+                    RanCommand(
+                        [
+                            "rm",
+                            "-f",
+                            os.path.join(po_dir, "missing"),
+                            os.path.join(po_dir, "notexist"),
+                        ]
+                    ),
+                    RanCommand(
+                        ["/usr/bin/intltool-update", "-m"],
+                        stdout=mock.ANY,
+                        stderr=mock.ANY,
+                        cwd=po_dir,
+                    ),
+                    RanCommand(
+                        ["/usr/bin/intltool-update", "-p", "-g", "test"],
+                        stdout=mock.ANY,
+                        stderr=mock.ANY,
+                        cwd=po_dir,
+                    ),
+                    RanCommand(
+                        [
+                            "tar",
+                            "-C",
+                            branch_dir,
+                            "-czf",
+                            result_path,
+                            "po/test.pot",
+                        ]
+                    ),
+                ]
+            ),
+        )
diff --git a/lpbuildd/target/tests/test_lifecycle.py b/lpbuildd/target/tests/test_lifecycle.py
index 89e03c7..a96e6b8 100644
--- a/lpbuildd/target/tests/test_lifecycle.py
+++ b/lpbuildd/target/tests/test_lifecycle.py
@@ -12,64 +12,78 @@ from lpbuildd.target.cli import parse_args
 
 
 class TestCreate(TestCase):
-
     def test_succeeds(self):
         args = [
             "unpack-chroot",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
             "/path/to/tarball",
-            ]
+        ]
         create = parse_args(args=args).operation
         self.assertEqual(0, create.run())
         self.assertEqual(
-            [(("/path/to/tarball", "chroot"), {})],
-            create.backend.create.calls)
+            [(("/path/to/tarball", "chroot"), {})], create.backend.create.calls
+        )
 
     def test_image_type(self):
         args = [
             "unpack-chroot",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            "--image-type", "lxd", "/path/to/tarball",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+            "--image-type",
+            "lxd",
+            "/path/to/tarball",
+        ]
         create = parse_args(args=args).operation
         self.assertEqual(0, create.run())
         self.assertEqual(
-            [(("/path/to/tarball", "lxd"), {})],
-            create.backend.create.calls)
+            [(("/path/to/tarball", "lxd"), {})], create.backend.create.calls
+        )
 
 
 class TestStart(TestCase):
-
     def test_succeeds(self):
         args = [
             "mount-chroot",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         start = parse_args(args=args).operation
         self.assertEqual(0, start.run())
         self.assertEqual([((), {})], start.backend.start.calls)
 
 
 class TestKillProcesses(TestCase):
-
     def test_succeeds(self):
         args = [
             "scan-for-processes",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         kill_processes = parse_args(args=args).operation
         self.assertEqual(0, kill_processes._run())
         self.assertEqual(
-            [((), {})], kill_processes.backend.kill_processes.calls)
+            [((), {})], kill_processes.backend.kill_processes.calls
+        )
 
 
 class TestStop(TestCase):
-
     def test_succeeds(self):
         args = [
             "umount-chroot",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         stop = parse_args(args=args).operation
         self.assertEqual(0, stop.run())
         self.assertEqual([((), {})], stop.backend.stop.calls)
@@ -78,26 +92,38 @@ class TestStop(TestCase):
         logger = self.useFixture(FakeLogger())
         args = [
             "umount-chroot",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         stop = parse_args(args=args).operation
         stop.backend.stop.failure = BackendException
         self.assertEqual(1, stop.run())
         self.assertEqual([((), {})], stop.backend.stop.calls)
-        self.assertThat(logger.output, StartsWith(dedent("""\
+        self.assertThat(
+            logger.output,
+            StartsWith(
+                dedent(
+                    """\
             Stopping target for build 1
             Failed to stop target
             Traceback (most recent call last):
-            """)))
+            """
+                )
+            ),
+        )
 
 
 class TestRemove(TestCase):
-
     def test_succeeds(self):
         args = [
             "remove-build",
-            "--backend=fake", "--series=xenial", "--arch=amd64", "1",
-            ]
+            "--backend=fake",
+            "--series=xenial",
+            "--arch=amd64",
+            "1",
+        ]
         remove = parse_args(args=args).operation
         self.assertEqual(0, remove.run())
         self.assertEqual([((), {})], remove.backend.remove.calls)
diff --git a/lpbuildd/target/tests/test_lxd.py b/lpbuildd/target/tests/test_lxd.py
index 9d10c09..04078fa 100644
--- a/lpbuildd/target/tests/test_lxd.py
+++ b/lpbuildd/target/tests/test_lxd.py
@@ -2,28 +2,22 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 import argparse
-from contextlib import closing
 import io
 import json
 import os
 import random
 import stat
 import tarfile
-from textwrap import dedent
 import time
+from contextlib import closing
+from textwrap import dedent
 from unittest import mock
 
-from fixtures import (
-    EnvironmentVariable,
-    MockPatch,
-    TempDir,
-    )
 import pylxd
+from fixtures import EnvironmentVariable, MockPatch, TempDir
 from pylxd.exceptions import LXDAPIException
-from systemfixtures import (
-    FakeFilesystem as _FakeFilesystem,
-    FakeProcesses,
-    )
+from systemfixtures import FakeFilesystem as _FakeFilesystem
+from systemfixtures import FakeProcesses
 from systemfixtures._overlay import Overlay
 from testtools import TestCase
 from testtools.matchers import (
@@ -33,23 +27,16 @@ from testtools.matchers import (
     HasPermissions,
     MatchesDict,
     MatchesListwise,
-    )
-
-from lpbuildd.target.lxd import (
-    LXD,
-    LXDException,
-    fallback_hosts,
-    policy_rc_d,
-    )
+)
+
+from lpbuildd.target.lxd import LXD, LXDException, fallback_hosts, policy_rc_d
 from lpbuildd.target.tests.testfixtures import CarefulFakeProcessFixture
 from lpbuildd.util import get_arch_bits
 
-
 LXD_RUNNING = 103
 
 
 class FakeLXDAPIException(LXDAPIException):
-
     def __init__(self):
         super().__init__(None)
 
@@ -58,7 +45,6 @@ class FakeLXDAPIException(LXDAPIException):
 
 
 class FakeSessionGet:
-
     def __init__(self, file_contents):
         self.file_contents = file_contents
 
@@ -68,14 +54,14 @@ class FakeSessionGet:
         if params["path"] in self.file_contents:
             response.status_code = 200
             response.iter_content.return_value = iter(
-                self.file_contents[params["path"]])
+                self.file_contents[params["path"]]
+            )
         else:
             response.json.return_value = {"error": "not found"}
         return response
 
 
 class FakeHostname:
-
     def __init__(self, hostname, fqdn):
         self.hostname = hostname
         self.fqdn = fqdn
@@ -94,8 +80,7 @@ class FakeFilesystem(_FakeFilesystem):
     def _setUp(self):
         super()._setUp()
         self._devices = {}
-        self.useFixture(
-            Overlay("os.mknod", self._mknod, self._is_fake_path))
+        self.useFixture(Overlay("os.mknod", self._mknod, self._is_fake_path))
 
     def _stat(self, real, path, *args, **kwargs):
         r = super()._stat(real, path, *args, **kwargs)
@@ -119,7 +104,6 @@ class FakeFilesystem(_FakeFilesystem):
 
 
 class TestLXD(TestCase):
-
     def setUp(self):
         super().setUp()
         self.useFixture(CarefulFakeProcessFixture())
@@ -149,11 +133,18 @@ class TestLXD(TestCase):
                 "series": "xenial",
                 "architecture": "amd64",
                 "description": "Launchpad chroot for Ubuntu xenial (amd64)",
-                },
-            }
-        metadata_yaml = json.dumps(
-            metadata, sort_keys=True, indent=4, separators=(",", ": "),
-            ensure_ascii=False).encode("UTF-8") + b"\n"
+            },
+        }
+        metadata_yaml = (
+            json.dumps(
+                metadata,
+                sort_keys=True,
+                indent=4,
+                separators=(",", ": "),
+                ensure_ascii=False,
+            ).encode("UTF-8")
+            + b"\n"
+        )
         with tarfile.open(output_path, "w:gz") as tar:
             metadata_file = tarfile.TarInfo(name="metadata.yaml")
             metadata_file.size = len(metadata_yaml)
@@ -169,7 +160,8 @@ class TestLXD(TestCase):
             creation_time = source_tarball.getmember("chroot-autobuild").mtime
             with tarfile.open(target_tarball_path, "w:gz") as target_tarball:
                 LXD("1", "xenial", "amd64")._convert(
-                    source_tarball, target_tarball)
+                    source_tarball, target_tarball
+                )
 
         target = os.path.join(tmp, "target")
         with tarfile.open(target_tarball_path, "r") as target_tarball:
@@ -177,17 +169,25 @@ class TestLXD(TestCase):
         self.assertThat(target, DirContains(["metadata.yaml", "rootfs"]))
         with open(os.path.join(target, "metadata.yaml")) as metadata_file:
             metadata = json.load(metadata_file)
-        self.assertThat(metadata, MatchesDict({
-            "architecture": Equals("x86_64"),
-            "creation_date": Equals(creation_time),
-            "properties": MatchesDict({
-                "os": Equals("Ubuntu"),
-                "series": Equals("xenial"),
-                "architecture": Equals("amd64"),
-                "description": Equals(
-                    "Launchpad chroot for Ubuntu xenial (amd64)"),
-                }),
-            }))
+        self.assertThat(
+            metadata,
+            MatchesDict(
+                {
+                    "architecture": Equals("x86_64"),
+                    "creation_date": Equals(creation_time),
+                    "properties": MatchesDict(
+                        {
+                            "os": Equals("Ubuntu"),
+                            "series": Equals("xenial"),
+                            "architecture": Equals("amd64"),
+                            "description": Equals(
+                                "Launchpad chroot for Ubuntu xenial (amd64)"
+                            ),
+                        }
+                    ),
+                }
+            ),
+        )
         rootfs = os.path.join(target, "rootfs")
         self.assertThat(rootfs, DirContains(["bin"]))
         self.assertThat(os.path.join(rootfs, "bin"), DirContains(["hello"]))
@@ -213,17 +213,21 @@ class TestLXD(TestCase):
 
         self.assertThat(
             [proc._args["args"] for proc in processes_fixture.procs],
-            MatchesListwise([
-                Equals(["sudo", "lxd", "init", "--auto"]),
-                Equals(["lxc", "list"]),
-                ]))
+            MatchesListwise(
+                [
+                    Equals(["sudo", "lxd", "init", "--auto"]),
+                    Equals(["lxc", "list"]),
+                ]
+            ),
+        )
         client.images.create.assert_called_once_with(mock.ANY, wait=True)
         with io.BytesIO(client.images.create.call_args[0][0]) as f:
             with tarfile.open(fileobj=f) as tar:
                 with closing(tar.extractfile("rootfs/bin/hello")) as hello:
                     self.assertEqual(b"hello\n", hello.read())
         image.add_alias.assert_called_once_with(
-            "lp-xenial-amd64", "lp-xenial-amd64")
+            "lp-xenial-amd64", "lp-xenial-amd64"
+        )
 
     def test_create_from_lxd(self):
         fs_fixture = self.useFixture(FakeFilesystem())
@@ -243,17 +247,21 @@ class TestLXD(TestCase):
 
         self.assertThat(
             [proc._args["args"] for proc in processes_fixture.procs],
-            MatchesListwise([
-                Equals(["sudo", "lxd", "init", "--auto"]),
-                Equals(["lxc", "list"]),
-                ]))
+            MatchesListwise(
+                [
+                    Equals(["sudo", "lxd", "init", "--auto"]),
+                    Equals(["lxc", "list"]),
+                ]
+            ),
+        )
         client.images.create.assert_called_once_with(mock.ANY, wait=True)
         with io.BytesIO(client.images.create.call_args[0][0]) as f:
             with tarfile.open(fileobj=f) as tar:
                 with closing(tar.extractfile("rootfs/bin/hello")) as hello:
                     self.assertEqual(b"hello\n", hello.read())
         image.add_alias.assert_called_once_with(
-            "lp-xenial-amd64", "lp-xenial-amd64")
+            "lp-xenial-amd64", "lp-xenial-amd64"
+        )
 
     def test_create_with_already_initialized_lxd(self):
         fs_fixture = self.useFixture(FakeFilesystem())
@@ -279,10 +287,15 @@ class TestLXD(TestCase):
                 with closing(tar.extractfile("rootfs/bin/hello")) as hello:
                     self.assertEqual(b"hello\n", hello.read())
         image.add_alias.assert_called_once_with(
-            "lp-xenial-amd64", "lp-xenial-amd64")
-
-    def assert_correct_profile(self, extra_raw_lxc_config=None,
-                               driver_version="2.0", gpu_nvidia_paths=False):
+            "lp-xenial-amd64", "lp-xenial-amd64"
+        )
+
+    def assert_correct_profile(
+        self,
+        extra_raw_lxc_config=None,
+        driver_version="2.0",
+        gpu_nvidia_paths=False,
+    ):
         if extra_raw_lxc_config is None:
             extra_raw_lxc_config = []
 
@@ -296,46 +309,51 @@ class TestLXD(TestCase):
             ("lxc.cgroup.devices.allow", ""),
             ("lxc.mount.auto", ""),
             ("lxc.mount.auto", "proc:rw sys:rw"),
-            ]
+        ]
 
         major, minor = (int(v) for v in driver_version.split(".")[0:2])
 
         if major >= 3:
-            raw_lxc_config.extend([
-                ("lxc.apparmor.profile", "unconfined"),
-                ("lxc.net.0.ipv4.address", "10.10.10.2/24"),
-                ("lxc.net.0.ipv4.gateway", "10.10.10.1"),
-                ])
+            raw_lxc_config.extend(
+                [
+                    ("lxc.apparmor.profile", "unconfined"),
+                    ("lxc.net.0.ipv4.address", "10.10.10.2/24"),
+                    ("lxc.net.0.ipv4.gateway", "10.10.10.1"),
+                ]
+            )
         else:
-            raw_lxc_config.extend([
-                ("lxc.aa_profile", "unconfined"),
-                ("lxc.network.0.ipv4", "10.10.10.2/24"),
-                ("lxc.network.0.ipv4.gateway", "10.10.10.1"),
-                ])
+            raw_lxc_config.extend(
+                [
+                    ("lxc.aa_profile", "unconfined"),
+                    ("lxc.network.0.ipv4", "10.10.10.2/24"),
+                    ("lxc.network.0.ipv4.gateway", "10.10.10.1"),
+                ]
+            )
 
         raw_lxc_config = "".join(
             f"{key}={val}\n"
-            for key, val in sorted(raw_lxc_config + extra_raw_lxc_config))
+            for key, val in sorted(raw_lxc_config + extra_raw_lxc_config)
+        )
 
         expected_config = {
             "security.privileged": "true",
             "security.nesting": "true",
             "raw.lxc": raw_lxc_config,
-            }
+        }
         expected_devices = {
             "eth0": {
                 "name": "eth0",
                 "nictype": "bridged",
                 "parent": "lpbuilddbr0",
                 "type": "nic",
-                },
-            }
+            },
+        }
         if driver_version == "3.0":
             expected_devices["root"] = {
                 "path": "/",
                 "pool": "default",
                 "type": "disk",
-                }
+            }
         if gpu_nvidia_paths:
             for i, path in enumerate(gpu_nvidia_paths):
                 if not path.startswith("/dev/"):
@@ -343,9 +361,10 @@ class TestLXD(TestCase):
                         "path": path,
                         "source": path,
                         "type": "disk",
-                        }
+                    }
         client.profiles.create.assert_called_once_with(
-            "lpbuildd", expected_config, expected_devices)
+            "lpbuildd", expected_config, expected_devices
+        )
 
     def test_create_profile_amd64(self):
         with MockPatch("pylxd.Client"):
@@ -355,10 +374,11 @@ class TestLXD(TestCase):
                 client.profiles.get.side_effect = FakeLXDAPIException
                 client.host_info = {
                     "environment": {"driver_version": driver_version}
-                    }
+                }
                 LXD("1", "xenial", "amd64").create_profile()
                 self.assert_correct_profile(
-                        driver_version=driver_version or "3.0")
+                    driver_version=driver_version or "3.0"
+                )
 
     def test_create_profile_powerpc(self):
         with MockPatch("pylxd.Client"):
@@ -368,12 +388,14 @@ class TestLXD(TestCase):
                 client.profiles.get.side_effect = FakeLXDAPIException
                 client.host_info = {
                     "environment": {"driver_version": driver_version}
-                    }
+                }
                 LXD("1", "xenial", "powerpc").create_profile()
                 self.assert_correct_profile(
-                        extra_raw_lxc_config=[("lxc.seccomp", ""), ],
-                        driver_version=driver_version or "3.0"
-                        )
+                    extra_raw_lxc_config=[
+                        ("lxc.seccomp", ""),
+                    ],
+                    driver_version=driver_version or "3.0",
+                )
 
     def test_create_profile_gpu_nvidia(self):
         with MockPatch("pylxd.Client"):
@@ -385,18 +407,21 @@ class TestLXD(TestCase):
                 "/dev/nvidiactl",
                 "/usr/bin/nvidia-smi",
                 "/usr/bin/nvidia-persistenced",
-                ]
+            ]
             processes_fixture = self.useFixture(FakeProcesses())
             processes_fixture.add(
                 lambda _: {
                     "stdout": io.StringIO(
-                        "".join(f"{path}\n" for path in gpu_nvidia_paths)),
-                    },
-                name="/snap/lxd/current/bin/nvidia-container-cli.real")
+                        "".join(f"{path}\n" for path in gpu_nvidia_paths)
+                    ),
+                },
+                name="/snap/lxd/current/bin/nvidia-container-cli.real",
+            )
             backend = LXD("1", "xenial", "amd64", constraints=["gpu-nvidia"])
             backend.create_profile()
             self.assert_correct_profile(
-                driver_version="3.0", gpu_nvidia_paths=gpu_nvidia_paths)
+                driver_version="3.0", gpu_nvidia_paths=gpu_nvidia_paths
+            )
 
     def fakeFS(self):
         fs_fixture = self.useFixture(FakeFilesystem())
@@ -418,8 +443,9 @@ class TestLXD(TestCase):
 
     # XXX cjwatson 2022-08-25: Refactor this to use some more sensible kind
     # of test parameterization.
-    def test_start(self, arch="amd64", unmounts_cpuinfo=False,
-                   gpu_nvidia=False):
+    def test_start(
+        self, arch="amd64", unmounts_cpuinfo=False, gpu_nvidia=False
+    ):
         self.fakeFS()
         DM_BLOCK_MAJOR = random.randrange(128, 255)
         with open("/proc/devices", "w") as f:
@@ -431,8 +457,9 @@ class TestLXD(TestCase):
         container = client.containers.create.return_value
         client.containers.get.return_value = container
         client.host_info = {"environment": {"driver_version": "2.0"}}
-        container.start.side_effect = (
-            lambda wait=False: setattr(container, "status_code", LXD_RUNNING))
+        container.start.side_effect = lambda wait=False: setattr(
+            container, "status_code", LXD_RUNNING
+        )
         files_api = container.api.files
         files_api._api_endpoint = f"/1.0/containers/lp-xenial-{arch}/files"
         existing_files = {
@@ -443,31 +470,32 @@ class TestLXD(TestCase):
         processes_fixture.add(lambda _: {}, name="sudo")
         processes_fixture.add(lambda _: {}, name="lxc")
         processes_fixture.add(
-            FakeHostname("example", "example.buildd"), name="hostname")
+            FakeHostname("example", "example.buildd"), name="hostname"
+        )
         if gpu_nvidia:
+            os.mknod("/dev/nvidia0", stat.S_IFCHR | 0o666, os.makedev(195, 0))
             os.mknod(
-                "/dev/nvidia0", stat.S_IFCHR | 0o666, os.makedev(195, 0))
-            os.mknod(
-                "/dev/nvidiactl", stat.S_IFCHR | 0o666, os.makedev(195, 255))
+                "/dev/nvidiactl", stat.S_IFCHR | 0o666, os.makedev(195, 255)
+            )
             gpu_nvidia_paths = [
                 "/dev/nvidia0",
                 "/dev/nvidiactl",
                 "/usr/bin/nvidia-smi",
                 "/usr/bin/nvidia-persistenced",
-                ]
+            ]
             processes_fixture.add(
                 lambda _: {
                     "stdout": io.StringIO(
-                        "".join(f"{path}\n" for path in gpu_nvidia_paths)),
-                    },
-                name="/snap/lxd/current/bin/nvidia-container-cli.real")
+                        "".join(f"{path}\n" for path in gpu_nvidia_paths)
+                    ),
+                },
+                name="/snap/lxd/current/bin/nvidia-container-cli.real",
+            )
         else:
             gpu_nvidia_paths = None
 
         with mock.patch.object(
-            LXD,
-            "path_exists",
-            side_effect=lambda path: path in existing_files
+            LXD, "path_exists", side_effect=lambda path: path in existing_files
         ):
             constraints = ["gpu-nvidia"] if gpu_nvidia else []
             LXD("1", "xenial", arch, constraints=constraints).start()
@@ -477,120 +505,236 @@ class TestLXD(TestCase):
         ip = ["sudo", "ip"]
         iptables = ["sudo", "iptables", "-w"]
         iptables_comment = [
-            "-m", "comment", "--comment", "managed by launchpad-buildd"]
+            "-m",
+            "comment",
+            "--comment",
+            "managed by launchpad-buildd",
+        ]
         setarch_cmd = "linux64" if get_arch_bits(arch) == 64 else "linux32"
         lxc = ["lxc", "exec", f"lp-xenial-{arch}", "--", setarch_cmd]
         expected_args = []
         if gpu_nvidia:
             expected_args.append(
                 Equals(
-                    ["/snap/lxd/current/bin/nvidia-container-cli.real",
-                     "list"]))
-        expected_args.extend([
-            Equals(ip + ["link", "add", "dev", "lpbuilddbr0",
-                         "type", "bridge"]),
-            Equals(ip + ["addr", "add", "10.10.10.1/24",
-                         "dev", "lpbuilddbr0"]),
-            Equals(ip + ["link", "set", "dev", "lpbuilddbr0", "up"]),
-            Equals(["sudo", "sysctl", "-q", "-w", "net.ipv4.ip_forward=1"]),
-            Equals(
-                iptables +
-                ["-t", "mangle", "-A", "FORWARD", "-i", "lpbuilddbr0",
-                 "-p", "tcp", "--tcp-flags", "SYN,RST", "SYN",
-                 "-j", "TCPMSS", "--clamp-mss-to-pmtu"] +
-                iptables_comment),
-            Equals(
-                iptables +
-                ["-t", "nat", "-A", "POSTROUTING",
-                 "-s", "10.10.10.1/24", "!", "-d", "10.10.10.1/24",
-                 "-j", "MASQUERADE"] +
-                iptables_comment),
-            Equals(
-                ["sudo", "/usr/sbin/dnsmasq", "-s", "lpbuildd",
-                 "-S", "/lpbuildd/", "-u", "buildd", "--strict-order",
-                 "--bind-interfaces",
-                 "--pid-file=/run/launchpad-buildd/dnsmasq.pid",
-                 "--except-interface=lo", "--interface=lpbuilddbr0",
-                 "--listen-address=10.10.10.1"]),
-            Equals(["hostname"]),
-            Equals(["hostname", "--fqdn"]),
-            Equals(
-                lxc +
-                ["mknod", "-m", "0660", "/dev/loop-control",
-                 "c", "10", "237"]),
-            ])
+                    ["/snap/lxd/current/bin/nvidia-container-cli.real", "list"]
+                )
+            )
+        expected_args.extend(
+            [
+                Equals(
+                    ip
+                    + ["link", "add", "dev", "lpbuilddbr0", "type", "bridge"]
+                ),
+                Equals(
+                    ip + ["addr", "add", "10.10.10.1/24", "dev", "lpbuilddbr0"]
+                ),
+                Equals(ip + ["link", "set", "dev", "lpbuilddbr0", "up"]),
+                Equals(
+                    ["sudo", "sysctl", "-q", "-w", "net.ipv4.ip_forward=1"]
+                ),
+                Equals(
+                    iptables
+                    + [
+                        "-t",
+                        "mangle",
+                        "-A",
+                        "FORWARD",
+                        "-i",
+                        "lpbuilddbr0",
+                        "-p",
+                        "tcp",
+                        "--tcp-flags",
+                        "SYN,RST",
+                        "SYN",
+                        "-j",
+                        "TCPMSS",
+                        "--clamp-mss-to-pmtu",
+                    ]
+                    + iptables_comment
+                ),
+                Equals(
+                    iptables
+                    + [
+                        "-t",
+                        "nat",
+                        "-A",
+                        "POSTROUTING",
+                        "-s",
+                        "10.10.10.1/24",
+                        "!",
+                        "-d",
+                        "10.10.10.1/24",
+                        "-j",
+                        "MASQUERADE",
+                    ]
+                    + iptables_comment
+                ),
+                Equals(
+                    [
+                        "sudo",
+                        "/usr/sbin/dnsmasq",
+                        "-s",
+                        "lpbuildd",
+                        "-S",
+                        "/lpbuildd/",
+                        "-u",
+                        "buildd",
+                        "--strict-order",
+                        "--bind-interfaces",
+                        "--pid-file=/run/launchpad-buildd/dnsmasq.pid",
+                        "--except-interface=lo",
+                        "--interface=lpbuilddbr0",
+                        "--listen-address=10.10.10.1",
+                    ]
+                ),
+                Equals(["hostname"]),
+                Equals(["hostname", "--fqdn"]),
+                Equals(
+                    lxc
+                    + [
+                        "mknod",
+                        "-m",
+                        "0660",
+                        "/dev/loop-control",
+                        "c",
+                        "10",
+                        "237",
+                    ]
+                ),
+            ]
+        )
         for minor in range(256):
             expected_args.append(
                 Equals(
-                    lxc +
-                    ["mknod", "-m", "0660", "/dev/loop%d" % minor,
-                     "b", "7", str(minor)]))
+                    lxc
+                    + [
+                        "mknod",
+                        "-m",
+                        "0660",
+                        "/dev/loop%d" % minor,
+                        "b",
+                        "7",
+                        str(minor),
+                    ]
+                )
+            )
         for minor in range(8):
             expected_args.append(
                 Equals(
-                    lxc +
-                    ["mknod", "-m", "0660", "/dev/dm-%d" % minor,
-                     "b", str(DM_BLOCK_MAJOR), str(minor)]))
+                    lxc
+                    + [
+                        "mknod",
+                        "-m",
+                        "0660",
+                        "/dev/dm-%d" % minor,
+                        "b",
+                        str(DM_BLOCK_MAJOR),
+                        str(minor),
+                    ]
+                )
+            )
         if gpu_nvidia:
-            expected_args.extend([
+            expected_args.extend(
+                [
+                    Equals(
+                        lxc
+                        + [
+                            "mknod",
+                            "-m",
+                            "0666",
+                            "/dev/nvidia0",
+                            "c",
+                            "195",
+                            "0",
+                        ]
+                    ),
+                    Equals(
+                        lxc
+                        + [
+                            "mknod",
+                            "-m",
+                            "0666",
+                            "/dev/nvidiactl",
+                            "c",
+                            "195",
+                            "255",
+                        ]
+                    ),
+                    Equals(lxc + ["/sbin/ldconfig"]),
+                ]
+            )
+        expected_args.extend(
+            [
                 Equals(
-                    lxc +
-                    ["mknod", "-m", "0666", "/dev/nvidia0",
-                     "c", "195", "0"]),
+                    lxc
+                    + ["mkdir", "-p", "/etc/systemd/system/snapd.service.d"]
+                ),
                 Equals(
-                    lxc +
-                    ["mknod", "-m", "0666", "/dev/nvidiactl",
-                     "c", "195", "255"]),
-                Equals(lxc + ["/sbin/ldconfig"]),
-                ])
-        expected_args.extend([
-            Equals(
-                lxc + ["mkdir", "-p", "/etc/systemd/system/snapd.service.d"]),
-            Equals(
-                lxc +
-                ["ln", "-s", "/dev/null",
-                 "/etc/systemd/system/snapd.refresh.timer"]),
-            ])
+                    lxc
+                    + [
+                        "ln",
+                        "-s",
+                        "/dev/null",
+                        "/etc/systemd/system/snapd.refresh.timer",
+                    ]
+                ),
+            ]
+        )
         if unmounts_cpuinfo:
             expected_args.append(Equals(lxc + ["umount", "/proc/cpuinfo"]))
         self.assertThat(
             [proc._args["args"] for proc in processes_fixture.procs],
-            MatchesListwise(expected_args))
-
-        client.containers.create.assert_called_once_with({
-            "name": f"lp-xenial-{arch}",
-            "profiles": ["lpbuildd"],
-            "source": {"type": "image", "alias": f"lp-xenial-{arch}"},
-            }, wait=True)
+            MatchesListwise(expected_args),
+        )
+
+        client.containers.create.assert_called_once_with(
+            {
+                "name": f"lp-xenial-{arch}",
+                "profiles": ["lpbuildd"],
+                "source": {"type": "image", "alias": f"lp-xenial-{arch}"},
+            },
+            wait=True,
+        )
         files_api.session.get.assert_any_call(
             f"/1.0/containers/lp-xenial-{arch}/files",
-            params={"path": "/etc/hosts"}, stream=True)
+            params={"path": "/etc/hosts"},
+            stream=True,
+        )
         files_api.post.assert_any_call(
             params={"path": "/etc/hosts"},
             data=(
                 b"127.0.0.1\tlocalhost\n\n"
-                b"127.0.1.1\texample.buildd example\n"),
-            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"})
+                b"127.0.1.1\texample.buildd example\n"
+            ),
+            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"},
+        )
         files_api.post.assert_any_call(
             params={"path": "/etc/hostname"},
             data=b"example\n",
-            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"})
+            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"},
+        )
         files_api.post.assert_any_call(
             params={"path": "/etc/resolv.conf"},
             data=b"host resolv.conf\n",
-            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"})
+            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"},
+        )
         files_api.post.assert_any_call(
             params={"path": "/usr/local/sbin/policy-rc.d"},
             data=policy_rc_d.encode("UTF-8"),
-            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0755"})
+            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0755"},
+        )
         self.assertNotIn(
             "/etc/init/mounted-dev.override",
-            [kwargs["params"]["path"]
-             for _, kwargs in files_api.post.call_args_list])
+            [
+                kwargs["params"]["path"]
+                for _, kwargs in files_api.post.call_args_list
+            ],
+        )
         files_api.post.assert_any_call(
             params={"path": "/etc/systemd/system/snapd.service.d/no-cdn.conf"},
             data=b"[Service]\nEnvironment=SNAPPY_STORE_NO_CDN=1\n",
-            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"})
+            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"},
+        )
         container.start.assert_called_once_with(wait=True)
         self.assertEqual(LXD_RUNNING, container.status_code)
 
@@ -602,8 +746,9 @@ class TestLXD(TestCase):
         container = client.containers.create.return_value
         client.containers.get.return_value = container
         client.host_info = {"environment": {"driver_version": "2.0"}}
-        container.start.side_effect = (
-            lambda wait=False: setattr(container, "status_code", LXD_RUNNING))
+        container.start.side_effect = lambda wait=False: setattr(
+            container, "status_code", LXD_RUNNING
+        )
         files_api = container.api.files
         files_api._api_endpoint = "/1.0/containers/lp-xenial-amd64/files"
         files_api.session.get.side_effect = FakeSessionGet({})
@@ -611,7 +756,8 @@ class TestLXD(TestCase):
         processes_fixture.add(lambda _: {}, name="sudo")
         processes_fixture.add(lambda _: {}, name="lxc")
         processes_fixture.add(
-            FakeHostname("example", "example.buildd"), name="hostname")
+            FakeHostname("example", "example.buildd"), name="hostname"
+        )
 
         with mock.patch.object(LXD, "path_exists", return_value=False):
             LXD("1", "xenial", "amd64").start()
@@ -619,9 +765,10 @@ class TestLXD(TestCase):
         files_api.post.assert_any_call(
             params={"path": "/etc/hosts"},
             data=(
-                fallback_hosts +
-                "\n127.0.1.1\texample.buildd example\n").encode("UTF-8"),
-            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"})
+                fallback_hosts + "\n127.0.1.1\texample.buildd example\n"
+            ).encode("UTF-8"),
+            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"},
+        )
 
     def test_start_with_mounted_dev_conf(self):
         self.fakeFS()
@@ -631,43 +778,52 @@ class TestLXD(TestCase):
         client.host_info = {"environment": {"driver_version": "2.0"}}
         container = client.containers.create.return_value
         client.containers.get.return_value = container
-        container.start.side_effect = (
-            lambda wait=False: setattr(container, "status_code", LXD_RUNNING))
+        container.start.side_effect = lambda wait=False: setattr(
+            container, "status_code", LXD_RUNNING
+        )
         files_api = container.api.files
         files_api._api_endpoint = "/1.0/containers/lp-trusty-amd64/files"
         existing_files = {
-            "/etc/init/mounted-dev.conf": [dedent("""\
+            "/etc/init/mounted-dev.conf": [
+                dedent(
+                    """\
                 start on mounted MOUNTPOINT=/dev
                 script
                     [ -e /dev/shm ] || ln -s /run/shm /dev/shm
                     /sbin/MAKEDEV std fd ppp tun
                 end script
                 task
-                """).encode("UTF-8")]}
+                """
+                ).encode("UTF-8")
+            ]
+        }
         files_api.session.get.side_effect = FakeSessionGet(existing_files)
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(lambda _: {}, name="sudo")
         processes_fixture.add(lambda _: {}, name="lxc")
 
         with mock.patch.object(
-            LXD,
-            "path_exists",
-            side_effect=lambda path: path in existing_files
+            LXD, "path_exists", side_effect=lambda path: path in existing_files
         ):
             LXD("1", "trusty", "amd64").start()
 
         files_api.session.get.assert_any_call(
             "/1.0/containers/lp-trusty-amd64/files",
-            params={"path": "/etc/init/mounted-dev.conf"}, stream=True)
+            params={"path": "/etc/init/mounted-dev.conf"},
+            stream=True,
+        )
         files_api.post.assert_any_call(
             params={"path": "/etc/init/mounted-dev.override"},
-            data=dedent("""\
+            data=dedent(
+                """\
                 script
                     [ -e /dev/shm ] || ln -s /run/shm /dev/shm
                     : # /sbin/MAKEDEV std fd ppp tun
                 end script
-                """).encode("UTF-8"),
-            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"})
+                """
+            ).encode("UTF-8"),
+            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"},
+        )
 
     def test_start_armhf_unmounts_cpuinfo(self):
         self.test_start(arch="armhf", unmounts_cpuinfo=True)
@@ -679,32 +835,54 @@ class TestLXD(TestCase):
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(lambda _: {}, name="lxc")
         LXD("1", "xenial", "amd64").run(
-            ["apt-get", "update"], env={"LANG": "C"})
+            ["apt-get", "update"], env={"LANG": "C"}
+        )
 
         expected_args = [
-            ["lxc", "exec", "lp-xenial-amd64", "--env", "LANG=C", "--",
-             "linux64", "apt-get", "update"],
-            ]
+            [
+                "lxc",
+                "exec",
+                "lp-xenial-amd64",
+                "--env",
+                "LANG=C",
+                "--",
+                "linux64",
+                "apt-get",
+                "update",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_run_get_output(self):
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(
-            lambda _: {"stdout": io.BytesIO(b"hello\n")}, name="lxc")
+            lambda _: {"stdout": io.BytesIO(b"hello\n")}, name="lxc"
+        )
         self.assertEqual(
             b"hello\n",
             LXD("1", "xenial", "amd64").run(
-                ["echo", "hello"], get_output=True))
+                ["echo", "hello"], get_output=True
+            ),
+        )
 
         expected_args = [
-            ["lxc", "exec", "lp-xenial-amd64", "--",
-             "linux64", "echo", "hello"],
-            ]
+            [
+                "lxc",
+                "exec",
+                "lp-xenial-amd64",
+                "--",
+                "linux64",
+                "echo",
+                "hello",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_run_non_ascii_arguments(self):
         processes_fixture = self.useFixture(FakeProcesses())
@@ -714,25 +892,36 @@ class TestLXD(TestCase):
 
         expected_args = [
             ["lxc", "exec", "lp-xenial-amd64", "--", "linux64", "echo", arg],
-            ]
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_run_env_shell_metacharacters(self):
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(lambda _: {}, name="lxc")
         LXD("1", "xenial", "amd64").run(
-            ["echo", "hello"], env={"OBJECT": "{'foo': 'bar'}"})
+            ["echo", "hello"], env={"OBJECT": "{'foo': 'bar'}"}
+        )
 
         expected_args = [
-            ["lxc", "exec", "lp-xenial-amd64",
-             "--env", "OBJECT={'foo': 'bar'}", "--",
-             "linux64", "echo", "hello"],
-            ]
+            [
+                "lxc",
+                "exec",
+                "lp-xenial-amd64",
+                "--env",
+                "OBJECT={'foo': 'bar'}",
+                "--",
+                "linux64",
+                "echo",
+                "hello",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_copy_in(self):
         source_dir = self.useFixture(TempDir()).path
@@ -751,7 +940,8 @@ class TestLXD(TestCase):
         container.api.files.post.assert_called_once_with(
             params={"path": target_path},
             data=b"hello\n",
-            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"})
+            headers={"X-LXD-uid": "0", "X-LXD-gid": "0", "X-LXD-mode": "0644"},
+        )
 
     def test_copy_in_error(self):
         source_dir = self.useFixture(TempDir()).path
@@ -765,11 +955,16 @@ class TestLXD(TestCase):
             pass
         target_path = "/path/to/target"
         e = self.assertRaises(
-            LXDException, LXD("1", "xenial", "amd64").copy_in,
-            source_path, target_path)
+            LXDException,
+            LXD("1", "xenial", "amd64").copy_in,
+            source_path,
+            target_path,
+        )
         self.assertEqual(
             "Failed to push lp-xenial-amd64:%s: "
-            "Fake LXD exception" % target_path, str(e))
+            "Fake LXD exception" % target_path,
+            str(e),
+        )
 
     def test_copy_out(self):
         target_dir = self.useFixture(TempDir()).path
@@ -781,15 +976,19 @@ class TestLXD(TestCase):
         target_path = os.path.join(target_dir, "target")
         files_api = container.api.files
         files_api._api_endpoint = "/1.0/containers/lp-xenial-amd64/files"
-        files_api.session.get.side_effect = FakeSessionGet({
-            source_path: [b"hello\n", b"world\n"],
-            })
+        files_api.session.get.side_effect = FakeSessionGet(
+            {
+                source_path: [b"hello\n", b"world\n"],
+            }
+        )
         LXD("1", "xenial", "amd64").copy_out(source_path, target_path)
 
         client.containers.get.assert_called_once_with("lp-xenial-amd64")
         files_api.session.get.assert_called_once_with(
             "/1.0/containers/lp-xenial-amd64/files",
-            params={"path": source_path}, stream=True)
+            params={"path": source_path},
+            stream=True,
+        )
         self.assertThat(target_path, FileContains("hello\nworld\n"))
 
     def test_copy_out_error(self):
@@ -804,11 +1003,15 @@ class TestLXD(TestCase):
         files_api._api_endpoint = "/1.0/containers/lp-xenial-amd64/files"
         files_api.session.get.side_effect = FakeSessionGet({})
         e = self.assertRaises(
-            LXDException, LXD("1", "xenial", "amd64").copy_out,
-            source_path, target_path)
+            LXDException,
+            LXD("1", "xenial", "amd64").copy_out,
+            source_path,
+            target_path,
+        )
         self.assertEqual(
             "Failed to pull lp-xenial-amd64:%s: not found" % source_path,
-            str(e))
+            str(e),
+        )
 
     def test_path_exists(self):
         processes_fixture = self.useFixture(FakeProcesses())
@@ -818,13 +1021,22 @@ class TestLXD(TestCase):
         self.assertFalse(LXD("1", "xenial", "amd64").path_exists("/absent"))
 
         expected_args = [
-            ["lxc", "exec", "lp-xenial-amd64", "--",
-             "linux64", "test", "-e", path]
-            for path in ("/present", "/absent")
+            [
+                "lxc",
+                "exec",
+                "lp-xenial-amd64",
+                "--",
+                "linux64",
+                "test",
+                "-e",
+                path,
             ]
+            for path in ("/present", "/absent")
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_isdir(self):
         processes_fixture = self.useFixture(FakeProcesses())
@@ -834,13 +1046,22 @@ class TestLXD(TestCase):
         self.assertFalse(LXD("1", "xenial", "amd64").isdir("/file"))
 
         expected_args = [
-            ["lxc", "exec", "lp-xenial-amd64", "--",
-             "linux64", "test", "-d", path]
-            for path in ("/dir", "/file")
+            [
+                "lxc",
+                "exec",
+                "lp-xenial-amd64",
+                "--",
+                "linux64",
+                "test",
+                "-d",
+                path,
             ]
+            for path in ("/dir", "/file")
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_islink(self):
         processes_fixture = self.useFixture(FakeProcesses())
@@ -850,45 +1071,69 @@ class TestLXD(TestCase):
         self.assertFalse(LXD("1", "xenial", "amd64").islink("/file"))
 
         expected_args = [
-            ["lxc", "exec", "lp-xenial-amd64", "--",
-             "linux64", "test", "-h", path]
-            for path in ("/link", "/file")
+            [
+                "lxc",
+                "exec",
+                "lp-xenial-amd64",
+                "--",
+                "linux64",
+                "test",
+                "-h",
+                path,
             ]
+            for path in ("/link", "/file")
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_find(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
         processes_fixture = self.useFixture(FakeProcesses())
-        test_proc_infos = iter([
-            {"stdout": io.BytesIO(b"foo\0bar\0bar/bar\0bar/baz\0")},
-            {"stdout": io.BytesIO(b"foo\0bar\0")},
-            {"stdout": io.BytesIO(b"foo\0bar/bar\0bar/baz\0")},
-            {"stdout": io.BytesIO(b"bar\0bar/bar\0")},
-            {"stdout": io.BytesIO(b"")},
-            ])
+        test_proc_infos = iter(
+            [
+                {"stdout": io.BytesIO(b"foo\0bar\0bar/bar\0bar/baz\0")},
+                {"stdout": io.BytesIO(b"foo\0bar\0")},
+                {"stdout": io.BytesIO(b"foo\0bar/bar\0bar/baz\0")},
+                {"stdout": io.BytesIO(b"bar\0bar/bar\0")},
+                {"stdout": io.BytesIO(b"")},
+            ]
+        )
         processes_fixture.add(lambda _: next(test_proc_infos), name="lxc")
         self.assertEqual(
             ["foo", "bar", "bar/bar", "bar/baz"],
-            LXD("1", "xenial", "amd64").find("/path"))
+            LXD("1", "xenial", "amd64").find("/path"),
+        )
         self.assertEqual(
             ["foo", "bar"],
-            LXD("1", "xenial", "amd64").find("/path", max_depth=1))
+            LXD("1", "xenial", "amd64").find("/path", max_depth=1),
+        )
         self.assertEqual(
             ["foo", "bar/bar", "bar/baz"],
             LXD("1", "xenial", "amd64").find(
-                "/path", include_directories=False))
+                "/path", include_directories=False
+            ),
+        )
         self.assertEqual(
             ["bar", "bar/bar"],
-            LXD("1", "xenial", "amd64").find("/path", name="bar"))
+            LXD("1", "xenial", "amd64").find("/path", name="bar"),
+        )
         self.assertEqual(
-            [], LXD("1", "xenial", "amd64").find("/path", name="nonexistent"))
+            [], LXD("1", "xenial", "amd64").find("/path", name="nonexistent")
+        )
 
         find_prefix = [
-            "lxc", "exec", "lp-xenial-amd64", "--",
-            "linux64", "find", "/path", "-mindepth", "1",
-            ]
+            "lxc",
+            "exec",
+            "lp-xenial-amd64",
+            "--",
+            "linux64",
+            "find",
+            "/path",
+            "-mindepth",
+            "1",
+        ]
         find_suffix = ["-printf", "%P\\0"]
         expected_args = [
             find_prefix + find_suffix,
@@ -896,51 +1141,80 @@ class TestLXD(TestCase):
             find_prefix + ["!", "-type", "d"] + find_suffix,
             find_prefix + ["-name", "bar"] + find_suffix,
             find_prefix + ["-name", "nonexistent"] + find_suffix,
-            ]
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_listdir(self):
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(
-            lambda _: {"stdout": io.BytesIO(b"foo\0bar\0baz\0")}, name="lxc")
+            lambda _: {"stdout": io.BytesIO(b"foo\0bar\0baz\0")}, name="lxc"
+        )
         self.assertEqual(
-            ["foo", "bar", "baz"],
-            LXD("1", "xenial", "amd64").listdir("/path"))
+            ["foo", "bar", "baz"], LXD("1", "xenial", "amd64").listdir("/path")
+        )
 
         expected_args = [
-            ["lxc", "exec", "lp-xenial-amd64", "--",
-             "linux64", "find", "/path", "-mindepth", "1", "-maxdepth", "1",
-             "-printf", "%P\\0"],
-            ]
+            [
+                "lxc",
+                "exec",
+                "lp-xenial-amd64",
+                "--",
+                "linux64",
+                "find",
+                "/path",
+                "-mindepth",
+                "1",
+                "-maxdepth",
+                "1",
+                "-printf",
+                "%P\\0",
+            ],
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_is_package_available(self):
         processes_fixture = self.useFixture(FakeProcesses())
-        test_proc_infos = iter([
-            {"stdout": io.StringIO("Package: snapd\n")},
-            {"returncode": 100},
-            {"stderr": io.StringIO("N: No packages found\n")},
-            ])
+        test_proc_infos = iter(
+            [
+                {"stdout": io.StringIO("Package: snapd\n")},
+                {"returncode": 100},
+                {"stderr": io.StringIO("N: No packages found\n")},
+            ]
+        )
         processes_fixture.add(lambda _: next(test_proc_infos), name="lxc")
         self.assertTrue(
-            LXD("1", "xenial", "amd64").is_package_available("snapd"))
+            LXD("1", "xenial", "amd64").is_package_available("snapd")
+        )
         self.assertFalse(
-            LXD("1", "xenial", "amd64").is_package_available("nonexistent"))
+            LXD("1", "xenial", "amd64").is_package_available("nonexistent")
+        )
         self.assertFalse(
-            LXD("1", "xenial", "amd64").is_package_available("virtual"))
+            LXD("1", "xenial", "amd64").is_package_available("virtual")
+        )
 
         expected_args = [
-            ["lxc", "exec", "lp-xenial-amd64", "--",
-             "linux64", "apt-cache", "show", package]
-            for package in ("snapd", "nonexistent", "virtual")
+            [
+                "lxc",
+                "exec",
+                "lp-xenial-amd64",
+                "--",
+                "linux64",
+                "apt-cache",
+                "show",
+                package,
             ]
+            for package in ("snapd", "nonexistent", "virtual")
+        ]
         self.assertEqual(
             expected_args,
-            [proc._args["args"] for proc in processes_fixture.procs])
+            [proc._args["args"] for proc in processes_fixture.procs],
+        )
 
     def test_stop(self):
         fs_fixture = self.useFixture(FakeFilesystem())
@@ -952,7 +1226,7 @@ class TestLXD(TestCase):
             f.write("42\n")
         self.useFixture(MockPatch("pylxd.Client"))
         client = pylxd.Client()
-        container = client.containers.get('lp-xenial-amd64')
+        container = client.containers.get("lp-xenial-amd64")
         container.status_code = LXD_RUNNING
         processes_fixture = self.useFixture(FakeProcesses())
         processes_fixture.add(lambda _: {}, name="sudo")
@@ -963,27 +1237,59 @@ class TestLXD(TestCase):
         ip = ["sudo", "ip"]
         iptables = ["sudo", "iptables", "-w"]
         iptables_comment = [
-            "-m", "comment", "--comment", "managed by launchpad-buildd"]
+            "-m",
+            "comment",
+            "--comment",
+            "managed by launchpad-buildd",
+        ]
         self.assertThat(
             [proc._args["args"] for proc in processes_fixture.procs],
-            MatchesListwise([
-                Equals(ip + ["addr", "flush", "dev", "lpbuilddbr0"]),
-                Equals(ip + ["link", "set", "dev", "lpbuilddbr0", "down"]),
-                Equals(
-                    iptables +
-                    ["-t", "mangle", "-D", "FORWARD", "-i", "lpbuilddbr0",
-                     "-p", "tcp", "--tcp-flags", "SYN,RST", "SYN",
-                     "-j", "TCPMSS", "--clamp-mss-to-pmtu"] +
-                    iptables_comment),
-                Equals(
-                    iptables +
-                    ["-t", "nat", "-D", "POSTROUTING",
-                     "-s", "10.10.10.1/24", "!", "-d", "10.10.10.1/24",
-                     "-j", "MASQUERADE"] +
-                    iptables_comment),
-                Equals(["sudo", "kill", "-9", "42"]),
-                Equals(ip + ["link", "delete", "lpbuilddbr0"]),
-                ]))
+            MatchesListwise(
+                [
+                    Equals(ip + ["addr", "flush", "dev", "lpbuilddbr0"]),
+                    Equals(ip + ["link", "set", "dev", "lpbuilddbr0", "down"]),
+                    Equals(
+                        iptables
+                        + [
+                            "-t",
+                            "mangle",
+                            "-D",
+                            "FORWARD",
+                            "-i",
+                            "lpbuilddbr0",
+                            "-p",
+                            "tcp",
+                            "--tcp-flags",
+                            "SYN,RST",
+                            "SYN",
+                            "-j",
+                            "TCPMSS",
+                            "--clamp-mss-to-pmtu",
+                        ]
+                        + iptables_comment
+                    ),
+                    Equals(
+                        iptables
+                        + [
+                            "-t",
+                            "nat",
+                            "-D",
+                            "POSTROUTING",
+                            "-s",
+                            "10.10.10.1/24",
+                            "!",
+                            "-d",
+                            "10.10.10.1/24",
+                            "-j",
+                            "MASQUERADE",
+                        ]
+                        + iptables_comment
+                    ),
+                    Equals(["sudo", "kill", "-9", "42"]),
+                    Equals(ip + ["link", "delete", "lpbuilddbr0"]),
+                ]
+            ),
+        )
 
     def test_remove(self):
         self.useFixture(EnvironmentVariable("HOME", "/expected/home"))
@@ -1002,6 +1308,9 @@ class TestLXD(TestCase):
         image.delete.assert_called_once_with(wait=True)
         self.assertThat(
             [proc._args["args"] for proc in processes_fixture.procs],
-            MatchesListwise([
-                Equals(["sudo", "rm", "-rf", "/expected/home/build-1"]),
-                ]))
+            MatchesListwise(
+                [
+                    Equals(["sudo", "rm", "-rf", "/expected/home/build-1"]),
+                ]
+            ),
+        )
diff --git a/lpbuildd/target/tests/test_operation.py b/lpbuildd/target/tests/test_operation.py
index 2ee909a..3264e8a 100644
--- a/lpbuildd/target/tests/test_operation.py
+++ b/lpbuildd/target/tests/test_operation.py
@@ -11,16 +11,20 @@ from lpbuildd.target.tests.matchers import RanBuildCommand
 
 
 class TestOperation(TestCase):
-
     def test_run_build_command_no_env(self):
         parser = ArgumentParser()
         Operation.add_arguments(parser)
         args = ["--backend=fake", "--series=xenial", "--arch=amd64", "1"]
         operation = Operation(parser.parse_args(args=args), parser)
         operation.run_build_command(["echo", "hello world"])
-        self.assertThat(operation.backend.run.calls, MatchesListwise([
-            RanBuildCommand(["echo", "hello world"]),
-            ]))
+        self.assertThat(
+            operation.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(["echo", "hello world"]),
+                ]
+            ),
+        )
 
     def test_run_build_command_env(self):
         parser = ArgumentParser()
@@ -28,7 +32,13 @@ class TestOperation(TestCase):
         args = ["--backend=fake", "--series=xenial", "--arch=amd64", "1"]
         operation = Operation(parser.parse_args(args=args), parser)
         operation.run_build_command(
-            ["echo", "hello world"], env={"FOO": "bar baz"})
-        self.assertThat(operation.backend.run.calls, MatchesListwise([
-            RanBuildCommand(["echo", "hello world"], FOO="bar baz"),
-            ]))
+            ["echo", "hello world"], env={"FOO": "bar baz"}
+        )
+        self.assertThat(
+            operation.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(["echo", "hello world"], FOO="bar baz"),
+                ]
+            ),
+        )
diff --git a/lpbuildd/target/tests/test_run_ci.py b/lpbuildd/target/tests/test_run_ci.py
index 08c4d95..bf61e99 100644
--- a/lpbuildd/target/tests/test_run_ci.py
+++ b/lpbuildd/target/tests/test_run_ci.py
@@ -7,35 +7,27 @@ import stat
 import subprocess
 from textwrap import dedent
 
-from fixtures import (
-    FakeLogger,
-    TempDir,
-    )
 import responses
+from fixtures import FakeLogger, TempDir
 from systemfixtures import FakeFilesystem
 from testtools import TestCase
-from testtools.matchers import (
-    AnyMatch,
-    MatchesAll,
-    MatchesListwise,
-    )
+from testtools.matchers import AnyMatch, MatchesAll, MatchesListwise
 
 from lpbuildd.target.cli import parse_args
 from lpbuildd.target.run_ci import (
     RETCODE_FAILURE_BUILD,
     RETCODE_FAILURE_INSTALL,
-    )
+)
 from lpbuildd.target.tests.matchers import (
     RanAptGet,
     RanBuildCommand,
     RanCommand,
     RanSnap,
-    )
+)
 from lpbuildd.tests.fakebuilder import FakeMethod
 
 
 class FakeRevisionID(FakeMethod):
-
     def __init__(self, revision_id):
         super().__init__()
         self.revision_id = revision_id
@@ -47,62 +39,89 @@ class FakeRevisionID(FakeMethod):
 
 
 class TestRunCIPrepare(TestCase):
-
     def test_install_git(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.install()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git"),
-            RanSnap("install", "lxd"),
-            RanSnap("install", "--classic", "lpcraft"),
-            RanCommand(["lxd", "init", "--auto"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git"),
+                    RanSnap("install", "lxd"),
+                    RanSnap("install", "--classic", "lpcraft"),
+                    RanCommand(["lxd", "init", "--auto"]),
+                ]
+            ),
+        )
 
     @responses.activate
     def test_install_snap_store_proxy(self):
-        store_assertion = dedent("""\
+        store_assertion = dedent(
+            """\
             type: store
             store: store-id
             url: http://snap-store-proxy.example
 
             body
-            """)
+            """
+        )
 
         def respond(request):
             return 200, {"X-Assertion-Store-Id": "store-id"}, store_assertion
 
         responses.add_callback(
-            "GET", "http://snap-store-proxy.example/v2/auth/store/assertions";,
-            callback=respond)
+            "GET",
+            "http://snap-store-proxy.example/v2/auth/store/assertions";,
+            callback=respond,
+        )
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--snap-store-proxy-url", "http://snap-store-proxy.example/";,
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--snap-store-proxy-url",
+            "http://snap-store-proxy.example/";,
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.install()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git"),
-            RanSnap("ack", "/dev/stdin", input_text=store_assertion),
-            RanSnap("set", "core", "proxy.store=store-id"),
-            RanSnap("install", "lxd"),
-            RanSnap("install", "--classic", "lpcraft"),
-            RanCommand(["lxd", "init", "--auto"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git"),
+                    RanSnap("ack", "/dev/stdin", input_text=store_assertion),
+                    RanSnap("set", "core", "proxy.store=store-id"),
+                    RanSnap("install", "lxd"),
+                    RanSnap("install", "--classic", "lpcraft"),
+                    RanCommand(["lxd", "init", "--auto"]),
+                ]
+            ),
+        )
 
     def test_install_proxy(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.bin = "/builderbin"
         self.useFixture(FakeFilesystem()).add("/builderbin")
@@ -111,61 +130,95 @@ class TestRunCIPrepare(TestCase):
             proxy_script.write("proxy script\n")
             os.fchmod(proxy_script.fileno(), 0o755)
         run_ci_prepare.install()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "python3", "socat", "git"),
-            RanSnap("install", "lxd"),
-            RanSnap("install", "--classic", "lpcraft"),
-            RanCommand(["lxd", "init", "--auto"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "python3", "socat", "git"),
+                    RanSnap("install", "lxd"),
+                    RanSnap("install", "--classic", "lpcraft"),
+                    RanCommand(["lxd", "init", "--auto"]),
+                ]
+            ),
+        )
         self.assertEqual(
             (b"proxy script\n", stat.S_IFREG | 0o755),
             run_ci_prepare.backend.backend_fs[
-                "/usr/local/bin/lpbuildd-git-proxy"])
+                "/usr/local/bin/lpbuildd-git-proxy"
+            ],
+        )
 
     def test_install_channels(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--channel=core=candidate", "--channel=core20=beta",
-            "--channel=lxd=beta", "--channel=lpcraft=edge",
-            "--git-repository", "lp:foo",
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--channel=core=candidate",
+            "--channel=core20=beta",
+            "--channel=lxd=beta",
+            "--channel=lpcraft=edge",
+            "--git-repository",
+            "lp:foo",
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.install()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git"),
-            RanSnap("install", "--channel=candidate", "core"),
-            RanSnap("install", "--channel=beta", "core20"),
-            RanSnap("install", "--channel=beta", "lxd"),
-            RanSnap("install", "--classic", "--channel=edge", "lpcraft"),
-            RanCommand(["lxd", "init", "--auto"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git"),
+                    RanSnap("install", "--channel=candidate", "core"),
+                    RanSnap("install", "--channel=beta", "core20"),
+                    RanSnap("install", "--channel=beta", "lxd"),
+                    RanSnap(
+                        "install", "--classic", "--channel=edge", "lpcraft"
+                    ),
+                    RanCommand(["lxd", "init", "--auto"]),
+                ]
+            ),
+        )
 
     def test_install_scan_malware(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
             "--scan-malware",
-            ]
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.install()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git", "clamav"),
-            RanSnap("install", "lxd"),
-            RanSnap("install", "--classic", "lpcraft"),
-            RanCommand(["lxd", "init", "--auto"]),
-            RanCommand(["freshclam", "--quiet"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git", "clamav"),
+                    RanSnap("install", "lxd"),
+                    RanSnap("install", "--classic", "lpcraft"),
+                    RanCommand(["lxd", "init", "--auto"]),
+                    RanCommand(["freshclam", "--quiet"]),
+                ]
+            ),
+        )
 
     def test_install_scan_malware_proxy(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
             "--scan-malware",
-            ]
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.bin = "/builderbin"
         self.useFixture(FakeFilesystem()).add("/builderbin")
@@ -179,62 +232,104 @@ class TestRunCIPrepare(TestCase):
             "https_proxy": "http://proxy.example:3128/";,
             "GIT_PROXY_COMMAND": "/usr/local/bin/lpbuildd-git-proxy",
             "SNAPPY_STORE_NO_CDN": "1",
-            }
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "python3", "socat", "git", "clamav"),
-            RanSnap("install", "lxd"),
-            RanSnap("install", "--classic", "lpcraft"),
-            RanCommand(["lxd", "init", "--auto"]),
-            RanCommand(["freshclam", "--quiet"], **env),
-            ]))
+        }
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "python3", "socat", "git", "clamav"),
+                    RanSnap("install", "lxd"),
+                    RanSnap("install", "--classic", "lpcraft"),
+                    RanCommand(["lxd", "init", "--auto"]),
+                    RanCommand(["freshclam", "--quiet"], **env),
+                ]
+            ),
+        )
 
     def test_install_scan_malware_with_clamav_database_url(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
             "--scan-malware",
-            "--clamav-database-url", "http://clamav.example/";,
-            ]
+            "--clamav-database-url",
+            "http://clamav.example/";,
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.backend.add_file(
-            "/etc/clamav/freshclam.conf", b"Test line\n")
+            "/etc/clamav/freshclam.conf", b"Test line\n"
+        )
         run_ci_prepare.install()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanAptGet("install", "git", "clamav"),
-            RanSnap("install", "lxd"),
-            RanSnap("install", "--classic", "lpcraft"),
-            RanCommand(["lxd", "init", "--auto"]),
-            RanCommand(["freshclam", "--quiet"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanAptGet("install", "git", "clamav"),
+                    RanSnap("install", "lxd"),
+                    RanSnap("install", "--classic", "lpcraft"),
+                    RanCommand(["lxd", "init", "--auto"]),
+                    RanCommand(["freshclam", "--quiet"]),
+                ]
+            ),
+        )
         self.assertEqual(
-            (b"Test line\nPrivateMirror http://clamav.example/\n";,
-             stat.S_IFREG | 0o644),
-            run_ci_prepare.backend.backend_fs["/etc/clamav/freshclam.conf"])
+            (
+                b"Test line\nPrivateMirror http://clamav.example/\n";,
+                stat.S_IFREG | 0o644,
+            ),
+            run_ci_prepare.backend.backend_fs["/etc/clamav/freshclam.conf"],
+        )
 
     def test_repo_git(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.backend.build_path = self.useFixture(TempDir()).path
         run_ci_prepare.backend.run = FakeRevisionID("0" * 40)
         run_ci_prepare.repo()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "tree"], cwd="/build"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "HEAD"], cwd="/build/tree"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/build/tree"),
-            RanBuildCommand(
-                ["git", "rev-parse", "HEAD^{}"],
-                cwd="/build/tree", get_output=True, universal_newlines=True),
-            RanCommand(["chown", "-R", "buildd:buildd", "/build/tree"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "tree"], cwd="/build"
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "HEAD"], cwd="/build/tree"
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/build/tree",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "HEAD^{}"],
+                        cwd="/build/tree",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                    RanCommand(
+                        ["chown", "-R", "buildd:buildd", "/build/tree"]
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(run_ci_prepare.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -242,26 +337,51 @@ class TestRunCIPrepare(TestCase):
     def test_repo_git_with_path(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "--git-path", "next",
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--git-path",
+            "next",
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.backend.build_path = self.useFixture(TempDir()).path
         run_ci_prepare.backend.run = FakeRevisionID("0" * 40)
         run_ci_prepare.repo()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "tree"], cwd="/build"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "next"], cwd="/build/tree"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/build/tree"),
-            RanBuildCommand(
-                ["git", "rev-parse", "next^{}"],
-                cwd="/build/tree", get_output=True, universal_newlines=True),
-            RanCommand(["chown", "-R", "buildd:buildd", "/build/tree"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "tree"], cwd="/build"
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "next"], cwd="/build/tree"
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/build/tree",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "next^{}"],
+                        cwd="/build/tree",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                    RanCommand(
+                        ["chown", "-R", "buildd:buildd", "/build/tree"]
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(run_ci_prepare.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -269,26 +389,52 @@ class TestRunCIPrepare(TestCase):
     def test_repo_git_with_tag_path(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo", "--git-path", "refs/tags/1.0",
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--git-path",
+            "refs/tags/1.0",
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.backend.build_path = self.useFixture(TempDir()).path
         run_ci_prepare.backend.run = FakeRevisionID("0" * 40)
         run_ci_prepare.repo()
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "tree"], cwd="/build"),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "refs/tags/1.0"], cwd="/build/tree"),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/build/tree"),
-            RanBuildCommand(
-                ["git", "rev-parse", "refs/tags/1.0^{}"],
-                cwd="/build/tree", get_output=True, universal_newlines=True),
-            RanCommand(["chown", "-R", "buildd:buildd", "/build/tree"]),
-            ]))
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "tree"], cwd="/build"
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "refs/tags/1.0"],
+                        cwd="/build/tree",
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/build/tree",
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "refs/tags/1.0^{}"],
+                        cwd="/build/tree",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                    RanCommand(
+                        ["chown", "-R", "buildd:buildd", "/build/tree"]
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(run_ci_prepare.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -296,10 +442,15 @@ class TestRunCIPrepare(TestCase):
     def test_repo_proxy(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            "--proxy-url", "http://proxy.example:3128/";,
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+            "--proxy-url",
+            "http://proxy.example:3128/";,
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.backend.build_path = self.useFixture(TempDir()).path
         run_ci_prepare.backend.run = FakeRevisionID("0" * 40)
@@ -309,20 +460,44 @@ class TestRunCIPrepare(TestCase):
             "https_proxy": "http://proxy.example:3128/";,
             "GIT_PROXY_COMMAND": "/usr/local/bin/lpbuildd-git-proxy",
             "SNAPPY_STORE_NO_CDN": "1",
-            }
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesListwise([
-            RanBuildCommand(
-                ["git", "clone", "-n", "lp:foo", "tree"], cwd="/build", **env),
-            RanBuildCommand(
-                ["git", "checkout", "-q", "HEAD"], cwd="/build/tree", **env),
-            RanBuildCommand(
-                ["git", "submodule", "update", "--init", "--recursive"],
-                cwd="/build/tree", **env),
-            RanBuildCommand(
-                ["git", "rev-parse", "HEAD^{}"],
-                cwd="/build/tree", get_output=True, universal_newlines=True),
-            RanCommand(["chown", "-R", "buildd:buildd", "/build/tree"]),
-            ]))
+        }
+        self.assertThat(
+            run_ci_prepare.backend.run.calls,
+            MatchesListwise(
+                [
+                    RanBuildCommand(
+                        ["git", "clone", "-n", "lp:foo", "tree"],
+                        cwd="/build",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        ["git", "checkout", "-q", "HEAD"],
+                        cwd="/build/tree",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        [
+                            "git",
+                            "submodule",
+                            "update",
+                            "--init",
+                            "--recursive",
+                        ],
+                        cwd="/build/tree",
+                        **env,
+                    ),
+                    RanBuildCommand(
+                        ["git", "rev-parse", "HEAD^{}"],
+                        cwd="/build/tree",
+                        get_output=True,
+                        universal_newlines=True,
+                    ),
+                    RanCommand(
+                        ["chown", "-R", "buildd:buildd", "/build/tree"]
+                    ),
+                ]
+            ),
+        )
         status_path = os.path.join(run_ci_prepare.backend.build_path, "status")
         with open(status_path) as status:
             self.assertEqual({"revision_id": "0" * 40}, json.load(status))
@@ -330,20 +505,29 @@ class TestRunCIPrepare(TestCase):
     def test_run_succeeds(self):
         args = [
             "run-ci-prepare",
-            "--backend=fake", "--series=focal", "--arch=amd64", "1",
-            "--git-repository", "lp:foo",
-            ]
+            "--backend=fake",
+            "--series=focal",
+            "--arch=amd64",
+            "1",
+            "--git-repository",
+            "lp:foo",
+        ]
         run_ci_prepare = parse_args(args=args).operation
         run_ci_prepare.backend.build_path = self.useFixture(TempDir()).path
         run_ci_prepare.backend.run = FakeRevisionID("0" * 40)
         self.assertEqual(0, run_ci_prepare.run())
         # Just check that it did something in each step, not every detail.
-        self.assertThat(run_ci_prepare.backend.run.calls, MatchesAll(
-            AnyMatch(RanSnap("install", "--classic", "lpcraft")),
-            AnyMatch(
-                RanBuildCommand(
-                    ["git", "clone", "-n", "

Follow ups