← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:black-buildmaster into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:black-buildmaster into launchpad:master.

Commit message:
lp.buildmaster: Apply black

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/425897
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:black-buildmaster into launchpad:master.
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index accdfac..1ab33b5 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -66,3 +66,5 @@ c606443bdb2f342593c9a7c9437cb70c01f85f29
 6178b1869f90f9bf1eb9ed050154888b523c53c5
 # apply black to lp.bugs
 2edd6d52841e03ba11ad431e40828f2f0b6a7e17
+# apply black to lp.buildmaster
+95cf83968d59453397dfbdcbe30556fc8004479a
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 1297eef..db91bfe 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -47,6 +47,7 @@ repos:
             |archiveuploader
             |blueprints
             |bugs
+            |buildmaster
           )/
 -   repo: https://github.com/PyCQA/isort
     rev: 5.9.2
@@ -70,6 +71,7 @@ repos:
             |archiveuploader
             |blueprints
             |bugs
+            |buildmaster
           )/
     -   id: isort
         alias: isort-black
@@ -83,6 +85,7 @@ repos:
             |archiveuploader
             |blueprints
             |bugs
+            |buildmaster
           )/
 -   repo: https://github.com/PyCQA/flake8
     rev: 3.9.2
diff --git a/lib/lp/buildmaster/browser/builder.py b/lib/lp/buildmaster/browser/builder.py
index 4e65faa..2c3a768 100644
--- a/lib/lp/buildmaster/browser/builder.py
+++ b/lib/lp/buildmaster/browser/builder.py
@@ -4,25 +4,22 @@
 """Browser views for builders."""
 
 __all__ = [
-    'BuilderOverviewMenu',
-    'BuilderNavigation',
-    'BuilderSetAddView',
-    'BuilderSetBreadcrumb',
-    'BuilderSetOverviewMenu',
-    'BuilderSetNavigation',
-    'BuilderSetView',
-    'BuilderView',
-    ]
+    "BuilderOverviewMenu",
+    "BuilderNavigation",
+    "BuilderSetAddView",
+    "BuilderSetBreadcrumb",
+    "BuilderSetOverviewMenu",
+    "BuilderSetNavigation",
+    "BuilderSetView",
+    "BuilderView",
+]
 
-from datetime import (
-    datetime,
-    timedelta,
-    )
-from itertools import groupby
 import operator
+from datetime import datetime, timedelta
+from itertools import groupby
 
-from lazr.restful.utils import smartquote
 import pytz
+from lazr.restful.utils import smartquote
 from zope.component import getUtility
 from zope.event import notify
 from zope.formlib.widget import CustomWidgetFactory
@@ -31,85 +28,80 @@ from zope.lifecycleevent import ObjectCreatedEvent
 
 from lp import _
 from lp.app.browser.launchpadform import (
-    action,
     LaunchpadEditFormView,
     LaunchpadFormView,
-    )
+    action,
+)
 from lp.app.browser.tales import DurationFormatterAPI
 from lp.app.widgets.itemswidgets import LabeledMultiCheckBoxWidget
 from lp.app.widgets.owner import HiddenUserWidget
-from lp.buildmaster.interfaces.builder import (
-    IBuilder,
-    IBuilderSet,
-    )
+from lp.buildmaster.interfaces.builder import IBuilder, IBuilderSet
 from lp.code.interfaces.cibuild import ICIBuildSet
 from lp.code.interfaces.sourcepackagerecipebuild import (
     ISourcePackageRecipeBuildSource,
-    )
+)
 from lp.oci.interfaces.ocirecipebuild import IOCIRecipeBuildSet
 from lp.services.helpers import english_list
 from lp.services.propertycache import cachedproperty
 from lp.services.webapp import (
     ApplicationMenu,
-    canonical_url,
-    enabled_with_permission,
     GetitemNavigation,
     LaunchpadView,
     Link,
     Navigation,
+    canonical_url,
+    enabled_with_permission,
     stepthrough,
-    )
+)
 from lp.services.webapp.batching import StormRangeFactory
 from lp.services.webapp.breadcrumb import Breadcrumb
 from lp.snappy.interfaces.snapbuild import ISnapBuildSet
-from lp.soyuz.browser.build import (
-    BuildRecordsView,
-    get_build_by_id_str,
-    )
+from lp.soyuz.browser.build import BuildRecordsView, get_build_by_id_str
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.livefsbuild import ILiveFSBuildSet
 
 
 class BuilderSetNavigation(GetitemNavigation):
     """Navigation methods for IBuilderSet."""
+
     usedfor = IBuilderSet
 
-    @stepthrough('+build')
+    @stepthrough("+build")
     def traverse_build(self, name):
         build = get_build_by_id_str(IBinaryPackageBuildSet, name)
         if build is None:
             return None
         return self.redirectSubTree(canonical_url(build, request=self.request))
 
-    @stepthrough('+recipebuild')
+    @stepthrough("+recipebuild")
     def traverse_recipebuild(self, name):
         build = get_build_by_id_str(ISourcePackageRecipeBuildSource, name)
         if build is None:
             return None
         return self.redirectSubTree(canonical_url(build, request=self.request))
 
-    @stepthrough('+livefsbuild')
+    @stepthrough("+livefsbuild")
     def traverse_livefsbuild(self, name):
         build = get_build_by_id_str(ILiveFSBuildSet, name)
         if build is None:
             return None
         return self.redirectSubTree(canonical_url(build, request=self.request))
 
-    @stepthrough('+snapbuild')
+    @stepthrough("+snapbuild")
     def traverse_snapbuild(self, name):
         build = get_build_by_id_str(ISnapBuildSet, name)
         if build is None:
             return None
         return self.redirectSubTree(canonical_url(build, request=self.request))
 
-    @stepthrough('+ocirecipebuild')
+    @stepthrough("+ocirecipebuild")
     def traverse_ocirecipebuild(self, name):
         build = get_build_by_id_str(IOCIRecipeBuildSet, name)
         if build is None:
             return None
         return self.redirectSubTree(canonical_url(build, request=self.request))
 
-    @stepthrough('+cibuild')
+    @stepthrough("+cibuild")
     def traverse_cibuild(self, name):
         build = get_build_by_id_str(ICIBuildSet, name)
         if build is None:
@@ -119,45 +111,49 @@ class BuilderSetNavigation(GetitemNavigation):
 
 class BuilderSetBreadcrumb(Breadcrumb):
     """Builds a breadcrumb for an `IBuilderSet`."""
-    text = 'Build Farm'
+
+    text = "Build Farm"
 
 
 class BuilderNavigation(Navigation):
     """Navigation methods for IBuilder."""
+
     usedfor = IBuilder
 
 
 class BuilderSetOverviewMenu(ApplicationMenu):
     """Overview Menu for IBuilderSet."""
+
     usedfor = IBuilderSet
-    facet = 'overview'
-    links = ['add']
+    facet = "overview"
+    links = ["add"]
 
-    @enabled_with_permission('launchpad.Admin')
+    @enabled_with_permission("launchpad.Admin")
     def add(self):
-        text = 'Register a new build machine'
-        return Link('+new', text, icon='add')
+        text = "Register a new build machine"
+        return Link("+new", text, icon="add")
 
 
 class BuilderOverviewMenu(ApplicationMenu):
     """Overview Menu for IBuilder."""
+
     usedfor = IBuilder
-    facet = 'overview'
-    links = ['history', 'edit', 'mode']
+    facet = "overview"
+    links = ["history", "edit", "mode"]
 
     def history(self):
-        text = 'View full history'
-        return Link('+history', text, icon='info')
+        text = "View full history"
+        return Link("+history", text, icon="info")
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def edit(self):
-        text = 'Change details'
-        return Link('+edit', text, icon='edit')
+        text = "Change details"
+        return Link("+edit", text, icon="edit")
 
-    @enabled_with_permission('launchpad.Edit')
+    @enabled_with_permission("launchpad.Edit")
     def mode(self):
-        text = 'Change mode'
-        return Link('+mode', text, icon='edit')
+        text = "Change mode"
+        return Link("+mode", text, icon="edit")
 
 
 class CleanInfoMixin:
@@ -177,7 +173,8 @@ class CleanInfoMixin:
         # little suspicious.
         if duration > timedelta(minutes=10):
             return "Cleaning for {}".format(
-                DurationFormatterAPI(duration).approximateduration())
+                DurationFormatterAPI(duration).approximateduration()
+            )
         else:
             return "Cleaning"
 
@@ -198,7 +195,8 @@ class BuilderSetView(CleanInfoMixin, LaunchpadView):
         return (
             not builder.virtualized,
             tuple(p.name for p in builder.processors),
-            builder.name)
+            builder.name,
+        )
 
     @cachedproperty
     def builders(self):
@@ -212,7 +210,9 @@ class BuilderSetView(CleanInfoMixin, LaunchpadView):
         return [
             BuilderClump(list(group))
             for _, group in groupby(
-                self.builders, lambda b: self.getBuilderSortKey(b)[:-1])]
+                self.builders, lambda b: self.getBuilderSortKey(b)[:-1]
+            )
+        ]
 
     @property
     def number_of_registered_builders(self):
@@ -240,7 +240,8 @@ class BuilderSetView(CleanInfoMixin, LaunchpadView):
     def virt_builders(self):
         """Return a BuilderCategory object for virtual builders."""
         builder_category = BuilderCategory(
-            'Virtual build status', virtualized=True)
+            "Virtual build status", virtualized=True
+        )
         builder_category.groupBuilders(self.builders, self.build_queue_sizes)
         return builder_category
 
@@ -248,7 +249,8 @@ class BuilderSetView(CleanInfoMixin, LaunchpadView):
     def nonvirt_builders(self):
         """Return a BuilderCategory object for non-virtual builders."""
         builder_category = BuilderCategory(
-            'Non-virtual build status', virtualized=False)
+            "Non-virtual build status", virtualized=False
+        )
         builder_category.groupBuilders(self.builders, self.build_queue_sizes)
         return builder_category
 
@@ -260,6 +262,7 @@ class BuilderClump:
     BuilderCategory are already in use here for slightly different kinds of
     grouping.
     """
+
     def __init__(self, builders):
         self.virtualized = builders[0].virtualized
         self.processors = builders[0].processors
@@ -272,11 +275,13 @@ class BuilderGroup:
     Also stores the corresponding 'queue_size', the number of pending jobs
     in this context.
     """
+
     def __init__(self, processor_name, queue_size, duration, builders):
         self.processor_name = processor_name
         self.queue_size = queue_size
         self.number_of_available_builders = len(
-            [b for b in builders if b.builderok])
+            [b for b in builders if b.builderok]
+        )
         if duration and self.number_of_available_builders:
             self.duration = duration / self.number_of_available_builders
         else:
@@ -288,6 +293,7 @@ class BuilderCategory:
 
     A collection of BuilderGroups as 'PPA builders' and 'Other builders'.
     """
+
     def __init__(self, title, virtualized):
         self.title = title
         self.virtualized = virtualized
@@ -296,16 +302,20 @@ class BuilderCategory:
     @property
     def groups(self):
         """Return a list of BuilderGroups ordered by 'processor_name'."""
-        return sorted(self._builder_groups,
-                      key=operator.attrgetter('processor_name'))
+        return sorted(
+            self._builder_groups, key=operator.attrgetter("processor_name")
+        )
 
     def groupBuilders(self, all_builders, build_queue_sizes):
         """Group the given builders as a collection of Buildergroups.
 
         A BuilderGroup will be initialized for each processor.
         """
-        builders = [builder for builder in all_builders
-                    if builder.virtualized is self.virtualized]
+        builders = [
+            builder
+            for builder in all_builders
+            if builder.virtualized is self.virtualized
+        ]
 
         grouped_builders = {}
         for builder in builders:
@@ -316,13 +326,17 @@ class BuilderCategory:
                     grouped_builders[processor] = [builder]
 
         for processor, builders in grouped_builders.items():
-            virt_str = 'virt' if self.virtualized else 'nonvirt'
+            virt_str = "virt" if self.virtualized else "nonvirt"
             processor_name = processor.name if processor else None
             queue_size, duration = build_queue_sizes[virt_str].get(
-                processor_name, (0, None))
+                processor_name, (0, None)
+            )
             builder_group = BuilderGroup(
-                processor_name, queue_size, duration,
-                sorted(builders, key=operator.attrgetter('title')))
+                processor_name,
+                queue_size,
+                duration,
+                sorted(builders, key=operator.attrgetter("title")),
+            )
             self._builder_groups.append(builder_group)
 
 
@@ -346,8 +360,7 @@ class BuilderView(CleanInfoMixin, LaunchpadView):
     @property
     def page_title(self):
         """Return a relevant page title for this view."""
-        return smartquote(
-            'Builder "%s"' % self.context.title)
+        return smartquote('Builder "%s"' % self.context.title)
 
     @property
     def toggle_mode_text(self):
@@ -361,14 +374,13 @@ class BuilderView(CleanInfoMixin, LaunchpadView):
 class BuilderHistoryView(BuildRecordsView):
     """This class exists only to override the page_title."""
 
-    page_title = 'Build history'
+    page_title = "Build history"
     binary_only = False
     range_factory = StormRangeFactory
 
     @property
     def label(self):
-        return smartquote(
-            'Build history for "%s"' % self.context.title)
+        return smartquote('Build history for "%s"' % self.context.title)
 
     @property
     def default_build_state(self):
@@ -389,28 +401,35 @@ class BuilderSetAddView(LaunchpadFormView):
     label = "Register a new build machine"
 
     field_names = [
-        'name', 'title', 'processors', 'url', 'active', 'virtualized',
-        'vm_host', 'vm_reset_protocol', 'owner'
-        ]
+        "name",
+        "title",
+        "processors",
+        "url",
+        "active",
+        "virtualized",
+        "vm_host",
+        "vm_reset_protocol",
+        "owner",
+    ]
     custom_widget_owner = HiddenUserWidget
     custom_widget_url = CustomWidgetFactory(TextWidget, displayWidth=30)
     custom_widget_vm_host = CustomWidgetFactory(TextWidget, displayWidth=30)
     custom_widget_processors = LabeledMultiCheckBoxWidget
 
-    @action(_('Register builder'), name='register')
+    @action(_("Register builder"), name="register")
     def register_action(self, action, data):
         """Register a new builder."""
         builder = getUtility(IBuilderSet).new(
-            processors=data.get('processors'),
-            url=data.get('url'),
-            name=data.get('name'),
-            title=data.get('title'),
-            owner=data.get('owner'),
-            active=data.get('active'),
-            virtualized=data.get('virtualized'),
-            vm_host=data.get('vm_host'),
-            vm_reset_protocol=data.get('vm_reset_protocol'),
-            )
+            processors=data.get("processors"),
+            url=data.get("url"),
+            name=data.get("name"),
+            title=data.get("title"),
+            owner=data.get("owner"),
+            active=data.get("active"),
+            virtualized=data.get("virtualized"),
+            vm_host=data.get("vm_host"),
+            vm_reset_protocol=data.get("vm_reset_protocol"),
+        )
         notify(ObjectCreatedEvent(builder))
         self.next_url = canonical_url(builder)
 
@@ -431,13 +450,22 @@ class BuilderEditView(LaunchpadEditFormView):
     schema = IBuilder
 
     field_names = [
-        'name', 'title', 'processors', 'url', 'manual', 'owner',
-        'virtualized', 'builderok', 'failnotes', 'vm_host',
-        'vm_reset_protocol', 'active',
-        ]
+        "name",
+        "title",
+        "processors",
+        "url",
+        "manual",
+        "owner",
+        "virtualized",
+        "builderok",
+        "failnotes",
+        "vm_host",
+        "vm_reset_protocol",
+        "active",
+    ]
     custom_widget_processors = LabeledMultiCheckBoxWidget
 
-    @action(_('Change'), name='update')
+    @action(_("Change"), name="update")
     def change_details(self, action, data):
         """Update the builder with the data from the form."""
         # notify_modified is set False here because it uses
@@ -450,11 +478,13 @@ class BuilderEditView(LaunchpadEditFormView):
         # interface properties with doNotSnapshot() but this doesn't
         # guard against future properties being created.
         builder_was_modified = self.updateContextFromData(
-            data, notify_modified=False)
+            data, notify_modified=False
+        )
 
         if builder_was_modified:
             notification = 'The builder "%s" was updated successfully.' % (
-                self.context.title)
+                self.context.title
+            )
             self.request.response.addNotification(notification)
 
         return builder_was_modified
@@ -473,7 +503,8 @@ class BuilderEditView(LaunchpadEditFormView):
     def page_title(self):
         """Return a relevant page title for this view."""
         return smartquote(
-            'Change details for builder "%s"' % self.context.title)
+            'Change details for builder "%s"' % self.context.title
+        )
 
     @property
     def label(self):
diff --git a/lib/lp/buildmaster/browser/processor.py b/lib/lp/buildmaster/browser/processor.py
index 145f1f2..b335c8b 100644
--- a/lib/lp/buildmaster/browser/processor.py
+++ b/lib/lp/buildmaster/browser/processor.py
@@ -4,8 +4,8 @@
 """Navigation views for processors."""
 
 __all__ = [
-    'ProcessorSetNavigation',
-    ]
+    "ProcessorSetNavigation",
+]
 
 
 from lp.buildmaster.interfaces.processor import IProcessorSet
@@ -14,6 +14,7 @@ from lp.services.webapp import Navigation
 
 class ProcessorSetNavigation(Navigation):
     """IProcessorSet navigation."""
+
     usedfor = IProcessorSet
 
     def traverse(self, name):
diff --git a/lib/lp/buildmaster/browser/tests/test_builder.py b/lib/lp/buildmaster/browser/tests/test_builder.py
index 4037d31..e74b3ed 100644
--- a/lib/lp/buildmaster/browser/tests/test_builder.py
+++ b/lib/lp/buildmaster/browser/tests/test_builder.py
@@ -10,10 +10,7 @@ from zope.security.proxy import removeSecurityProxy
 
 from lp.app.browser.tales import DurationFormatterAPI
 from lp.buildmaster.browser.tests.test_builder_views import BuildCreationMixin
-from lp.buildmaster.enums import (
-    BuilderCleanStatus,
-    BuildStatus,
-    )
+from lp.buildmaster.enums import BuilderCleanStatus, BuildStatus
 from lp.buildmaster.interfaces.builder import IBuilderSet
 from lp.buildmaster.model.builder import Builder
 from lp.oci.interfaces.ocirecipe import OCI_RECIPE_ALLOW_CREATE
@@ -24,21 +21,14 @@ from lp.services.job.model.job import Job
 from lp.services.webapp.publisher import canonical_url
 from lp.soyuz.interfaces.livefs import LIVEFS_FEATURE_FLAG
 from lp.testing import (
+    TestCaseWithFactory,
     admin_logged_in,
     logout,
     record_two_runs,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    DatabaseFunctionalLayer,
-    LaunchpadFunctionalLayer,
-    )
+)
+from lp.testing.layers import DatabaseFunctionalLayer, LaunchpadFunctionalLayer
 from lp.testing.matchers import HasQueryCount
-from lp.testing.pages import (
-    extract_text,
-    find_tags_by_class,
-    setupBrowser,
-    )
+from lp.testing.pages import extract_text, find_tags_by_class, setupBrowser
 from lp.testing.views import create_initialized_view
 
 
@@ -54,9 +44,9 @@ class TestBuilderSetNavigation(TestCaseWithFactory):
     def test_binary_package_build_api_redirects(self):
         build = self.factory.makeBinaryPackageBuild()
         url = "http://api.launchpad.test/devel/builders/+build/%s"; % build.id
-        expected_url = (
-            "http://api.launchpad.test/devel"; +
-            canonical_url(build, path_only_if_possible=True))
+        expected_url = "http://api.launchpad.test/devel"; + canonical_url(
+            build, path_only_if_possible=True
+        )
         logout()
         browser = setupBrowser()
         browser.open(url)
@@ -65,11 +55,12 @@ class TestBuilderSetNavigation(TestCaseWithFactory):
     def test_source_package_recipe_build_api_redirects(self):
         build = self.factory.makeSourcePackageRecipeBuild()
         url = (
-            "http://api.launchpad.test/devel/builders/+recipebuild/%s"; %
-            build.id)
-        expected_url = (
-            "http://api.launchpad.test/devel"; +
-            canonical_url(build, path_only_if_possible=True))
+            "http://api.launchpad.test/devel/builders/+recipebuild/%s";
+            % build.id
+        )
+        expected_url = "http://api.launchpad.test/devel"; + canonical_url(
+            build, path_only_if_possible=True
+        )
         logout()
         browser = setupBrowser()
         browser.open(url)
@@ -79,11 +70,12 @@ class TestBuilderSetNavigation(TestCaseWithFactory):
         self.useFixture(FeatureFixture({LIVEFS_FEATURE_FLAG: "on"}))
         build = self.factory.makeLiveFSBuild()
         url = (
-            "http://api.launchpad.test/devel/builders/+livefsbuild/%s"; %
-            build.id)
-        expected_url = (
-            "http://api.launchpad.test/devel"; +
-            canonical_url(build, path_only_if_possible=True))
+            "http://api.launchpad.test/devel/builders/+livefsbuild/%s";
+            % build.id
+        )
+        expected_url = "http://api.launchpad.test/devel"; + canonical_url(
+            build, path_only_if_possible=True
+        )
         logout()
         browser = setupBrowser()
         browser.open(url)
@@ -92,11 +84,11 @@ class TestBuilderSetNavigation(TestCaseWithFactory):
     def test_snap_build_api_redirects(self):
         build = self.factory.makeSnapBuild()
         url = (
-            "http://api.launchpad.test/devel/builders/+snapbuild/%s"; %
-            build.id)
-        expected_url = (
-            "http://api.launchpad.test/devel"; +
-            canonical_url(build, path_only_if_possible=True))
+            "http://api.launchpad.test/devel/builders/+snapbuild/%s"; % build.id
+        )
+        expected_url = "http://api.launchpad.test/devel"; + canonical_url(
+            build, path_only_if_possible=True
+        )
         logout()
         browser = setupBrowser()
         browser.open(url)
@@ -106,11 +98,12 @@ class TestBuilderSetNavigation(TestCaseWithFactory):
         self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: "on"}))
         build = self.factory.makeOCIRecipeBuild()
         url = (
-            "http://api.launchpad.test/devel/builders/+ocirecipebuild/%s"; %
-            build.id)
-        expected_url = (
-            "http://api.launchpad.test/devel"; +
-            canonical_url(build, path_only_if_possible=True))
+            "http://api.launchpad.test/devel/builders/+ocirecipebuild/%s";
+            % build.id
+        )
+        expected_url = "http://api.launchpad.test/devel"; + canonical_url(
+            build, path_only_if_possible=True
+        )
         logout()
         browser = setupBrowser()
         browser.open(url)
@@ -118,11 +111,10 @@ class TestBuilderSetNavigation(TestCaseWithFactory):
 
     def test_ci_build_api_redirects(self):
         build = self.factory.makeCIBuild()
-        url = (
-            "http://api.launchpad.test/devel/builders/+cibuild/%s"; % build.id)
-        expected_url = (
-            "http://api.launchpad.test/devel"; +
-            canonical_url(build, path_only_if_possible=True))
+        url = "http://api.launchpad.test/devel/builders/+cibuild/%s"; % build.id
+        expected_url = "http://api.launchpad.test/devel"; + canonical_url(
+            build, path_only_if_possible=True
+        )
         logout()
         browser = setupBrowser()
         browser.open(url)
@@ -148,26 +140,30 @@ class TestBuildersHomepage(TestCaseWithFactory, BuildCreationMixin):
         def create_build():
             build = self.createBinaryPackageBuild()
             build.updateStatus(
-                BuildStatus.NEEDSBUILD, force_invalid_transition=True)
+                BuildStatus.NEEDSBUILD, force_invalid_transition=True
+            )
             queue = build.queueBuild()
             queue.markAsBuilding(build.builder)
 
         nb_objects = 2
         recorder1, recorder2 = record_two_runs(
-            builders_homepage_render, create_build, nb_objects)
+            builders_homepage_render, create_build, nb_objects
+        )
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
     def test_builders_recipe_build_query_count(self):
         def create_build():
             build = self.createRecipeBuildWithBuilder()
             build.updateStatus(
-                BuildStatus.NEEDSBUILD, force_invalid_transition=True)
+                BuildStatus.NEEDSBUILD, force_invalid_transition=True
+            )
             queue = build.queueBuild()
             queue.markAsBuilding(build.builder)
 
         nb_objects = 2
         recorder1, recorder2 = record_two_runs(
-            builders_homepage_render, create_build, nb_objects)
+            builders_homepage_render, create_build, nb_objects
+        )
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
     def test_builders_translation_template_build_query_count(self):
@@ -177,7 +173,8 @@ class TestBuildersHomepage(TestCaseWithFactory, BuildCreationMixin):
 
         nb_objects = 2
         recorder1, recorder2 = record_two_runs(
-            builders_homepage_render, create_build, nb_objects)
+            builders_homepage_render, create_build, nb_objects
+        )
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
     def test_builders_variety_query_count(self):
@@ -186,13 +183,14 @@ class TestBuildersHomepage(TestCaseWithFactory, BuildCreationMixin):
                 self.factory.makeBinaryPackageBuild().queueBuild(),
                 self.factory.makeSourcePackageRecipeBuild().queueBuild(),
                 self.factory.makeTranslationTemplatesBuild().queueBuild(),
-                ]
+            ]
             for bq in bqs:
                 bq.markAsBuilding(self.factory.makeBuilder())
 
         nb_objects = 2
         recorder1, recorder2 = record_two_runs(
-            builders_homepage_render, create_builds, nb_objects)
+            builders_homepage_render, create_builds, nb_objects
+        )
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
     def test_category_portlet_not_shown_if_empty(self):
@@ -201,20 +199,20 @@ class TestBuildersHomepage(TestCaseWithFactory, BuildCreationMixin):
         self.assertIn("Non-virtual build status", content)
 
         with admin_logged_in():
-            getUtility(IBuilderSet).getByName('frog').active = False
+            getUtility(IBuilderSet).getByName("frog").active = False
         content = builders_homepage_render()
         self.assertNotIn("Virtual build status", content)
         self.assertIn("Non-virtual build status", content)
 
         with admin_logged_in():
-            getUtility(IBuilderSet).getByName('bob').active = False
-            getUtility(IBuilderSet).getByName('frog').active = True
+            getUtility(IBuilderSet).getByName("bob").active = False
+            getUtility(IBuilderSet).getByName("frog").active = True
         content = builders_homepage_render()
         self.assertIn("Virtual build status", content)
         self.assertNotIn("Non-virtual build status", content)
 
         with admin_logged_in():
-            getUtility(IBuilderSet).getByName('frog').active = False
+            getUtility(IBuilderSet).getByName("frog").active = False
         content = builders_homepage_render()
         self.assertNotIn("Virtual build status", content)
         self.assertNotIn("Non-virtual build status", content)
@@ -223,12 +221,16 @@ class TestBuildersHomepage(TestCaseWithFactory, BuildCreationMixin):
         now = get_transaction_timestamp(IStore(Builder))
         durations = [
             timedelta(minutes=5),
-            timedelta(minutes=11), timedelta(hours=1), timedelta(hours=2)]
+            timedelta(minutes=11),
+            timedelta(hours=1),
+            timedelta(hours=2),
+        ]
         with admin_logged_in():
             for builder in getUtility(IBuilderSet):
                 builder.active = False
             builders = [
-                self.factory.makeBuilder() for _ in range(len(durations))]
+                self.factory.makeBuilder() for _ in range(len(durations))
+            ]
             for builder, duration in zip(builders, durations):
                 naked_builder = removeSecurityProxy(builder)
                 naked_builder.clean_status = BuilderCleanStatus.CLEANING
@@ -239,12 +241,19 @@ class TestBuildersHomepage(TestCaseWithFactory, BuildCreationMixin):
         expected_text = ["{}\nCleaning".format(builders[0].name)]
         # We show durations for builders that have been cleaning for more
         # than ten minutes.
-        expected_text.extend([
-            "{}\nCleaning for {}".format(
-                builder.name,
-                DurationFormatterAPI(duration).approximateduration())
-            for builder, duration in zip(builders[1:], durations[1:])])
+        expected_text.extend(
+            [
+                "{}\nCleaning for {}".format(
+                    builder.name,
+                    DurationFormatterAPI(duration).approximateduration(),
+                )
+                for builder, duration in zip(builders[1:], durations[1:])
+            ]
+        )
         self.assertEqual(
             expected_text,
-            [extract_text(row)
-             for row in find_tags_by_class(content, "builder-row")])
+            [
+                extract_text(row)
+                for row in find_tags_by_class(content, "builder-row")
+            ],
+        )
diff --git a/lib/lp/buildmaster/browser/tests/test_builder_views.py b/lib/lp/buildmaster/browser/tests/test_builder_views.py
index 2233d21..8bfb548 100644
--- a/lib/lp/buildmaster/browser/tests/test_builder_views.py
+++ b/lib/lp/buildmaster/browser/tests/test_builder_views.py
@@ -4,36 +4,33 @@
 from functools import partial
 
 import soupmatchers
+import transaction
 from storm.locals import Store
 from testtools.matchers import MatchesAll
-import transaction
 from zope.component import getUtility
 
-from lp.buildmaster.enums import (
-    BuildFarmJobType,
-    BuildStatus,
-    )
+from lp.buildmaster.enums import BuildFarmJobType, BuildStatus
 from lp.buildmaster.interfaces.buildfarmjob import (
     IBuildFarmJobSource,
     InconsistentBuildFarmJobError,
-    )
+)
 from lp.registry.interfaces.person import IPersonSet
 from lp.services.database.sqlbase import flush_database_updates
 from lp.services.webapp.authorization import clear_cache
 from lp.soyuz.browser.build import getSpecificJobs
 from lp.testing import (
-    celebrity_logged_in,
-    record_two_runs,
     StormStatementRecorder,
     TestCaseWithFactory,
-    )
+    celebrity_logged_in,
+    record_two_runs,
+)
 from lp.testing.layers import LaunchpadFunctionalLayer
 from lp.testing.matchers import HasQueryCount
 from lp.testing.sampledata import ADMIN_EMAIL
 from lp.testing.views import create_initialized_view
 from lp.translations.interfaces.translationtemplatesbuild import (
     ITranslationTemplatesBuildSource,
-    )
+)
 
 
 class TestgetSpecificJobs(TestCaseWithFactory):
@@ -64,23 +61,26 @@ class TestgetSpecificJobs(TestCaseWithFactory):
     def test_getSpecificJobs(self):
         builds = self.createBuilds()
         specific_jobs = getSpecificJobs(
-            [build.build_farm_job for build in builds])
-        self.assertContentEqual(
-            builds, specific_jobs)
+            [build.build_farm_job for build in builds]
+        )
+        self.assertContentEqual(builds, specific_jobs)
 
     def test_getSpecificJobs_preserves_order(self):
         builds = self.createBuilds()
         specific_jobs = getSpecificJobs(
-            [build.build_farm_job for build in builds])
+            [build.build_farm_job for build in builds]
+        )
         self.assertEqual(
             [(build.id, build.__class__) for build in builds],
-            [(job.id, job.__class__) for job in specific_jobs])
+            [(job.id, job.__class__) for job in specific_jobs],
+        )
 
     def test_getSpecificJobs_duplicated_builds(self):
         builds = self.createBuilds()
         duplicated_builds = builds + builds
         specific_jobs = getSpecificJobs(
-            [build.build_farm_job for build in duplicated_builds])
+            [build.build_farm_job for build in duplicated_builds]
+        )
         self.assertEqual(len(duplicated_builds), len(specific_jobs))
 
     def test_getSpecificJobs_empty(self):
@@ -103,15 +103,15 @@ class TestgetSpecificJobs(TestCaseWithFactory):
     def test_getSpecificJobs_no_specific_job(self):
         build_farm_job_source = getUtility(IBuildFarmJobSource)
         build_farm_job = build_farm_job_source.new(
-            BuildFarmJobType.TRANSLATIONTEMPLATESBUILD)
+            BuildFarmJobType.TRANSLATIONTEMPLATESBUILD
+        )
         flush_database_updates()
         self.assertRaises(
-            InconsistentBuildFarmJobError,
-            getSpecificJobs, [build_farm_job])
+            InconsistentBuildFarmJobError, getSpecificJobs, [build_farm_job]
+        )
 
 
 class BuildCreationMixin:
-
     def markAsBuilt(self, build, builder):
         lfa = self.factory.makeLibraryFileAlias()
         build.updateStatus(BuildStatus.BUILDING, builder=builder)
@@ -127,19 +127,21 @@ class BuildCreationMixin:
         self.markAsBuilt(build, builder)
         return build
 
-    def createRecipeBuildWithBuilder(self, private_branch=False,
-                                     builder=None):
+    def createRecipeBuildWithBuilder(self, private_branch=False, builder=None):
         if builder is None:
             builder = self.factory.makeBuilder()
         branch2 = self.factory.makeAnyBranch()
         branch1 = self.factory.makeAnyBranch()
         build = self.factory.makeSourcePackageRecipeBuild(
             recipe=self.factory.makeSourcePackageRecipe(
-                branches=[branch1, branch2]))
+                branches=[branch1, branch2]
+            )
+        )
         if private_branch:
-            with celebrity_logged_in('admin'):
+            with celebrity_logged_in("admin"):
                 branch1.setPrivate(
-                    True, getUtility(IPersonSet).getByEmail(ADMIN_EMAIL))
+                    True, getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
+                )
         Store.of(build).flush()
         self.markAsBuilt(build, builder)
         return build
@@ -170,11 +172,13 @@ class TestBuilderHistoryView(TestCaseWithFactory, BuildCreationMixin):
         # view.setupBuildList) issues a constant number of queries
         # when recipe builds are displayed.
         def builder_history_render():
-            create_initialized_view(self.builder, '+history').render()
+            create_initialized_view(self.builder, "+history").render()
+
         recorder1, recorder2 = record_two_runs(
             builder_history_render,
             partial(self.createRecipeBuildWithBuilder, builder=self.builder),
-            self.nb_objects)
+            self.nb_objects,
+        )
 
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
@@ -182,11 +186,13 @@ class TestBuilderHistoryView(TestCaseWithFactory, BuildCreationMixin):
         # Rendering to builder's history issues a constant number of queries
         # when binary builds are displayed.
         def builder_history_render():
-            create_initialized_view(self.builder, '+history').render()
+            create_initialized_view(self.builder, "+history").render()
+
         recorder1, recorder2 = record_two_runs(
             builder_history_render,
             partial(self.createBinaryPackageBuild, builder=self.builder),
-            self.nb_objects)
+            self.nb_objects,
+        )
 
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
@@ -194,12 +200,16 @@ class TestBuilderHistoryView(TestCaseWithFactory, BuildCreationMixin):
         # Rendering to builder's history issues a constant number of queries
         # when ppa binary builds are displayed.
         def builder_history_render():
-            create_initialized_view(self.builder, '+history').render()
+            create_initialized_view(self.builder, "+history").render()
+
         createBinaryPackageBuildInPPA = partial(
-            self.createBinaryPackageBuild, in_ppa=True, builder=self.builder)
+            self.createBinaryPackageBuild, in_ppa=True, builder=self.builder
+        )
         recorder1, recorder2 = record_two_runs(
-            builder_history_render, createBinaryPackageBuildInPPA,
-            self.nb_objects)
+            builder_history_render,
+            createBinaryPackageBuildInPPA,
+            self.nb_objects,
+        )
 
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
@@ -207,22 +217,26 @@ class TestBuilderHistoryView(TestCaseWithFactory, BuildCreationMixin):
         # Rendering to builder's history issues a constant number of queries
         # when translation template builds are displayed.
         def builder_history_render():
-            create_initialized_view(self.builder, '+history').render()
+            create_initialized_view(self.builder, "+history").render()
+
         recorder1, recorder2 = record_two_runs(
             builder_history_render,
             partial(
                 self.createTranslationTemplateBuildWithBuilder,
-                builder=self.builder),
-            self.nb_objects)
+                builder=self.builder,
+            ),
+            self.nb_objects,
+        )
 
         self.assertThat(recorder2, HasQueryCount.byEquality(recorder1))
 
     def test_build_history_private_build_view(self):
         self.createRecipeBuildWithBuilder(builder=self.builder)
         self.createRecipeBuildWithBuilder(
-            private_branch=True, builder=self.builder)
+            private_branch=True, builder=self.builder
+        )
         clear_cache()
-        view = create_initialized_view(self.builder, '+history')
+        view = create_initialized_view(self.builder, "+history")
         view.setupBuildList()
 
         self.assertIn(None, view.complete_builds)
@@ -230,15 +244,20 @@ class TestBuilderHistoryView(TestCaseWithFactory, BuildCreationMixin):
     def test_build_history_private_build_display(self):
         self.createRecipeBuildWithBuilder(builder=self.builder)
         self.createRecipeBuildWithBuilder(
-            private_branch=True, builder=self.builder)
+            private_branch=True, builder=self.builder
+        )
         clear_cache()
-        view = create_initialized_view(self.builder, '+history')
+        view = create_initialized_view(self.builder, "+history")
         private_build_icon_matcher = soupmatchers.HTMLContains(
             soupmatchers.Tag(
-                'Private build icon', 'img', attrs={'src': '/@@/private'}))
+                "Private build icon", "img", attrs={"src": "/@@/private"}
+            )
+        )
         private_build_matcher = soupmatchers.HTMLContains(
-            soupmatchers.Tag('Private build', 'td', text='Private job'))
+            soupmatchers.Tag("Private build", "td", text="Private job")
+        )
 
         self.assertThat(
             view.render(),
-            MatchesAll(private_build_matcher, private_build_icon_matcher))
+            MatchesAll(private_build_matcher, private_build_icon_matcher),
+        )
diff --git a/lib/lp/buildmaster/browser/tests/test_processor.py b/lib/lp/buildmaster/browser/tests/test_processor.py
index 3743881..364a206 100644
--- a/lib/lp/buildmaster/browser/tests/test_processor.py
+++ b/lib/lp/buildmaster/browser/tests/test_processor.py
@@ -13,13 +13,15 @@ class TestProcessorNavigation(TestCaseWithFactory):
     layer = DatabaseFunctionalLayer
 
     def test_processor_url(self):
-        quantum = self.factory.makeProcessor('quantum')
+        quantum = self.factory.makeProcessor("quantum")
         self.assertEqual(
-            '/+processors/quantum',
-            canonical_url(quantum, force_local_path=True))
+            "/+processors/quantum",
+            canonical_url(quantum, force_local_path=True),
+        )
 
     def test_processor_navigation(self):
-        quantum = self.factory.makeProcessor('quantum')
+        quantum = self.factory.makeProcessor("quantum")
         obj, view, request = test_traverse(
-            'http://api.launchpad.test/devel/+processors/quantum')
+            "http://api.launchpad.test/devel/+processors/quantum";
+        )
         self.assertEqual(quantum, obj)
diff --git a/lib/lp/buildmaster/builderproxy.py b/lib/lp/buildmaster/builderproxy.py
index a22ca85..a49a251 100644
--- a/lib/lp/buildmaster/builderproxy.py
+++ b/lib/lp/buildmaster/builderproxy.py
@@ -12,7 +12,7 @@ token if and only if they are allowed general internet access.
 
 __all__ = [
     "BuilderProxyMixin",
-    ]
+]
 
 import base64
 import time
@@ -36,42 +36,49 @@ class BuilderProxyMixin:
     def addProxyArgs(self, args, allow_internet=True):
         if _get_proxy_config("builder_proxy_host") and allow_internet:
             token = yield self._requestProxyToken()
-            args["proxy_url"] = (
-                "http://{username}:{password}@{host}:{port}".format(
-                    username=token['username'],
-                    password=token['secret'],
-                    host=_get_proxy_config("builder_proxy_host"),
-                    port=_get_proxy_config("builder_proxy_port")))
-            args["revocation_endpoint"] = (
-                "{endpoint}/{token}".format(
-                    endpoint=_get_proxy_config(
-                        "builder_proxy_auth_api_endpoint"),
-                    token=token['username']))
+            args[
+                "proxy_url"
+            ] = "http://{username}:{password}@{host}:{port}".format(
+                username=token["username"],
+                password=token["secret"],
+                host=_get_proxy_config("builder_proxy_host"),
+                port=_get_proxy_config("builder_proxy_port"),
+            )
+            args["revocation_endpoint"] = "{endpoint}/{token}".format(
+                endpoint=_get_proxy_config("builder_proxy_auth_api_endpoint"),
+                token=token["username"],
+            )
 
     @defer.inlineCallbacks
     def _requestProxyToken(self):
         admin_username = _get_proxy_config(
-            "builder_proxy_auth_api_admin_username")
+            "builder_proxy_auth_api_admin_username"
+        )
         if not admin_username:
             raise CannotBuild(
-                "builder_proxy_auth_api_admin_username is not configured.")
+                "builder_proxy_auth_api_admin_username is not configured."
+            )
         secret = _get_proxy_config("builder_proxy_auth_api_admin_secret")
         if not secret:
             raise CannotBuild(
-                "builder_proxy_auth_api_admin_secret is not configured.")
+                "builder_proxy_auth_api_admin_secret is not configured."
+            )
         url = _get_proxy_config("builder_proxy_auth_api_endpoint")
         if not secret:
             raise CannotBuild(
-                "builder_proxy_auth_api_endpoint is not configured.")
+                "builder_proxy_auth_api_endpoint is not configured."
+            )
         timestamp = int(time.time())
-        proxy_username = '{build_id}-{timestamp}'.format(
-            build_id=self.build.build_cookie,
-            timestamp=timestamp)
-        auth_string = '{}:{}'.format(admin_username, secret).strip()
-        auth_header = b'Basic ' + base64.b64encode(auth_string.encode('ASCII'))
+        proxy_username = "{build_id}-{timestamp}".format(
+            build_id=self.build.build_cookie, timestamp=timestamp
+        )
+        auth_string = "{}:{}".format(admin_username, secret).strip()
+        auth_header = b"Basic " + base64.b64encode(auth_string.encode("ASCII"))
 
         token = yield self._worker.process_pool.doWork(
             RequestProxyTokenCommand,
-            url=url, auth_header=auth_header,
-            proxy_username=proxy_username)
+            url=url,
+            auth_header=auth_header,
+            proxy_username=proxy_username,
+        )
         return token
diff --git a/lib/lp/buildmaster/downloader.py b/lib/lp/buildmaster/downloader.py
index 1498b03..d5b60ac 100644
--- a/lib/lp/buildmaster/downloader.py
+++ b/lib/lp/buildmaster/downloader.py
@@ -8,19 +8,16 @@ anything from the rest of Launchpad.
 """
 
 __all__ = [
-    'DownloadCommand',
-    'RequestProcess',
-    'RequestProxyTokenCommand',
-    ]
+    "DownloadCommand",
+    "RequestProcess",
+    "RequestProxyTokenCommand",
+]
 
 import os.path
 import tempfile
 
 from ampoule.child import AMPChild
-from requests import (
-    RequestException,
-    Session,
-    )
+from requests import RequestException, Session
 from requests_toolbelt.downloadutils import stream
 from requests_toolbelt.exceptions import StreamingError
 from twisted.protocols import amp
@@ -32,12 +29,12 @@ class DownloadCommand(amp.Command):
         (b"file_url", amp.Unicode()),
         (b"path_to_write", amp.Unicode()),
         (b"timeout", amp.Integer()),
-        ]
+    ]
     response = []
     errors = {
         RequestException: b"REQUEST_ERROR",
         StreamingError: b"STREAMING_ERROR",
-        }
+    }
 
 
 class RequestProxyTokenCommand(amp.Command):
@@ -46,15 +43,15 @@ class RequestProxyTokenCommand(amp.Command):
         (b"url", amp.Unicode()),
         (b"auth_header", amp.String()),
         (b"proxy_username", amp.Unicode()),
-        ]
+    ]
     response = [
         (b"username", amp.Unicode()),
         (b"secret", amp.Unicode()),
         (b"timestamp", amp.Unicode()),
-        ]
+    ]
     errors = {
         RequestException: b"REQUEST_ERROR",
-        }
+    }
 
 
 class RequestProcess(AMPChild):
@@ -71,8 +68,11 @@ class RequestProcess(AMPChild):
         except FileExistsError:
             pass
         f = tempfile.NamedTemporaryFile(
-            mode="wb", prefix=os.path.basename(path_to_write) + "_",
-            dir=os.path.dirname(path_to_write), delete=False)
+            mode="wb",
+            prefix=os.path.basename(path_to_write) + "_",
+            dir=os.path.dirname(path_to_write),
+            delete=False,
+        )
         try:
             stream.stream_response_to_file(response, path=f)
         except Exception:
@@ -89,7 +89,9 @@ class RequestProcess(AMPChild):
         session = Session()
         session.trust_env = False
         response = session.post(
-            url, headers={"Authorization": auth_header},
-            json={"username": proxy_username})
+            url,
+            headers={"Authorization": auth_header},
+            json={"username": proxy_username},
+        )
         response.raise_for_status()
         return response.json()
diff --git a/lib/lp/buildmaster/enums.py b/lib/lp/buildmaster/enums.py
index 8417ed1..d71a650 100644
--- a/lib/lp/buildmaster/enums.py
+++ b/lib/lp/buildmaster/enums.py
@@ -4,18 +4,15 @@
 """Common build interfaces."""
 
 __all__ = [
-    'BuildBaseImageType',
-    'BuilderCleanStatus',
-    'BuilderResetProtocol',
-    'BuildStatus',
-    'BuildQueueStatus',
-    'BuildFarmJobType',
-    ]
-
-from lazr.enum import (
-    DBEnumeratedType,
-    DBItem,
-    )
+    "BuildBaseImageType",
+    "BuilderCleanStatus",
+    "BuilderResetProtocol",
+    "BuildStatus",
+    "BuildQueueStatus",
+    "BuildFarmJobType",
+]
+
+from lazr.enum import DBEnumeratedType, DBItem
 
 
 class BuildStatus(DBEnumeratedType):
@@ -26,66 +23,89 @@ class BuildStatus(DBEnumeratedType):
     order to correctly manage the autobuilder queues in the BuildQueue table.
     """
 
-    NEEDSBUILD = DBItem(0, """
+    NEEDSBUILD = DBItem(
+        0,
+        """
         Needs building
 
         Build record is fresh and needs building. Nothing is yet known to
         block this build and it is a candidate for building on any free
         builder of the relevant architecture
-        """)
+        """,
+    )
 
-    FULLYBUILT = DBItem(1, """
+    FULLYBUILT = DBItem(
+        1,
+        """
         Successfully built
 
         Build record is an historic account of the build. The build is
         complete and needs no further work to complete it. The build log etc
         are all in place if available.
-        """)
+        """,
+    )
 
-    FAILEDTOBUILD = DBItem(2, """
+    FAILEDTOBUILD = DBItem(
+        2,
+        """
         Failed to build
 
         Build record is an historic account of the build. The build failed and
         cannot be automatically retried. Either a new upload will be needed
         or the build will have to be manually reset into 'NEEDSBUILD' when
         the issue is corrected
-        """)
+        """,
+    )
 
-    MANUALDEPWAIT = DBItem(3, """
+    MANUALDEPWAIT = DBItem(
+        3,
+        """
         Dependency wait
 
         Build record represents a package whose build dependencies cannot
         currently be satisfied within the relevant DistroArchSeries. This
         build will have to be manually given back (put into 'NEEDSBUILD') when
         the dependency issue is resolved.
-        """)
+        """,
+    )
 
-    CHROOTWAIT = DBItem(4, """
+    CHROOTWAIT = DBItem(
+        4,
+        """
         Chroot problem
 
         Build record represents a build which needs a chroot currently known
         to be damaged or bad in some way. The buildd maintainer will have to
         reset all relevant CHROOTWAIT builds to NEEDSBUILD after the chroot
         has been fixed.
-        """)
+        """,
+    )
 
-    SUPERSEDED = DBItem(5, """
+    SUPERSEDED = DBItem(
+        5,
+        """
         Build for superseded Source
 
         Build record represents a build which never got to happen because the
         source package release for the build was superseded before the job
         was scheduled to be run on a builder. Builds which reach this state
         will rarely if ever be reset to any other state.
-        """)
+        """,
+    )
 
-    BUILDING = DBItem(6, """
+    BUILDING = DBItem(
+        6,
+        """
         Currently building
 
         Build record represents a build which is being build by one of the
         available builders.
-        """)
+        """,
+    )
 
-    FAILEDTOUPLOAD = DBItem(7, """
+    FAILEDTOUPLOAD = DBItem(
+        7,
+        """
         Failed to upload
 
         Build record is an historic account of a build that could not be
@@ -95,28 +115,38 @@ class BuildStatus(DBEnumeratedType):
         In those cases all the build historic information will be stored (
         buildlog, datebuilt, duration, builder, etc) and the buildd admins
         will be notified via process-upload about the reason of the rejection.
-        """)
+        """,
+    )
 
-    UPLOADING = DBItem(8, """
+    UPLOADING = DBItem(
+        8,
+        """
         Uploading build
 
         The build has completed and is waiting to be processed by the
         upload processor.
-        """)
+        """,
+    )
 
-    CANCELLING = DBItem(9, """
+    CANCELLING = DBItem(
+        9,
+        """
         Cancelling build
 
         A cancellation request was made for the build. It cannot be cancelled
         immediately because a request is made in the webapp but we need to
         wait for the buildd-manager to actually cancel it.
-        """)
+        """,
+    )
 
-    CANCELLED = DBItem(10, """
+    CANCELLED = DBItem(
+        10,
+        """
         Cancelled build
 
         A build was cancelled. This is a terminal state.
-        """)
+        """,
+    )
 
 
 class BuildFarmJobType(DBEnumeratedType):
@@ -126,59 +156,86 @@ class BuildFarmJobType(DBEnumeratedType):
     farm.
     """
 
-    PACKAGEBUILD = DBItem(1, """
+    PACKAGEBUILD = DBItem(
+        1,
+        """
         Binary package build
 
         Build a source package.
-        """)
+        """,
+    )
 
-    BRANCHBUILD = DBItem(2, """
+    BRANCHBUILD = DBItem(
+        2,
+        """
         Branch build
 
         Build a package from a bazaar branch.
-        """)
+        """,
+    )
 
-    RECIPEBRANCHBUILD = DBItem(3, """
+    RECIPEBRANCHBUILD = DBItem(
+        3,
+        """
         Recipe branch build
 
         Build a package from a bazaar branch and a recipe.
-        """)
+        """,
+    )
 
-    TRANSLATIONTEMPLATESBUILD = DBItem(4, """
+    TRANSLATIONTEMPLATESBUILD = DBItem(
+        4,
+        """
         Translation template build
 
         Generate translation templates from a bazaar branch.
-        """)
+        """,
+    )
 
-    LIVEFSBUILD = DBItem(5, """
+    LIVEFSBUILD = DBItem(
+        5,
+        """
         Live filesystem build
 
         Build a live filesystem from an archive.
-        """)
+        """,
+    )
 
-    SNAPBUILD = DBItem(6, """
+    SNAPBUILD = DBItem(
+        6,
+        """
         Snap package build
 
         Build a snap package from a recipe.
-        """)
+        """,
+    )
 
-    OCIRECIPEBUILD = DBItem(7, """
+    OCIRECIPEBUILD = DBItem(
+        7,
+        """
         OCI image build
 
         Build an OCI image from a recipe.
-        """)
+        """,
+    )
 
-    CHARMRECIPEBUILD = DBItem(8, """
+    CHARMRECIPEBUILD = DBItem(
+        8,
+        """
         Charm recipe build
 
         Build a charm from a recipe.
-        """)
+        """,
+    )
 
-    CIBUILD = DBItem(9, """
+    CIBUILD = DBItem(
+        9,
+        """
         CI build
 
         Run a continuous integration job on a code revision.
-        """)
+        """,
+    )
 
 
 class BuildQueueStatus(DBEnumeratedType):
@@ -191,70 +248,97 @@ class BuildQueueStatus(DBEnumeratedType):
     includes values to represent the result of a completed job.
     """
 
-    WAITING = DBItem(0, """
+    WAITING = DBItem(
+        0,
+        """
         Waiting
 
         The job is waiting to be run.
-        """)
+        """,
+    )
 
-    RUNNING = DBItem(1, """
+    RUNNING = DBItem(
+        1,
+        """
         Running
 
         The job is currently running.
-        """)
+        """,
+    )
 
-    CANCELLING = DBItem(2, """
+    CANCELLING = DBItem(
+        2,
+        """
         Cancelling
 
         The job has been cancelled, so should be terminated.
-        """)
+        """,
+    )
 
-    SUSPENDED = DBItem(3, """
+    SUSPENDED = DBItem(
+        3,
+        """
         Suspended
 
         The job is suspended, so should not be run.
-        """)
+        """,
+    )
 
 
 class BuilderCleanStatus(DBEnumeratedType):
 
-    CLEAN = DBItem(0, """
+    CLEAN = DBItem(
+        0,
+        """
         Clean
 
         The builder is ready for use.
-        """)
+        """,
+    )
 
-    DIRTY = DBItem(1, """
+    DIRTY = DBItem(
+        1,
+        """
         Dirty
 
         The builder is dirty and needs to be cleaned before use.
-        """)
+        """,
+    )
 
-    CLEANING = DBItem(2, """
+    CLEANING = DBItem(
+        2,
+        """
         Cleaning
 
         The builder is being cleaned.
-        """)
+        """,
+    )
 
 
 class BuilderResetProtocol(DBEnumeratedType):
 
-    PROTO_1_1 = DBItem(11, """
+    PROTO_1_1 = DBItem(
+        11,
+        """
         1.1
 
         Original synchronous protocol with vm_host and buildd_name. The
         reset trigger must exit cleanly once the worker is reset and
         accepting requests.
-        """)
+        """,
+    )
 
-    PROTO_2_0 = DBItem(20, """
+    PROTO_2_0 = DBItem(
+        20,
+        """
         2.0
 
         Asynchronous protocol with vm_host and buildd_name. The reset
         trigger must exit cleanly once the request is accepted, and use
         the webservice to set Builder.clean_status back to 'Clean' when
         the worker is reset and accepting requests.
-        """)
+        """,
+    )
 
 
 class BuildBaseImageType(DBEnumeratedType):
diff --git a/lib/lp/buildmaster/interactor.py b/lib/lp/buildmaster/interactor.py
index eed2b23..70b738f 100644
--- a/lib/lp/buildmaster/interactor.py
+++ b/lib/lp/buildmaster/interactor.py
@@ -2,57 +2,41 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'BuilderInteractor',
-    'extract_vitals_from_db',
-    ]
+    "BuilderInteractor",
+    "extract_vitals_from_db",
+]
 
-from collections import (
-    namedtuple,
-    OrderedDict,
-    )
 import logging
 import os.path
 import sys
 import traceback
+from collections import OrderedDict, namedtuple
 from urllib.parse import urlparse
 
-from ampoule.pool import ProcessPool
 import six
 import transaction
-from twisted.internet import (
-    defer,
-    reactor as default_reactor,
-    )
+from ampoule.pool import ProcessPool
+from twisted.internet import defer
+from twisted.internet import reactor as default_reactor
 from twisted.internet.interfaces import IReactorCore
 from twisted.web import xmlrpc
 from twisted.web.client import HTTPConnectionPool
-from zope.security.proxy import (
-    isinstance as zope_isinstance,
-    removeSecurityProxy,
-    )
+from zope.security.proxy import isinstance as zope_isinstance
+from zope.security.proxy import removeSecurityProxy
 
-from lp.buildmaster.downloader import (
-    DownloadCommand,
-    RequestProcess,
-    )
-from lp.buildmaster.enums import (
-    BuilderCleanStatus,
-    BuilderResetProtocol,
-    )
+from lp.buildmaster.downloader import DownloadCommand, RequestProcess
+from lp.buildmaster.enums import BuilderCleanStatus, BuilderResetProtocol
 from lp.buildmaster.interfaces.builder import (
     BuildDaemonError,
     BuildDaemonIsolationError,
     CannotFetchFile,
     CannotResumeHost,
-    )
+)
 from lp.buildmaster.interfaces.buildfarmjobbehaviour import (
     IBuildFarmJobBehaviour,
-    )
+)
 from lp.services.config import config
-from lp.services.job.runner import (
-    QuietAMPConnector,
-    VirtualEnvProcessStarter,
-    )
+from lp.services.job.runner import QuietAMPConnector, VirtualEnvProcessStarter
 from lp.services.twistedsupport import cancel_on_timeout
 from lp.services.twistedsupport.processmonitor import ProcessWithTimeout
 from lp.services.webapp import urlappend
@@ -60,6 +44,7 @@ from lp.services.webapp import urlappend
 
 class QuietQueryFactory(xmlrpc._QueryFactory):
     """XMLRPC client factory that doesn't splatter the log with junk."""
+
     noisy = False
 
 
@@ -105,7 +90,8 @@ def default_process_pool(reactor=None):
         _default_process_pool.start()
         if IReactorCore.providedBy(reactor):
             shutdown_id = reactor.addSystemEventTrigger(
-                "during", "shutdown", _default_process_pool.stop)
+                "during", "shutdown", _default_process_pool.stop
+            )
             _default_process_pool_shutdown = (reactor, shutdown_id)
     return _default_process_pool
 
@@ -135,8 +121,16 @@ class BuilderWorker:
     # many false positives in your test run and will most likely break
     # production.
 
-    def __init__(self, proxy, builder_url, vm_host, timeout, reactor,
-                 pool=None, process_pool=None):
+    def __init__(
+        self,
+        proxy,
+        builder_url,
+        vm_host,
+        timeout,
+        reactor,
+        pool=None,
+        process_pool=None,
+    ):
         """Initialize a BuilderWorker.
 
         :param proxy: An XML-RPC proxy, implementing 'callRemote'. It must
@@ -146,7 +140,7 @@ class BuilderWorker:
         """
         self.url = builder_url
         self._vm_host = vm_host
-        self._file_cache_url = urlappend(builder_url, 'filecache')
+        self._file_cache_url = urlappend(builder_url, "filecache")
         self._server = proxy
         self.timeout = timeout
         if reactor is None:
@@ -160,8 +154,16 @@ class BuilderWorker:
         self.process_pool = process_pool
 
     @classmethod
-    def makeBuilderWorker(cls, builder_url, vm_host, timeout, reactor=None,
-                          proxy=None, pool=None, process_pool=None):
+    def makeBuilderWorker(
+        cls,
+        builder_url,
+        vm_host,
+        timeout,
+        reactor=None,
+        proxy=None,
+        pool=None,
+        process_pool=None,
+    ):
         """Create and return a `BuilderWorker`.
 
         :param builder_url: The URL of the worker buildd machine,
@@ -173,40 +175,46 @@ class BuilderWorker:
         :param pool: Used by tests to override the HTTPConnectionPool.
         :param process_pool: Used by tests to override the ProcessPool.
         """
-        rpc_url = urlappend(builder_url, 'rpc')
+        rpc_url = urlappend(builder_url, "rpc")
         if proxy is None:
             server_proxy = xmlrpc.Proxy(
-                rpc_url.encode('UTF-8'), allowNone=True,
-                connectTimeout=timeout)
+                rpc_url.encode("UTF-8"), allowNone=True, connectTimeout=timeout
+            )
             server_proxy.queryFactory = QuietQueryFactory
         else:
             server_proxy = proxy
         return cls(
-            server_proxy, builder_url, vm_host, timeout, reactor,
-            pool=pool, process_pool=process_pool)
+            server_proxy,
+            builder_url,
+            vm_host,
+            timeout,
+            reactor,
+            pool=pool,
+            process_pool=process_pool,
+        )
 
     def _with_timeout(self, d, timeout=None):
         return cancel_on_timeout(d, timeout or self.timeout, self.reactor)
 
     def abort(self):
         """Abort the current build."""
-        return self._with_timeout(self._server.callRemote('abort'))
+        return self._with_timeout(self._server.callRemote("abort"))
 
     def clean(self):
         """Clean up the waiting files and reset the worker's internal state."""
-        return self._with_timeout(self._server.callRemote('clean'))
+        return self._with_timeout(self._server.callRemote("clean"))
 
     def echo(self, *args):
         """Echo the arguments back."""
-        return self._with_timeout(self._server.callRemote('echo', *args))
+        return self._with_timeout(self._server.callRemote("echo", *args))
 
     def info(self):
         """Return the protocol version and the builder methods supported."""
-        return self._with_timeout(self._server.callRemote('info'))
+        return self._with_timeout(self._server.callRemote("info"))
 
     def status(self):
         """Return the status of the build daemon."""
-        return self._with_timeout(self._server.callRemote('status'))
+        return self._with_timeout(self._server.callRemote("status"))
 
     def ensurepresent(self, sha1sum, url, username, password):
         """Attempt to ensure the given file is present."""
@@ -215,8 +223,10 @@ class BuilderWorker:
         # downloads large files.
         return self._with_timeout(
             self._server.callRemote(
-                'ensurepresent', sha1sum, url, username, password),
-            self.timeout * 5)
+                "ensurepresent", sha1sum, url, username, password
+            ),
+            self.timeout * 5,
+        )
 
     def getURL(self, sha1):
         """Get the URL for a file on the builder with a given SHA-1."""
@@ -243,15 +253,22 @@ class BuilderWorker:
             # reactor thread).
             yield self.process_pool.doWork(
                 DownloadCommand,
-                file_url=file_url, path_to_write=path_to_write,
-                timeout=self.timeout)
+                file_url=file_url,
+                path_to_write=path_to_write,
+                timeout=self.timeout,
+            )
             if logger is not None:
                 logger.info("Grabbed %s" % file_url)
         except Exception as e:
             if logger is not None:
-                logger.info("Failed to grab %s: %s\n%s" % (
-                    file_url, e,
-                    " ".join(traceback.format_exception(*sys.exc_info()))))
+                logger.info(
+                    "Failed to grab %s: %s\n%s"
+                    % (
+                        file_url,
+                        e,
+                        " ".join(traceback.format_exception(*sys.exc_info())),
+                    )
+                )
             raise
 
     def getFiles(self, files, logger=None):
@@ -263,9 +280,12 @@ class BuilderWorker:
 
         :return: A DeferredList that calls back when the download is done.
         """
-        dl = defer.gatherResults([
-            self.getFile(builder_file, local_file, logger=logger)
-            for builder_file, local_file in files])
+        dl = defer.gatherResults(
+            [
+                self.getFile(builder_file, local_file, logger=logger)
+                for builder_file, local_file in files
+            ]
+        )
         return dl
 
     def resume(self, clock=None):
@@ -281,26 +301,28 @@ class BuilderWorker:
             (stdout, stderr, subprocess exitcode) triple
         """
         url_components = urlparse(self.url)
-        buildd_name = url_components.hostname.split('.')[0]
+        buildd_name = url_components.hostname.split(".")[0]
         resume_command = config.builddmaster.vm_resume_command % {
-            'vm_host': self._vm_host,
-            'buildd_name': buildd_name}
+            "vm_host": self._vm_host,
+            "buildd_name": buildd_name,
+        }
         # Twisted API requires string but the configuration provides unicode.
-        resume_argv = [
-            term.encode('utf-8') for term in resume_command.split()]
+        resume_argv = [term.encode("utf-8") for term in resume_command.split()]
         d = defer.Deferred()
         p = ProcessWithTimeout(d, self.timeout, clock=clock)
         p.spawnProcess(resume_argv[0], tuple(resume_argv))
         return d
 
     @defer.inlineCallbacks
-    def sendFileToWorker(self, sha1, url, username="", password="",
-                         logger=None):
+    def sendFileToWorker(
+        self, sha1, url, username="", password="", logger=None
+    ):
         """Helper to send the file at 'url' with 'sha1' to this builder."""
         if logger is not None:
             logger.info(
-                "Asking %s to ensure it has %s (%s%s)" % (
-                    self.url, sha1, url, ' with auth' if username else ''))
+                "Asking %s to ensure it has %s (%s%s)"
+                % (self.url, sha1, url, " with auth" if username else "")
+            )
         present, info = yield self.ensurepresent(sha1, url, username, password)
         if not present:
             raise CannotFetchFile(url, info)
@@ -318,15 +340,31 @@ class BuilderWorker:
         """
         if isinstance(filemap, OrderedDict):
             filemap = dict(filemap)
-        return self._with_timeout(self._server.callRemote(
-            'build', buildid, builder_type, chroot_sha1, filemap, args))
+        return self._with_timeout(
+            self._server.callRemote(
+                "build", buildid, builder_type, chroot_sha1, filemap, args
+            )
+        )
 
 
 BuilderVitals = namedtuple(
-    'BuilderVitals',
-    ('name', 'url', 'processor_names', 'virtualized', 'vm_host',
-     'vm_reset_protocol', 'builderok', 'manual', 'build_queue', 'version',
-     'clean_status', 'active', 'failure_count'))
+    "BuilderVitals",
+    (
+        "name",
+        "url",
+        "processor_names",
+        "virtualized",
+        "vm_host",
+        "vm_reset_protocol",
+        "builderok",
+        "manual",
+        "build_queue",
+        "version",
+        "clean_status",
+        "active",
+        "failure_count",
+    ),
+)
 
 _BQ_UNSPECIFIED = object()
 
@@ -335,15 +373,23 @@ def extract_vitals_from_db(builder, build_queue=_BQ_UNSPECIFIED):
     if build_queue == _BQ_UNSPECIFIED:
         build_queue = builder.currentjob
     return BuilderVitals(
-        builder.name, builder.url,
+        builder.name,
+        builder.url,
         [processor.name for processor in builder.processors],
-        builder.virtualized, builder.vm_host, builder.vm_reset_protocol,
-        builder.builderok, builder.manual, build_queue, builder.version,
-        builder.clean_status, builder.active, builder.failure_count)
+        builder.virtualized,
+        builder.vm_host,
+        builder.vm_reset_protocol,
+        builder.builderok,
+        builder.manual,
+        build_queue,
+        builder.version,
+        builder.clean_status,
+        builder.active,
+        builder.failure_count,
+    )
 
 
 class BuilderInteractor:
-
     @staticmethod
     def makeWorkerFromVitals(vitals):
         if vitals.virtualized:
@@ -351,7 +397,8 @@ class BuilderInteractor:
         else:
             timeout = config.builddmaster.socket_timeout
         return BuilderWorker.makeBuilderWorker(
-            vitals.url, vitals.vm_host, timeout)
+            vitals.url, vitals.vm_host, timeout
+        )
 
     @staticmethod
     def getBuildBehaviour(queue_item, builder, worker):
@@ -376,10 +423,10 @@ class BuilderInteractor:
             whose value is a CannotResumeHost exception.
         """
         if not vitals.virtualized:
-            return defer.fail(CannotResumeHost('Builder is not virtualized.'))
+            return defer.fail(CannotResumeHost("Builder is not virtualized."))
 
         if not vitals.vm_host:
-            return defer.fail(CannotResumeHost('Undefined vm_host.'))
+            return defer.fail(CannotResumeHost("Undefined vm_host."))
 
         logger = cls._getWorkerScannerLogger()
         logger.info("Resuming %s (%s)" % (vitals.name, vitals.url))
@@ -393,8 +440,9 @@ class BuilderInteractor:
         def got_resume_bad(failure):
             stdout, stderr, code = failure.value
             raise CannotResumeHost(
-                "Resuming failed:\nOUT:\n%s\nERR:\n%s\n" %
-                (six.ensure_str(stdout), six.ensure_str(stderr)))
+                "Resuming failed:\nOUT:\n%s\nERR:\n%s\n"
+                % (six.ensure_str(stdout), six.ensure_str(stderr))
+            )
 
         return d.addCallback(got_resume_ok).addErrback(got_resume_bad)
 
@@ -414,7 +462,8 @@ class BuilderInteractor:
                 # once resumeWorkerHost returns the worker should be
                 # running.
                 builder_factory[vitals.name].setCleanStatus(
-                    BuilderCleanStatus.CLEANING)
+                    BuilderCleanStatus.CLEANING
+                )
                 transaction.commit()
                 yield cls.resumeWorkerHost(vitals, worker)
                 # We ping the resumed worker before we try to do anything
@@ -433,37 +482,39 @@ class BuilderInteractor:
                 if vitals.clean_status == BuilderCleanStatus.DIRTY:
                     yield cls.resumeWorkerHost(vitals, worker)
                     builder_factory[vitals.name].setCleanStatus(
-                        BuilderCleanStatus.CLEANING)
+                        BuilderCleanStatus.CLEANING
+                    )
                     transaction.commit()
                     logger = cls._getWorkerScannerLogger()
                     logger.info("%s is being cleaned.", vitals.name)
                 return False
             raise CannotResumeHost(
-                "Invalid vm_reset_protocol: %r" % vitals.vm_reset_protocol)
+                "Invalid vm_reset_protocol: %r" % vitals.vm_reset_protocol
+            )
         else:
             worker_status = yield worker.status()
-            status = worker_status.get('builder_status', None)
-            if status == 'BuilderStatus.IDLE':
+            status = worker_status.get("builder_status", None)
+            if status == "BuilderStatus.IDLE":
                 # This is as clean as we can get it.
                 return True
-            elif status == 'BuilderStatus.BUILDING':
+            elif status == "BuilderStatus.BUILDING":
                 # Asynchronously abort() the worker and wait until WAITING.
                 yield worker.abort()
                 return False
-            elif status == 'BuilderStatus.ABORTING':
+            elif status == "BuilderStatus.ABORTING":
                 # Wait it out until WAITING.
                 return False
-            elif status == 'BuilderStatus.WAITING':
+            elif status == "BuilderStatus.WAITING":
                 # Just a synchronous clean() call and we'll be idle.
                 yield worker.clean()
                 return True
-            raise BuildDaemonError(
-                "Invalid status during clean: %r" % status)
+            raise BuildDaemonError("Invalid status during clean: %r" % status)
 
     @classmethod
     @defer.inlineCallbacks
-    def _startBuild(cls, build_queue_item, vitals, builder, worker, behaviour,
-                    logger):
+    def _startBuild(
+        cls, build_queue_item, vitals, builder, worker, behaviour, logger
+    ):
         """Start a build on this builder.
 
         :param build_queue_item: A BuildQueueItem to build.
@@ -478,11 +529,13 @@ class BuilderInteractor:
         # Set the build behaviour depending on the provided build queue item.
         if not builder.builderok:
             raise BuildDaemonIsolationError(
-                "Attempted to start a build on a known-bad builder.")
+                "Attempted to start a build on a known-bad builder."
+            )
 
         if builder.clean_status != BuilderCleanStatus.CLEAN:
             raise BuildDaemonIsolationError(
-                "Attempted to start build on a dirty worker.")
+                "Attempted to start build on a dirty worker."
+            )
 
         builder.setCleanStatus(BuilderCleanStatus.DIRTY)
         transaction.commit()
@@ -508,21 +561,27 @@ class BuilderInteractor:
             candidate = builder_factory.acquireBuildCandidate(vitals, builder)
             if candidate is not None:
                 if candidate.specific_source.postprocessCandidate(
-                        candidate, logger):
+                    candidate, logger
+                ):
                     break
         else:
             logger.debug("No build candidates available for builder.")
             return None
 
         new_behaviour = cls.getBuildBehaviour(candidate, builder, worker)
-        needed_bfjb = type(removeSecurityProxy(
-            IBuildFarmJobBehaviour(candidate.specific_build)))
+        needed_bfjb = type(
+            removeSecurityProxy(
+                IBuildFarmJobBehaviour(candidate.specific_build)
+            )
+        )
         if not zope_isinstance(new_behaviour, needed_bfjb):
             raise AssertionError(
-                "Inappropriate IBuildFarmJobBehaviour: %r is not a %r" %
-                (new_behaviour, needed_bfjb))
+                "Inappropriate IBuildFarmJobBehaviour: %r is not a %r"
+                % (new_behaviour, needed_bfjb)
+            )
         yield cls._startBuild(
-            candidate, vitals, builder, worker, new_behaviour, logger)
+            candidate, vitals, builder, worker, new_behaviour, logger
+        )
         return candidate
 
     @staticmethod
@@ -543,7 +602,8 @@ class BuilderInteractor:
             # fixed number of bytes from the tail of the log.  Turn it into
             # Unicode as best we can.
             logtail = worker_status.get("logtail").data.decode(
-                "UTF-8", errors="replace")
+                "UTF-8", errors="replace"
+            )
             # PostgreSQL text columns can't contain \0 characters, and since
             # we only use this for web UI display purposes there's no point
             # in going through contortions to store them.
@@ -554,8 +614,15 @@ class BuilderInteractor:
 
     @classmethod
     @defer.inlineCallbacks
-    def updateBuild(cls, vitals, worker, worker_status, builder_factory,
-                    behaviour_factory, manager):
+    def updateBuild(
+        cls,
+        vitals,
+        worker,
+        worker_status,
+        builder_factory,
+        behaviour_factory,
+        manager,
+    ):
         """Verify the current build job status.
 
         Perform the required actions for each state.
@@ -566,15 +633,19 @@ class BuilderInteractor:
         # impossible to get past the cookie check unless the worker
         # matches the DB, and this method isn't called unless the DB
         # says there's a job.
-        builder_status = worker_status['builder_status']
+        builder_status = worker_status["builder_status"]
         if builder_status not in (
-                'BuilderStatus.BUILDING', 'BuilderStatus.ABORTING',
-                'BuilderStatus.WAITING'):
+            "BuilderStatus.BUILDING",
+            "BuilderStatus.ABORTING",
+            "BuilderStatus.WAITING",
+        ):
             raise AssertionError("Unknown status %s" % builder_status)
         builder = builder_factory[vitals.name]
         behaviour = behaviour_factory(vitals.build_queue, builder, worker)
         if builder_status in (
-                'BuilderStatus.BUILDING', 'BuilderStatus.ABORTING'):
+            "BuilderStatus.BUILDING",
+            "BuilderStatus.ABORTING",
+        ):
             logtail = cls.extractLogTail(worker_status)
             if logtail is not None:
                 manager.addLogTail(vitals.build_queue.id, logtail)
@@ -589,5 +660,5 @@ class BuilderInteractor:
         # should be able to configure the root-logger instead of creating
         # a new object, then the logger lookups won't require the specific
         # name argument anymore. See bug 164203.
-        logger = logging.getLogger('worker-scanner')
+        logger = logging.getLogger("worker-scanner")
         return logger
diff --git a/lib/lp/buildmaster/interfaces/builder.py b/lib/lp/buildmaster/interfaces/builder.py
index f062feb..09e548d 100644
--- a/lib/lp/buildmaster/interfaces/builder.py
+++ b/lib/lp/buildmaster/interfaces/builder.py
@@ -4,18 +4,19 @@
 """Builder interfaces."""
 
 __all__ = [
-    'BuildDaemonError',
-    'BuildDaemonIsolationError',
-    'BuildWorkerFailure',
-    'CannotBuild',
-    'CannotFetchFile',
-    'CannotResumeHost',
-    'IBuilder',
-    'IBuilderModerateAttributes',
-    'IBuilderSet',
-    ]
+    "BuildDaemonError",
+    "BuildDaemonIsolationError",
+    "BuildWorkerFailure",
+    "CannotBuild",
+    "CannotFetchFile",
+    "CannotResumeHost",
+    "IBuilder",
+    "IBuilderModerateAttributes",
+    "IBuilderSet",
+]
 
 from lazr.restful.declarations import (
+    REQUEST_USER,
     call_with,
     collection_default_content,
     export_factory_operation,
@@ -29,40 +30,19 @@ from lazr.restful.declarations import (
     operation_parameters,
     operation_returns_collection_of,
     operation_returns_entry,
-    REQUEST_USER,
-    )
-from lazr.restful.fields import (
-    Reference,
-    ReferenceChoice,
-    )
+)
+from lazr.restful.fields import Reference, ReferenceChoice
 from lazr.restful.interface import copy_field
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
-from zope.schema import (
-    Bool,
-    Choice,
-    Datetime,
-    Int,
-    List,
-    Text,
-    TextLine,
-    )
+from zope.interface import Attribute, Interface
+from zope.schema import Bool, Choice, Datetime, Int, List, Text, TextLine
 
 from lp import _
 from lp.app.validators.name import name_validator
 from lp.app.validators.url import builder_url_validator
-from lp.buildmaster.enums import (
-    BuilderCleanStatus,
-    BuilderResetProtocol,
-    )
+from lp.buildmaster.enums import BuilderCleanStatus, BuilderResetProtocol
 from lp.buildmaster.interfaces.processor import IProcessor
 from lp.registry.interfaces.role import IHasOwner
-from lp.services.fields import (
-    PersonChoice,
-    Title,
-    )
+from lp.services.fields import PersonChoice, Title
 from lp.soyuz.interfaces.buildrecords import IHasBuildRecords
 
 
@@ -98,108 +78,202 @@ class BuildWorkerFailure(BuildDaemonError):
 
 
 class IBuilderModerateAttributes(Interface):
-    manual = exported(Bool(
-        title=_('Manual Mode'), required=False, default=False,
-        description=_('The auto-build system does not dispatch '
-                      'jobs automatically for builders in manual mode.')))
+    manual = exported(
+        Bool(
+            title=_("Manual Mode"),
+            required=False,
+            default=False,
+            description=_(
+                "The auto-build system does not dispatch "
+                "jobs automatically for builders in manual mode."
+            ),
+        )
+    )
 
-    builderok = exported(Bool(
-        title=_('Builder State OK'), required=True, default=True,
-        description=_('Whether or not the builder is ok')),
-        as_of='devel')
+    builderok = exported(
+        Bool(
+            title=_("Builder State OK"),
+            required=True,
+            default=True,
+            description=_("Whether or not the builder is ok"),
+        ),
+        as_of="devel",
+    )
 
-    failnotes = exported(Text(
-        title=_('Failure Notes'), required=False,
-        description=_('The reason for a builder not being ok')))
+    failnotes = exported(
+        Text(
+            title=_("Failure Notes"),
+            required=False,
+            description=_("The reason for a builder not being ok"),
+        )
+    )
 
 
 class IBuilderView(IHasBuildRecords, IHasOwner):
 
     id = Attribute("Builder identifier")
 
-    processor = exported(ReferenceChoice(
-        title=_('Processor'), required=True, vocabulary='Processor',
-        schema=IProcessor,
-        description=_(
-            'DEPRECATED: Processor identifying jobs which can be built by '
-            'this device. Use `processors` instead to handle multiple '
-            'supported architectures.')),
-        as_of='devel')
+    processor = exported(
+        ReferenceChoice(
+            title=_("Processor"),
+            required=True,
+            vocabulary="Processor",
+            schema=IProcessor,
+            description=_(
+                "DEPRECATED: Processor identifying jobs which can be built by "
+                "this device. Use `processors` instead to handle multiple "
+                "supported architectures."
+            ),
+        ),
+        as_of="devel",
+    )
 
     processors = exported(
         List(
             title=_("Processors"),
             description=_(
                 "Processors identifying jobs which can be built by this "
-                "device."),
+                "device."
+            ),
             value_type=ReferenceChoice(
-                vocabulary='Processor', schema=IProcessor)),
-        as_of='devel')
-
-    owner = exported(PersonChoice(
-        title=_('Owner'), required=True, vocabulary='ValidOwner',
-        description=_('Builder owner, a Launchpad member which '
-                      'will be responsible for this device.')))
-
-    url = exported(TextLine(
-        title=_('URL'), required=True, constraint=builder_url_validator,
-        description=_('The URL to the build machine, used as a unique '
-                      'identifier. Includes protocol, host and port only, '
-                      'e.g.: http://farm.com:8221/')))
-
-    name = exported(TextLine(
-        title=_('Name'), required=True, constraint=name_validator,
-        description=_('The builder name used for reference purposes')))
-
-    title = exported(Title(
-        title=_('Title'), required=True,
-        description=_(
-            'The builder title. Should be just a few words.')))
-
-    virtualized = exported(Bool(
-        title=_('Virtualized'), required=True, default=False,
-        description=_('Whether or not the builder is a virtual Xen '
-                      'instance.')))
-
-    vm_host = exported(TextLine(
-        title=_('VM host'), required=False,
-        description=_('The machine hostname hosting the virtual '
-                      'buildd-worker, e.g.: foobar-host.ppa')))
-
-    vm_reset_protocol = exported(Choice(
-        title=_("VM reset protocol"), vocabulary=BuilderResetProtocol,
-        readonly=False, required=False,
-        description=_("The protocol version for resetting the VM.")))
-
-    active = exported(Bool(
-        title=_('Publicly Visible'), required=False, default=True,
-        description=_('Whether or not to present the builder publicly.')))
+                vocabulary="Processor", schema=IProcessor
+            ),
+        ),
+        as_of="devel",
+    )
+
+    owner = exported(
+        PersonChoice(
+            title=_("Owner"),
+            required=True,
+            vocabulary="ValidOwner",
+            description=_(
+                "Builder owner, a Launchpad member which "
+                "will be responsible for this device."
+            ),
+        )
+    )
+
+    url = exported(
+        TextLine(
+            title=_("URL"),
+            required=True,
+            constraint=builder_url_validator,
+            description=_(
+                "The URL to the build machine, used as a unique "
+                "identifier. Includes protocol, host and port only, "
+                "e.g.: http://farm.com:8221/";
+            ),
+        )
+    )
+
+    name = exported(
+        TextLine(
+            title=_("Name"),
+            required=True,
+            constraint=name_validator,
+            description=_("The builder name used for reference purposes"),
+        )
+    )
+
+    title = exported(
+        Title(
+            title=_("Title"),
+            required=True,
+            description=_("The builder title. Should be just a few words."),
+        )
+    )
+
+    virtualized = exported(
+        Bool(
+            title=_("Virtualized"),
+            required=True,
+            default=False,
+            description=_(
+                "Whether or not the builder is a virtual Xen instance."
+            ),
+        )
+    )
+
+    vm_host = exported(
+        TextLine(
+            title=_("VM host"),
+            required=False,
+            description=_(
+                "The machine hostname hosting the virtual "
+                "buildd-worker, e.g.: foobar-host.ppa"
+            ),
+        )
+    )
+
+    vm_reset_protocol = exported(
+        Choice(
+            title=_("VM reset protocol"),
+            vocabulary=BuilderResetProtocol,
+            readonly=False,
+            required=False,
+            description=_("The protocol version for resetting the VM."),
+        )
+    )
+
+    active = exported(
+        Bool(
+            title=_("Publicly Visible"),
+            required=False,
+            default=True,
+            description=_("Whether or not to present the builder publicly."),
+        )
+    )
 
     currentjob = Attribute("BuildQueue instance for job being processed.")
 
-    current_build = exported(Reference(
-        title=_("Current build"), required=False, readonly=True,
-        schema=Interface,  # Really IBuildFarmJob.
-        description=_("The job currently running on this builder.")),
-        as_of="devel")
+    current_build = exported(
+        Reference(
+            title=_("Current build"),
+            required=False,
+            readonly=True,
+            schema=Interface,  # Really IBuildFarmJob.
+            description=_("The job currently running on this builder."),
+        ),
+        as_of="devel",
+    )
 
-    failure_count = exported(Int(
-        title=_('Failure Count'), required=False, default=0,
-       description=_("Number of consecutive failures for this builder.")))
+    failure_count = exported(
+        Int(
+            title=_("Failure Count"),
+            required=False,
+            default=0,
+            description=_("Number of consecutive failures for this builder."),
+        )
+    )
 
-    version = exported(Text(
-        title=_('Version'), required=False,
-        description=_('The version of launchpad-buildd on the worker.')))
+    version = exported(
+        Text(
+            title=_("Version"),
+            required=False,
+            description=_("The version of launchpad-buildd on the worker."),
+        )
+    )
 
-    clean_status = exported(Choice(
-        title=_("Clean status"), vocabulary=BuilderCleanStatus, readonly=True,
-        description=_(
-            "The readiness of the builder to take a job. Only internal build "
-            "infrastructure bots need to or should write to this.")))
+    clean_status = exported(
+        Choice(
+            title=_("Clean status"),
+            vocabulary=BuilderCleanStatus,
+            readonly=True,
+            description=_(
+                "The readiness of the builder to take a job. Only internal "
+                "build infrastructure bots need to or should write to this."
+            ),
+        )
+    )
 
-    date_clean_status_changed = exported(Datetime(
-        title=_("Date clean status changed"), readonly=True,
-        description=_("The date the builder's clean status last changed.")))
+    date_clean_status_changed = exported(
+        Datetime(
+            title=_("Date clean status changed"),
+            readonly=True,
+            description=_("The date the builder's clean status last changed."),
+        )
+    )
 
     def gotFailure():
         """Increment failure_count on the builder."""
@@ -212,11 +286,10 @@ class IBuilderView(IHasBuildRecords, IHasOwner):
 
 
 class IBuilderEdit(Interface):
-
-    @mutator_for(IBuilderView['clean_status'])
-    @operation_parameters(status=copy_field(IBuilderView['clean_status']))
+    @mutator_for(IBuilderView["clean_status"])
+    @operation_parameters(status=copy_field(IBuilderView["clean_status"]))
     @export_write_operation()
-    @operation_for_version('devel')
+    @operation_for_version("devel")
     def setCleanStatus(status):
         """Update the clean status."""
 
@@ -237,15 +310,30 @@ class IBuilder(IBuilderEdit, IBuilderView, IBuilderModerateAttributes):
 
 
 class IBuilderSetAdmin(Interface):
-
     @call_with(owner=REQUEST_USER)
     @export_factory_operation(
         IBuilder,
-        ['processors', 'url', 'name', 'title', 'active', 'virtualized',
-         'vm_host'])
-    @operation_for_version('devel')
-    def new(processors, url, name, title, owner, active=True,
-            virtualized=False, vm_host=None):
+        [
+            "processors",
+            "url",
+            "name",
+            "title",
+            "active",
+            "virtualized",
+            "vm_host",
+        ],
+    )
+    @operation_for_version("devel")
+    def new(
+        processors,
+        url,
+        name,
+        title,
+        owner,
+        active=True,
+        virtualized=False,
+        vm_host=None,
+    ):
         """Create a new builder.
 
         The builder will be set to manual. An admin needs to verify its
@@ -263,7 +351,8 @@ class IBuilderSet(IBuilderSetAdmin):
     Methods on this interface should deal with the set of Builders:
     methods that affect a single Builder should be on IBuilder.
     """
-    title = Attribute('Title')
+
+    title = Attribute("Title")
 
     def __iter__():
         """Iterate over builders."""
@@ -272,7 +361,8 @@ class IBuilderSet(IBuilderSetAdmin):
         """Retrieve a builder by name"""
 
     @operation_parameters(
-        name=TextLine(title=_("Builder name"), required=True))
+        name=TextLine(title=_("Builder name"), required=True)
+    )
     @operation_returns_entry(IBuilder)
     @export_read_operation()
     @operation_for_version("beta")
@@ -293,7 +383,7 @@ class IBuilderSet(IBuilderSetAdmin):
         """Return all active configured builders."""
 
     @export_read_operation()
-    @operation_for_version('devel')
+    @operation_for_version("devel")
     def getBuildQueueSizes():
         """Return the number of pending builds for each processor.
 
@@ -315,11 +405,12 @@ class IBuilderSet(IBuilderSetAdmin):
 
     @operation_parameters(
         processor=Reference(
-            title=_("Processor"), required=True, schema=IProcessor),
-        virtualized=Bool(
-            title=_("Virtualized"), required=False, default=True))
+            title=_("Processor"), required=True, schema=IProcessor
+        ),
+        virtualized=Bool(title=_("Virtualized"), required=False, default=True),
+    )
     @operation_returns_collection_of(IBuilder)
     @export_read_operation()
-    @operation_for_version('devel')
+    @operation_for_version("devel")
     def getBuildersForQueue(processor, virtualized):
         """Return all builders for given processor/virtualization setting."""
diff --git a/lib/lp/buildmaster/interfaces/buildfarmjob.py b/lib/lp/buildmaster/interfaces/buildfarmjob.py
index 92fc062..f4200bd 100644
--- a/lib/lp/buildmaster/interfaces/buildfarmjob.py
+++ b/lib/lp/buildmaster/interfaces/buildfarmjob.py
@@ -4,18 +4,18 @@
 """Interface for Soyuz build farm jobs."""
 
 __all__ = [
-    'CannotBeRescored',
-    'CannotBeRetried',
-    'IBuildFarmJob',
-    'IBuildFarmJobAdmin',
-    'IBuildFarmJobDB',
-    'IBuildFarmJobEdit',
-    'IBuildFarmJobSet',
-    'IBuildFarmJobSource',
-    'IBuildFarmJobView',
-    'InconsistentBuildFarmJobError',
-    'ISpecificBuildFarmJobSource',
-    ]
+    "CannotBeRescored",
+    "CannotBeRetried",
+    "IBuildFarmJob",
+    "IBuildFarmJobAdmin",
+    "IBuildFarmJobDB",
+    "IBuildFarmJobEdit",
+    "IBuildFarmJobSet",
+    "IBuildFarmJobSource",
+    "IBuildFarmJobView",
+    "InconsistentBuildFarmJobError",
+    "ISpecificBuildFarmJobSource",
+]
 
 import http.client
 
@@ -26,26 +26,13 @@ from lazr.restful.declarations import (
     exported_as_webservice_entry,
     operation_for_version,
     operation_parameters,
-    )
+)
 from lazr.restful.fields import Reference
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
-from zope.schema import (
-    Bool,
-    Choice,
-    Datetime,
-    Int,
-    TextLine,
-    Timedelta,
-    )
+from zope.interface import Attribute, Interface
+from zope.schema import Bool, Choice, Datetime, Int, TextLine, Timedelta
 
 from lp import _
-from lp.buildmaster.enums import (
-    BuildFarmJobType,
-    BuildStatus,
-    )
+from lp.buildmaster.enums import BuildFarmJobType, BuildStatus
 from lp.buildmaster.interfaces.builder import IBuilder
 from lp.buildmaster.interfaces.processor import IProcessor
 from lp.services.librarian.interfaces import ILibraryFileAlias
@@ -82,122 +69,186 @@ class IBuildFarmJobDB(Interface):
     This is deprecated while it's flattened into the concrete implementations.
     """
 
-    id = Attribute('The build farm job ID.')
+    id = Attribute("The build farm job ID.")
 
     job_type = Choice(
-        title=_("Job type"), required=True, readonly=True,
+        title=_("Job type"),
+        required=True,
+        readonly=True,
         vocabulary=BuildFarmJobType,
-        description=_("The specific type of job."))
+        description=_("The specific type of job."),
+    )
 
 
 class IBuildFarmJobView(Interface):
     """`IBuildFarmJob` attributes that require launchpad.View."""
 
-    id = Attribute('The build farm job ID.')
+    id = Attribute("The build farm job ID.")
 
-    build_farm_job = Attribute('Generic build farm job record')
+    build_farm_job = Attribute("Generic build farm job record")
 
     processor = Reference(
-        IProcessor, title=_("Processor"), required=False, readonly=True,
+        IProcessor,
+        title=_("Processor"),
+        required=False,
+        readonly=True,
         description=_(
             "The Processor required by this build farm job. "
-            "This should be None for processor-independent job types."))
+            "This should be None for processor-independent job types."
+        ),
+    )
 
     virtualized = Bool(
-        title=_('Virtualized'), required=False, readonly=True,
+        title=_("Virtualized"),
+        required=False,
+        readonly=True,
         description=_(
             "The virtualization setting required by this build farm job. "
             "This should be None for job types that do not care whether "
-            "they run virtualized."))
+            "they run virtualized."
+        ),
+    )
 
     date_created = exported(
         Datetime(
-            title=_("Date created"), required=True, readonly=True,
+            title=_("Date created"),
+            required=True,
+            readonly=True,
             description=_(
-                "The timestamp when the build farm job was created.")),
+                "The timestamp when the build farm job was created."
+            ),
+        ),
         ("1.0", dict(exported_as="datecreated")),
         as_of="beta",
-        )
+    )
 
     date_started = exported(
         Datetime(
-            title=_("Date started"), required=False, readonly=True,
+            title=_("Date started"),
+            required=False,
+            readonly=True,
             description=_(
-                "The timestamp when the build farm job was started.")),
-        as_of="devel")
+                "The timestamp when the build farm job was started."
+            ),
+        ),
+        as_of="devel",
+    )
 
     date_finished = exported(
         Datetime(
-            title=_("Date finished"), required=False, readonly=True,
+            title=_("Date finished"),
+            required=False,
+            readonly=True,
             description=_(
-                "The timestamp when the build farm job was finished.")),
+                "The timestamp when the build farm job was finished."
+            ),
+        ),
         ("1.0", dict(exported_as="datebuilt")),
         as_of="beta",
-        )
+    )
 
     duration = exported(
         Timedelta(
-            title=_("Duration"), required=False, readonly=True,
-            description=_("Duration interval, calculated when the "
-                          "result gets collected.")),
-        as_of="devel")
+            title=_("Duration"),
+            required=False,
+            readonly=True,
+            description=_(
+                "Duration interval, calculated when the "
+                "result gets collected."
+            ),
+        ),
+        as_of="devel",
+    )
 
     date_first_dispatched = exported(
         Datetime(
-            title=_("Date finished"), required=False, readonly=True,
-            description=_("The actual build start time. Set when the build "
-                          "is dispatched the first time and not changed in "
-                          "subsequent build attempts.")))
+            title=_("Date finished"),
+            required=False,
+            readonly=True,
+            description=_(
+                "The actual build start time. Set when the build "
+                "is dispatched the first time and not changed in "
+                "subsequent build attempts."
+            ),
+        )
+    )
 
     builder = exported(
         Reference(
-            title=_("Builder"), schema=IBuilder, required=False, readonly=True,
-            description=_("The builder assigned to this job.")))
+            title=_("Builder"),
+            schema=IBuilder,
+            required=False,
+            readonly=True,
+            description=_("The builder assigned to this job."),
+        )
+    )
 
     buildqueue_record = Reference(
         # Really IBuildQueue, set in _schema_circular_imports to avoid
         # circular import.
-        schema=Interface, required=True,
-        title=_("Corresponding BuildQueue record"))
+        schema=Interface,
+        required=True,
+        title=_("Corresponding BuildQueue record"),
+    )
 
     status = exported(
         Choice(
-            title=_('Status'), required=True, vocabulary=BuildStatus,
-            description=_("The current status of the job.")),
+            title=_("Status"),
+            required=True,
+            vocabulary=BuildStatus,
+            description=_("The current status of the job."),
+        ),
         ("1.0", dict(exported_as="buildstate")),
         as_of="beta",
-        )
+    )
 
     log = Reference(
-        schema=ILibraryFileAlias, required=False,
+        schema=ILibraryFileAlias,
+        required=False,
         title=_(
-            "The LibraryFileAlias containing the entire log for this job."))
+            "The LibraryFileAlias containing the entire log for this job."
+        ),
+    )
 
     log_url = exported(
         TextLine(
-            title=_("Build Log URL"), required=False,
-            description=_("A URL for the build log. None if there is no "
-                          "log available.")),
+            title=_("Build Log URL"),
+            required=False,
+            description=_(
+                "A URL for the build log. None if there is no "
+                "log available."
+            ),
+        ),
         ("1.0", dict(exported_as="build_log_url")),
         as_of="beta",
-        )
+    )
 
     is_private = Bool(
-        title=_("is private"), required=False, readonly=True,
-        description=_("Whether the build should be treated as private."))
+        title=_("is private"),
+        required=False,
+        readonly=True,
+        description=_("Whether the build should be treated as private."),
+    )
 
     job_type = Choice(
-        title=_("Job type"), required=True, readonly=True,
+        title=_("Job type"),
+        required=True,
+        readonly=True,
         vocabulary=BuildFarmJobType,
-        description=_("The specific type of job."))
+        description=_("The specific type of job."),
+    )
 
     build_cookie = Attribute(
-        "A string which uniquely identifies the job in the build farm.")
+        "A string which uniquely identifies the job in the build farm."
+    )
 
     failure_count = Int(
-        title=_("Failure Count"), required=False, readonly=True,
+        title=_("Failure Count"),
+        required=False,
+        readonly=True,
         default=0,
-        description=_("Number of consecutive failures for this job."))
+        description=_("Number of consecutive failures for this job."),
+    )
 
     def setLog(log):
         """Set the `LibraryFileAlias` that contains the job log."""
@@ -210,9 +261,14 @@ class IBuildFarmJobView(Interface):
         :param extra: Extra labels to attach to the metric.
         """
 
-    def updateStatus(status, builder=None, worker_status=None,
-                     date_started=None, date_finished=None,
-                     force_invalid_transition=False):
+    def updateStatus(
+        status,
+        builder=None,
+        worker_status=None,
+        date_started=None,
+        date_finished=None,
+        force_invalid_transition=False,
+    ):
         """Update job metadata when the build status changes.
 
         This automatically handles setting status, date_finished, builder,
@@ -242,8 +298,7 @@ class IBuildFarmJobView(Interface):
             created in a suspended state.
         """
 
-    title = exported(TextLine(title=_("Title"), required=False),
-                     as_of="beta")
+    title = exported(TextLine(title=_("Title"), required=False), as_of="beta")
 
     was_built = Attribute("Whether or not modified by the builddfarm.")
 
@@ -251,31 +306,52 @@ class IBuildFarmJobView(Interface):
     # the TAL assumes it can read this directly.
     dependencies = exported(
         TextLine(
-            title=_('Dependencies'), required=False,
+            title=_("Dependencies"),
+            required=False,
             description=_(
-                'Debian-like dependency line that must be satisfied before '
-                'attempting to build this request.')),
-        as_of="beta")
+                "Debian-like dependency line that must be satisfied before "
+                "attempting to build this request."
+            ),
+        ),
+        as_of="beta",
+    )
 
     # Only really used by IBinaryPackageBuild, but
     # get_sources_list_for_building looks up this attribute for all build
     # types.
     external_dependencies = Attribute(
         "Newline-separated list of repositories to be used to retrieve any "
-        "external build-dependencies when performing this build.")
+        "external build-dependencies when performing this build."
+    )
 
-    can_be_rescored = exported(Bool(
-        title=_("Can be rescored"), required=True, readonly=True,
-        description=_(
-            "Whether this build record can be rescored manually.")))
+    can_be_rescored = exported(
+        Bool(
+            title=_("Can be rescored"),
+            required=True,
+            readonly=True,
+            description=_(
+                "Whether this build record can be rescored manually."
+            ),
+        )
+    )
 
-    can_be_retried = exported(Bool(
-        title=_("Can be retried"), required=True, readonly=True,
-        description=_("Whether this build record can be retried.")))
+    can_be_retried = exported(
+        Bool(
+            title=_("Can be retried"),
+            required=True,
+            readonly=True,
+            description=_("Whether this build record can be retried."),
+        )
+    )
 
-    can_be_cancelled = exported(Bool(
-        title=_("Can be cancelled"), required=True, readonly=True,
-        description=_("Whether this build record can be cancelled.")))
+    can_be_cancelled = exported(
+        Bool(
+            title=_("Can be cancelled"),
+            required=True,
+            readonly=True,
+            description=_("Whether this build record can be cancelled."),
+        )
+    )
 
     def clearBuilder():
         """Clear this build record's builder.
@@ -331,7 +407,7 @@ class IBuildFarmJobAdmin(Interface):
         """Change the build's score."""
 
 
-@exported_as_webservice_entry(as_of='beta')
+@exported_as_webservice_entry(as_of="beta")
 class IBuildFarmJob(IBuildFarmJobView, IBuildFarmJobEdit, IBuildFarmJobAdmin):
     """Operations that jobs for the build farm must implement."""
 
@@ -349,14 +425,14 @@ class ISpecificBuildFarmJobSource(Interface):
         """
 
     def getByBuildFarmJobs(build_farm_jobs):
-        """"Look up the concrete `IBuildFarmJob`s for a list of BuildFarmJobs.
+        """Look up the concrete `IBuildFarmJob`s for a list of BuildFarmJobs.
 
         :param build_farm_jobs: A list of BuildFarmJobs for which to get the
             concrete jobs.
         """
 
     def getByBuildFarmJob(build_farm_job):
-        """"Look up the concrete `IBuildFarmJob` for a BuildFarmJob.
+        """Look up the concrete `IBuildFarmJob` for a BuildFarmJob.
 
         :param build_farm_job: A BuildFarmJob for which to get the concrete
             job.
@@ -389,8 +465,9 @@ class ISpecificBuildFarmJobSource(Interface):
 class IBuildFarmJobSource(Interface):
     """A utility of BuildFarmJob used to create _things_."""
 
-    def new(job_type, status=None, processor=None, virtualized=None,
-            builder=None):
+    def new(
+        job_type, status=None, processor=None, virtualized=None, builder=None
+    ):
         """Create a new `IBuildFarmJob`.
 
         :param job_type: A `BuildFarmJobType` item.
diff --git a/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py b/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
index c529327..f17fc48 100644
--- a/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
+++ b/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
@@ -4,24 +4,23 @@
 """Interface for build farm job behaviours."""
 
 __all__ = [
-    'IBuildFarmJobBehaviour',
-    ]
+    "IBuildFarmJobBehaviour",
+]
 
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
+from zope.interface import Attribute, Interface
 
 
 class IBuildFarmJobBehaviour(Interface):
 
     builder_type = Attribute(
         "The name of the builder type to use for this build, corresponding "
-        "to a launchpad-buildd build manager tag.")
+        "to a launchpad-buildd build manager tag."
+    )
 
     image_types = Attribute(
         "A list of `BuildBaseImageType`s indicating which types of base "
-        "images can be used for this build.")
+        "images can be used for this build."
+    )
 
     build = Attribute("The `IBuildFarmJob` to build.")
 
diff --git a/lib/lp/buildmaster/interfaces/buildqueue.py b/lib/lp/buildmaster/interfaces/buildqueue.py
index 151972e..2888091 100644
--- a/lib/lp/buildmaster/interfaces/buildqueue.py
+++ b/lib/lp/buildmaster/interfaces/buildqueue.py
@@ -4,23 +4,13 @@
 """Build interfaces."""
 
 __all__ = [
-    'IBuildQueue',
-    'IBuildQueueSet',
-    ]
+    "IBuildQueue",
+    "IBuildQueueSet",
+]
 
 from lazr.restful.fields import Reference
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
-from zope.schema import (
-    Bool,
-    Choice,
-    Datetime,
-    Int,
-    Text,
-    Timedelta,
-    )
+from zope.interface import Attribute, Interface
+from zope.schema import Bool, Choice, Datetime, Int, Text, Timedelta
 
 from lp import _
 from lp.buildmaster.enums import BuildQueueStatus
@@ -45,32 +35,47 @@ class IBuildQueue(Interface):
 
     id = Attribute("Job identifier")
     builder = Reference(
-        IBuilder, title=_("Builder"), required=True, readonly=True,
-        description=_("The IBuilder instance processing this job"))
-    logtail = Text(
-        description=_("The current tail of the log of the job"))
+        IBuilder,
+        title=_("Builder"),
+        required=True,
+        readonly=True,
+        description=_("The IBuilder instance processing this job"),
+    )
+    logtail = Text(description=_("The current tail of the log of the job"))
     lastscore = Int(description=_("This job's score."))
-    manual = Bool(
-        description=_("Whether or not the job was manually scored."))
+    manual = Bool(description=_("Whether or not the job was manually scored."))
     processor = Reference(
-        IProcessor, title=_("Processor"), required=False, readonly=True,
-        description=_("The processor required by this build farm job."))
+        IProcessor,
+        title=_("Processor"),
+        required=False,
+        readonly=True,
+        description=_("The processor required by this build farm job."),
+    )
     virtualized = Bool(
         required=False,
         description=_(
-            "The virtualization setting required by this build farm job."))
+            "The virtualization setting required by this build farm job."
+        ),
+    )
 
     status = Choice(
-        title=_("Status"), vocabulary=BuildQueueStatus, readonly=True,
-        description=_("The status of this build queue item."))
+        title=_("Status"),
+        vocabulary=BuildQueueStatus,
+        readonly=True,
+        description=_("The status of this build queue item."),
+    )
 
     estimated_duration = Timedelta(
-        title=_("Estimated Job Duration"), required=True,
-        description=_("Estimated job duration interval."))
+        title=_("Estimated Job Duration"),
+        required=True,
+        description=_("Estimated job duration interval."),
+    )
 
     current_build_duration = Timedelta(
-        title=_("Current build duration"), required=False,
-        description=_("Time spent building so far."))
+        title=_("Current build duration"),
+        required=False,
+        description=_("Time spent building so far."),
+    )
 
     def manualScore(value):
         """Manually set a score value to a queue item and lock it."""
@@ -106,15 +111,18 @@ class IBuildQueue(Interface):
     specific_source = Attribute("Type of concrete build farm job.")
 
     specific_build = Reference(
-        IBuildFarmJob, title=_("Build farm job"),
-        description=_("Concrete build farm job object."))
+        IBuildFarmJob,
+        title=_("Build farm job"),
+        description=_("Concrete build farm job object."),
+    )
 
     build_cookie = Attribute(
-        "A string which uniquely identifies the job in the build farm.")
+        "A string which uniquely identifies the job in the build farm."
+    )
 
     date_started = Datetime(
-        title=_('Start time'),
-        description=_('Time when the job started.'))
+        title=_("Start time"), description=_("Time when the job started.")
+    )
 
     def getEstimatedJobStartTime():
         """Get the estimated start time for a pending build farm job.
diff --git a/lib/lp/buildmaster/interfaces/packagebuild.py b/lib/lp/buildmaster/interfaces/packagebuild.py
index 0ff6c1e..e7d97e6 100644
--- a/lib/lp/buildmaster/interfaces/packagebuild.py
+++ b/lib/lp/buildmaster/interfaces/packagebuild.py
@@ -4,25 +4,21 @@
 """Interface for package-specific builds."""
 
 __all__ = [
-    'IPackageBuild',
-    'IPackageBuildView',
-    ]
+    "IPackageBuild",
+    "IPackageBuildView",
+]
 
 
 from lazr.restful.declarations import exported
 from lazr.restful.fields import Reference
 from zope.interface import Attribute
-from zope.schema import (
-    Choice,
-    Object,
-    TextLine,
-    )
+from zope.schema import Choice, Object, TextLine
 
 from lp import _
 from lp.buildmaster.interfaces.buildfarmjob import (
     IBuildFarmJob,
     IBuildFarmJobView,
-    )
+)
 from lp.registry.interfaces.distribution import IDistribution
 from lp.registry.interfaces.distroseries import IDistroSeries
 from lp.registry.interfaces.pocket import PackagePublishingPocket
@@ -35,43 +31,66 @@ class IPackageBuildView(IBuildFarmJobView):
 
     archive = exported(
         Reference(
-            title=_('Archive'), schema=IArchive,
-            required=True, readonly=True,
-            description=_('The Archive context for this build.')))
+            title=_("Archive"),
+            schema=IArchive,
+            required=True,
+            readonly=True,
+            description=_("The Archive context for this build."),
+        )
+    )
 
     pocket = exported(
         Choice(
-            title=_('Pocket'), required=True,
+            title=_("Pocket"),
+            required=True,
             vocabulary=PackagePublishingPocket,
-            description=_('The build targeted pocket.')))
+            description=_("The build targeted pocket."),
+        )
+    )
 
     upload_log = Object(
-        schema=ILibraryFileAlias, required=False,
-        title=_('The LibraryFileAlias containing the upload log for a'
-                'build resulting in an upload that could not be processed '
-                'successfully. Otherwise it will be None.'))
+        schema=ILibraryFileAlias,
+        required=False,
+        title=_(
+            "The LibraryFileAlias containing the upload log for a"
+            "build resulting in an upload that could not be processed "
+            "successfully. Otherwise it will be None."
+        ),
+    )
 
     upload_log_url = exported(
         TextLine(
-            title=_("Upload Log URL"), required=False,
-            description=_("A URL for failed upload logs."
-                          "Will be None if there was no failure.")))
+            title=_("Upload Log URL"),
+            required=False,
+            description=_(
+                "A URL for failed upload logs."
+                "Will be None if there was no failure."
+            ),
+        )
+    )
 
     current_component = Attribute(
-        'Component where the source related to this build was last '
-        'published.')
+        "Component where the source related to this build was last "
+        "published."
+    )
 
     distribution = exported(
         Reference(
             schema=IDistribution,
-            title=_("Distribution"), required=True,
-            description=_("Shortcut for its distribution.")))
+            title=_("Distribution"),
+            required=True,
+            description=_("Shortcut for its distribution."),
+        )
+    )
 
     distro_series = exported(
         Reference(
             schema=IDistroSeries,
-            title=_("Distribution series"), required=True,
-            description=_("Shortcut for its distribution series.")))
+            title=_("Distribution series"),
+            required=True,
+            description=_("Shortcut for its distribution series."),
+        )
+    )
 
     def verifySuccessfulUpload() -> bool:
         """Verify that the upload of this build completed successfully."""
diff --git a/lib/lp/buildmaster/interfaces/processor.py b/lib/lp/buildmaster/interfaces/processor.py
index a77e665..67f0a58 100644
--- a/lib/lp/buildmaster/interfaces/processor.py
+++ b/lib/lp/buildmaster/interfaces/processor.py
@@ -4,10 +4,10 @@
 """Processor interfaces."""
 
 __all__ = [
-    'IProcessor',
-    'IProcessorSet',
-    'ProcessorNotFound',
-    ]
+    "IProcessor",
+    "IProcessorSet",
+    "ProcessorNotFound",
+]
 
 from lazr.restful.declarations import (
     collection_default_content,
@@ -18,16 +18,9 @@ from lazr.restful.declarations import (
     operation_for_version,
     operation_parameters,
     operation_returns_entry,
-    )
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
-from zope.schema import (
-    Bool,
-    Text,
-    TextLine,
-    )
+)
+from zope.interface import Attribute, Interface
+from zope.schema import Bool, Text, TextLine
 
 from lp import _
 from lp.app.errors import NameLookupFailed
@@ -35,7 +28,8 @@ from lp.app.errors import NameLookupFailed
 
 class ProcessorNotFound(NameLookupFailed):
     """Exception raised when a processor name isn't found."""
-    _message_prefix = 'No such processor'
+
+    _message_prefix = "No such processor"
 
 
 # XXX: BradCrittenden 2011-06-20 bug=760849: The following use of 'beta'
@@ -44,57 +38,76 @@ class ProcessorNotFound(NameLookupFailed):
 # the WADL generation work it must be back-dated to the earliest version.
 # Note that individual attributes and methods can and must truthfully set
 # 'devel' as their version.
-@exported_as_webservice_entry(publish_web_link=False, as_of='beta')
+@exported_as_webservice_entry(publish_web_link=False, as_of="beta")
 class IProcessor(Interface):
     """The SQLObject Processor Interface"""
 
     id = Attribute("The Processor ID")
     name = exported(
-        TextLine(title=_("Name"),
-                 description=_("The Processor Name")),
-        as_of='devel', readonly=True)
+        TextLine(title=_("Name"), description=_("The Processor Name")),
+        as_of="devel",
+        readonly=True,
+    )
     title = exported(
-        TextLine(title=_("Title"),
-                 description=_("The Processor Title")),
-        as_of='devel', readonly=True)
+        TextLine(title=_("Title"), description=_("The Processor Title")),
+        as_of="devel",
+        readonly=True,
+    )
     description = exported(
-        Text(title=_("Description"),
-             description=_("The Processor Description")),
-        as_of='devel', readonly=True)
+        Text(
+            title=_("Description"), description=_("The Processor Description")
+        ),
+        as_of="devel",
+        readonly=True,
+    )
     restricted = exported(
         Bool(title=_("Whether this processor is restricted.")),
-        as_of='devel', readonly=True)
+        as_of="devel",
+        readonly=True,
+    )
     build_by_default = exported(
-        Bool(title=_(
-            "Whether this processor is enabled on archives by default.")),
-        as_of='devel', readonly=True)
+        Bool(
+            title=_(
+                "Whether this processor is enabled on archives by default."
+            )
+        ),
+        as_of="devel",
+        readonly=True,
+    )
     supports_virtualized = exported(
         Bool(
             title=_("Supports virtualized builds"),
             description=_(
                 "Whether the processor has virtualized builders. If not, "
                 "archives that require virtualized builds won't build on "
-                "this processor.")),
-        as_of='devel', readonly=True)
+                "this processor."
+            ),
+        ),
+        as_of="devel",
+        readonly=True,
+    )
     supports_nonvirtualized = exported(
         Bool(
             title=_("Supports non-virtualized builds"),
             description=_(
                 "Whether the processor has non-virtualized builders. If not, "
                 "all builds for this processor will build on virtualized "
-                "builders, even for non-virtualized archives.")),
-        as_of='devel', readonly=True)
+                "builders, even for non-virtualized archives."
+            ),
+        ),
+        as_of="devel",
+        readonly=True,
+    )
 
 
 @exported_as_webservice_collection(IProcessor)
 class IProcessorSet(Interface):
     """Operations related to Processor instances."""
 
-    @operation_parameters(
-        name=TextLine(required=True))
+    @operation_parameters(name=TextLine(required=True))
     @operation_returns_entry(IProcessor)
     @export_read_operation()
-    @operation_for_version('devel')
+    @operation_for_version("devel")
     def getByName(name):
         """Return the IProcessor instance with the matching name.
 
@@ -107,9 +120,15 @@ class IProcessorSet(Interface):
     def getAll():
         """Return all the `IProcessor` known to Launchpad."""
 
-    def new(name, title, description, restricted=False,
-            build_by_default=False, supports_virtualized=False,
-            supports_nonvirtualized=True):
+    def new(
+        name,
+        title,
+        description,
+        restricted=False,
+        build_by_default=False,
+        supports_virtualized=False,
+        supports_nonvirtualized=True,
+    ):
         """Create a new processor.
 
         :param name: Name of the processor.
diff --git a/lib/lp/buildmaster/interfaces/webservice.py b/lib/lp/buildmaster/interfaces/webservice.py
index e6ca23e..87d2c10 100644
--- a/lib/lp/buildmaster/interfaces/webservice.py
+++ b/lib/lp/buildmaster/interfaces/webservice.py
@@ -10,25 +10,19 @@ which tells `lazr.restful` that it should look for webservice exports here.
 """
 
 __all__ = [
-    'CannotBeRescored',
-    'CannotBeRetried',
-    'IBuilder',
-    'IBuilderSet',
-    'IBuildFarmJob',
-    'IProcessor',
-    'IProcessorSet',
-    ]
+    "CannotBeRescored",
+    "CannotBeRetried",
+    "IBuilder",
+    "IBuilderSet",
+    "IBuildFarmJob",
+    "IProcessor",
+    "IProcessorSet",
+]
 
-from lp.buildmaster.interfaces.builder import (
-    IBuilder,
-    IBuilderSet,
-    )
+from lp.buildmaster.interfaces.builder import IBuilder, IBuilderSet
 from lp.buildmaster.interfaces.buildfarmjob import (
     CannotBeRescored,
     CannotBeRetried,
     IBuildFarmJob,
-    )
-from lp.buildmaster.interfaces.processor import (
-    IProcessor,
-    IProcessorSet,
-    )
+)
+from lp.buildmaster.interfaces.processor import IProcessor, IProcessorSet
diff --git a/lib/lp/buildmaster/manager.py b/lib/lp/buildmaster/manager.py
index 894e632..13eb590 100644
--- a/lib/lp/buildmaster/manager.py
+++ b/lib/lp/buildmaster/manager.py
@@ -4,31 +4,24 @@
 """Soyuz buildd worker manager logic."""
 
 __all__ = [
-    'BuilddManager',
-    'BUILDD_MANAGER_LOG_NAME',
-    'PrefetchedBuilderFactory',
-    'WorkerScanner',
-    ]
+    "BuilddManager",
+    "BUILDD_MANAGER_LOG_NAME",
+    "PrefetchedBuilderFactory",
+    "WorkerScanner",
+]
 
-from collections import defaultdict
 import datetime
 import functools
 import logging
 import os.path
 import shutil
+from collections import defaultdict
 
 import six
-from storm.expr import (
-    Column,
-    LeftJoin,
-    Table,
-    )
 import transaction
+from storm.expr import Column, LeftJoin, Table
 from twisted.application import service
-from twisted.internet import (
-    defer,
-    reactor,
-    )
+from twisted.internet import defer, reactor
 from twisted.internet.task import LoopingCall
 from twisted.python import log
 from zope.component import getUtility
@@ -37,11 +30,8 @@ from lp.buildmaster.enums import (
     BuilderCleanStatus,
     BuildQueueStatus,
     BuildStatus,
-    )
-from lp.buildmaster.interactor import (
-    BuilderInteractor,
-    extract_vitals_from_db,
-    )
+)
+from lp.buildmaster.interactor import BuilderInteractor, extract_vitals_from_db
 from lp.buildmaster.interfaces.builder import (
     BuildDaemonError,
     BuildDaemonIsolationError,
@@ -50,7 +40,7 @@ from lp.buildmaster.interfaces.builder import (
     CannotFetchFile,
     CannotResumeHost,
     IBuilderSet,
-    )
+)
 from lp.buildmaster.interfaces.buildqueue import IBuildQueueSet
 from lp.buildmaster.interfaces.processor import IProcessorSet
 from lp.buildmaster.model.builder import Builder
@@ -58,14 +48,10 @@ from lp.buildmaster.model.buildqueue import BuildQueue
 from lp.services.config import config
 from lp.services.database.bulk import dbify_value
 from lp.services.database.interfaces import IStore
-from lp.services.database.stormexpr import (
-    BulkUpdate,
-    Values,
-    )
+from lp.services.database.stormexpr import BulkUpdate, Values
 from lp.services.propertycache import get_property_cache
 from lp.services.statsd.interfaces.statsd_client import IStatsdClient
 
-
 BUILDD_MANAGER_LOG_NAME = "worker-scanner"
 
 
@@ -97,7 +83,8 @@ class PrefetchedBuildCandidates:
     def _getBuilderGroupKeys(vitals):
         return [
             (processor_name, vitals.virtualized)
-            for processor_name in vitals.processor_names + [None]]
+            for processor_name in vitals.processor_names + [None]
+        ]
 
     @staticmethod
     def _getSortKey(candidate):
@@ -112,24 +99,31 @@ class PrefetchedBuildCandidates:
 
     def prefetchForBuilder(self, vitals):
         """Ensure that the prefetched cache is populated for this builder."""
-        missing_builder_group_keys = (
-            set(self._getBuilderGroupKeys(vitals)) - set(self.candidates))
+        missing_builder_group_keys = set(
+            self._getBuilderGroupKeys(vitals)
+        ) - set(self.candidates)
         if not missing_builder_group_keys:
             return
         processor_set = getUtility(IProcessorSet)
         processors_by_name = {
             processor_name: (
                 processor_set.getByName(processor_name)
-                if processor_name is not None else None)
-            for processor_name, _ in missing_builder_group_keys}
+                if processor_name is not None
+                else None
+            )
+            for processor_name, _ in missing_builder_group_keys
+        }
         bq_set = getUtility(IBuildQueueSet)
         for builder_group_key in missing_builder_group_keys:
             processor_name, virtualized = builder_group_key
             self._addCandidates(
                 builder_group_key,
                 bq_set.findBuildCandidates(
-                    processors_by_name[processor_name], virtualized,
-                    len(self.builder_groups[builder_group_key])))
+                    processors_by_name[processor_name],
+                    virtualized,
+                    len(self.builder_groups[builder_group_key]),
+                ),
+            )
 
     def pop(self, vitals):
         """Return a suitable build candidate for this builder.
@@ -143,10 +137,13 @@ class PrefetchedBuildCandidates:
         # each builder group, and then re-sort the combined list in exactly
         # the same way.
         grouped_candidates = sorted(
-            ((builder_group_key, self.candidates[builder_group_key][0])
-             for builder_group_key in builder_group_keys
-             if self.candidates[builder_group_key]),
-            key=lambda key_candidate: self.sort_keys[key_candidate[1]])
+            (
+                (builder_group_key, self.candidates[builder_group_key][0])
+                for builder_group_key in builder_group_keys
+                if self.candidates[builder_group_key]
+            ),
+            key=lambda key_candidate: self.sort_keys[key_candidate[1]],
+        )
         if grouped_candidates:
             builder_group_key, candidate_id = grouped_candidates[0]
             self.candidates[builder_group_key].pop(0)
@@ -237,7 +234,8 @@ class BuilderFactory(BaseBuilderFactory):
         """See `BaseBuilderFactory`."""
         return (
             extract_vitals_from_db(b)
-            for b in getUtility(IBuilderSet).__iter__())
+            for b in getUtility(IBuilderSet).__iter__()
+        )
 
     def findBuildCandidate(self, vitals):
         """See `BaseBuilderFactory`."""
@@ -262,16 +260,23 @@ class PrefetchedBuilderFactory(BaseBuilderFactory):
     def update(self):
         """See `BaseBuilderFactory`."""
         transaction.abort()
-        builders_and_current_bqs = list(IStore(Builder).using(
-            Builder, LeftJoin(BuildQueue, BuildQueue.builder == Builder.id)
-            ).find((Builder, BuildQueue)))
+        builders_and_current_bqs = list(
+            IStore(Builder)
+            .using(
+                Builder, LeftJoin(BuildQueue, BuildQueue.builder == Builder.id)
+            )
+            .find((Builder, BuildQueue))
+        )
         getUtility(IBuilderSet).preloadProcessors(
-            [b for b, _ in builders_and_current_bqs])
+            [b for b, _ in builders_and_current_bqs]
+        )
         self.vitals_map = {
             b.name: extract_vitals_from_db(b, bq)
-            for b, bq in builders_and_current_bqs}
+            for b, bq in builders_and_current_bqs
+        }
         self.candidates = PrefetchedBuildCandidates(
-            list(self.vitals_map.values()))
+            list(self.vitals_map.values())
+        )
         transaction.abort()
         self.date_updated = datetime.datetime.utcnow()
 
@@ -354,17 +359,30 @@ def recover_failure(logger, vitals, builder, retry, exception):
     # judge_failure decides who is guilty and their sentences. We're
     # just the executioner.
     builder_action, job_action = judge_failure(
-        builder.failure_count, job.specific_build.failure_count if job else 0,
-        exception, retry=retry and not cancelling)
+        builder.failure_count,
+        job.specific_build.failure_count if job else 0,
+        exception,
+        retry=retry and not cancelling,
+    )
     if job is not None:
         logger.info(
             "Judged builder %s (%d failures) with job %s (%d failures): "
-            "%r, %r", builder.name, builder.failure_count, job.build_cookie,
-            job.specific_build.failure_count, builder_action, job_action)
+            "%r, %r",
+            builder.name,
+            builder.failure_count,
+            job.build_cookie,
+            job.specific_build.failure_count,
+            builder_action,
+            job_action,
+        )
     else:
         logger.info(
             "Judged builder %s (%d failures) with no job: %r, %r",
-            builder.name, builder.failure_count, builder_action, job_action)
+            builder.name,
+            builder.failure_count,
+            builder_action,
+            job_action,
+        )
 
     if job is not None and job_action is not None:
         if cancelling:
@@ -383,12 +401,14 @@ def recover_failure(logger, vitals, builder, retry, exception):
                 # it out of the queue to avoid further corruption.
                 logger.warning(
                     "Build is already successful! Dequeuing but leaving build "
-                    "status alone. Something is very wrong.")
+                    "status alone. Something is very wrong."
+                )
             else:
                 # Whatever it was before, we want it failed. We're an
                 # error handler, so let's not risk more errors.
                 job.specific_build.updateStatus(
-                    BuildStatus.FAILEDTOBUILD, force_invalid_transition=True)
+                    BuildStatus.FAILEDTOBUILD, force_invalid_transition=True
+                )
             job.destroySelf()
         elif job_action == True:
             # Reset the job so it will be retried elsewhere.
@@ -432,10 +452,17 @@ class WorkerScanner:
     # greater than abort_timeout in launchpad-buildd's worker BuildManager.
     CANCEL_TIMEOUT = 180
 
-    def __init__(self, builder_name, builder_factory, manager, logger,
-                 clock=None, interactor_factory=BuilderInteractor,
-                 worker_factory=BuilderInteractor.makeWorkerFromVitals,
-                 behaviour_factory=BuilderInteractor.getBuildBehaviour):
+    def __init__(
+        self,
+        builder_name,
+        builder_factory,
+        manager,
+        logger,
+        clock=None,
+        interactor_factory=BuilderInteractor,
+        worker_factory=BuilderInteractor.makeWorkerFromVitals,
+        behaviour_factory=BuilderInteractor.getBuildBehaviour,
+    ):
         self.builder_name = builder_name
         self.builder_factory = builder_factory
         self.manager = manager
@@ -475,10 +502,13 @@ class WorkerScanner:
         # as it's always up to date, but PrefetchedBuilderFactory caches
         # heavily, and we don't want to eg. forget that we dispatched a
         # build in the previous cycle.
-        if (self.date_scanned is not None
-            and self.date_scanned > self.builder_factory.date_updated):
+        if (
+            self.date_scanned is not None
+            and self.date_scanned > self.builder_factory.date_updated
+        ):
             self.logger.debug(
-                "Skipping builder %s (cache out of date)" % self.builder_name)
+                "Skipping builder %s (cache out of date)" % self.builder_name
+            )
             return defer.succeed(None)
 
         self.logger.debug("Scanning builder %s" % self.builder_name)
@@ -510,14 +540,25 @@ class WorkerScanner:
         # the error.
         error_message = failure.getErrorMessage()
         if failure.check(
-            BuildWorkerFailure, CannotBuild, CannotResumeHost,
-            BuildDaemonError, CannotFetchFile):
-            self.logger.info("Scanning %s failed with: %s" % (
-                self.builder_name, error_message))
+            BuildWorkerFailure,
+            CannotBuild,
+            CannotResumeHost,
+            BuildDaemonError,
+            CannotFetchFile,
+        ):
+            self.logger.info(
+                "Scanning %s failed with: %s"
+                % (self.builder_name, error_message)
+            )
         else:
-            self.logger.info("Scanning %s failed with: %s\n%s" % (
-                self.builder_name, failure.getErrorMessage(),
-                failure.getTraceback()))
+            self.logger.info(
+                "Scanning %s failed with: %s\n%s"
+                % (
+                    self.builder_name,
+                    failure.getErrorMessage(),
+                    failure.getTraceback(),
+                )
+            )
 
         # Decide if we need to terminate the job or reset/fail the builder.
         vitals = self.builder_factory.getVitals(self.builder_name)
@@ -527,20 +568,23 @@ class WorkerScanner:
             labels = {}
             if builder.current_build is not None:
                 builder.current_build.gotFailure()
-                labels.update({
-                    'build': True,
-                    'arch': builder.current_build.processor.name,
-                    })
+                labels.update(
+                    {
+                        "build": True,
+                        "arch": builder.current_build.processor.name,
+                    }
+                )
             else:
-                labels['build'] = False
-            self.statsd_client.incr('builders.judged_failed', labels=labels)
+                labels["build"] = False
+            self.statsd_client.incr("builders.judged_failed", labels=labels)
             recover_failure(self.logger, vitals, builder, retry, failure.value)
             transaction.commit()
         except Exception:
             # Catastrophic code failure! Not much we can do.
             self.logger.error(
                 "Miserable failure when trying to handle failure:\n",
-                exc_info=True)
+                exc_info=True,
+            )
             transaction.abort()
 
     @defer.inlineCallbacks
@@ -557,8 +601,10 @@ class WorkerScanner:
         elif self.date_cancel is None:
             self.logger.info(
                 "Cancelling BuildQueue %d (%s) on %s",
-                vitals.build_queue.id, self.getExpectedCookie(vitals),
-                vitals.name)
+                vitals.build_queue.id,
+                self.getExpectedCookie(vitals),
+                vitals.name,
+            )
             yield worker.abort()
             self.date_cancel = self._clock.seconds() + self.CANCEL_TIMEOUT
         else:
@@ -568,14 +614,20 @@ class WorkerScanner:
             if self._clock.seconds() < self.date_cancel:
                 self.logger.info(
                     "Waiting for BuildQueue %d (%s) on %s to cancel",
-                    vitals.build_queue.id, self.getExpectedCookie(vitals),
-                    vitals.name)
+                    vitals.build_queue.id,
+                    self.getExpectedCookie(vitals),
+                    vitals.name,
+                )
             else:
                 raise BuildWorkerFailure(
                     "Timeout waiting for BuildQueue %d (%s) on %s to "
-                    "cancel" % (
-                    vitals.build_queue.id, self.getExpectedCookie(vitals),
-                    vitals.name))
+                    "cancel"
+                    % (
+                        vitals.build_queue.id,
+                        self.getExpectedCookie(vitals),
+                        vitals.name,
+                    )
+                )
 
     def getExpectedCookie(self, vitals):
         """Return the build cookie expected to be held by the worker.
@@ -616,29 +668,34 @@ class WorkerScanner:
                 # This is probably a grave bug with security implications,
                 # as a worker that has a job must be cleaned afterwards.
                 raise BuildDaemonIsolationError(
-                    "Non-dirty builder allegedly building.")
+                    "Non-dirty builder allegedly building."
+                )
 
             lost_reason = None
             if not vitals.builderok:
-                lost_reason = '%s is disabled' % vitals.name
+                lost_reason = "%s is disabled" % vitals.name
             else:
                 worker_status = yield worker.status()
                 # Ensure that the worker has the job that we think it
                 # should.
-                worker_cookie = worker_status.get('build_id')
+                worker_cookie = worker_status.get("build_id")
                 expected_cookie = self.getExpectedCookie(vitals)
                 if worker_cookie != expected_cookie:
-                    lost_reason = (
-                        '%s is lost (expected %r, got %r)' % (
-                            vitals.name, expected_cookie, worker_cookie))
+                    lost_reason = "%s is lost (expected %r, got %r)" % (
+                        vitals.name,
+                        expected_cookie,
+                        worker_cookie,
+                    )
 
             if lost_reason is not None:
                 # The worker is either confused or disabled, so reset and
                 # requeue the job. The next scan cycle will clean up the
                 # worker if appropriate.
                 self.logger.warning(
-                    "%s. Resetting job %s.", lost_reason,
-                    vitals.build_queue.build_cookie)
+                    "%s. Resetting job %s.",
+                    lost_reason,
+                    vitals.build_queue.build_cookie,
+                )
                 vitals.build_queue.reset()
                 transaction.commit()
                 return
@@ -650,8 +707,13 @@ class WorkerScanner:
             # ready.  Yes, "updateBuild" is a bad name.
             assert worker_status is not None
             yield interactor.updateBuild(
-                vitals, worker, worker_status, self.builder_factory,
-                self.behaviour_factory, self.manager)
+                vitals,
+                worker,
+                worker_status,
+                self.builder_factory,
+                self.behaviour_factory,
+                self.manager,
+            )
         else:
             if not vitals.builderok:
                 return
@@ -659,23 +721,26 @@ class WorkerScanner:
             # it's dirty, clean.
             if vitals.clean_status == BuilderCleanStatus.CLEAN:
                 worker_status = yield worker.status()
-                if worker_status.get('builder_status') != 'BuilderStatus.IDLE':
+                if worker_status.get("builder_status") != "BuilderStatus.IDLE":
                     raise BuildDaemonIsolationError(
-                        'Allegedly clean worker not idle (%r instead)'
-                        % worker_status.get('builder_status'))
+                        "Allegedly clean worker not idle (%r instead)"
+                        % worker_status.get("builder_status")
+                    )
                 self.updateVersion(vitals, worker_status)
                 if vitals.manual:
                     # If the builder is in manual mode, don't dispatch
                     # anything.
                     self.logger.debug(
-                        '%s is in manual mode, not dispatching.', vitals.name)
+                        "%s is in manual mode, not dispatching.", vitals.name
+                    )
                     return
                 # Try to find and dispatch a job. If it fails, don't
                 # attempt to just retry the scan; we need to reset
                 # the job so the dispatch will be reattempted.
                 builder = self.builder_factory[self.builder_name]
                 d = interactor.findAndStartJob(
-                    vitals, builder, worker, self.builder_factory)
+                    vitals, builder, worker, self.builder_factory
+                )
                 d.addErrback(functools.partial(self._scanFailed, False))
                 yield d
                 if builder.currentjob is not None:
@@ -689,11 +754,12 @@ class WorkerScanner:
                 # straight back to CLEAN, or we might have to spin
                 # through another few cycles.
                 done = yield interactor.cleanWorker(
-                    vitals, worker, self.builder_factory)
+                    vitals, worker, self.builder_factory
+                )
                 if done:
                     builder = self.builder_factory[self.builder_name]
                     builder.setCleanStatus(BuilderCleanStatus.CLEAN)
-                    self.logger.debug('%s has been cleaned.', vitals.name)
+                    self.logger.debug("%s has been cleaned.", vitals.name)
                     transaction.commit()
 
 
@@ -731,7 +797,7 @@ class BuilddManager(service.Service):
         # Redirect the output to the twisted log module.
         channel = logging.StreamHandler(log.StdioOnnaStick())
         channel.setLevel(level)
-        channel.setFormatter(logging.Formatter('%(message)s'))
+        channel.setFormatter(logging.Formatter("%(message)s"))
 
         logger.addHandler(channel)
         logger.setLevel(level)
@@ -740,7 +806,8 @@ class BuilddManager(service.Service):
     def checkForNewBuilders(self):
         """Add and return any new builders."""
         new_builders = {
-            vitals.name for vitals in self.builder_factory.iterVitals()}
+            vitals.name for vitals in self.builder_factory.iterVitals()
+        }
         old_builders = set(self.current_builders)
         extra_builders = new_builders.difference(old_builders)
         self.current_builders.extend(extra_builders)
@@ -755,8 +822,8 @@ class BuilddManager(service.Service):
             self.addScanForBuilders(new_builders)
         except Exception:
             self.logger.error(
-                "Failure while updating builders:\n",
-                exc_info=True)
+                "Failure while updating builders:\n", exc_info=True
+            )
             transaction.abort()
         self.logger.debug("Builder refresh complete.")
 
@@ -774,19 +841,28 @@ class BuilddManager(service.Service):
                 new_logtails_expr = Values(
                     new_logtails.name,
                     [("buildqueue", "integer"), ("logtail", "text")],
-                    [[dbify_value(BuildQueue.id, buildqueue_id),
-                      dbify_value(BuildQueue.logtail, logtail)]
-                     for buildqueue_id, logtail in pending_logtails.items()])
+                    [
+                        [
+                            dbify_value(BuildQueue.id, buildqueue_id),
+                            dbify_value(BuildQueue.logtail, logtail),
+                        ]
+                        for buildqueue_id, logtail in pending_logtails.items()
+                    ],
+                )
                 store = IStore(BuildQueue)
-                store.execute(BulkUpdate(
-                    {BuildQueue.logtail: Column("logtail", new_logtails)},
-                    table=BuildQueue, values=new_logtails_expr,
-                    where=(
-                        BuildQueue.id == Column("buildqueue", new_logtails))))
+                store.execute(
+                    BulkUpdate(
+                        {BuildQueue.logtail: Column("logtail", new_logtails)},
+                        table=BuildQueue,
+                        values=new_logtails_expr,
+                        where=(
+                            BuildQueue.id == Column("buildqueue", new_logtails)
+                        ),
+                    )
+                )
                 transaction.commit()
         except Exception:
-            self.logger.exception(
-                "Failure while flushing log tail updates:\n")
+            self.logger.exception("Failure while flushing log tail updates:\n")
             transaction.abort()
         self.logger.debug("Flushing log tail updates complete.")
 
@@ -812,11 +888,14 @@ class BuilddManager(service.Service):
             pass
         # Add and start WorkerScanners for each current builder, and any
         # added in the future.
-        self.scan_builders_loop, self.scan_builders_deferred = (
-            self._startLoop(self.SCAN_BUILDERS_INTERVAL, self.scanBuilders))
+        self.scan_builders_loop, self.scan_builders_deferred = self._startLoop(
+            self.SCAN_BUILDERS_INTERVAL, self.scanBuilders
+        )
         # Schedule bulk flushes for build queue logtail updates.
-        self.flush_logtails_loop, self.flush_logtails_deferred = (
-            self._startLoop(self.FLUSH_LOGTAILS_INTERVAL, self.flushLogTails))
+        (
+            self.flush_logtails_loop,
+            self.flush_logtails_deferred,
+        ) = self._startLoop(self.FLUSH_LOGTAILS_INTERVAL, self.flushLogTails)
 
     def stopService(self):
         """Callback for when we need to shut down."""
@@ -841,7 +920,8 @@ class BuilddManager(service.Service):
         """Set up scanner objects for the builders specified."""
         for builder in builders:
             worker_scanner = WorkerScanner(
-                builder, self.builder_factory, self, self.logger)
+                builder, self.builder_factory, self, self.logger
+            )
             self.workers.append(worker_scanner)
             worker_scanner.startCycle()
 
diff --git a/lib/lp/buildmaster/model/builder.py b/lib/lp/buildmaster/model/builder.py
index 1fd92ec..8d8937f 100644
--- a/lib/lp/buildmaster/model/builder.py
+++ b/lib/lp/buildmaster/model/builder.py
@@ -2,62 +2,39 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'Builder',
-    'BuilderProcessor',
-    'BuilderSet',
-    ]
+    "Builder",
+    "BuilderProcessor",
+    "BuilderSet",
+]
 
 import pytz
-from storm.expr import (
-    Coalesce,
-    Count,
-    Sum,
-    )
-from storm.properties import (
-    Bool,
-    DateTime,
-    Int,
-    Unicode,
-    )
+from storm.expr import Coalesce, Count, Sum
+from storm.properties import Bool, DateTime, Int, Unicode
 from storm.references import Reference
 from storm.store import Store
 from zope.component import getUtility
 from zope.interface import implementer
 
-from lp.app.errors import (
-    IncompatibleArguments,
-    NotFoundError,
-    )
+from lp.app.errors import IncompatibleArguments, NotFoundError
 from lp.buildmaster.enums import (
     BuilderCleanStatus,
     BuilderResetProtocol,
     BuildQueueStatus,
-    )
-from lp.buildmaster.interfaces.builder import (
-    IBuilder,
-    IBuilderSet,
-    )
+)
+from lp.buildmaster.interfaces.builder import IBuilder, IBuilderSet
 from lp.buildmaster.interfaces.buildfarmjob import IBuildFarmJobSet
 from lp.buildmaster.interfaces.buildqueue import IBuildQueueSet
 from lp.buildmaster.model.buildqueue import BuildQueue
 from lp.buildmaster.model.processor import Processor
 from lp.registry.interfaces.person import validate_public_person
-from lp.services.database.bulk import (
-    load,
-    load_related,
-    )
+from lp.services.database.bulk import load, load_related
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import (
-    IStandbyStore,
-    IStore,
-    )
+from lp.services.database.interfaces import IStandbyStore, IStore
 from lp.services.database.stormbase import StormBase
-from lp.services.propertycache import (
-    cachedproperty,
-    get_property_cache,
-    )
+from lp.services.propertycache import cachedproperty, get_property_cache
+
 # XXX Michael Nelson 2010-01-13 bug=491330
 # These dependencies on soyuz will be removed when getBuildRecords()
 # is moved.
@@ -67,32 +44,45 @@ from lp.soyuz.interfaces.buildrecords import IHasBuildRecords
 
 @implementer(IBuilder, IHasBuildRecords)
 class Builder(StormBase):
-    __storm_table__ = 'Builder'
-    __storm_order__ = ['id']
+    __storm_table__ = "Builder"
+    __storm_order__ = ["id"]
 
     id = Int(primary=True)
-    url = Unicode(name='url', allow_none=False)
-    name = Unicode(name='name', allow_none=False)
-    title = Unicode(name='title', allow_none=False)
+    url = Unicode(name="url", allow_none=False)
+    name = Unicode(name="name", allow_none=False)
+    title = Unicode(name="title", allow_none=False)
     owner_id = Int(
-        name='owner', validator=validate_public_person, allow_none=False)
-    owner = Reference(owner_id, 'Person.id')
-    _builderok = Bool(name='builderok', allow_none=False)
-    failnotes = Unicode(name='failnotes')
-    virtualized = Bool(name='virtualized', default=True, allow_none=False)
-    manual = Bool(name='manual', default=False)
-    vm_host = Unicode(name='vm_host')
-    active = Bool(name='active', allow_none=False, default=True)
-    failure_count = Int(name='failure_count', default=0, allow_none=False)
-    version = Unicode(name='version')
+        name="owner", validator=validate_public_person, allow_none=False
+    )
+    owner = Reference(owner_id, "Person.id")
+    _builderok = Bool(name="builderok", allow_none=False)
+    failnotes = Unicode(name="failnotes")
+    virtualized = Bool(name="virtualized", default=True, allow_none=False)
+    manual = Bool(name="manual", default=False)
+    vm_host = Unicode(name="vm_host")
+    active = Bool(name="active", allow_none=False, default=True)
+    failure_count = Int(name="failure_count", default=0, allow_none=False)
+    version = Unicode(name="version")
     clean_status = DBEnum(
-        enum=BuilderCleanStatus, default=BuilderCleanStatus.DIRTY)
+        enum=BuilderCleanStatus, default=BuilderCleanStatus.DIRTY
+    )
     vm_reset_protocol = DBEnum(enum=BuilderResetProtocol)
     date_clean_status_changed = DateTime(tzinfo=pytz.UTC)
 
-    def __init__(self, processors, url, name, title, owner, active=True,
-                 virtualized=True, vm_host=None, vm_reset_protocol=None,
-                 builderok=True, manual=False):
+    def __init__(
+        self,
+        processors,
+        url,
+        name,
+        title,
+        owner,
+        active=True,
+        virtualized=True,
+        vm_host=None,
+        vm_reset_protocol=None,
+        builderok=True,
+        manual=False,
+    ):
         super().__init__()
         # The processors cache starts out empty so that the processors
         # property setter doesn't issue an additional query.
@@ -137,10 +127,15 @@ class Builder(StormBase):
         """See `IBuilder`."""
         # This _cache method is a quick hack to get a settable
         # cachedproperty, mostly for the webservice's benefit.
-        return list(Store.of(self).find(
-            Processor,
-            BuilderProcessor.processor_id == Processor.id,
-            BuilderProcessor.builder == self).order_by(Processor.name))
+        return list(
+            Store.of(self)
+            .find(
+                Processor,
+                BuilderProcessor.processor_id == Processor.id,
+                BuilderProcessor.builder == self,
+            )
+            .order_by(Processor.name)
+        )
 
     @property
     def processors(self):
@@ -151,7 +146,7 @@ class Builder(StormBase):
         existing = set(self.processors)
         wanted = set(processors)
         # Enable the wanted but missing.
-        for processor in (wanted - existing):
+        for processor in wanted - existing:
             bp = BuilderProcessor()
             bp.builder = self
             bp.processor = processor
@@ -161,7 +156,9 @@ class Builder(StormBase):
             BuilderProcessor,
             BuilderProcessor.builder == self,
             BuilderProcessor.processor_id.is_in(
-                processor.id for processor in existing - wanted)).remove()
+                processor.id for processor in existing - wanted
+            ),
+        ).remove()
         del get_property_cache(self)._processors_cache
 
     @property
@@ -203,28 +200,38 @@ class Builder(StormBase):
         self.builderok = False
         self.failnotes = reason
 
-    def getBuildRecords(self, build_state=None, name=None, pocket=None,
-                        arch_tag=None, user=None, binary_only=True):
+    def getBuildRecords(
+        self,
+        build_state=None,
+        name=None,
+        pocket=None,
+        arch_tag=None,
+        user=None,
+        binary_only=True,
+    ):
         """See IHasBuildRecords."""
         if binary_only:
             return getUtility(IBinaryPackageBuildSet).getBuildsForBuilder(
-                self.id, build_state, name, pocket, arch_tag, user)
+                self.id, build_state, name, pocket, arch_tag, user
+            )
         else:
             if arch_tag is not None or name is not None or pocket is not None:
                 raise IncompatibleArguments(
                     "The 'arch_tag', 'name', and 'pocket' parameters can be "
-                    "used only with binary_only=True.")
+                    "used only with binary_only=True."
+                )
             return getUtility(IBuildFarmJobSet).getBuildsForBuilder(
-                self, status=build_state, user=user)
+                self, status=build_state, user=user
+            )
 
 
 class BuilderProcessor(StormBase):
-    __storm_table__ = 'BuilderProcessor'
-    __storm_primary__ = ('builder_id', 'processor_id')
+    __storm_table__ = "BuilderProcessor"
+    __storm_primary__ = ("builder_id", "processor_id")
 
-    builder_id = Int(name='builder', allow_none=False)
+    builder_id = Int(name="builder", allow_none=False)
     builder = Reference(builder_id, Builder.id)
-    processor_id = Int(name='processor', allow_none=False)
+    processor_id = Int(name="processor", allow_none=False)
     processor = Reference(processor_id, Processor.id)
 
 
@@ -245,15 +252,33 @@ class BuilderSet:
     def __getitem__(self, name):
         return self.getByName(name)
 
-    def new(self, processors, url, name, title, owner, active=True,
-            virtualized=False, vm_host=None, vm_reset_protocol=None,
-            manual=True):
+    def new(
+        self,
+        processors,
+        url,
+        name,
+        title,
+        owner,
+        active=True,
+        virtualized=False,
+        vm_host=None,
+        vm_reset_protocol=None,
+        manual=True,
+    ):
         """See IBuilderSet."""
         return Builder(
-            processors=processors, url=url, name=name, title=title,
-            owner=owner, active=active, virtualized=virtualized,
-            vm_host=vm_host, vm_reset_protocol=vm_reset_protocol,
-            builderok=True, manual=manual)
+            processors=processors,
+            url=url,
+            name=name,
+            title=title,
+            owner=owner,
+            active=active,
+            virtualized=virtualized,
+            vm_host=vm_host,
+            vm_reset_protocol=vm_reset_protocol,
+            builderok=True,
+            manual=manual,
+        )
 
     def get(self, builder_id):
         """See IBuilderSet."""
@@ -272,11 +297,15 @@ class BuilderSet:
         # Builders' processor caches.
         store = IStore(BuilderProcessor)
         builders_by_id = {b.id: b for b in builders}
-        pairs = list(store.using(BuilderProcessor, Processor).find(
-            (BuilderProcessor.builder_id, BuilderProcessor.processor_id),
-            BuilderProcessor.processor_id == Processor.id,
-            BuilderProcessor.builder_id.is_in(builders_by_id)).order_by(
-                BuilderProcessor.builder_id, Processor.name))
+        pairs = list(
+            store.using(BuilderProcessor, Processor)
+            .find(
+                (BuilderProcessor.builder_id, BuilderProcessor.processor_id),
+                BuilderProcessor.processor_id == Processor.id,
+                BuilderProcessor.builder_id.is_in(builders_by_id),
+            )
+            .order_by(BuilderProcessor.builder_id, Processor.name)
+        )
         load(Processor, [pid for bid, pid in pairs])
         for builder in builders:
             get_property_cache(builder)._processors_cache = []
@@ -287,36 +316,45 @@ class BuilderSet:
     def getBuilders(self):
         """See IBuilderSet."""
         from lp.registry.model.person import Person
-        rs = IStore(Builder).find(
-            Builder, Builder.active == True).order_by(
-                Builder.virtualized, Builder.name)
+
+        rs = (
+            IStore(Builder)
+            .find(Builder, Builder.active == True)
+            .order_by(Builder.virtualized, Builder.name)
+        )
 
         def preload(rows):
             self.preloadProcessors(rows)
-            load_related(Person, rows, ['owner_id'])
+            load_related(Person, rows, ["owner_id"])
             bqs = getUtility(IBuildQueueSet).preloadForBuilders(rows)
             BuildQueue.preloadSpecificBuild(bqs)
+
         return DecoratedResultSet(rs, pre_iter_hook=preload)
 
     def getBuildQueueSizes(self):
         """See `IBuilderSet`."""
-        results = IStandbyStore(BuildQueue).find((
-            Count(),
-            Sum(BuildQueue.estimated_duration),
-            Processor,
-            Coalesce(BuildQueue.virtualized, True)),
-            Processor.id == BuildQueue.processor_id,
-            BuildQueue.status == BuildQueueStatus.WAITING).group_by(
-                Processor, Coalesce(BuildQueue.virtualized, True))
-
-        result_dict = {'virt': {}, 'nonvirt': {}}
+        results = (
+            IStandbyStore(BuildQueue)
+            .find(
+                (
+                    Count(),
+                    Sum(BuildQueue.estimated_duration),
+                    Processor,
+                    Coalesce(BuildQueue.virtualized, True),
+                ),
+                Processor.id == BuildQueue.processor_id,
+                BuildQueue.status == BuildQueueStatus.WAITING,
+            )
+            .group_by(Processor, Coalesce(BuildQueue.virtualized, True))
+        )
+
+        result_dict = {"virt": {}, "nonvirt": {}}
         for size, duration, processor, virtualized in results:
             if virtualized is False:
-                virt_str = 'nonvirt'
+                virt_str = "nonvirt"
             else:
-                virt_str = 'virt'
-            result_dict[virt_str][processor.name] = (
-                size, duration)
+                virt_str = "virt"
+            result_dict[virt_str][processor.name] = (size, duration)
 
         return result_dict
 
@@ -327,4 +365,5 @@ class BuilderSet:
             Builder._builderok == True,
             Builder.virtualized == virtualized,
             BuilderProcessor.builder_id == Builder.id,
-            BuilderProcessor.processor == processor)
+            BuilderProcessor.processor == processor,
+        )
diff --git a/lib/lp/buildmaster/model/buildfarmjob.py b/lib/lp/buildmaster/model/buildfarmjob.py
index 9d86194..160d635 100644
--- a/lib/lp/buildmaster/model/buildfarmjob.py
+++ b/lib/lp/buildmaster/model/buildfarmjob.py
@@ -3,36 +3,21 @@
 # LICENSE).
 
 __all__ = [
-    'BuildFarmJob',
-    'BuildFarmJobMixin',
-    'SpecificBuildFarmJobSourceMixin',
-    ]
+    "BuildFarmJob",
+    "BuildFarmJobMixin",
+    "SpecificBuildFarmJobSourceMixin",
+]
 
 import datetime
 
 import pytz
-from storm.expr import (
-    Desc,
-    LeftJoin,
-    Or,
-    )
-from storm.locals import (
-    DateTime,
-    Int,
-    Reference,
-    Storm,
-    )
+from storm.expr import Desc, LeftJoin, Or
+from storm.locals import DateTime, Int, Reference, Storm
 from storm.store import Store
 from zope.component import getUtility
-from zope.interface import (
-    implementer,
-    provider,
-    )
+from zope.interface import implementer, provider
 
-from lp.buildmaster.enums import (
-    BuildFarmJobType,
-    BuildStatus,
-    )
+from lp.buildmaster.enums import BuildFarmJobType, BuildStatus
 from lp.buildmaster.interfaces.buildfarmjob import (
     CannotBeRescored,
     CannotBeRetried,
@@ -40,20 +25,13 @@ from lp.buildmaster.interfaces.buildfarmjob import (
     IBuildFarmJobDB,
     IBuildFarmJobSet,
     IBuildFarmJobSource,
-    )
+)
 from lp.buildmaster.model.buildqueue import BuildQueue
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import (
-    IMasterStore,
-    IStore,
-    )
-from lp.services.propertycache import (
-    cachedproperty,
-    get_property_cache,
-    )
+from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.propertycache import cachedproperty, get_property_cache
 from lp.services.statsd.interfaces.statsd_client import IStatsdClient
 
-
 VALID_STATUS_TRANSITIONS = {
     BuildStatus.NEEDSBUILD: tuple(BuildStatus.items),
     BuildStatus.FULLYBUILT: (),
@@ -64,59 +42,79 @@ VALID_STATUS_TRANSITIONS = {
     BuildStatus.BUILDING: tuple(BuildStatus.items),
     BuildStatus.FAILEDTOUPLOAD: (BuildStatus.NEEDSBUILD,),
     BuildStatus.UPLOADING: (
-        BuildStatus.FULLYBUILT, BuildStatus.FAILEDTOUPLOAD,
-        BuildStatus.SUPERSEDED),
+        BuildStatus.FULLYBUILT,
+        BuildStatus.FAILEDTOUPLOAD,
+        BuildStatus.SUPERSEDED,
+    ),
     BuildStatus.CANCELLING: (BuildStatus.CANCELLED,),
     BuildStatus.CANCELLED: (BuildStatus.NEEDSBUILD,),
-    }
+}
 
 
 @implementer(IBuildFarmJob, IBuildFarmJobDB)
 @provider(IBuildFarmJobSource)
 class BuildFarmJob(Storm):
     """A base implementation for `IBuildFarmJob` classes."""
-    __storm_table__ = 'BuildFarmJob'
+
+    __storm_table__ = "BuildFarmJob"
 
     id = Int(primary=True)
 
     date_created = DateTime(
-        name='date_created', allow_none=False, tzinfo=pytz.UTC)
+        name="date_created", allow_none=False, tzinfo=pytz.UTC
+    )
 
     date_finished = DateTime(
-        name='date_finished', allow_none=True, tzinfo=pytz.UTC)
+        name="date_finished", allow_none=True, tzinfo=pytz.UTC
+    )
 
-    builder_id = Int(name='builder', allow_none=True)
-    builder = Reference(builder_id, 'Builder.id')
+    builder_id = Int(name="builder", allow_none=True)
+    builder = Reference(builder_id, "Builder.id")
 
-    status = DBEnum(name='status', allow_none=False, enum=BuildStatus)
+    status = DBEnum(name="status", allow_none=False, enum=BuildStatus)
 
-    job_type = DBEnum(
-        name='job_type', allow_none=False, enum=BuildFarmJobType)
+    job_type = DBEnum(name="job_type", allow_none=False, enum=BuildFarmJobType)
 
-    archive_id = Int(name='archive')
-    archive = Reference(archive_id, 'Archive.id')
+    archive_id = Int(name="archive")
+    archive = Reference(archive_id, "Archive.id")
 
-    def __init__(self, job_type, status=BuildStatus.NEEDSBUILD,
-                 date_created=None, builder=None, archive=None):
+    def __init__(
+        self,
+        job_type,
+        status=BuildStatus.NEEDSBUILD,
+        date_created=None,
+        builder=None,
+        archive=None,
+    ):
         super().__init__()
         (self.job_type, self.status, self.builder, self.archive) = (
-             job_type, status, builder, archive)
+            job_type,
+            status,
+            builder,
+            archive,
+        )
         if date_created is not None:
             self.date_created = date_created
 
     @classmethod
-    def new(cls, job_type, status=BuildStatus.NEEDSBUILD, date_created=None,
-            builder=None, archive=None):
+    def new(
+        cls,
+        job_type,
+        status=BuildStatus.NEEDSBUILD,
+        date_created=None,
+        builder=None,
+        archive=None,
+    ):
         """See `IBuildFarmJobSource`."""
         build_farm_job = BuildFarmJob(
-            job_type, status, date_created, builder, archive)
+            job_type, status, date_created, builder, archive
+        )
         store = IMasterStore(BuildFarmJob)
         store.add(build_farm_job)
         return build_farm_job
 
 
 class BuildFarmJobMixin:
-
     @property
     def dependencies(self):
         return None
@@ -140,8 +138,11 @@ class BuildFarmJobMixin:
     @cachedproperty
     def buildqueue_record(self):
         """See `IBuildFarmJob`."""
-        return Store.of(self).find(
-            BuildQueue, _build_farm_job_id=self.build_farm_job_id).one()
+        return (
+            Store.of(self)
+            .find(BuildQueue, _build_farm_job_id=self.build_farm_job_id)
+            .one()
+        )
 
     @property
     def is_private(self):
@@ -164,17 +165,19 @@ class BuildFarmJobMixin:
     @property
     def was_built(self):
         """See `IBuild`"""
-        return self.status not in [BuildStatus.NEEDSBUILD,
-                                   BuildStatus.BUILDING,
-                                   BuildStatus.CANCELLED,
-                                   BuildStatus.CANCELLING,
-                                   BuildStatus.UPLOADING,
-                                   BuildStatus.SUPERSEDED]
+        return self.status not in [
+            BuildStatus.NEEDSBUILD,
+            BuildStatus.BUILDING,
+            BuildStatus.CANCELLED,
+            BuildStatus.CANCELLING,
+            BuildStatus.UPLOADING,
+            BuildStatus.SUPERSEDED,
+        ]
 
     @property
     def build_cookie(self):
         """See `IBuildFarmJob`."""
-        return '%s-%s' % (self.job_type.name, self.id)
+        return "%s-%s" % (self.job_type.name, self.id)
 
     def setLog(self, log):
         """See `IBuildFarmJob`."""
@@ -186,24 +189,35 @@ class BuildFarmJobMixin:
         if self.processor is not None:
             labels["arch"] = self.processor.name
         if self.builder is not None:
-            labels.update({
-                "builder_name": self.builder.name,
-                "virtualized": str(self.builder.virtualized),
-                })
+            labels.update(
+                {
+                    "builder_name": self.builder.name,
+                    "virtualized": str(self.builder.virtualized),
+                }
+            )
         labels.update(extra)
         getUtility(IStatsdClient).incr("build.%s" % metric_name, labels=labels)
 
-    def updateStatus(self, status, builder=None, worker_status=None,
-                     date_started=None, date_finished=None,
-                     force_invalid_transition=False):
+    def updateStatus(
+        self,
+        status,
+        builder=None,
+        worker_status=None,
+        date_started=None,
+        date_finished=None,
+        force_invalid_transition=False,
+    ):
         """See `IBuildFarmJob`."""
-        if (not force_invalid_transition
-                and status != self.build_farm_job.status
-                and status not in VALID_STATUS_TRANSITIONS[
-                    self.build_farm_job.status]):
+        if (
+            not force_invalid_transition
+            and status != self.build_farm_job.status
+            and status
+            not in VALID_STATUS_TRANSITIONS[self.build_farm_job.status]
+        ):
             raise AssertionError(
                 "Can't change build status from %s to %s."
-                % (self.build_farm_job.status.name, status.name))
+                % (self.build_farm_job.status.name, status.name)
+            )
 
         self.build_farm_job.status = self.status = status
 
@@ -225,15 +239,22 @@ class BuildFarmJobMixin:
 
         # If we're in a final build state (or UPLOADING, which sort of
         # is), set date_finished if date_started is.
-        if (self.date_started is not None and self.date_finished is None
-            and status not in (
-                BuildStatus.NEEDSBUILD, BuildStatus.BUILDING,
-                BuildStatus.CANCELLING)):
+        if (
+            self.date_started is not None
+            and self.date_finished is None
+            and status
+            not in (
+                BuildStatus.NEEDSBUILD,
+                BuildStatus.BUILDING,
+                BuildStatus.CANCELLING,
+            )
+        ):
             # XXX cprov 20060615 bug=120584: Currently buildduration includes
             # the scanner latency, it should really be asking the worker for
             # the duration spent building locally.
-            self.build_farm_job.date_finished = self.date_finished = (
-                date_finished or datetime.datetime.now(pytz.UTC))
+            self.build_farm_job.date_finished = (
+                self.date_finished
+            ) = date_finished or datetime.datetime.now(pytz.UTC)
             self.emitMetric("finished", status=status.name)
 
     def gotFailure(self):
@@ -254,7 +275,9 @@ class BuildFarmJobMixin:
         queue_entry = BuildQueue(
             estimated_duration=duration_estimate,
             build_farm_job=self.build_farm_job,
-            processor=self.processor, virtualized=self.virtualized)
+            processor=self.processor,
+            virtualized=self.virtualized,
+        )
 
         # This build queue job is to be created in a suspended state.
         if suspended:
@@ -279,7 +302,7 @@ class BuildFarmJobMixin:
             BuildStatus.FAILEDTOUPLOAD,
             BuildStatus.CANCELLED,
             BuildStatus.SUPERSEDED,
-            ]
+        ]
 
         # If the build is currently in any of the failed states,
         # it may be retried.
@@ -289,8 +312,9 @@ class BuildFarmJobMixin:
     def can_be_rescored(self):
         """See `IBuildFarmJob`."""
         return (
-            self.buildqueue_record is not None and
-            self.status is BuildStatus.NEEDSBUILD)
+            self.buildqueue_record is not None
+            and self.status is BuildStatus.NEEDSBUILD
+        )
 
     @property
     def can_be_cancelled(self):
@@ -301,7 +325,7 @@ class BuildFarmJobMixin:
         cancellable_statuses = [
             BuildStatus.BUILDING,
             BuildStatus.NEEDSBUILD,
-            ]
+        ]
         return self.status in cancellable_statuses
 
     def clearBuilder(self):
@@ -345,11 +369,10 @@ class BuildFarmJobMixin:
 
 
 class SpecificBuildFarmJobSourceMixin:
-
     @staticmethod
     def addCandidateSelectionCriteria():
         """See `ISpecificBuildFarmJobSource`."""
-        return ('')
+        return ""
 
     @staticmethod
     def postprocessCandidate(job, logger):
@@ -359,18 +382,15 @@ class SpecificBuildFarmJobSourceMixin:
 
 @implementer(IBuildFarmJobSet)
 class BuildFarmJobSet:
-
     def getBuildsForBuilder(self, builder_id, status=None, user=None):
         """See `IBuildFarmJobSet`."""
         # Imported here to avoid circular imports.
-        from lp.soyuz.model.archive import (
-            Archive,
-            get_archive_privacy_filter,
-            )
+        from lp.soyuz.model.archive import Archive, get_archive_privacy_filter
 
         clauses = [
             BuildFarmJob.builder == builder_id,
-            Or(Archive.id == None, get_archive_privacy_filter(user))]
+            Or(Archive.id == None, get_archive_privacy_filter(user)),
+        ]
         if status is not None:
             clauses.append(BuildFarmJob.status == status)
 
@@ -381,11 +401,14 @@ class BuildFarmJobSet:
         origin = [
             BuildFarmJob,
             LeftJoin(Archive, Archive.id == BuildFarmJob.archive_id),
-            ]
+        ]
 
-        return IStore(BuildFarmJob).using(*origin).find(
-            BuildFarmJob, *clauses).order_by(
-                Desc(BuildFarmJob.date_finished), BuildFarmJob.id)
+        return (
+            IStore(BuildFarmJob)
+            .using(*origin)
+            .find(BuildFarmJob, *clauses)
+            .order_by(Desc(BuildFarmJob.date_finished), BuildFarmJob.id)
+        )
 
     def getBuildsForArchive(self, archive, status=None):
         """See `IBuildFarmJobSet`."""
@@ -396,7 +419,8 @@ class BuildFarmJobSet:
             extra_exprs.append(BuildFarmJob.status == status)
 
         result_set = IStore(BuildFarmJob).find(
-            BuildFarmJob, BuildFarmJob.archive == archive, *extra_exprs)
+            BuildFarmJob, BuildFarmJob.archive == archive, *extra_exprs
+        )
 
         # When we have a set of builds that may include pending or
         # superseded builds, we order by -date_created (as we won't
@@ -407,12 +431,14 @@ class BuildFarmJobSet:
             BuildStatus.BUILDING,
             BuildStatus.UPLOADING,
             BuildStatus.SUPERSEDED,
-            ]
+        ]
         if status is None or status in unfinished_states:
             result_set.order_by(
-                Desc(BuildFarmJob.date_created), BuildFarmJob.id)
+                Desc(BuildFarmJob.date_created), BuildFarmJob.id
+            )
         else:
             result_set.order_by(
-                Desc(BuildFarmJob.date_finished), BuildFarmJob.id)
+                Desc(BuildFarmJob.date_finished), BuildFarmJob.id
+            )
 
         return result_set
diff --git a/lib/lp/buildmaster/model/buildfarmjobbehaviour.py b/lib/lp/buildmaster/model/buildfarmjobbehaviour.py
index 50ec99f..6c894c0 100644
--- a/lib/lp/buildmaster/model/buildfarmjobbehaviour.py
+++ b/lib/lp/buildmaster/model/buildfarmjobbehaviour.py
@@ -4,15 +4,15 @@
 """Base and idle BuildFarmJobBehaviour classes."""
 
 __all__ = [
-    'BuildFarmJobBehaviourBase',
-    ]
+    "BuildFarmJobBehaviourBase",
+]
 
-from collections import OrderedDict
-from datetime import datetime
 import gzip
 import logging
 import os
 import tempfile
+from collections import OrderedDict
+from datetime import datetime
 
 import transaction
 from twisted.internet import defer
@@ -23,11 +23,8 @@ from lp.buildmaster.enums import (
     BuildBaseImageType,
     BuildFarmJobType,
     BuildStatus,
-    )
-from lp.buildmaster.interfaces.builder import (
-    BuildDaemonError,
-    CannotBuild,
-    )
+)
+from lp.buildmaster.interfaces.builder import BuildDaemonError, CannotBuild
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.config import config
 from lp.services.helpers import filenameToContentType
@@ -38,8 +35,7 @@ from lp.services.statsd.interfaces.statsd_client import IStatsdClient
 from lp.services.utils import sanitise_urls
 from lp.services.webapp import canonical_url
 
-
-WORKER_LOG_FILENAME = 'buildlog'
+WORKER_LOG_FILENAME = "buildlog"
 
 
 class BuildFarmJobBehaviourBase:
@@ -58,8 +54,9 @@ class BuildFarmJobBehaviourBase:
     @cachedproperty
     def _authserver(self):
         return xmlrpc.Proxy(
-            config.builddmaster.authentication_endpoint.encode('UTF-8'),
-            connectTimeout=config.builddmaster.authentication_timeout)
+            config.builddmaster.authentication_endpoint.encode("UTF-8"),
+            connectTimeout=config.builddmaster.authentication_timeout,
+        )
 
     @property
     def archive(self):
@@ -93,7 +90,8 @@ class BuildFarmJobBehaviourBase:
 
     def issueMacaroon(self):
         raise NotImplementedError(
-            "This build type does not support accessing private resources.")
+            "This build type does not support accessing private resources."
+        )
 
     def extraBuildArgs(self, logger=None):
         """The default behaviour is to send only common extra arguments."""
@@ -110,8 +108,12 @@ class BuildFarmJobBehaviourBase:
         args = yield self.extraBuildArgs(logger=logger)
         filemap = yield self.determineFilesToSend()
         return (
-            self.builder_type, self.distro_arch_series, self.pocket,
-            filemap, args)
+            self.builder_type,
+            self.distro_arch_series,
+            self.pocket,
+            filemap,
+            args,
+        )
 
     def verifyBuildRequest(self, logger):
         """The default behaviour is a no-op."""
@@ -123,65 +125,83 @@ class BuildFarmJobBehaviourBase:
         cookie = self.build.build_cookie
         logger.info(
             "Preparing job %s (%s) on %s."
-            % (cookie, self.build.title, self._builder.url))
+            % (cookie, self.build.title, self._builder.url)
+        )
 
         builder_type, das, pocket, files, args = yield (
-            self.composeBuildRequest(logger))
+            self.composeBuildRequest(logger)
+        )
 
         # First cache the chroot and any other files that the job needs.
         pocket_chroot = None
         for image_type in self.image_types:
             pocket_chroot = das.getPocketChroot(
-                pocket=pocket, image_type=image_type)
+                pocket=pocket, image_type=image_type
+            )
             if pocket_chroot is not None:
                 break
         if pocket_chroot is None:
             raise CannotBuild(
-                "Unable to find a chroot for %s" % das.displayname)
+                "Unable to find a chroot for %s" % das.displayname
+            )
         chroot = pocket_chroot.chroot
         args["image_type"] = pocket_chroot.image_type.name.lower()
 
         filename_to_sha1 = OrderedDict()
         dl = []
-        dl.append(self._worker.sendFileToWorker(
-            logger=logger, url=chroot.http_url, sha1=chroot.content.sha1))
+        dl.append(
+            self._worker.sendFileToWorker(
+                logger=logger, url=chroot.http_url, sha1=chroot.content.sha1
+            )
+        )
         for filename, params in files.items():
-            filename_to_sha1[filename] = params['sha1']
+            filename_to_sha1[filename] = params["sha1"]
             dl.append(self._worker.sendFileToWorker(logger=logger, **params))
         yield defer.gatherResults(dl)
 
         combined_args = {
-            'builder_type': builder_type, 'chroot_sha1': chroot.content.sha1,
-            'filemap': filename_to_sha1, 'args': args}
+            "builder_type": builder_type,
+            "chroot_sha1": chroot.content.sha1,
+            "filemap": filename_to_sha1,
+            "args": args,
+        }
         logger.info(
             "Dispatching job %s (%s) to %s:\n%s"
-            % (cookie, self.build.title, self._builder.url,
-               sanitise_urls(repr(combined_args))))
+            % (
+                cookie,
+                self.build.title,
+                self._builder.url,
+                sanitise_urls(repr(combined_args)),
+            )
+        )
 
         (status, info) = yield self._worker.build(
-            cookie, builder_type, chroot.content.sha1, filename_to_sha1, args)
+            cookie, builder_type, chroot.content.sha1, filename_to_sha1, args
+        )
 
         # Update stats
-        job_type = getattr(self.build, 'job_type', None)
-        job_type_name = job_type.name if job_type else 'UNKNOWN'
+        job_type = getattr(self.build, "job_type", None)
+        job_type_name = job_type.name if job_type else "UNKNOWN"
         statsd_client = getUtility(IStatsdClient)
         statsd_client.incr(
-            'build.count',
+            "build.count",
             labels={
-                'job_type': job_type_name,
-                'builder_name': self._builder.name,
-                })
+                "job_type": job_type_name,
+                "builder_name": self._builder.name,
+            },
+        )
 
         logger.info(
             "Job %s (%s) started on %s: %s %s"
-            % (cookie, self.build.title, self._builder.url, status, info))
+            % (cookie, self.build.title, self._builder.url, status, info)
+        )
 
     def getUploadDirLeaf(self, build_cookie, now=None):
         """See `IPackageBuild`."""
         if now is None:
             now = datetime.now()
         timestamp = now.strftime("%Y%m%d-%H%M%S")
-        return '%s-%s' % (timestamp, build_cookie)
+        return "%s-%s" % (timestamp, build_cookie)
 
     def transferWorkerFileToLibrarian(self, file_sha1, filename, private):
         """Transfer a file from the worker to the librarian.
@@ -201,25 +221,28 @@ class BuildFarmJobBehaviourBase:
             try:
                 # If the requested file is the 'buildlog' compress it
                 # using gzip before storing in Librarian.
-                if file_sha1 == 'buildlog':
-                    out_file = open(out_file_name, 'rb')
-                    filename += '.gz'
-                    out_file_name += '.gz'
-                    gz_file = gzip.GzipFile(out_file_name, mode='wb')
+                if file_sha1 == "buildlog":
+                    out_file = open(out_file_name, "rb")
+                    filename += ".gz"
+                    out_file_name += ".gz"
+                    gz_file = gzip.GzipFile(out_file_name, mode="wb")
                     copy_and_close(out_file, gz_file)
-                    os.remove(out_file_name.replace('.gz', ''))
+                    os.remove(out_file_name.replace(".gz", ""))
 
                 # Open the file, seek to its end position, count and seek to
                 # beginning, ready for adding to the Librarian.
-                out_file = open(out_file_name, 'rb')
+                out_file = open(out_file_name, "rb")
                 out_file.seek(0, 2)
                 bytes_written = out_file.tell()
                 out_file.seek(0)
 
                 library_file = getUtility(ILibraryFileAliasSet).create(
-                    filename, bytes_written, out_file,
+                    filename,
+                    bytes_written,
+                    out_file,
                     contentType=filenameToContentType(filename),
-                    restricted=private)
+                    restricted=private,
+                )
             finally:
                 # Remove the temporary file.
                 os.remove(out_file_name)
@@ -232,12 +255,13 @@ class BuildFarmJobBehaviourBase:
 
     def getLogFileName(self):
         """Return the preferred file name for this job's log."""
-        return 'buildlog.txt'
+        return "buildlog.txt"
 
     def getLogFromWorker(self, queue_item):
         """Return a Deferred which fires when the log is in the librarian."""
         d = self.transferWorkerFileToLibrarian(
-            WORKER_LOG_FILENAME, self.getLogFileName(), self.build.is_private)
+            WORKER_LOG_FILENAME, self.getLogFileName(), self.build.is_private
+        )
         return d
 
     @defer.inlineCallbacks
@@ -254,9 +278,13 @@ class BuildFarmJobBehaviourBase:
         # Explode before collecting a binary that is denied in this
         # distroseries/pocket/archive
         assert build.archive.canModifySuite(
-            build.distro_series, build.pocket), (
-                "%s (%s) can not be built for pocket %s in %s: illegal status"
-                % (build.title, build.id, build.pocket.name, build.archive))
+            build.distro_series, build.pocket
+        ), "%s (%s) can not be built for pocket %s in %s: illegal status" % (
+            build.title,
+            build.id,
+            build.pocket.name,
+            build.archive,
+        )
 
     @staticmethod
     def extractBuildStatus(worker_status):
@@ -265,25 +293,28 @@ class BuildFarmJobBehaviourBase:
         :param worker_status: build status dict from BuilderWorker.status.
         :return: the unqualified status name, e.g. "OK".
         """
-        status_string = worker_status['build_status']
-        lead_string = 'BuildStatus.'
+        status_string = worker_status["build_status"]
+        lead_string = "BuildStatus."
         assert status_string.startswith(lead_string), (
-            "Malformed status string: '%s'" % status_string)
-        return status_string[len(lead_string):]
+            "Malformed status string: '%s'" % status_string
+        )
+        return status_string[len(lead_string) :]
 
     # The list of build status values for which email notifications are
     # allowed to be sent. It is up to each callback as to whether it will
     # consider sending a notification but it won't do so if the status is not
     # in this list.
-    ALLOWED_STATUS_NOTIFICATIONS = ['PACKAGEFAIL', 'CHROOTFAIL']
+    ALLOWED_STATUS_NOTIFICATIONS = ["PACKAGEFAIL", "CHROOTFAIL"]
 
     @defer.inlineCallbacks
     def handleStatus(self, bq, worker_status):
         """See `IBuildFarmJobBehaviour`."""
         if bq != self.build.buildqueue_record:
             raise AssertionError(
-                "%r != %r" % (bq, self.build.buildqueue_record))
+                "%r != %r" % (bq, self.build.buildqueue_record)
+            )
         from lp.buildmaster.manager import BUILDD_MANAGER_LOG_NAME
+
         logger = logging.getLogger(BUILDD_MANAGER_LOG_NAME)
         builder_status = worker_status["builder_status"]
 
@@ -292,19 +323,24 @@ class BuildFarmJobBehaviourBase:
             status = self.extractBuildStatus(worker_status)
             notify = status in self.ALLOWED_STATUS_NOTIFICATIONS
             fail_status_map = {
-                'PACKAGEFAIL': BuildStatus.FAILEDTOBUILD,
-                'DEPFAIL': BuildStatus.MANUALDEPWAIT,
-                'CHROOTFAIL': BuildStatus.CHROOTWAIT,
-                }
+                "PACKAGEFAIL": BuildStatus.FAILEDTOBUILD,
+                "DEPFAIL": BuildStatus.MANUALDEPWAIT,
+                "CHROOTFAIL": BuildStatus.CHROOTWAIT,
+            }
             if self.build.status == BuildStatus.CANCELLING:
-                fail_status_map['ABORTED'] = BuildStatus.CANCELLED
+                fail_status_map["ABORTED"] = BuildStatus.CANCELLED
 
             logger.info(
-                'Processing finished job %s (%s) from builder %s: %s'
-                % (self.build.build_cookie, self.build.title,
-                   self.build.buildqueue_record.builder.name, status))
+                "Processing finished job %s (%s) from builder %s: %s"
+                % (
+                    self.build.build_cookie,
+                    self.build.title,
+                    self.build.buildqueue_record.builder.name,
+                    status,
+                )
+            )
             build_status = None
-            if status == 'OK':
+            if status == "OK":
                 yield self.storeLogFromWorker(worker_status)
                 # handleSuccess will sometimes perform write operations
                 # outside the database transaction, so a failure between
@@ -317,7 +353,8 @@ class BuildFarmJobBehaviourBase:
                 build_status = fail_status_map[status]
             else:
                 raise BuildDaemonError(
-                    "Build returned unexpected status: %r" % status)
+                    "Build returned unexpected status: %r" % status
+                )
         else:
             # The build status remains unchanged.
             build_status = bq.specific_build.status
@@ -327,7 +364,8 @@ class BuildFarmJobBehaviourBase:
         # process-upload, so doing that before we've removed the BuildQueue
         # causes races.
         self.build.updateStatus(
-            build_status, builder=bq.builder, worker_status=worker_status)
+            build_status, builder=bq.builder, worker_status=worker_status
+        )
 
         if builder_status == "BuilderStatus.WAITING":
             if notify:
@@ -341,15 +379,18 @@ class BuildFarmJobBehaviourBase:
         filemap = worker_status["filemap"]
         filenames_to_download = []
         for filename, sha1 in filemap.items():
-            logger.info("Grabbing file: %s (%s)" % (
-                filename, self._worker.getURL(sha1)))
+            logger.info(
+                "Grabbing file: %s (%s)"
+                % (filename, self._worker.getURL(sha1))
+            )
             out_file_name = os.path.join(upload_path, filename)
             # If the evaluated output file name is not within our
             # upload path, then we don't try to copy this or any
             # subsequent files.
             if not os.path.realpath(out_file_name).startswith(upload_path):
                 raise BuildDaemonError(
-                    "Build returned a file named '%s'." % filename)
+                    "Build returned a file named '%s'." % filename
+                )
             filenames_to_download.append((sha1, out_file_name))
         yield self._worker.getFiles(filenames_to_download, logger=logger)
 
@@ -386,7 +427,8 @@ class BuildFarmJobBehaviourBase:
         #       <archive_id>/distribution_name
         # for all destination archive types.
         upload_path = os.path.join(
-            grab_dir, str(build.archive.id), build.distribution.name)
+            grab_dir, str(build.archive.id), build.distribution.name
+        )
         os.makedirs(upload_path)
 
         yield self._downloadFiles(worker_status, upload_path, logger)
@@ -398,7 +440,8 @@ class BuildFarmJobBehaviourBase:
         # uploads.
         logger.info(
             "Gathered %s completely. Moving %s to uploader queue."
-            % (build.build_cookie, upload_leaf))
+            % (build.build_cookie, upload_leaf)
+        )
         target_dir = os.path.join(root, "incoming")
         if not os.path.exists(target_dir):
             os.mkdir(target_dir)
diff --git a/lib/lp/buildmaster/model/buildqueue.py b/lib/lp/buildmaster/model/buildqueue.py
index b0da009..cc44137 100644
--- a/lib/lp/buildmaster/model/buildqueue.py
+++ b/lib/lp/buildmaster/model/buildqueue.py
@@ -2,36 +2,21 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'BuildQueue',
-    'BuildQueueSet',
-    ]
+    "BuildQueue",
+    "BuildQueueSet",
+]
 
+import logging
 from datetime import datetime
 from itertools import groupby
-import logging
 from operator import attrgetter
 
 import pytz
-from storm.expr import (
-    And,
-    Desc,
-    Exists,
-    Or,
-    SQL,
-    )
-from storm.properties import (
-    Bool,
-    DateTime,
-    Int,
-    TimeDelta,
-    Unicode,
-    )
+from storm.expr import SQL, And, Desc, Exists, Or
+from storm.properties import Bool, DateTime, Int, TimeDelta, Unicode
 from storm.references import Reference
 from storm.store import Store
-from zope.component import (
-    getSiteManager,
-    getUtility,
-    )
+from zope.component import getSiteManager, getUtility
 from zope.interface import implementer
 from zope.security.proxy import removeSecurityProxy
 
@@ -40,28 +25,16 @@ from lp.buildmaster.enums import (
     BuildFarmJobType,
     BuildQueueStatus,
     BuildStatus,
-    )
+)
 from lp.buildmaster.interfaces.buildfarmjob import ISpecificBuildFarmJobSource
-from lp.buildmaster.interfaces.buildqueue import (
-    IBuildQueue,
-    IBuildQueueSet,
-    )
-from lp.services.database.bulk import (
-    load_referencing,
-    load_related,
-    )
-from lp.services.database.constants import (
-    DEFAULT,
-    UTC_NOW,
-    )
+from lp.buildmaster.interfaces.buildqueue import IBuildQueue, IBuildQueueSet
+from lp.services.database.bulk import load_referencing, load_related
+from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.enumcol import DBEnum
 from lp.services.database.interfaces import IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.features import getFeatureFlag
-from lp.services.propertycache import (
-    cachedproperty,
-    get_property_cache,
-    )
+from lp.services.propertycache import cachedproperty, get_property_cache
 
 
 def specific_build_farm_job_sources():
@@ -71,7 +44,8 @@ def specific_build_farm_job_sources():
     # interface.
     components = getSiteManager()
     implementations = sorted(
-        components.getUtilitiesFor(ISpecificBuildFarmJobSource))
+        components.getUtilitiesFor(ISpecificBuildFarmJobSource)
+    )
     # The above yields a collection of 2-tuples where the first element
     # is the name of the `BuildFarmJobType` enum and the second element
     # is the implementing class respectively.
@@ -89,8 +63,14 @@ class BuildQueue(StormBase):
     __storm_table__ = "BuildQueue"
     __storm_order__ = "id"
 
-    def __init__(self, build_farm_job, estimated_duration=DEFAULT,
-                 virtualized=DEFAULT, processor=DEFAULT, lastscore=None):
+    def __init__(
+        self,
+        build_farm_job,
+        estimated_duration=DEFAULT,
+        virtualized=DEFAULT,
+        processor=DEFAULT,
+        lastscore=None,
+    ):
         super().__init__()
         self._build_farm_job = build_farm_job
         self.estimated_duration = estimated_duration
@@ -102,20 +82,20 @@ class BuildQueue(StormBase):
 
     id = Int(primary=True)
 
-    _build_farm_job_id = Int(name='build_farm_job')
-    _build_farm_job = Reference(_build_farm_job_id, 'BuildFarmJob.id')
+    _build_farm_job_id = Int(name="build_farm_job")
+    _build_farm_job = Reference(_build_farm_job_id, "BuildFarmJob.id")
     status = DBEnum(enum=BuildQueueStatus, default=BuildQueueStatus.WAITING)
     date_started = DateTime(tzinfo=pytz.UTC)
 
-    builder_id = Int(name='builder', default=None)
-    builder = Reference(builder_id, 'Builder.id')
-    logtail = Unicode(name='logtail', default=None)
-    lastscore = Int(name='lastscore', default=0)
-    manual = Bool(name='manual', default=False)
+    builder_id = Int(name="builder", default=None)
+    builder = Reference(builder_id, "Builder.id")
+    logtail = Unicode(name="logtail", default=None)
+    lastscore = Int(name="lastscore", default=0)
+    manual = Bool(name="manual", default=False)
     estimated_duration = TimeDelta()
-    processor_id = Int(name='processor')
-    processor = Reference(processor_id, 'Processor.id')
-    virtualized = Bool(name='virtualized')
+    processor_id = Int(name="processor")
+    processor = Reference(processor_id, "Processor.id")
+    virtualized = Bool(name="virtualized")
 
     @property
     def specific_source(self):
@@ -138,14 +118,16 @@ class BuildQueue(StormBase):
     @staticmethod
     def preloadSpecificBuild(queues):
         from lp.buildmaster.model.buildfarmjob import BuildFarmJob
+
         queues = [removeSecurityProxy(bq) for bq in queues]
-        load_related(BuildFarmJob, queues, ['_build_farm_job_id'])
+        load_related(BuildFarmJob, queues, ["_build_farm_job_id"])
         bfj_to_bq = {bq._build_farm_job: bq for bq in queues}
-        key = attrgetter('_build_farm_job.job_type')
+        key = attrgetter("_build_farm_job.job_type")
         for job_type, group in groupby(sorted(queues, key=key), key=key):
             source = getUtility(ISpecificBuildFarmJobSource, job_type.name)
             builds = source.getByBuildFarmJobs(
-                [bq._build_farm_job for bq in group])
+                [bq._build_farm_job for bq in group]
+            )
             for build in builds:
                 bq = bfj_to_bq[removeSecurityProxy(build).build_farm_job]
                 get_property_cache(bq).specific_build = build
@@ -231,7 +213,8 @@ class BuildQueue(StormBase):
             self.specific_build.updateStatus(BuildStatus.CANCELLING)
         else:
             raise AssertionError(
-                "Tried to cancel %r from %s" % (self, self.status.name))
+                "Tried to cancel %r from %s" % (self, self.status.name)
+            )
 
     def markAsCancelled(self):
         """See `IBuildQueue`."""
@@ -241,6 +224,7 @@ class BuildQueue(StormBase):
     def getEstimatedJobStartTime(self, now=None):
         """See `IBuildQueue`."""
         from lp.buildmaster.queuedepth import estimate_job_start_time
+
         return estimate_job_start_time(self, now or self._now())
 
     @staticmethod
@@ -266,9 +250,8 @@ class BuildQueueSet:
 
     def preloadForBuilders(self, builders):
         # Populate builders' currentjob cachedproperty.
-        queues = load_referencing(BuildQueue, builders, ['builder_id'])
-        queue_builders = {
-            queue.builder_id: queue for queue in queues}
+        queues = load_referencing(BuildQueue, builders, ["builder_id"])
+        queue_builders = {queue.builder_id: queue for queue in queues}
         for builder in builders:
             cache = get_property_cache(builder)
             cache.currentjob = queue_builders.get(builder.id, None)
@@ -277,17 +260,24 @@ class BuildQueueSet:
     def preloadForBuildFarmJobs(self, builds):
         """See `IBuildQueueSet`."""
         from lp.buildmaster.model.builder import Builder
-        bqs = list(IStore(BuildQueue).find(
-            BuildQueue,
-            BuildQueue._build_farm_job_id.is_in(
-                [removeSecurityProxy(b).build_farm_job_id for b in builds])))
-        load_related(Builder, bqs, ['builder_id'])
+
+        bqs = list(
+            IStore(BuildQueue).find(
+                BuildQueue,
+                BuildQueue._build_farm_job_id.is_in(
+                    [removeSecurityProxy(b).build_farm_job_id for b in builds]
+                ),
+            )
+        )
+        load_related(Builder, bqs, ["builder_id"])
         prefetched_data = {
             removeSecurityProxy(buildqueue)._build_farm_job_id: buildqueue
-            for buildqueue in bqs}
+            for buildqueue in bqs
+        }
         for build in builds:
             bq = prefetched_data.get(
-                removeSecurityProxy(build).build_farm_job_id)
+                removeSecurityProxy(build).build_farm_job_id
+            )
             get_property_cache(build).buildqueue_record = bq
         return bqs
 
@@ -297,7 +287,7 @@ class BuildQueueSet:
         # should be able to configure the root-logger instead of creating
         # a new object, then the logger lookups won't require the specific
         # name argument anymore. See bug 164203.
-        logger = logging.getLogger('worker-scanner')
+        logger = logging.getLogger("worker-scanner")
         return logger
 
     def findBuildCandidates(self, processor, virtualized, limit):
@@ -313,9 +303,8 @@ class BuildQueueSet:
             query = job_source.addCandidateSelectionCriteria()
             if query:
                 job_type_conditions.append(
-                    Or(
-                        BuildFarmJob.job_type != job_type,
-                        Exists(SQL(query))))
+                    Or(BuildFarmJob.job_type != job_type, Exists(SQL(query)))
+                )
 
         def get_int_feature_flag(flag):
             value_str = getFeatureFlag(flag)
@@ -328,9 +317,12 @@ class BuildQueueSet:
         score_conditions = []
         minimum_scores = set()
         if processor is not None:
-            minimum_scores.add(get_int_feature_flag(
-                'buildmaster.minimum_score.%s' % processor.name))
-        minimum_scores.add(get_int_feature_flag('buildmaster.minimum_score'))
+            minimum_scores.add(
+                get_int_feature_flag(
+                    "buildmaster.minimum_score.%s" % processor.name
+                )
+            )
+        minimum_scores.add(get_int_feature_flag("buildmaster.minimum_score"))
         minimum_scores.discard(None)
         # If there are minimum scores set for any of the processors
         # supported by this builder, use the highest of them.  This is a bit
@@ -338,17 +330,22 @@ class BuildQueueSet:
         # option and avoids substantially complicating the candidate query.
         if minimum_scores:
             score_conditions.append(
-                BuildQueue.lastscore >= max(minimum_scores))
+                BuildQueue.lastscore >= max(minimum_scores)
+            )
 
         store = IStore(BuildQueue)
-        return list(store.using(BuildQueue, BuildFarmJob).find(
-            BuildQueue,
-            BuildFarmJob.id == BuildQueue._build_farm_job_id,
-            BuildQueue.status == BuildQueueStatus.WAITING,
-            BuildQueue.processor == processor,
-            BuildQueue.virtualized == virtualized,
-            BuildQueue.builder == None,
-            And(*(job_type_conditions + score_conditions))
-            # This must match the ordering used in
-            # PrefetchedBuildCandidates._getSortKey.
-            ).order_by(Desc(BuildQueue.lastscore), BuildQueue.id)[:limit])
+        return list(
+            store.using(BuildQueue, BuildFarmJob)
+            .find(
+                BuildQueue,
+                BuildFarmJob.id == BuildQueue._build_farm_job_id,
+                BuildQueue.status == BuildQueueStatus.WAITING,
+                BuildQueue.processor == processor,
+                BuildQueue.virtualized == virtualized,
+                BuildQueue.builder == None,
+                And(*(job_type_conditions + score_conditions))
+                # This must match the ordering used in
+                # PrefetchedBuildCandidates._getSortKey.
+            )
+            .order_by(Desc(BuildQueue.lastscore), BuildQueue.id)[:limit]
+        )
diff --git a/lib/lp/buildmaster/model/packagebuild.py b/lib/lp/buildmaster/model/packagebuild.py
index 7c550c5..b1efd51 100644
--- a/lib/lp/buildmaster/model/packagebuild.py
+++ b/lib/lp/buildmaster/model/packagebuild.py
@@ -2,8 +2,8 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'PackageBuildMixin',
-    ]
+    "PackageBuildMixin",
+]
 
 import io
 
@@ -17,12 +17,11 @@ from lp.services.librarian.browser import ProxiedLibraryFileAlias
 from lp.services.librarian.interfaces import ILibraryFileAliasSet
 from lp.soyuz.adapters.archivedependencies import (
     default_component_dependency_name,
-    )
+)
 from lp.soyuz.interfaces.component import IComponentSet
 
 
 class PackageBuildMixin(BuildFarmJobMixin):
-
     @property
     def current_component(self):
         """See `IPackageBuild`."""
@@ -47,18 +46,32 @@ class PackageBuildMixin(BuildFarmJobMixin):
         """See `IBuildFarmJob`"""
         return self.archive.private
 
-    def updateStatus(self, status, builder=None, worker_status=None,
-                     date_started=None, date_finished=None,
-                     force_invalid_transition=False):
+    def updateStatus(
+        self,
+        status,
+        builder=None,
+        worker_status=None,
+        date_started=None,
+        date_finished=None,
+        force_invalid_transition=False,
+    ):
         super().updateStatus(
-            status, builder=builder, worker_status=worker_status,
-            date_started=date_started, date_finished=date_finished,
-            force_invalid_transition=force_invalid_transition)
-
-        if (status == BuildStatus.MANUALDEPWAIT and worker_status is not None
-            and worker_status.get('dependencies') is not None):
+            status,
+            builder=builder,
+            worker_status=worker_status,
+            date_started=date_started,
+            date_finished=date_finished,
+            force_invalid_transition=force_invalid_transition,
+        )
+
+        if (
+            status == BuildStatus.MANUALDEPWAIT
+            and worker_status is not None
+            and worker_status.get("dependencies") is not None
+        ):
             self.dependencies = six.ensure_text(
-                worker_status.get('dependencies'))
+                worker_status.get("dependencies")
+            )
         else:
             self.dependencies = None
 
@@ -76,11 +89,12 @@ class PackageBuildMixin(BuildFarmJobMixin):
         # object's 'upload_log' attribute will point to the
         # `LibrarianFileAlias`.
 
-        assert self.upload_log is None, (
-            "Upload log information already exists and cannot be overridden.")
+        assert (
+            self.upload_log is None
+        ), "Upload log information already exists and cannot be overridden."
 
         if filename is None:
-            filename = 'upload_%s_log.txt' % self.id
+            filename = "upload_%s_log.txt" % self.id
         contentType = filenameToContentType(filename)
         content = six.ensure_binary(content)
         file_size = len(content)
@@ -88,8 +102,12 @@ class PackageBuildMixin(BuildFarmJobMixin):
         restricted = self.is_private
 
         return getUtility(ILibraryFileAliasSet).create(
-            filename, file_size, file_content, contentType=contentType,
-            restricted=restricted)
+            filename,
+            file_size,
+            file_content,
+            contentType=contentType,
+            restricted=restricted,
+        )
 
     def storeUploadLog(self, content):
         """See `IPackageBuild`."""
diff --git a/lib/lp/buildmaster/model/processor.py b/lib/lp/buildmaster/model/processor.py
index 314e12a..15a3871 100644
--- a/lib/lp/buildmaster/model/processor.py
+++ b/lib/lp/buildmaster/model/processor.py
@@ -2,9 +2,9 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'Processor',
-    'ProcessorSet',
-    ]
+    "Processor",
+    "ProcessorSet",
+]
 
 from storm.locals import Bool
 from zope.interface import implementer
@@ -13,7 +13,7 @@ from lp.buildmaster.interfaces.processor import (
     IProcessor,
     IProcessorSet,
     ProcessorNotFound,
-    )
+)
 from lp.services.database.interfaces import IStore
 from lp.services.database.sqlbase import SQLBase
 from lp.services.database.sqlobject import StringCol
@@ -21,11 +21,11 @@ from lp.services.database.sqlobject import StringCol
 
 @implementer(IProcessor)
 class Processor(SQLBase):
-    _table = 'Processor'
+    _table = "Processor"
 
-    name = StringCol(dbName='name', notNull=True)
-    title = StringCol(dbName='title', notNull=True)
-    description = StringCol(dbName='description', notNull=True)
+    name = StringCol(dbName="name", notNull=True)
+    title = StringCol(dbName="title", notNull=True)
+    description = StringCol(dbName="description", notNull=True)
     restricted = Bool(allow_none=False, default=False)
 
     # When setting this to true you may want to add missing
@@ -51,8 +51,9 @@ class ProcessorSet:
 
     def getByName(self, name):
         """See `IProcessorSet`."""
-        processor = IStore(Processor).find(
-            Processor, Processor.name == name).one()
+        processor = (
+            IStore(Processor).find(Processor, Processor.name == name).one()
+        )
         if processor is None:
             raise ProcessorNotFound(name)
         return processor
@@ -61,12 +62,23 @@ class ProcessorSet:
         """See `IProcessorSet`."""
         return IStore(Processor).find(Processor)
 
-    def new(self, name, title, description, restricted=False,
-            build_by_default=False, supports_virtualized=False,
-            supports_nonvirtualized=True):
+    def new(
+        self,
+        name,
+        title,
+        description,
+        restricted=False,
+        build_by_default=False,
+        supports_virtualized=False,
+        supports_nonvirtualized=True,
+    ):
         """See `IProcessorSet`."""
         return Processor(
-            name=name, title=title, description=description,
-            restricted=restricted, build_by_default=build_by_default,
+            name=name,
+            title=title,
+            description=description,
+            restricted=restricted,
+            build_by_default=build_by_default,
             supports_virtualized=supports_virtualized,
-            supports_nonvirtualized=supports_nonvirtualized)
+            supports_nonvirtualized=supports_nonvirtualized,
+        )
diff --git a/lib/lp/buildmaster/queuedepth.py b/lib/lp/buildmaster/queuedepth.py
index ed53342..1a92c14 100644
--- a/lib/lp/buildmaster/queuedepth.py
+++ b/lib/lp/buildmaster/queuedepth.py
@@ -2,23 +2,17 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'estimate_job_start_time',
-    ]
+    "estimate_job_start_time",
+]
 
 from collections import defaultdict
-from datetime import (
-    datetime,
-    timedelta,
-    )
+from datetime import datetime, timedelta
 
 from pytz import utc
 from storm.expr import Count
 
 from lp.buildmaster.enums import BuildQueueStatus
-from lp.buildmaster.model.builder import (
-    Builder,
-    BuilderProcessor,
-    )
+from lp.buildmaster.model.builder import Builder, BuilderProcessor
 from lp.buildmaster.model.buildqueue import BuildQueue
 from lp.services.database.interfaces import IStore
 from lp.services.database.sqlbase import sqlvalues
@@ -28,16 +22,29 @@ def get_builder_data():
     """How many working builders are there, how are they configured?"""
     # XXX: This is broken with multi-Processor buildds, as it only
     # considers competition from the same processor.
-    per_arch_totals = list(IStore(Builder).find(
-        (BuilderProcessor.processor_id, Builder.virtualized,
-         Count(Builder.id)),
-        BuilderProcessor.builder_id == Builder.id,
-        Builder._builderok == True, Builder.manual == False).group_by(
-            BuilderProcessor.processor_id, Builder.virtualized))
-    per_virt_totals = list(IStore(Builder).find(
-        (Builder.virtualized, Count(Builder.id)),
-        Builder._builderok == True, Builder.manual == False).group_by(
-            Builder.virtualized))
+    per_arch_totals = list(
+        IStore(Builder)
+        .find(
+            (
+                BuilderProcessor.processor_id,
+                Builder.virtualized,
+                Count(Builder.id),
+            ),
+            BuilderProcessor.builder_id == Builder.id,
+            Builder._builderok == True,
+            Builder.manual == False,
+        )
+        .group_by(BuilderProcessor.processor_id, Builder.virtualized)
+    )
+    per_virt_totals = list(
+        IStore(Builder)
+        .find(
+            (Builder.virtualized, Count(Builder.id)),
+            Builder._builderok == True,
+            Builder.manual == False,
+        )
+        .group_by(Builder.virtualized)
+    )
 
     builder_stats = defaultdict(int)
     for virtualized, count in per_virt_totals:
@@ -57,12 +64,16 @@ def get_free_builders_count(processor, virtualized):
             AND id NOT IN (
                 SELECT builder FROM BuildQueue WHERE builder IS NOT NULL)
             AND virtualized = %s
-        """ % sqlvalues(virtualized)
+        """ % sqlvalues(
+        virtualized
+    )
     if processor is not None:
         query += """
             AND id IN (
                 SELECT builder FROM BuilderProcessor WHERE processor = %s)
-        """ % sqlvalues(processor)
+        """ % sqlvalues(
+            processor
+        )
     result_set = IStore(BuildQueue).execute(query)
     free_builders = result_set.get_one()[0]
     return free_builders
@@ -78,7 +89,7 @@ def get_head_job_platform(bq):
     :return: A (processor, virtualized) tuple which is the head job's
     platform or None if the JOI is the head job.
     """
-    my_platform = (getattr(bq.processor, 'id', None), bq.virtualized)
+    my_platform = (getattr(bq.processor, "id", None), bq.virtualized)
     query = """
         SELECT
             processor,
@@ -91,7 +102,7 @@ def get_head_job_platform(bq):
         ORDER BY lastscore DESC, id LIMIT 1
         """
     result = IStore(BuildQueue).execute(query).get_one()
-    return (my_platform if result is None else result)
+    return my_platform if result is None else result
 
 
 def estimate_time_to_next_builder(bq, now=None):
@@ -151,18 +162,21 @@ def estimate_time_to_next_builder(bq, now=None):
             AND BuildQueue.status = %s
             AND Builder.virtualized = %s
         """ % sqlvalues(
-            now, now, BuildQueueStatus.RUNNING, head_job_virtualized)
+        now, now, BuildQueueStatus.RUNNING, head_job_virtualized
+    )
 
     if head_job_processor is not None:
         # Only look at builders with specific processor types.
         delay_query += """
             AND Builder.id IN (
                 SELECT builder FROM BuilderProcessor WHERE processor = %s)
-            """ % sqlvalues(head_job_processor)
+            """ % sqlvalues(
+            head_job_processor
+        )
 
     result_set = IStore(BuildQueue).execute(delay_query)
     head_job_delay = result_set.get_one()[0]
-    return (0 if head_job_delay is None else int(head_job_delay))
+    return 0 if head_job_delay is None else int(head_job_delay)
 
 
 def get_pending_jobs_clauses(bq):
@@ -178,15 +192,21 @@ def get_pending_jobs_clauses(bq):
             (BuildQueue.lastscore = %s AND BuildQueue.id < %s))
         AND buildqueue.virtualized = %s
         """ % sqlvalues(
-            BuildQueueStatus.WAITING, bq.lastscore, bq.lastscore, bq.id,
-            bq.virtualized)
+        BuildQueueStatus.WAITING,
+        bq.lastscore,
+        bq.lastscore,
+        bq.id,
+        bq.virtualized,
+    )
     processor_clause = """
         AND (
             -- The processor values either match or the candidate
             -- job is processor-independent.
             buildqueue.processor = %s OR
             buildqueue.processor IS NULL)
-        """ % sqlvalues(bq.processor)
+        """ % sqlvalues(
+        bq.processor
+    )
     # We don't care about processors if the estimation is for a
     # processor-independent job.
     if bq.processor is not None:
@@ -226,7 +246,7 @@ def estimate_job_delay(bq, builder_stats):
             # virtualization settings.
             return a == b
 
-    my_platform = (getattr(bq.processor, 'id', None), bq.virtualized)
+    my_platform = (getattr(bq.processor, "id", None), bq.virtualized)
     query = """
         SELECT
             BuildQueue.processor,
@@ -278,7 +298,7 @@ def estimate_job_delay(bq, builder_stats):
         builders = builder_stats[platform]
         # If there are less jobs than builders that can take them on,
         # the delays should be averaged/divided by the number of jobs.
-        denominator = (jobs if jobs < builders else builders)
+        denominator = jobs if jobs < builders else builders
         if denominator > 1:
             duration = int(duration / float(denominator))
 
@@ -302,13 +322,14 @@ def estimate_job_start_time(bq, now=None):
     # This method may only be invoked for pending jobs.
     if bq.status != BuildQueueStatus.WAITING:
         raise AssertionError(
-            "The start time is only estimated for pending jobs.")
+            "The start time is only estimated for pending jobs."
+        )
 
     # XXX: This is broken with multi-Processor buildds, as it only
     # considers competition from the same processor.
 
     builder_stats = get_builder_data()
-    platform = (getattr(bq.processor, 'id', None), bq.virtualized)
+    platform = (getattr(bq.processor, "id", None), bq.virtualized)
     if builder_stats[platform] == 0:
         # No builders that can run the job at hand
         #   -> no dispatch time estimation available.
diff --git a/lib/lp/buildmaster/security.py b/lib/lp/buildmaster/security.py
index abd59af..2c90386 100644
--- a/lib/lp/buildmaster/security.py
+++ b/lib/lp/buildmaster/security.py
@@ -4,9 +4,9 @@
 """Security adapters for the buildmaster package."""
 
 __all__ = [
-    'ViewBuilder',
-    'ViewProcessor',
-    ]
+    "ViewBuilder",
+    "ViewProcessor",
+]
 
 from lp.app.security import AnonymousAuthorization
 from lp.buildmaster.interfaces.builder import IBuilder
@@ -15,9 +15,11 @@ from lp.buildmaster.interfaces.processor import IProcessor
 
 class ViewBuilder(AnonymousAuthorization):
     """Anyone can view a `IBuilder`."""
+
     usedfor = IBuilder
 
 
 class ViewProcessor(AnonymousAuthorization):
     """Anyone can view an `IProcessor`."""
+
     usedfor = IProcessor
diff --git a/lib/lp/buildmaster/tests/builderproxy.py b/lib/lp/buildmaster/tests/builderproxy.py
index 1fa6225..3f7fc4f 100644
--- a/lib/lp/buildmaster/tests/builderproxy.py
+++ b/lib/lp/buildmaster/tests/builderproxy.py
@@ -3,28 +3,17 @@
 
 """Fixtures for dealing with the build time HTTP proxy."""
 
-from datetime import datetime
 import json
+import uuid
+from datetime import datetime
 from textwrap import dedent
 from urllib.parse import urlsplit
-import uuid
 
 import fixtures
-from testtools.matchers import (
-    Equals,
-    HasLength,
-    MatchesStructure,
-    )
-from twisted.internet import (
-    defer,
-    endpoints,
-    reactor,
-    )
+from testtools.matchers import Equals, HasLength, MatchesStructure
+from twisted.internet import defer, endpoints, reactor
 from twisted.python.compat import nativeString
-from twisted.web import (
-    resource,
-    server,
-    )
+from twisted.web import resource, server
 
 from lp.services.config import config
 
@@ -40,18 +29,22 @@ class ProxyAuthAPITokensResource(resource.Resource):
 
     def render_POST(self, request):
         content = json.loads(request.content.read().decode("UTF-8"))
-        self.requests.append({
-            "method": request.method,
-            "uri": request.uri,
-            "headers": dict(request.requestHeaders.getAllRawHeaders()),
-            "json": content,
-            })
+        self.requests.append(
+            {
+                "method": request.method,
+                "uri": request.uri,
+                "headers": dict(request.requestHeaders.getAllRawHeaders()),
+                "json": content,
+            }
+        )
         username = content["username"]
-        return json.dumps({
-            "username": username,
-            "secret": uuid.uuid4().hex,
-            "timestamp": datetime.utcnow().isoformat(),
-            }).encode("UTF-8")
+        return json.dumps(
+            {
+                "username": username,
+                "secret": uuid.uuid4().hex,
+                "timestamp": datetime.utcnow().isoformat(),
+            }
+        ).encode("UTF-8")
 
 
 class InProcessProxyAuthAPIFixture(fixtures.Fixture):
@@ -83,14 +76,19 @@ class InProcessProxyAuthAPIFixture(fixtures.Fixture):
         self.addCleanup(site.stopFactory)
         port = yield endpoint.listen(site)
         self.addCleanup(port.stopListening)
-        config.push("in-process-proxy-auth-api-fixture", dedent("""
-            [builddmaster]
-            builder_proxy_auth_api_admin_secret: admin-secret
-            builder_proxy_auth_api_admin_username: admin-launchpad.test
-            builder_proxy_auth_api_endpoint: http://{host}:{port}/tokens
-            builder_proxy_host: {host}
-            builder_proxy_port: {port}
-            """).format(host=port.getHost().host, port=port.getHost().port))
+        config.push(
+            "in-process-proxy-auth-api-fixture",
+            dedent(
+                """
+                [builddmaster]
+                builder_proxy_auth_api_admin_secret: admin-secret
+                builder_proxy_auth_api_admin_username: admin-launchpad.test
+                builder_proxy_auth_api_endpoint: http://{host}:{port}/tokens
+                builder_proxy_host: {host}
+                builder_proxy_port: {port}
+                """
+            ).format(host=port.getHost().host, port=port.getHost().port),
+        )
         self.addCleanup(config.pop, "in-process-proxy-auth-api-fixture")
 
 
@@ -100,12 +98,12 @@ class ProxyURLMatcher(MatchesStructure):
     def __init__(self, job, now):
         super().__init__(
             scheme=Equals("http"),
-            username=Equals("{}-{}".format(
-                job.build.build_cookie, int(now))),
+            username=Equals("{}-{}".format(job.build.build_cookie, int(now))),
             password=HasLength(32),
             hostname=Equals(config.builddmaster.builder_proxy_host),
             port=Equals(config.builddmaster.builder_proxy_port),
-            path=Equals(""))
+            path=Equals(""),
+        )
 
     def match(self, matchee):
         super().match(urlsplit(matchee))
@@ -118,4 +116,7 @@ class RevocationEndpointMatcher(Equals):
         super().__init__(
             "{}/{}-{}".format(
                 config.builddmaster.builder_proxy_auth_api_endpoint,
-                job.build.build_cookie, int(now)))
+                job.build.build_cookie,
+                int(now),
+            )
+        )
diff --git a/lib/lp/buildmaster/tests/harness.py b/lib/lp/buildmaster/tests/harness.py
index d8a1617..a372fca 100644
--- a/lib/lp/buildmaster/tests/harness.py
+++ b/lib/lp/buildmaster/tests/harness.py
@@ -4,8 +4,8 @@
 """TacHandler for `buildd-manager` daemon."""
 
 __all__ = [
-    'BuilddManagerTestSetup',
-    ]
+    "BuilddManagerTestSetup",
+]
 
 
 import os
@@ -42,12 +42,12 @@ class BuilddManagerTestSetup(TacTestSetup):
     @property
     def root(self):
         """Directory where log and pid files will be stored."""
-        return '/var/tmp/buildd-manager/'
+        return "/var/tmp/buildd-manager/"
 
     @property
     def tacfile(self):
         """Absolute path to the 'buildd-manager' tac file."""
-        return os.path.join(self.daemon_directory, 'buildd-manager.tac')
+        return os.path.join(self.daemon_directory, "buildd-manager.tac")
 
     @property
     def pidfile(self):
@@ -55,7 +55,7 @@ class BuilddManagerTestSetup(TacTestSetup):
 
         Will be created when the tac file actually runs.
         """
-        return os.path.join(self.root, 'buildd-manager.pid')
+        return os.path.join(self.root, "buildd-manager.pid")
 
     @property
     def logfile(self):
@@ -63,4 +63,4 @@ class BuilddManagerTestSetup(TacTestSetup):
 
         Will be created when the tac file actually runs.
         """
-        return os.path.join(self.root, 'buildd-manager.log')
+        return os.path.join(self.root, "buildd-manager.log")
diff --git a/lib/lp/buildmaster/tests/mock_workers.py b/lib/lp/buildmaster/tests/mock_workers.py
index c76e76c..4c56031 100644
--- a/lib/lp/buildmaster/tests/mock_workers.py
+++ b/lib/lp/buildmaster/tests/mock_workers.py
@@ -4,23 +4,23 @@
 """Mock Build objects for tests soyuz buildd-system."""
 
 __all__ = [
-    'AbortingWorker',
-    'BrokenWorker',
-    'BuildingWorker',
-    'DeadProxy',
-    'LostBuildingBrokenWorker',
-    'make_publisher',
-    'MockBuilder',
-    'OkWorker',
-    'TrivialBehaviour',
-    'WaitingWorker',
-    'WorkerTestHelpers',
-    ]
+    "AbortingWorker",
+    "BrokenWorker",
+    "BuildingWorker",
+    "DeadProxy",
+    "LostBuildingBrokenWorker",
+    "make_publisher",
+    "MockBuilder",
+    "OkWorker",
+    "TrivialBehaviour",
+    "WaitingWorker",
+    "WorkerTestHelpers",
+]
 
-from collections import OrderedDict
 import os
 import sys
 import xmlrpc.client
+from collections import OrderedDict
 
 import fixtures
 from lpbuildd.tests.harness import BuilddTestSetup
@@ -28,10 +28,7 @@ from testtools.content import attach_file
 from twisted.internet import defer
 from twisted.web.xmlrpc import Proxy
 
-from lp.buildmaster.enums import (
-    BuilderCleanStatus,
-    BuilderResetProtocol,
-    )
+from lp.buildmaster.enums import BuilderCleanStatus, BuilderResetProtocol
 from lp.buildmaster.interactor import BuilderWorker
 from lp.buildmaster.interfaces.builder import CannotFetchFile
 from lp.services.config import config
@@ -44,18 +41,27 @@ def make_publisher():
     """Make a Soyuz test publisher."""
     # Avoid circular imports.
     from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
+
     return SoyuzTestPublisher()
 
 
 class MockBuilder:
     """Emulates a IBuilder class."""
 
-    def __init__(self, name='mock-builder', builderok=True, manual=False,
-                 processors=None, virtualized=True, vm_host=None,
-                 url='http://fake:0000', version=None,
-                 clean_status=BuilderCleanStatus.DIRTY,
-                 vm_reset_protocol=BuilderResetProtocol.PROTO_1_1,
-                 active=True):
+    def __init__(
+        self,
+        name="mock-builder",
+        builderok=True,
+        manual=False,
+        processors=None,
+        virtualized=True,
+        vm_host=None,
+        url="http://fake:0000";,
+        version=None,
+        clean_status=BuilderCleanStatus.DIRTY,
+        vm_reset_protocol=BuilderResetProtocol.PROTO_1_1,
+        active=True,
+    ):
         self.currentjob = None
         self.builderok = builderok
         self.manual = manual
@@ -96,83 +102,92 @@ class OkWorker:
         return [(x[0] if isinstance(x, tuple) else x) for x in self.call_log]
 
     def status(self):
-        self.call_log.append('status')
-        worker_status = {'builder_status': 'BuilderStatus.IDLE'}
+        self.call_log.append("status")
+        worker_status = {"builder_status": "BuilderStatus.IDLE"}
         if self.version is not None:
-            worker_status['builder_version'] = self.version
+            worker_status["builder_version"] = self.version
         return defer.succeed(worker_status)
 
     def ensurepresent(self, sha1, url, user=None, password=None):
-        self.call_log.append(('ensurepresent', url, user, password))
+        self.call_log.append(("ensurepresent", url, user, password))
         return defer.succeed((True, None))
 
     def build(self, buildid, buildtype, chroot, filemap, args):
         self.call_log.append(
-            ('build', buildid, buildtype, chroot, list(filemap), args))
-        return defer.succeed(('BuildStatus.BUILDING', buildid))
+            ("build", buildid, buildtype, chroot, list(filemap), args)
+        )
+        return defer.succeed(("BuildStatus.BUILDING", buildid))
 
     def echo(self, *args):
-        self.call_log.append(('echo',) + args)
+        self.call_log.append(("echo",) + args)
         return defer.succeed(args)
 
     def clean(self):
-        self.call_log.append('clean')
+        self.call_log.append("clean")
         return defer.succeed(None)
 
     def abort(self):
-        self.call_log.append('abort')
+        self.call_log.append("abort")
         return defer.succeed(None)
 
     def info(self):
-        self.call_log.append('info')
-        return defer.succeed(('1.0', self.arch_tag, 'binarypackage'))
+        self.call_log.append("info")
+        return defer.succeed(("1.0", self.arch_tag, "binarypackage"))
 
     def resume(self):
-        self.call_log.append('resume')
+        self.call_log.append("resume")
         return defer.succeed(("", "", 0))
 
     @defer.inlineCallbacks
-    def sendFileToWorker(self, sha1, url, username="", password="",
-                         logger=None):
+    def sendFileToWorker(
+        self, sha1, url, username="", password="", logger=None
+    ):
         present, info = yield self.ensurepresent(sha1, url, username, password)
         if not present:
             raise CannotFetchFile(url, info)
 
     def getURL(self, sha1):
-        return urlappend(
-            'http://localhost:8221/filecache/', sha1).encode('utf8')
+        return urlappend("http://localhost:8221/filecache/";, sha1).encode(
+            "utf8"
+        )
 
     def getFiles(self, files, logger=None):
-        dl = defer.gatherResults([
-            self.getFile(builder_file, local_file)
-            for builder_file, local_file in files])
+        dl = defer.gatherResults(
+            [
+                self.getFile(builder_file, local_file)
+                for builder_file, local_file in files
+            ]
+        )
         return dl
 
 
 class BuildingWorker(OkWorker):
     """A mock worker that looks like it's currently building."""
 
-    def __init__(self, build_id='1-1'):
+    def __init__(self, build_id="1-1"):
         super().__init__()
         self.build_id = build_id
         self.status_count = 0
 
     def status(self):
-        self.call_log.append('status')
+        self.call_log.append("status")
         buildlog = xmlrpc.client.Binary(
-            b"This is a build log: %d" % self.status_count)
+            b"This is a build log: %d" % self.status_count
+        )
         self.status_count += 1
-        return defer.succeed({
-            'builder_status': 'BuilderStatus.BUILDING',
-            'build_id': self.build_id,
-            'logtail': buildlog,
-            })
+        return defer.succeed(
+            {
+                "builder_status": "BuilderStatus.BUILDING",
+                "build_id": self.build_id,
+                "logtail": buildlog,
+            }
+        )
 
     def getFile(self, sum, file_to_write):
-        self.call_log.append('getFile')
+        self.call_log.append("getFile")
         if sum == "buildlog":
             if isinstance(file_to_write, str):
-                file_to_write = open(file_to_write, 'wb')
+                file_to_write = open(file_to_write, "wb")
             file_to_write.write(b"This is a build log")
             file_to_write.close()
         return defer.succeed(None)
@@ -181,8 +196,13 @@ class BuildingWorker(OkWorker):
 class WaitingWorker(OkWorker):
     """A mock worker that looks like it's currently waiting."""
 
-    def __init__(self, state='BuildStatus.OK', dependencies=None,
-                 build_id='1-1', filemap=None):
+    def __init__(
+        self,
+        state="BuildStatus.OK",
+        dependencies=None,
+        build_id="1-1",
+        filemap=None,
+    ):
         super().__init__()
         self.state = state
         self.dependencies = dependencies
@@ -194,28 +214,30 @@ class WaitingWorker(OkWorker):
 
         # By default, the worker only has a buildlog, but callsites
         # can update this list as needed.
-        self.valid_files = {'buildlog': ''}
+        self.valid_files = {"buildlog": ""}
         self._got_file_record = []
 
     def status(self):
-        self.call_log.append('status')
-        return defer.succeed({
-            'builder_status': 'BuilderStatus.WAITING',
-            'build_status': self.state,
-            'build_id': self.build_id,
-            'filemap': self.filemap,
-            'dependencies': self.dependencies,
-            })
+        self.call_log.append("status")
+        return defer.succeed(
+            {
+                "builder_status": "BuilderStatus.WAITING",
+                "build_status": self.state,
+                "build_id": self.build_id,
+                "filemap": self.filemap,
+                "dependencies": self.dependencies,
+            }
+        )
 
     def getFile(self, hash, file_to_write):
-        self.call_log.append('getFile')
+        self.call_log.append("getFile")
         if hash in self.valid_files:
             if isinstance(file_to_write, str):
-                file_to_write = open(file_to_write, 'wb')
+                file_to_write = open(file_to_write, "wb")
             if not self.valid_files[hash]:
                 content = ("This is a %s" % hash).encode("ASCII")
             else:
-                with open(self.valid_files[hash], 'rb') as source:
+                with open(self.valid_files[hash], "rb") as source:
                     content = source.read()
             file_to_write.write(content)
             file_to_write.close()
@@ -227,11 +249,13 @@ class AbortingWorker(OkWorker):
     """A mock worker that looks like it's in the process of aborting."""
 
     def status(self):
-        self.call_log.append('status')
-        return defer.succeed({
-            'builder_status': 'BuilderStatus.ABORTING',
-            'build_id': '1-1',
-            })
+        self.call_log.append("status")
+        return defer.succeed(
+            {
+                "builder_status": "BuilderStatus.ABORTING",
+                "build_id": "1-1",
+            }
+        )
 
 
 class LostBuildingBrokenWorker:
@@ -244,18 +268,20 @@ class LostBuildingBrokenWorker:
         self.call_log = []
 
     def status(self):
-        self.call_log.append('status')
-        return defer.succeed({
-            'builder_status': 'BuilderStatus.BUILDING',
-            'build_id': '1000-10000',
-            })
+        self.call_log.append("status")
+        return defer.succeed(
+            {
+                "builder_status": "BuilderStatus.BUILDING",
+                "build_id": "1000-10000",
+            }
+        )
 
     def abort(self):
-        self.call_log.append('abort')
+        self.call_log.append("abort")
         return defer.fail(xmlrpc.client.Fault(8002, "Could not abort"))
 
     def resume(self):
-        self.call_log.append('resume')
+        self.call_log.append("resume")
         return defer.succeed(("", "", 0))
 
 
@@ -266,7 +292,7 @@ class BrokenWorker:
         self.call_log = []
 
     def status(self):
-        self.call_log.append('status')
+        self.call_log.append("status")
         return defer.fail(xmlrpc.client.Fault(8001, "Broken worker"))
 
 
@@ -288,17 +314,14 @@ class LPBuilddTestSetup(BuilddTestSetup):
     """A BuilddTestSetup that uses the LP virtualenv."""
 
     def setUp(self):
-        super().setUp(
-            python_path=sys.executable,
-            twistd_script=twistd_script)
+        super().setUp(python_path=sys.executable, twistd_script=twistd_script)
 
 
 class WorkerTestHelpers(fixtures.Fixture):
-
     @property
     def base_url(self):
         """The URL for the XML-RPC service set up by `BuilddTestSetup`."""
-        return 'http://localhost:%d' % LPBuilddTestSetup().daemon_port
+        return "http://localhost:%d"; % LPBuilddTestSetup().daemon_port
 
     def getServerWorker(self):
         """Set up a test build worker server.
@@ -307,20 +330,28 @@ class WorkerTestHelpers(fixtures.Fixture):
         """
         tachandler = self.useFixture(LPBuilddTestSetup())
         attach_file(
-            self, tachandler.logfile, name='xmlrpc-log-file', buffer_now=False)
+            self, tachandler.logfile, name="xmlrpc-log-file", buffer_now=False
+        )
         return tachandler
 
-    def getClientWorker(self, reactor=None, proxy=None,
-                        pool=None, process_pool=None):
+    def getClientWorker(
+        self, reactor=None, proxy=None, pool=None, process_pool=None
+    ):
         """Return a `BuilderWorker` for use in testing.
 
         Points to a fixed URL that is also used by `BuilddTestSetup`.
         """
         return BuilderWorker.makeBuilderWorker(
-            self.base_url, 'vmhost', config.builddmaster.socket_timeout,
-            reactor=reactor, proxy=proxy, pool=pool, process_pool=process_pool)
-
-    def makeCacheFile(self, tachandler, filename, contents=b'something'):
+            self.base_url,
+            "vmhost",
+            config.builddmaster.socket_timeout,
+            reactor=reactor,
+            proxy=proxy,
+            pool=pool,
+            process_pool=process_pool,
+        )
+
+    def makeCacheFile(self, tachandler, filename, contents=b"something"):
         """Make a cache file available on the remote worker.
 
         :param tachandler: The TacTestSetup object used to start the remote
@@ -329,8 +360,8 @@ class WorkerTestHelpers(fixtures.Fixture):
             area.
         :param contents: Bytes to write to the file.
         """
-        path = os.path.join(tachandler.root, 'filecache', filename)
-        with open(path, 'wb') as fd:
+        path = os.path.join(tachandler.root, "filecache", filename)
+        with open(path, "wb") as fd:
             fd.write(contents)
         self.addCleanup(os.unlink, path)
 
@@ -344,21 +375,25 @@ class WorkerTestHelpers(fixtures.Fixture):
         :return: The build id returned by the worker.
         """
         if build_id is None:
-            build_id = 'random-build-id'
+            build_id = "random-build-id"
         tachandler = self.getServerWorker()
-        chroot_file = 'fake-chroot'
-        dsc_file = 'thing'
+        chroot_file = "fake-chroot"
+        dsc_file = "thing"
         self.makeCacheFile(tachandler, chroot_file)
         self.makeCacheFile(tachandler, dsc_file)
         extra_args = {
-            'distribution': 'ubuntu',
-            'series': 'precise',
-            'suite': 'precise',
-            'ogrecomponent': 'main',
-            }
+            "distribution": "ubuntu",
+            "series": "precise",
+            "suite": "precise",
+            "ogrecomponent": "main",
+        }
         return worker.build(
-            build_id, 'binarypackage', chroot_file,
+            build_id,
+            "binarypackage",
+            chroot_file,
             # Although a single-element dict obviously has stable ordering,
             # we use an OrderedDict anyway to test that BuilderWorker
             # serializes it correctly over XML-RPC.
-            OrderedDict([('.dsc', dsc_file)]), extra_args)
+            OrderedDict([(".dsc", dsc_file)]),
+            extra_args,
+        )
diff --git a/lib/lp/buildmaster/tests/test_builder.py b/lib/lp/buildmaster/tests/test_builder.py
index 731d8e2..0adba4a 100644
--- a/lib/lp/buildmaster/tests/test_builder.py
+++ b/lib/lp/buildmaster/tests/test_builder.py
@@ -7,14 +7,8 @@ from fixtures import FakeLogger
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
-from lp.buildmaster.enums import (
-    BuilderCleanStatus,
-    BuildStatus,
-    )
-from lp.buildmaster.interfaces.builder import (
-    IBuilder,
-    IBuilderSet,
-    )
+from lp.buildmaster.enums import BuilderCleanStatus, BuildStatus
+from lp.buildmaster.interfaces.builder import IBuilder, IBuilderSet
 from lp.buildmaster.interfaces.buildqueue import IBuildQueueSet
 from lp.buildmaster.interfaces.processor import IProcessorSet
 from lp.buildmaster.model.buildqueue import BuildQueue
@@ -22,20 +16,14 @@ from lp.buildmaster.tests.mock_workers import make_publisher
 from lp.services.database.interfaces import IStore
 from lp.services.database.sqlbase import flush_database_updates
 from lp.services.features.testing import FeatureFixture
-from lp.soyuz.enums import (
-    ArchivePurpose,
-    PackagePublishingStatus,
-    )
+from lp.soyuz.enums import ArchivePurpose, PackagePublishingStatus
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.testing import (
+    TestCaseWithFactory,
     admin_logged_in,
     celebrity_logged_in,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    DatabaseFunctionalLayer,
-    LaunchpadZopelessLayer,
-    )
+)
+from lp.testing.layers import DatabaseFunctionalLayer, LaunchpadZopelessLayer
 
 
 class TestBuilder(TestCaseWithFactory):
@@ -46,7 +34,7 @@ class TestBuilder(TestCaseWithFactory):
     def test_providesInterface(self):
         # Builder provides IBuilder
         builder = self.factory.makeBuilder()
-        with celebrity_logged_in('buildd_admin'):
+        with celebrity_logged_in("buildd_admin"):
             self.assertProvides(builder, IBuilder)
 
     def test_default_values(self):
@@ -74,7 +62,7 @@ class TestBuilder(TestCaseWithFactory):
     def test_setCleanStatus(self):
         builder = self.factory.makeBuilder()
         self.assertEqual(BuilderCleanStatus.DIRTY, builder.clean_status)
-        with celebrity_logged_in('buildd_admin'):
+        with celebrity_logged_in("buildd_admin"):
             builder.setCleanStatus(BuilderCleanStatus.CLEAN)
         self.assertEqual(BuilderCleanStatus.CLEAN, builder.clean_status)
 
@@ -109,22 +97,22 @@ class TestFindBuildCandidatesBase(TestCaseWithFactory):
         self.publisher = make_publisher()
         self.publisher.prepareBreezyAutotest()
 
-        self.proc_386 = getUtility(IProcessorSet).getByName('386')
+        self.proc_386 = getUtility(IProcessorSet).getByName("386")
 
         # Create some i386 builders ready to build PPA builds.  Two
         # already exist in sampledata so we'll use those first.
-        self.builder1 = getUtility(IBuilderSet)['bob']
-        self.frog_builder = getUtility(IBuilderSet)['frog']
-        self.builder3 = self.factory.makeBuilder(name='builder3')
-        self.builder4 = self.factory.makeBuilder(name='builder4')
-        self.builder5 = self.factory.makeBuilder(name='builder5')
+        self.builder1 = getUtility(IBuilderSet)["bob"]
+        self.frog_builder = getUtility(IBuilderSet)["frog"]
+        self.builder3 = self.factory.makeBuilder(name="builder3")
+        self.builder4 = self.factory.makeBuilder(name="builder4")
+        self.builder5 = self.factory.makeBuilder(name="builder5")
         self.builders = [
             self.builder1,
             self.frog_builder,
             self.builder3,
             self.builder4,
             self.builder5,
-            ]
+        ]
 
         # Ensure all builders are operational.
         for builder in self.builders:
@@ -143,7 +131,8 @@ class TestFindBuildCandidatesGeneralCases(TestFindBuildCandidatesBase):
         bq1 = self.factory.makeBinaryPackageBuild().queueBuild()
         bq2 = self.factory.makeBinaryPackageBuild().queueBuild()
         bq3 = self.factory.makeBinaryPackageBuild(
-            processor=bq2.processor).queueBuild()
+            processor=bq2.processor
+        ).queueBuild()
 
         # No job is returned for a fresh processor.
         proc = self.factory.makeProcessor()
@@ -151,24 +140,26 @@ class TestFindBuildCandidatesGeneralCases(TestFindBuildCandidatesBase):
 
         # bq1 is the best candidate for its processor.
         self.assertEqual(
-            [bq1], self.bq_set.findBuildCandidates(bq1.processor, True, 3))
+            [bq1], self.bq_set.findBuildCandidates(bq1.processor, True, 3)
+        )
 
         # bq2's score doesn't matter when finding candidates for bq1's
         # processor.
         bq2.manualScore(3000)
         self.assertEqual([], self.bq_set.findBuildCandidates(proc, True, 3))
         self.assertEqual(
-            [bq1], self.bq_set.findBuildCandidates(bq1.processor, True, 3))
+            [bq1], self.bq_set.findBuildCandidates(bq1.processor, True, 3)
+        )
 
         # When looking at bq2's processor, the build with the higher score
         # wins.
         self.assertEqual(
-            [bq2, bq3],
-            self.bq_set.findBuildCandidates(bq2.processor, True, 3))
+            [bq2, bq3], self.bq_set.findBuildCandidates(bq2.processor, True, 3)
+        )
         bq3.manualScore(4000)
         self.assertEqual(
-            [bq3, bq2],
-            self.bq_set.findBuildCandidates(bq2.processor, True, 3))
+            [bq3, bq2], self.bq_set.findBuildCandidates(bq2.processor, True, 3)
+        )
 
     def test_findBuildCandidates_honours_limit(self):
         # BuildQueueSet.findBuildCandidates returns no more than the number
@@ -176,15 +167,20 @@ class TestFindBuildCandidatesGeneralCases(TestFindBuildCandidatesBase):
         processor = self.factory.makeProcessor()
         bqs = [
             self.factory.makeBinaryPackageBuild(
-                processor=processor).queueBuild()
-            for _ in range(10)]
+                processor=processor
+            ).queueBuild()
+            for _ in range(10)
+        ]
 
         self.assertEqual(
-            bqs[:5], self.bq_set.findBuildCandidates(processor, True, 5))
+            bqs[:5], self.bq_set.findBuildCandidates(processor, True, 5)
+        )
         self.assertEqual(
-            bqs, self.bq_set.findBuildCandidates(processor, True, 10))
+            bqs, self.bq_set.findBuildCandidates(processor, True, 10)
+        )
         self.assertEqual(
-            bqs, self.bq_set.findBuildCandidates(processor, True, 11))
+            bqs, self.bq_set.findBuildCandidates(processor, True, 11)
+        )
 
     def test_findBuildCandidates_honours_minimum_score(self):
         # Sometimes there's an emergency that requires us to lock down the
@@ -199,7 +195,8 @@ class TestFindBuildCandidatesGeneralCases(TestFindBuildCandidatesBase):
             bqs.append([])
             for score in (100000, 99999):
                 bq = self.factory.makeBinaryPackageBuild(
-                    processor=processors[-1]).queueBuild()
+                    processor=processors[-1]
+                ).queueBuild()
                 bq.manualScore(score)
                 bqs[-1].append(bq)
         processors.append(self.factory.makeProcessor())
@@ -207,53 +204,62 @@ class TestFindBuildCandidatesGeneralCases(TestFindBuildCandidatesBase):
         # By default, each processor has the two builds we just created for
         # it as candidates, with the highest score first.
         self.assertEqual(
-            bqs[0], self.bq_set.findBuildCandidates(processors[0], True, 3))
+            bqs[0], self.bq_set.findBuildCandidates(processors[0], True, 3)
+        )
         self.assertEqual(
-            bqs[1], self.bq_set.findBuildCandidates(processors[1], True, 3))
+            bqs[1], self.bq_set.findBuildCandidates(processors[1], True, 3)
+        )
 
         # If we set a minimum score, then only builds above that threshold
         # are candidates.
-        with FeatureFixture({'buildmaster.minimum_score': '100000'}):
+        with FeatureFixture({"buildmaster.minimum_score": "100000"}):
             self.assertEqual(
                 [bqs[0][0]],
-                self.bq_set.findBuildCandidates(processors[0], True, 3))
+                self.bq_set.findBuildCandidates(processors[0], True, 3),
+            )
             self.assertEqual(
                 [bqs[1][0]],
-                self.bq_set.findBuildCandidates(processors[1], True, 3))
+                self.bq_set.findBuildCandidates(processors[1], True, 3),
+            )
 
         # We can similarly set a minimum score for individual processors.
         cases = [
-            ({0: '99999'}, [bqs[0], bqs[1], []]),
-            ({1: '99999'}, [bqs[0], bqs[1], []]),
-            ({2: '99999'}, [bqs[0], bqs[1], []]),
-            ({0: '100000'}, [[bqs[0][0]], bqs[1], []]),
-            ({1: '100000'}, [bqs[0], [bqs[1][0]], []]),
-            ({2: '100000'}, [bqs[0], bqs[1], []]),
-            ]
+            ({0: "99999"}, [bqs[0], bqs[1], []]),
+            ({1: "99999"}, [bqs[0], bqs[1], []]),
+            ({2: "99999"}, [bqs[0], bqs[1], []]),
+            ({0: "100000"}, [[bqs[0][0]], bqs[1], []]),
+            ({1: "100000"}, [bqs[0], [bqs[1][0]], []]),
+            ({2: "100000"}, [bqs[0], bqs[1], []]),
+        ]
         for feature_spec, expected_bqs in cases:
             features = {
-                'buildmaster.minimum_score.%s' % processors[i].name: score
-                for i, score in feature_spec.items()}
+                "buildmaster.minimum_score.%s" % processors[i].name: score
+                for i, score in feature_spec.items()
+            }
             with FeatureFixture(features):
                 for i, processor in enumerate(processors):
                     self.assertEqual(
                         expected_bqs[i],
-                        self.bq_set.findBuildCandidates(processor, True, 3))
+                        self.bq_set.findBuildCandidates(processor, True, 3),
+                    )
 
         # If we set an invalid minimum score, buildd-manager doesn't
         # explode.
         with FakeLogger() as logger:
-            with FeatureFixture({'buildmaster.minimum_score': 'nonsense'}):
+            with FeatureFixture({"buildmaster.minimum_score": "nonsense"}):
                 self.assertEqual(
                     bqs[0],
-                    self.bq_set.findBuildCandidates(processors[0], True, 3))
+                    self.bq_set.findBuildCandidates(processors[0], True, 3),
+                )
                 self.assertEqual(
                     bqs[1],
-                    self.bq_set.findBuildCandidates(processors[1], True, 3))
+                    self.bq_set.findBuildCandidates(processors[1], True, 3),
+                )
             self.assertEqual(
                 "invalid buildmaster.minimum_score: nonsense\n"
                 "invalid buildmaster.minimum_score: nonsense\n",
-                logger.output)
+                logger.output,
+            )
 
 
 class TestFindBuildCandidatesPPABase(TestFindBuildCandidatesBase):
@@ -261,8 +267,9 @@ class TestFindBuildCandidatesPPABase(TestFindBuildCandidatesBase):
     ppa_joe_private = False
     ppa_jim_private = False
 
-    def _setBuildsBuildingForArch(self, builds_list, num_builds,
-                                  archtag="i386"):
+    def _setBuildsBuildingForArch(
+        self, builds_list, num_builds, archtag="i386"
+    ):
         """Helper function.
 
         Set the first `num_builds` in `builds_list` with `archtag` as
@@ -272,7 +279,8 @@ class TestFindBuildCandidatesPPABase(TestFindBuildCandidatesBase):
         for build in builds_list[:num_builds]:
             if build.distro_arch_series.architecturetag == archtag:
                 build.updateStatus(
-                    BuildStatus.BUILDING, builder=self.builders[count])
+                    BuildStatus.BUILDING, builder=self.builders[count]
+                )
             count += 1
 
     def setUp(self):
@@ -281,67 +289,84 @@ class TestFindBuildCandidatesPPABase(TestFindBuildCandidatesBase):
 
         # Create two PPAs and add some builds to each.
         self.ppa_joe = self.factory.makeArchive(
-            name="joesppa", private=self.ppa_joe_private)
+            name="joesppa", private=self.ppa_joe_private
+        )
         self.ppa_jim = self.factory.makeArchive(
-            name="jimsppa", private=self.ppa_jim_private)
+            name="jimsppa", private=self.ppa_jim_private
+        )
 
         self.joe_builds = []
         self.joe_builds.extend(
             self.publisher.getPubSource(
-                sourcename="gedit", status=PackagePublishingStatus.PUBLISHED,
-                archive=self.ppa_joe).createMissingBuilds())
+                sourcename="gedit",
+                status=PackagePublishingStatus.PUBLISHED,
+                archive=self.ppa_joe,
+            ).createMissingBuilds()
+        )
         self.joe_builds.extend(
             self.publisher.getPubSource(
                 sourcename="firefox",
                 status=PackagePublishingStatus.PUBLISHED,
-                archive=self.ppa_joe).createMissingBuilds())
+                archive=self.ppa_joe,
+            ).createMissingBuilds()
+        )
         self.joe_builds.extend(
             self.publisher.getPubSource(
                 sourcename="cobblers",
                 status=PackagePublishingStatus.PUBLISHED,
-                archive=self.ppa_joe).createMissingBuilds())
+                archive=self.ppa_joe,
+            ).createMissingBuilds()
+        )
         self.joe_builds.extend(
             self.publisher.getPubSource(
                 sourcename="thunderpants",
                 status=PackagePublishingStatus.PUBLISHED,
-                archive=self.ppa_joe).createMissingBuilds())
+                archive=self.ppa_joe,
+            ).createMissingBuilds()
+        )
 
         self.jim_builds = []
         self.jim_builds.extend(
             self.publisher.getPubSource(
                 sourcename="gedit",
                 status=PackagePublishingStatus.PUBLISHED,
-                archive=self.ppa_jim).createMissingBuilds())
+                archive=self.ppa_jim,
+            ).createMissingBuilds()
+        )
         self.jim_builds.extend(
             self.publisher.getPubSource(
                 sourcename="firefox",
                 status=PackagePublishingStatus.PUBLISHED,
-                archive=self.ppa_jim).createMissingBuilds())
+                archive=self.ppa_jim,
+            ).createMissingBuilds()
+        )
 
         # Set the first three builds in joe's PPA as building, which
         # leaves two builders free.
         self._setBuildsBuildingForArch(self.joe_builds, 3)
         num_active_builders = len(
-            [build for build in self.joe_builds if build.builder is not None])
+            [build for build in self.joe_builds if build.builder is not None]
+        )
         num_free_builders = len(self.builders) - num_active_builders
         self.assertEqual(num_free_builders, 2)
 
 
 class TestFindBuildCandidatesPPA(TestFindBuildCandidatesPPABase):
-
     def test_findBuildCandidate(self):
         # joe's fourth i386 build will be the next build candidate.
         [next_job] = self.bq_set.findBuildCandidates(self.proc_386, True, 1)
         build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(next_job)
-        self.assertEqual('joesppa', build.archive.name)
+        self.assertEqual("joesppa", build.archive.name)
 
     def test_findBuildCandidate_with_disabled_archive(self):
         # Disabled archives should not be considered for dispatching
         # builds.
         [disabled_job] = self.bq_set.findBuildCandidates(
-            self.proc_386, True, 1)
+            self.proc_386, True, 1
+        )
         build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(
-            disabled_job)
+            disabled_job
+        )
         build.archive.disable()
         [next_job] = self.bq_set.findBuildCandidates(self.proc_386, True, 1)
         self.assertNotEqual(disabled_job, next_job)
@@ -355,7 +380,7 @@ class TestFindBuildCandidatesPrivatePPA(TestFindBuildCandidatesPPABase):
         # joe's fourth i386 build will be the next build candidate.
         [next_job] = self.bq_set.findBuildCandidates(self.proc_386, True, 1)
         build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(next_job)
-        self.assertEqual('joesppa', build.archive.name)
+        self.assertEqual("joesppa", build.archive.name)
 
         # If the source for the build is still pending, it won't be
         # dispatched because the builder has to fetch the source files
@@ -367,37 +392,46 @@ class TestFindBuildCandidatesPrivatePPA(TestFindBuildCandidatesPPABase):
 
 
 class TestFindBuildCandidatesDistroArchive(TestFindBuildCandidatesBase):
-
     def setUp(self):
         """Publish some builds for the test archive."""
         super().setUp()
         # Create a primary archive and publish some builds for the
         # queue.
         self.non_ppa = self.factory.makeArchive(
-            name="primary", purpose=ArchivePurpose.PRIMARY)
+            name="primary", purpose=ArchivePurpose.PRIMARY
+        )
 
         self.gedit_build = self.publisher.getPubSource(
-            sourcename="gedit", status=PackagePublishingStatus.PUBLISHED,
-            archive=self.non_ppa).createMissingBuilds()[0]
+            sourcename="gedit",
+            status=PackagePublishingStatus.PUBLISHED,
+            archive=self.non_ppa,
+        ).createMissingBuilds()[0]
         self.firefox_build = self.publisher.getPubSource(
-            sourcename="firefox", status=PackagePublishingStatus.PUBLISHED,
-            archive=self.non_ppa).createMissingBuilds()[0]
+            sourcename="firefox",
+            status=PackagePublishingStatus.PUBLISHED,
+            archive=self.non_ppa,
+        ).createMissingBuilds()[0]
 
     def test_findBuildCandidate_for_non_ppa(self):
         # Normal archives are not restricted to serial builds per
         # arch.
         self.assertEqual(
-            [self.gedit_build.buildqueue_record,
-             self.firefox_build.buildqueue_record],
-            self.bq_set.findBuildCandidates(self.proc_386, True, 3))
+            [
+                self.gedit_build.buildqueue_record,
+                self.firefox_build.buildqueue_record,
+            ],
+            self.bq_set.findBuildCandidates(self.proc_386, True, 3),
+        )
 
         # Now even if we set the build building, we'll still get the
         # second non-ppa build for the same archive as the next candidate.
         self.gedit_build.updateStatus(
-            BuildStatus.BUILDING, builder=self.frog_builder)
+            BuildStatus.BUILDING, builder=self.frog_builder
+        )
         self.assertEqual(
             [self.firefox_build.buildqueue_record],
-            self.bq_set.findBuildCandidates(self.proc_386, True, 3))
+            self.bq_set.findBuildCandidates(self.proc_386, True, 3),
+        )
 
     def test_findBuildCandidate_for_recipe_build(self):
         # Recipe builds with a higher score are selected first.
@@ -406,19 +440,24 @@ class TestFindBuildCandidatesDistroArchive(TestFindBuildCandidatesBase):
         self.assertEqual(self.firefox_build.buildqueue_record.lastscore, 2505)
 
         das = self.factory.makeDistroArchSeries(
-            processor=getUtility(IProcessorSet).getByName('386'))
+            processor=getUtility(IProcessorSet).getByName("386")
+        )
         das.distroseries.nominatedarchindep = das
         recipe_build_job = self.factory.makeSourcePackageRecipeBuild(
-            distroseries=das.distroseries).queueBuild()
+            distroseries=das.distroseries
+        ).queueBuild()
         recipe_build_job.manualScore(9999)
 
         self.assertEqual(recipe_build_job.lastscore, 9999)
 
         self.assertEqual(
-            [recipe_build_job,
-             self.gedit_build.buildqueue_record,
-             self.firefox_build.buildqueue_record],
-            self.bq_set.findBuildCandidates(self.proc_386, True, 3))
+            [
+                recipe_build_job,
+                self.gedit_build.buildqueue_record,
+                self.firefox_build.buildqueue_record,
+            ],
+            self.bq_set.findBuildCandidates(self.proc_386, True, 3),
+        )
 
 
 class TestFindRecipeBuildCandidates(TestFindBuildCandidatesBase):
@@ -436,17 +475,21 @@ class TestFindRecipeBuildCandidates(TestFindBuildCandidatesBase):
         # Create a primary archive and publish some builds for the
         # queue.
         self.non_ppa = self.factory.makeArchive(
-            name="primary", purpose=ArchivePurpose.PRIMARY)
+            name="primary", purpose=ArchivePurpose.PRIMARY
+        )
 
         das = self.factory.makeDistroArchSeries(
-            processor=getUtility(IProcessorSet).getByName('386'))
+            processor=getUtility(IProcessorSet).getByName("386")
+        )
         das.distroseries.nominatedarchindep = das
         self.clearBuildQueue()
         self.bq1 = self.factory.makeSourcePackageRecipeBuild(
-            distroseries=das.distroseries).queueBuild()
+            distroseries=das.distroseries
+        ).queueBuild()
         self.bq1.manualScore(3333)
         self.bq2 = self.factory.makeSourcePackageRecipeBuild(
-            distroseries=das.distroseries).queueBuild()
+            distroseries=das.distroseries
+        ).queueBuild()
         self.bq2.manualScore(4333)
 
     def test_findBuildCandidate_with_highest_score(self):
@@ -454,4 +497,5 @@ class TestFindRecipeBuildCandidates(TestFindBuildCandidatesBase):
         # This test is run in a "recipe builds only" context.
         self.assertEqual(
             [self.bq2, self.bq1],
-            self.bq_set.findBuildCandidates(self.proc_386, True, 2))
+            self.bq_set.findBuildCandidates(self.proc_386, True, 2),
+        )
diff --git a/lib/lp/buildmaster/tests/test_buildfarmjob.py b/lib/lp/buildmaster/tests/test_buildfarmjob.py
index 911582f..68f9335 100644
--- a/lib/lp/buildmaster/tests/test_buildfarmjob.py
+++ b/lib/lp/buildmaster/tests/test_buildfarmjob.py
@@ -3,10 +3,7 @@
 
 """Tests for `IBuildFarmJob`."""
 
-from datetime import (
-    datetime,
-    timedelta,
-    )
+from datetime import datetime, timedelta
 
 import pytz
 from storm.store import Store
@@ -16,27 +13,17 @@ from zope.security.interfaces import Unauthorized
 from zope.security.proxy import removeSecurityProxy
 
 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
-from lp.buildmaster.enums import (
-    BuildFarmJobType,
-    BuildStatus,
-    )
+from lp.buildmaster.enums import BuildFarmJobType, BuildStatus
 from lp.buildmaster.interfaces.buildfarmjob import (
     CannotBeRetried,
     IBuildFarmJob,
     IBuildFarmJobSet,
     IBuildFarmJobSource,
-    )
+)
 from lp.buildmaster.model.buildfarmjob import BuildFarmJob
 from lp.services.database.sqlbase import flush_database_updates
-from lp.testing import (
-    admin_logged_in,
-    login,
-    TestCaseWithFactory,
-    )
-from lp.testing.layers import (
-    DatabaseFunctionalLayer,
-    LaunchpadFunctionalLayer,
-    )
+from lp.testing import TestCaseWithFactory, admin_logged_in, login
+from lp.testing.layers import DatabaseFunctionalLayer, LaunchpadFunctionalLayer
 
 
 class TestBuildFarmJobBase:
@@ -48,10 +35,14 @@ class TestBuildFarmJobBase:
         super().setUp()
         self.build_farm_job = self.makeBuildFarmJob()
 
-    def makeBuildFarmJob(self, builder=None,
-                         job_type=BuildFarmJobType.PACKAGEBUILD,
-                         status=BuildStatus.NEEDSBUILD,
-                         date_finished=None, archive=None):
+    def makeBuildFarmJob(
+        self,
+        builder=None,
+        job_type=BuildFarmJobType.PACKAGEBUILD,
+        status=BuildStatus.NEEDSBUILD,
+        date_finished=None,
+        archive=None,
+    ):
         """A factory method for creating PackageBuilds.
 
         This is not included in the launchpad test factory because
@@ -60,7 +51,8 @@ class TestBuildFarmJobBase:
         or eventually a SPRecipeBuild).
         """
         build_farm_job = getUtility(IBuildFarmJobSource).new(
-            job_type=job_type, status=status, archive=archive)
+            job_type=job_type, status=status, archive=archive
+        )
         removeSecurityProxy(build_farm_job).builder = builder
         removeSecurityProxy(build_farm_job).date_started = date_finished
         removeSecurityProxy(build_farm_job).date_finished = date_finished
@@ -75,8 +67,8 @@ class TestBuildFarmJob(TestBuildFarmJobBase, TestCaseWithFactory):
         flush_database_updates()
         store = Store.of(self.build_farm_job)
         retrieved_job = store.find(
-            BuildFarmJob,
-            BuildFarmJob.id == self.build_farm_job.id).one()
+            BuildFarmJob, BuildFarmJob.id == self.build_farm_job.id
+        ).one()
         self.assertEqual(self.build_farm_job, retrieved_job)
 
     def test_default_values(self):
@@ -84,13 +76,11 @@ class TestBuildFarmJob(TestBuildFarmJobBase, TestCaseWithFactory):
         # are set for various attributes.
         flush_database_updates()
         bfj = removeSecurityProxy(self.build_farm_job)
-        self.assertEqual(
-            BuildStatus.NEEDSBUILD, bfj.status)
+        self.assertEqual(BuildStatus.NEEDSBUILD, bfj.status)
         # The date_created is set automatically.
         self.assertTrue(bfj.date_created is not None)
         # The job type is required to create a build farm job.
-        self.assertEqual(
-            BuildFarmJobType.PACKAGEBUILD, bfj.job_type)
+        self.assertEqual(BuildFarmJobType.PACKAGEBUILD, bfj.job_type)
         # Other attributes are unset by default.
         self.assertEqual(None, bfj.date_finished)
         self.assertEqual(None, bfj.builder)
@@ -101,10 +91,11 @@ class TestBuildFarmJob(TestBuildFarmJobBase, TestCaseWithFactory):
         # when transactions are committed.
         ten_years_ago = datetime.now(pytz.UTC) - timedelta(365 * 10)
         build_farm_job = getUtility(IBuildFarmJobSource).new(
-            job_type=BuildFarmJobType.PACKAGEBUILD,
-            date_created=ten_years_ago)
+            job_type=BuildFarmJobType.PACKAGEBUILD, date_created=ten_years_ago
+        )
         self.assertEqual(
-            ten_years_ago, removeSecurityProxy(build_farm_job).date_created)
+            ten_years_ago, removeSecurityProxy(build_farm_job).date_created
+        )
 
 
 class TestBuildFarmJobMixin(TestCaseWithFactory):
@@ -120,7 +111,7 @@ class TestBuildFarmJobMixin(TestCaseWithFactory):
 
     def test_providesInterface(self):
         # BuildFarmJobMixin derivatives provide IBuildFarmJob
-        login('admin@xxxxxxxxxxxxx')
+        login("admin@xxxxxxxxxxxxx")
         self.assertProvides(self.build_farm_job, IBuildFarmJob)
 
     def test_duration_none(self):
@@ -136,21 +127,21 @@ class TestBuildFarmJobMixin(TestCaseWithFactory):
         now = datetime.now(pytz.UTC)
         duration = timedelta(1)
         self.build_farm_job.updateStatus(
-            BuildStatus.BUILDING, date_started=now)
+            BuildStatus.BUILDING, date_started=now
+        )
         self.build_farm_job.updateStatus(
-            BuildStatus.FULLYBUILT, date_finished=now + duration)
+            BuildStatus.FULLYBUILT, date_finished=now + duration
+        )
         self.assertEqual(duration, self.build_farm_job.duration)
 
     def test_view_build_farm_job(self):
         # Anonymous access can read public builds, but not edit.
-        self.assertEqual(
-            BuildStatus.NEEDSBUILD, self.build_farm_job.status)
-        self.assertRaises(
-            Unauthorized, getattr, self.build_farm_job, 'retry')
+        self.assertEqual(BuildStatus.NEEDSBUILD, self.build_farm_job.status)
+        self.assertRaises(Unauthorized, getattr, self.build_farm_job, "retry")
 
     def test_edit_build_farm_job(self):
         # Users with edit access can update attributes.
-        login('admin@xxxxxxxxxxxxx')
+        login("admin@xxxxxxxxxxxxx")
         self.assertRaises(CannotBeRetried, self.build_farm_job.retry)
 
     def test_updateStatus_sets_status(self):
@@ -164,7 +155,8 @@ class TestBuildFarmJobMixin(TestCaseWithFactory):
         builder = self.factory.makeBuilder()
         self.assertIs(None, self.build_farm_job.builder)
         self.build_farm_job.updateStatus(
-            BuildStatus.FULLYBUILT, builder=builder)
+            BuildStatus.FULLYBUILT, builder=builder
+        )
         self.assertEqual(builder, self.build_farm_job.builder)
 
     def test_updateStatus_BUILDING_sets_date_started(self):
@@ -204,16 +196,22 @@ class TestBuildFarmJobMixin(TestCaseWithFactory):
         # UPLOADING counts as the end of the job. date_finished doesn't
         # include the upload time.
         for status in (
-                BuildStatus.FULLYBUILT, BuildStatus.FAILEDTOBUILD,
-                BuildStatus.CHROOTWAIT, BuildStatus.MANUALDEPWAIT,
-                BuildStatus.UPLOADING, BuildStatus.FAILEDTOUPLOAD,
-                BuildStatus.CANCELLED, BuildStatus.SUPERSEDED):
+            BuildStatus.FULLYBUILT,
+            BuildStatus.FAILEDTOBUILD,
+            BuildStatus.CHROOTWAIT,
+            BuildStatus.MANUALDEPWAIT,
+            BuildStatus.UPLOADING,
+            BuildStatus.FAILEDTOUPLOAD,
+            BuildStatus.CANCELLED,
+            BuildStatus.SUPERSEDED,
+        ):
             build = self.factory.makeBinaryPackageBuild()
             build.updateStatus(status)
             self.assertIs(None, build.date_started)
             self.assertIs(None, build.date_finished)
             build.updateStatus(
-                BuildStatus.NEEDSBUILD, force_invalid_transition=True)
+                BuildStatus.NEEDSBUILD, force_invalid_transition=True
+            )
             build.updateStatus(BuildStatus.BUILDING)
             self.assertIsNot(None, build.date_started)
             self.assertIs(None, build.date_finished)
@@ -247,14 +245,17 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
         # status.
         successful_builds = [
             self.makeBuildFarmJob(
-                builder=self.builder, status=BuildStatus.FULLYBUILT),
+                builder=self.builder, status=BuildStatus.FULLYBUILT
+            ),
             self.makeBuildFarmJob(
-                builder=self.builder, status=BuildStatus.FULLYBUILT),
-            ]
+                builder=self.builder, status=BuildStatus.FULLYBUILT
+            ),
+        ]
         self.makeBuildFarmJob(builder=self.builder)
 
         query_by_status = self.build_farm_job_set.getBuildsForBuilder(
-                self.builder, status=BuildStatus.FULLYBUILT)
+            self.builder, status=BuildStatus.FULLYBUILT
+        )
 
         self.assertContentEqual(successful_builds, query_by_status)
 
@@ -264,7 +265,8 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
             owning_team = self.factory.makeTeam()
         archive = self.factory.makeArchive(owner=owning_team, private=True)
         private_build = self.factory.makeBinaryPackageBuild(
-            archive=archive, builder=self.builder)
+            archive=archive, builder=self.builder
+        )
         private_build = removeSecurityProxy(private_build).build_farm_job
         other_build = self.makeBuildFarmJob(builder=self.builder)
         return (private_build, other_build)
@@ -283,7 +285,8 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
         private_build, other_build = self._makePrivateAndNonPrivateBuilds()
 
         result = self.build_farm_job_set.getBuildsForBuilder(
-            self.builder, user=self.factory.makePerson())
+            self.builder, user=self.factory.makePerson()
+        )
 
         self.assertContentEqual([other_build], result)
 
@@ -293,7 +296,8 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
         private_build, other_build = self._makePrivateAndNonPrivateBuilds()
 
         result = self.build_farm_job_set.getBuildsForBuilder(
-            self.builder, user=admin_team.teamowner)
+            self.builder, user=admin_team.teamowner
+        )
 
         self.assertContentEqual([private_build, other_build], result)
 
@@ -301,11 +305,12 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
         # Similarly, if the user is in the owning team they can see it.
         owning_team = self.factory.makeTeam()
         private_build, other_build = self._makePrivateAndNonPrivateBuilds(
-            owning_team=owning_team)
+            owning_team=owning_team
+        )
 
         result = self.build_farm_job_set.getBuildsForBuilder(
-            self.builder,
-            user=owning_team.teamowner)
+            self.builder, user=owning_team.teamowner
+        )
 
         self.assertContentEqual([private_build, other_build], result)
 
@@ -313,13 +318,16 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
         # Results are returned with the oldest build last.
         build_1 = self.makeBuildFarmJob(
             builder=self.builder,
-            date_finished=datetime(2008, 10, 10, tzinfo=pytz.UTC))
+            date_finished=datetime(2008, 10, 10, tzinfo=pytz.UTC),
+        )
         build_2 = self.makeBuildFarmJob(
             builder=self.builder,
-            date_finished=datetime(2008, 11, 10, tzinfo=pytz.UTC))
+            date_finished=datetime(2008, 11, 10, tzinfo=pytz.UTC),
+        )
         build_3 = self.makeBuildFarmJob(
             builder=self.builder,
-            date_finished=datetime(2008, 9, 10, tzinfo=pytz.UTC))
+            date_finished=datetime(2008, 9, 10, tzinfo=pytz.UTC),
+        )
 
         result = self.build_farm_job_set.getBuildsForBuilder(self.builder)
         self.assertEqual([build_2, build_1, build_3], list(result))
@@ -329,8 +337,9 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
         builds = [
             self.makeBuildFarmJob(archive=archive),
             self.makeBuildFarmJob(
-                archive=archive, status=BuildStatus.BUILDING),
-            ]
+                archive=archive, status=BuildStatus.BUILDING
+            ),
+        ]
         return (archive, builds)
 
     def test_getBuildsForArchive_all(self):
@@ -338,7 +347,8 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
         # archive.
         archive, builds = self.makeBuildsForArchive()
         self.assertContentEqual(
-            builds, self.build_farm_job_set.getBuildsForArchive(archive))
+            builds, self.build_farm_job_set.getBuildsForArchive(archive)
+        )
 
     def test_getBuildsForArchive_by_status(self):
         # If the status arg is used, the results will be filtered by
@@ -347,4 +357,6 @@ class TestBuildFarmJobSet(TestBuildFarmJobBase, TestCaseWithFactory):
         self.assertContentEqual(
             builds[1:],
             self.build_farm_job_set.getBuildsForArchive(
-                archive, status=BuildStatus.BUILDING))
+                archive, status=BuildStatus.BUILDING
+            ),
+        )
diff --git a/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py b/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
index 3bdbf55..924b131 100644
--- a/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
+++ b/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
@@ -3,12 +3,12 @@
 
 """Unit tests for BuildFarmJobBehaviourBase."""
 
-from collections import OrderedDict
-from datetime import datetime
 import hashlib
 import os
 import shutil
 import tempfile
+from collections import OrderedDict
+from datetime import datetime
 
 import six
 from testtools import ExpectedException
@@ -18,37 +18,31 @@ from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
 from lp.archiveuploader.uploadprocessor import parse_build_upload_leaf_name
-from lp.buildmaster.enums import (
-    BuildBaseImageType,
-    BuildStatus,
-    )
+from lp.buildmaster.enums import BuildBaseImageType, BuildStatus
 from lp.buildmaster.interactor import (
     BuilderInteractor,
     shut_down_default_process_pool,
-    )
+)
 from lp.buildmaster.interfaces.builder import BuildDaemonError
 from lp.buildmaster.interfaces.buildfarmjobbehaviour import (
     IBuildFarmJobBehaviour,
-    )
+)
 from lp.buildmaster.interfaces.buildqueue import IBuildQueueSet
 from lp.buildmaster.interfaces.processor import IProcessorSet
 from lp.buildmaster.model.buildfarmjobbehaviour import (
     BuildFarmJobBehaviourBase,
-    )
+)
 from lp.buildmaster.tests.mock_workers import (
     MockBuilder,
     OkWorker,
     WaitingWorker,
-    )
+)
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.config import config
 from lp.services.log.logger import BufferLogger
 from lp.services.statsd.tests import StatsMixin
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
-from lp.testing import (
-    TestCase,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCase, TestCaseWithFactory
 from lp.testing.dbuser import dbuser
 from lp.testing.factory import LaunchpadObjectFactory
 from lp.testing.fakemethod import FakeMethod
@@ -56,49 +50,46 @@ from lp.testing.layers import (
     LaunchpadZopelessLayer,
     ZopelessDatabaseLayer,
     ZopelessLayer,
-    )
+)
 from lp.testing.mail_helpers import pop_notifications
 
 
 class FakeBuildFarmJob:
     """Dummy BuildFarmJob."""
 
-    build_cookie = 'PACKAGEBUILD-1'
-    title = 'some job for something'
+    build_cookie = "PACKAGEBUILD-1"
+    title = "some job for something"
 
 
 class FakeLibraryFileContent:
-
     def __init__(self, filename):
         self.sha1 = hashlib.sha1(six.ensure_binary(filename)).hexdigest()
 
 
 class FakeLibraryFileAlias:
-
     def __init__(self, filename):
         self.filename = filename
         self.content = FakeLibraryFileContent(filename)
-        self.http_url = 'http://librarian.test/%s' % filename
+        self.http_url = "http://librarian.test/%s"; % filename
 
 
 class FakePocketChroot:
-
     def __init__(self, chroot, image_type):
         self.chroot = chroot
         self.image_type = image_type
 
 
 class FakeDistroArchSeries:
-
     def __init__(self):
         self.images = {
-            BuildBaseImageType.CHROOT: 'chroot-fooix-bar-y86.tar.gz',
-            }
+            BuildBaseImageType.CHROOT: "chroot-fooix-bar-y86.tar.gz",
+        }
 
     def getPocketChroot(self, pocket, exact_pocket=False, image_type=None):
         if image_type in self.images:
             return FakePocketChroot(
-                FakeLibraryFileAlias(self.images[image_type]), image_type)
+                FakeLibraryFileAlias(self.images[image_type]), image_type
+            )
         else:
             return None
 
@@ -118,17 +109,21 @@ class TestBuildFarmJobBehaviourBase(TestCaseWithFactory):
 
     def _makeBuild(self):
         """Create a `Build` object."""
-        x86 = getUtility(IProcessorSet).getByName('386')
+        x86 = getUtility(IProcessorSet).getByName("386")
         distroarchseries = self.factory.makeDistroArchSeries(
-            architecturetag='x86', processor=x86)
+            architecturetag="x86", processor=x86
+        )
         distroseries = distroarchseries.distroseries
         archive = self.factory.makeArchive(
-            distribution=distroseries.distribution)
+            distribution=distroseries.distribution
+        )
         pocket = PackagePublishingPocket.RELEASE
         spr = self.factory.makeSourcePackageRelease(
-            distroseries=distroseries, archive=archive)
+            distroseries=distroseries, archive=archive
+        )
         return getUtility(IBinaryPackageBuildSet).new(
-            spr, archive, distroarchseries, pocket)
+            spr, archive, distroarchseries, pocket
+        )
 
     def test_getUploadDirLeaf(self):
         # getUploadDirLeaf returns the current time, followed by the build
@@ -136,10 +131,12 @@ class TestBuildFarmJobBehaviourBase(TestCaseWithFactory):
         now = datetime.now()
         build_cookie = self.factory.getUniqueString()
         upload_leaf = self._makeBehaviour().getUploadDirLeaf(
-            build_cookie, now=now)
+            build_cookie, now=now
+        )
         self.assertEqual(
-            '%s-%s' % (now.strftime("%Y%m%d-%H%M%S"), build_cookie),
-            upload_leaf)
+            "%s-%s" % (now.strftime("%Y%m%d-%H%M%S"), build_cookie),
+            upload_leaf,
+        )
 
     def test_extraBuildArgs_virtualized(self):
         # If the builder is virtualized, extraBuildArgs sends
@@ -161,15 +158,18 @@ class TestBuildFarmJobBehaviourBase(TestCaseWithFactory):
         worker_status = {"build_status": "BuildStatus.BUILDING"}
         self.assertEqual(
             "BUILDING",
-            BuildFarmJobBehaviourBase.extractBuildStatus(worker_status))
+            BuildFarmJobBehaviourBase.extractBuildStatus(worker_status),
+        )
 
     def test_extractBuildStatus_malformed(self):
         # extractBuildStatus errors out when the status string is not
         # of the form it expects.
         worker_status = {"build_status": "BUILDING"}
         self.assertRaises(
-            AssertionError, BuildFarmJobBehaviourBase.extractBuildStatus,
-            worker_status)
+            AssertionError,
+            BuildFarmJobBehaviourBase.extractBuildStatus,
+            worker_status,
+        )
 
 
 class TestDispatchBuildToWorker(StatsMixin, TestCase):
@@ -178,33 +178,58 @@ class TestDispatchBuildToWorker(StatsMixin, TestCase):
     run_tests_with = AsynchronousDeferredRunTest
 
     def makeBehaviour(self, das):
-        files = OrderedDict([
-            ('foo.dsc', {'url': 'http://host/foo.dsc', 'sha1': '0'}),
-            ('bar.tar', {
-                'url': 'http://host/bar.tar', 'sha1': '0',
-                'username': 'admin', 'password': 'sekrit'}),
-            ])
+        files = OrderedDict(
+            [
+                ("foo.dsc", {"url": "http://host/foo.dsc";, "sha1": "0"}),
+                (
+                    "bar.tar",
+                    {
+                        "url": "http://host/bar.tar";,
+                        "sha1": "0",
+                        "username": "admin",
+                        "password": "sekrit",
+                    },
+                ),
+            ]
+        )
 
         behaviour = BuildFarmJobBehaviourBase(FakeBuildFarmJob())
         behaviour.composeBuildRequest = FakeMethod(
-            ('foobuild', das, PackagePublishingPocket.RELEASE, files,
-             {'some': 'arg', 'archives': ['http://admin:sekrit@blah/']}))
+            (
+                "foobuild",
+                das,
+                PackagePublishingPocket.RELEASE,
+                files,
+                {"some": "arg", "archives": ["http://admin:sekrit@blah/"]},
+            )
+        )
         return behaviour
 
     def assertDispatched(self, worker, logger, chroot_filename, image_type):
         # The worker's been asked to cache the chroot and both source
         # files, and then to start the build.
         expected_calls = [
-            ('ensurepresent',
-             'http://librarian.test/%s' % chroot_filename, '', ''),
-            ('ensurepresent', 'http://host/foo.dsc', '', ''),
-            ('ensurepresent', 'http://host/bar.tar', 'admin', 'sekrit'),
-            ('build', 'PACKAGEBUILD-1', 'foobuild',
-             hashlib.sha1(six.ensure_binary(chroot_filename)).hexdigest(),
-             ['foo.dsc', 'bar.tar'],
-             {'archives': ['http://admin:sekrit@blah/'],
-              'image_type': image_type,
-              'some': 'arg'})]
+            (
+                "ensurepresent",
+                "http://librarian.test/%s"; % chroot_filename,
+                "",
+                "",
+            ),
+            ("ensurepresent", "http://host/foo.dsc";, "", ""),
+            ("ensurepresent", "http://host/bar.tar";, "admin", "sekrit"),
+            (
+                "build",
+                "PACKAGEBUILD-1",
+                "foobuild",
+                hashlib.sha1(six.ensure_binary(chroot_filename)).hexdigest(),
+                ["foo.dsc", "bar.tar"],
+                {
+                    "archives": ["http://admin:sekrit@blah/";],
+                    "image_type": image_type,
+                    "some": "arg",
+                },
+            ),
+        ]
         self.assertEqual(expected_calls, worker.call_log)
 
         # And details have been logged, including the build arguments
@@ -214,13 +239,15 @@ class TestDispatchBuildToWorker(StatsMixin, TestCase):
             "INFO Preparing job PACKAGEBUILD-1 (some job for something) on "
             "http://fake:0000.\n";
             "INFO Dispatching job PACKAGEBUILD-1 (some job for something) to "
-            "http://fake:0000:\n{";)
-        self.assertIn('http://<redacted>@blah/', logger.getLogBuffer())
-        self.assertNotIn('sekrit', logger.getLogBuffer())
+            "http://fake:0000:\n{";,
+        )
+        self.assertIn("http://<redacted>@blah/", logger.getLogBuffer())
+        self.assertNotIn("sekrit", logger.getLogBuffer())
         self.assertEndsWith(
             logger.getLogBuffer(),
             "INFO Job PACKAGEBUILD-1 (some job for something) started on "
-            "http://fake:0000: BuildStatus.BUILDING PACKAGEBUILD-1\n")
+            "http://fake:0000: BuildStatus.BUILDING PACKAGEBUILD-1\n",
+        )
 
     @defer.inlineCallbacks
     def test_dispatchBuildToWorker(self):
@@ -232,14 +259,15 @@ class TestDispatchBuildToWorker(StatsMixin, TestCase):
         yield behaviour.dispatchBuildToWorker(logger)
 
         self.assertDispatched(
-            worker, logger, 'chroot-fooix-bar-y86.tar.gz', 'chroot')
+            worker, logger, "chroot-fooix-bar-y86.tar.gz", "chroot"
+        )
 
     @defer.inlineCallbacks
     def test_dispatchBuildToWorker_with_other_image_available(self):
         # If a base image is available but isn't in the behaviour's image
         # types, it isn't used.
         das = FakeDistroArchSeries()
-        das.images[BuildBaseImageType.LXD] = 'lxd-fooix-bar-y86.tar.gz'
+        das.images[BuildBaseImageType.LXD] = "lxd-fooix-bar-y86.tar.gz"
         behaviour = self.makeBehaviour(das)
         builder = MockBuilder()
         worker = OkWorker()
@@ -248,15 +276,18 @@ class TestDispatchBuildToWorker(StatsMixin, TestCase):
         yield behaviour.dispatchBuildToWorker(logger)
 
         self.assertDispatched(
-            worker, logger, 'chroot-fooix-bar-y86.tar.gz', 'chroot')
+            worker, logger, "chroot-fooix-bar-y86.tar.gz", "chroot"
+        )
 
     @defer.inlineCallbacks
     def test_dispatchBuildToWorker_lxd(self):
         das = FakeDistroArchSeries()
-        das.images[BuildBaseImageType.LXD] = 'lxd-fooix-bar-y86.tar.gz'
+        das.images[BuildBaseImageType.LXD] = "lxd-fooix-bar-y86.tar.gz"
         behaviour = self.makeBehaviour(das)
         behaviour.image_types = [
-            BuildBaseImageType.LXD, BuildBaseImageType.CHROOT]
+            BuildBaseImageType.LXD,
+            BuildBaseImageType.CHROOT,
+        ]
         builder = MockBuilder()
         worker = OkWorker()
         logger = BufferLogger()
@@ -264,13 +295,16 @@ class TestDispatchBuildToWorker(StatsMixin, TestCase):
         yield behaviour.dispatchBuildToWorker(logger)
 
         self.assertDispatched(
-            worker, logger, 'lxd-fooix-bar-y86.tar.gz', 'lxd')
+            worker, logger, "lxd-fooix-bar-y86.tar.gz", "lxd"
+        )
 
     @defer.inlineCallbacks
     def test_dispatchBuildToWorker_fallback(self):
         behaviour = self.makeBehaviour(FakeDistroArchSeries())
         behaviour.image_types = [
-            BuildBaseImageType.LXD, BuildBaseImageType.CHROOT]
+            BuildBaseImageType.LXD,
+            BuildBaseImageType.CHROOT,
+        ]
         builder = MockBuilder()
         worker = OkWorker()
         logger = BufferLogger()
@@ -278,7 +312,8 @@ class TestDispatchBuildToWorker(StatsMixin, TestCase):
         yield behaviour.dispatchBuildToWorker(logger)
 
         self.assertDispatched(
-            worker, logger, 'chroot-fooix-bar-y86.tar.gz', 'chroot')
+            worker, logger, "chroot-fooix-bar-y86.tar.gz", "chroot"
+        )
 
     @defer.inlineCallbacks
     def test_dispatchBuildToWorker_stats(self):
@@ -292,8 +327,11 @@ class TestDispatchBuildToWorker(StatsMixin, TestCase):
         self.assertEqual(1, self.stats_client.incr.call_count)
         self.assertEqual(
             self.stats_client.incr.call_args_list[0][0],
-            ('build.count,builder_name=mock-builder,env=test,'
-             'job_type=UNKNOWN',))
+            (
+                "build.count,builder_name=mock-builder,env=test,"
+                "job_type=UNKNOWN",
+            ),
+        )
 
 
 class TestGetUploadMethodsMixin:
@@ -309,7 +347,8 @@ class TestGetUploadMethodsMixin:
         super().setUp()
         self.build = self.makeBuild()
         self.behaviour = IBuildFarmJobBehaviour(
-            self.build.buildqueue_record.specific_build)
+            self.build.buildqueue_record.specific_build
+        )
 
     def test_getUploadDirLeafCookie_parseable(self):
         # getUploadDirLeaf should return a directory name
@@ -317,7 +356,8 @@ class TestGetUploadMethodsMixin:
         upload_leaf = self.behaviour.getUploadDirLeaf(self.build.build_cookie)
         (job_type, job_id) = parse_build_upload_leaf_name(upload_leaf)
         self.assertEqual(
-            (self.build.job_type.name, self.build.id), (job_type, job_id))
+            (self.build.job_type.name, self.build.id), (job_type, job_id)
+        )
 
 
 class TestVerifySuccessfulBuildMixin:
@@ -338,7 +378,8 @@ class TestVerifySuccessfulBuildMixin:
         # says is modifiable.
         build = self.makeBuild()
         behaviour = IBuildFarmJobBehaviour(
-            build.buildqueue_record.specific_build)
+            build.buildqueue_record.specific_build
+        )
         behaviour.verifySuccessfulBuild()
 
     def test_verifySuccessfulBuild_denies_unmodifiable_suite(self):
@@ -346,7 +387,8 @@ class TestVerifySuccessfulBuildMixin:
         # archive says is unmodifiable.
         build = self.makeUnmodifiableBuild()
         behaviour = IBuildFarmJobBehaviour(
-            build.buildqueue_record.specific_build)
+            build.buildqueue_record.specific_build
+        )
         self.assertRaises(AssertionError, behaviour.verifySuccessfulBuild)
 
 
@@ -368,32 +410,38 @@ class TestHandleStatusMixin:
         # handleStatus_OK can get a reference to the worker.
         self.builder = self.factory.makeBuilder()
         self.build.buildqueue_record.markAsBuilding(self.builder)
-        self.worker = WaitingWorker('BuildStatus.OK')
-        self.worker.valid_files['test_file_hash'] = ''
+        self.worker = WaitingWorker("BuildStatus.OK")
+        self.worker.valid_files["test_file_hash"] = ""
         self.interactor = BuilderInteractor()
         self.behaviour = self.interactor.getBuildBehaviour(
-            self.build.buildqueue_record, self.builder, self.worker)
+            self.build.buildqueue_record, self.builder, self.worker
+        )
         self.addCleanup(shut_down_default_process_pool)
 
         # We overwrite the buildmaster root to use a temp directory.
         tempdir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tempdir)
         self.upload_root = tempdir
-        tmp_builddmaster_root = """
+        tmp_builddmaster_root = (
+            """
         [builddmaster]
         root: %s
-        """ % self.upload_root
-        config.push('tmp_builddmaster_root', tmp_builddmaster_root)
+        """
+            % self.upload_root
+        )
+        config.push("tmp_builddmaster_root", tmp_builddmaster_root)
 
         # We stub out our builds getUploaderCommand() method so
         # we can check whether it was called as well as
         # verifySuccessfulUpload().
         removeSecurityProxy(self.build).verifySuccessfulUpload = FakeMethod(
-            result=True)
+            result=True
+        )
 
     def assertResultCount(self, count, result):
         self.assertEqual(
-            1, len(os.listdir(os.path.join(self.upload_root, result))))
+            1, len(os.listdir(os.path.join(self.upload_root, result)))
+        )
 
     @defer.inlineCallbacks
     def test_handleStatus_BUILDING(self):
@@ -406,17 +454,23 @@ class TestHandleStatusMixin:
         removeSecurityProxy(self.build).updateStatus = FakeMethod()
         with dbuser(config.builddmaster.dbuser):
             yield self.behaviour.handleStatus(
-                self.build.buildqueue_record, worker_status)
+                self.build.buildqueue_record, worker_status
+            )
         self.assertEqual(None, self.build.log)
         self.assertEqual(0, len(os.listdir(self.upload_root)))
         self.assertEqual(
-            [((initial_status,),
-              {"builder": self.builder, "worker_status": worker_status})],
-            removeSecurityProxy(self.build).updateStatus.calls)
+            [
+                (
+                    (initial_status,),
+                    {"builder": self.builder, "worker_status": worker_status},
+                )
+            ],
+            removeSecurityProxy(self.build).updateStatus.calls,
+        )
         self.assertEqual(0, len(pop_notifications()), "Notifications received")
         self.assertEqual(
-            self.build.buildqueue_record,
-            getUtility(IBuildQueueSet).get(bq_id))
+            self.build.buildqueue_record, getUtility(IBuildQueueSet).get(bq_id)
+        )
 
     @defer.inlineCallbacks
     def test_handleStatus_WAITING_OK_normal_file(self):
@@ -426,9 +480,12 @@ class TestHandleStatusMixin:
         with dbuser(config.builddmaster.dbuser):
             yield self.behaviour.handleStatus(
                 self.build.buildqueue_record,
-                {'builder_status': 'BuilderStatus.WAITING',
-                 'build_status': 'BuildStatus.OK',
-                 'filemap': {'myfile.py': 'test_file_hash'}})
+                {
+                    "builder_status": "BuilderStatus.WAITING",
+                    "build_status": "BuildStatus.OK",
+                    "filemap": {"myfile.py": "test_file_hash"},
+                },
+            )
         self.assertEqual(BuildStatus.UPLOADING, self.build.status)
         self.assertResultCount(1, "incoming")
 
@@ -437,28 +494,34 @@ class TestHandleStatusMixin:
         # A filemap that tries to write to files outside of the upload
         # directory will not be collected.
         with ExpectedException(
-                BuildDaemonError,
-                "Build returned a file named '/tmp/myfile.py'."):
+            BuildDaemonError, "Build returned a file named '/tmp/myfile.py'."
+        ):
             with dbuser(config.builddmaster.dbuser):
                 yield self.behaviour.handleStatus(
                     self.build.buildqueue_record,
-                    {'builder_status': 'BuilderStatus.WAITING',
-                     'build_status': 'BuildStatus.OK',
-                     'filemap': {'/tmp/myfile.py': 'test_file_hash'}})
+                    {
+                        "builder_status": "BuilderStatus.WAITING",
+                        "build_status": "BuildStatus.OK",
+                        "filemap": {"/tmp/myfile.py": "test_file_hash"},
+                    },
+                )
 
     @defer.inlineCallbacks
     def test_handleStatus_WAITING_OK_relative_filepath(self):
         # A filemap that tries to write to files outside of
         # the upload directory will not be collected.
         with ExpectedException(
-                BuildDaemonError,
-                "Build returned a file named '../myfile.py'."):
+            BuildDaemonError, "Build returned a file named '../myfile.py'."
+        ):
             with dbuser(config.builddmaster.dbuser):
                 yield self.behaviour.handleStatus(
                     self.build.buildqueue_record,
-                    {'builder_status': 'BuilderStatus.WAITING',
-                     'build_status': 'BuildStatus.OK',
-                     'filemap': {'../myfile.py': 'test_file_hash'}})
+                    {
+                        "builder_status": "BuilderStatus.WAITING",
+                        "build_status": "BuildStatus.OK",
+                        "filemap": {"../myfile.py": "test_file_hash"},
+                    },
+                )
 
     @defer.inlineCallbacks
     def test_handleStatus_WAITING_OK_sets_build_log(self):
@@ -467,9 +530,12 @@ class TestHandleStatusMixin:
         with dbuser(config.builddmaster.dbuser):
             yield self.behaviour.handleStatus(
                 self.build.buildqueue_record,
-                {'builder_status': 'BuilderStatus.WAITING',
-                 'build_status': 'BuildStatus.OK',
-                 'filemap': {'myfile.py': 'test_file_hash'}})
+                {
+                    "builder_status": "BuilderStatus.WAITING",
+                    "build_status": "BuildStatus.OK",
+                    "filemap": {"myfile.py": "test_file_hash"},
+                },
+            )
         self.assertNotEqual(None, self.build.log)
 
     @defer.inlineCallbacks
@@ -477,20 +543,26 @@ class TestHandleStatusMixin:
         # An email notification is sent for a given build status if
         # notifications are allowed for that status.
         expected_notification = (
-            status in self.behaviour.ALLOWED_STATUS_NOTIFICATIONS)
+            status in self.behaviour.ALLOWED_STATUS_NOTIFICATIONS
+        )
 
         with dbuser(config.builddmaster.dbuser):
             yield self.behaviour.handleStatus(
                 self.build.buildqueue_record,
-                {'builder_status': 'BuilderStatus.WAITING',
-                 'build_status': 'BuildStatus.%s' % status})
+                {
+                    "builder_status": "BuilderStatus.WAITING",
+                    "build_status": "BuildStatus.%s" % status,
+                },
+            )
 
         if expected_notification:
             self.assertNotEqual(
-                0, len(pop_notifications()), "Notifications received")
+                0, len(pop_notifications()), "Notifications received"
+            )
         else:
             self.assertEqual(
-                0, len(pop_notifications()), "Notifications received")
+                0, len(pop_notifications()), "Notifications received"
+            )
 
     def test_handleStatus_WAITING_DEPFAIL_notifies(self):
         return self._test_handleStatus_WAITING_notifies("DEPFAIL")
@@ -507,8 +579,11 @@ class TestHandleStatusMixin:
             self.build.updateStatus(BuildStatus.CANCELLING)
             yield self.behaviour.handleStatus(
                 self.build.buildqueue_record,
-                {"builder_status": "BuilderStatus.WAITING",
-                 "build_status": "BuildStatus.ABORTED"})
+                {
+                    "builder_status": "BuilderStatus.WAITING",
+                    "build_status": "BuildStatus.ABORTED",
+                },
+            )
         self.assertEqual(0, len(pop_notifications()), "Notifications received")
         self.assertEqual(BuildStatus.CANCELLED, self.build.status)
 
@@ -516,16 +591,21 @@ class TestHandleStatusMixin:
     def test_handleStatus_WAITING_ABORTED_illegal_when_building(self):
         self.builder.vm_host = "fake_vm_host"
         self.behaviour = self.interactor.getBuildBehaviour(
-            self.build.buildqueue_record, self.builder, self.worker)
+            self.build.buildqueue_record, self.builder, self.worker
+        )
         with dbuser(config.builddmaster.dbuser):
             self.build.updateStatus(BuildStatus.BUILDING)
             with ExpectedException(
-                    BuildDaemonError,
-                    "Build returned unexpected status: %r" % 'ABORTED'):
+                BuildDaemonError,
+                "Build returned unexpected status: %r" % "ABORTED",
+            ):
                 yield self.behaviour.handleStatus(
                     self.build.buildqueue_record,
-                    {"builder_status": "BuilderStatus.WAITING",
-                     "build_status": "BuildStatus.ABORTED"})
+                    {
+                        "builder_status": "BuilderStatus.WAITING",
+                        "build_status": "BuildStatus.ABORTED",
+                    },
+                )
 
     @defer.inlineCallbacks
     def test_handleStatus_WAITING_ABORTED_cancelling_sets_build_log(self):
@@ -535,8 +615,11 @@ class TestHandleStatusMixin:
             self.build.updateStatus(BuildStatus.CANCELLING)
             yield self.behaviour.handleStatus(
                 self.build.buildqueue_record,
-                {"builder_status": "BuilderStatus.WAITING",
-                 "build_status": "BuildStatus.ABORTED"})
+                {
+                    "builder_status": "BuilderStatus.WAITING",
+                    "build_status": "BuildStatus.ABORTED",
+                },
+            )
         self.assertNotEqual(None, self.build.log)
 
     @defer.inlineCallbacks
@@ -546,40 +629,54 @@ class TestHandleStatusMixin:
         with dbuser(config.builddmaster.dbuser):
             yield self.behaviour.handleStatus(
                 self.build.buildqueue_record,
-                {'builder_status': 'BuilderStatus.WAITING',
-                 'build_status': 'BuildStatus.OK',
-                 'filemap': {'myfile.py': 'test_file_hash'}})
+                {
+                    "builder_status": "BuilderStatus.WAITING",
+                    "build_status": "BuildStatus.OK",
+                    "filemap": {"myfile.py": "test_file_hash"},
+                },
+            )
         self.assertNotEqual(None, self.build.date_finished)
 
     @defer.inlineCallbacks
     def test_givenback_collection(self):
         with ExpectedException(
-                BuildDaemonError,
-                "Build returned unexpected status: %r" % 'GIVENBACK'):
+            BuildDaemonError,
+            "Build returned unexpected status: %r" % "GIVENBACK",
+        ):
             with dbuser(config.builddmaster.dbuser):
                 yield self.behaviour.handleStatus(
                     self.build.buildqueue_record,
-                    {"builder_status": "BuilderStatus.WAITING",
-                     "build_status": "BuildStatus.GIVENBACK"})
+                    {
+                        "builder_status": "BuilderStatus.WAITING",
+                        "build_status": "BuildStatus.GIVENBACK",
+                    },
+                )
 
     @defer.inlineCallbacks
     def test_builderfail_collection(self):
         with ExpectedException(
-                BuildDaemonError,
-                "Build returned unexpected status: %r" % 'BUILDERFAIL'):
+            BuildDaemonError,
+            "Build returned unexpected status: %r" % "BUILDERFAIL",
+        ):
             with dbuser(config.builddmaster.dbuser):
                 yield self.behaviour.handleStatus(
                     self.build.buildqueue_record,
-                    {"builder_status": "BuilderStatus.WAITING",
-                     "build_status": "BuildStatus.BUILDERFAIL"})
+                    {
+                        "builder_status": "BuilderStatus.WAITING",
+                        "build_status": "BuildStatus.BUILDERFAIL",
+                    },
+                )
 
     @defer.inlineCallbacks
     def test_invalid_status_collection(self):
         with ExpectedException(
-                BuildDaemonError,
-                "Build returned unexpected status: %r" % 'BORKED'):
+            BuildDaemonError, "Build returned unexpected status: %r" % "BORKED"
+        ):
             with dbuser(config.builddmaster.dbuser):
                 yield self.behaviour.handleStatus(
                     self.build.buildqueue_record,
-                    {"builder_status": "BuilderStatus.WAITING",
-                     "build_status": "BuildStatus.BORKED"})
+                    {
+                        "builder_status": "BuilderStatus.WAITING",
+                        "build_status": "BuildStatus.BORKED",
+                    },
+                )
diff --git a/lib/lp/buildmaster/tests/test_buildqueue.py b/lib/lp/buildmaster/tests/test_buildqueue.py
index 5b41e41..4870532 100644
--- a/lib/lp/buildmaster/tests/test_buildqueue.py
+++ b/lib/lp/buildmaster/tests/test_buildqueue.py
@@ -8,43 +8,34 @@ from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
 from lp.app.errors import NotFoundError
-from lp.buildmaster.enums import (
-    BuildQueueStatus,
-    BuildStatus,
-    )
+from lp.buildmaster.enums import BuildQueueStatus, BuildStatus
 from lp.buildmaster.interfaces.buildqueue import IBuildQueueSet
 from lp.services.database.interfaces import IStore
-from lp.soyuz.enums import (
-    ArchivePurpose,
-    PackagePublishingStatus,
-    )
+from lp.soyuz.enums import ArchivePurpose, PackagePublishingStatus
 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
 from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
 from lp.testing import TestCaseWithFactory
 from lp.testing.fakemethod import FakeMethod
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    ZopelessDatabaseLayer,
-    )
+from lp.testing.layers import LaunchpadZopelessLayer, ZopelessDatabaseLayer
 
 
-def find_job(test, name, processor='386'):
+def find_job(test, name, processor="386"):
     """Find build and queue instance for the given source and processor."""
 
     def processor_matches(bq):
         if processor is None:
-            return (bq.processor is None)
+            return bq.processor is None
         else:
-            return (processor == bq.processor.name)
+            return processor == bq.processor.name
 
     for build in test.builds:
         bq = build.buildqueue_record
         source = None
-        for attr in ('source_package_release', 'recipe'):
+        for attr in ("source_package_release", "recipe"):
             source = getattr(build, attr, None)
             if source is not None:
                 break
-        if (source.name == name and processor_matches(bq)):
+        if source.name == name and processor_matches(bq):
             return (build, bq)
     return (None, None)
 
@@ -153,29 +144,35 @@ class TestPlatformData(TestCaseWithFactory):
 
         # We test builds that target a primary archive.
         self.non_ppa = self.factory.makeArchive(
-            name="primary", purpose=ArchivePurpose.PRIMARY)
+            name="primary", purpose=ArchivePurpose.PRIMARY
+        )
         self.non_ppa.require_virtualized = False
 
         self.builds = []
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="gedit", status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                sourcename="gedit",
+                status=PackagePublishingStatus.PUBLISHED,
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
 
     def test_JobPlatformSettings(self):
         """The `BuildQueue` instance shares the processor/virtualized
         properties with the associated `Build`."""
-        build, bq = find_job(self, 'gedit')
+        build, bq = find_job(self, "gedit")
 
         # Make sure the 'processor' properties are the same.
         self.assertEqual(
-            bq.processor, build.processor,
-            "The 'processor' property deviates.")
+            bq.processor, build.processor, "The 'processor' property deviates."
+        )
 
         # Make sure the 'virtualized' properties are the same.
         self.assertEqual(
-            bq.virtualized, build.virtualized,
-            "The 'virtualized' property deviates.")
+            bq.virtualized,
+            build.virtualized,
+            "The 'virtualized' property deviates.",
+        )
 
 
 class TestBuildQueueManual(TestCaseWithFactory):
diff --git a/lib/lp/buildmaster/tests/test_doc.py b/lib/lp/buildmaster/tests/test_doc.py
index b1b22c5..91adba7 100644
--- a/lib/lp/buildmaster/tests/test_doc.py
+++ b/lib/lp/buildmaster/tests/test_doc.py
@@ -6,30 +6,26 @@
 import os
 
 from lp.services.testing import build_test_suite
-from lp.testing.layers import (
-    LaunchpadFunctionalLayer,
-    LaunchpadZopelessLayer,
-    )
-from lp.testing.systemdocs import (
-    LayeredDocFileSuite,
-    setUp,
-    tearDown,
-    )
-
+from lp.testing.layers import LaunchpadFunctionalLayer, LaunchpadZopelessLayer
+from lp.testing.systemdocs import LayeredDocFileSuite, setUp, tearDown
 
 here = os.path.dirname(os.path.realpath(__file__))
 
 
 special = {
-    'builder.rst': LayeredDocFileSuite(
-        '../doc/builder.rst',
-        setUp=setUp, tearDown=tearDown,
-        layer=LaunchpadFunctionalLayer),
-    'buildqueue.rst': LayeredDocFileSuite(
-        '../doc/buildqueue.rst',
-        setUp=setUp, tearDown=tearDown,
-        layer=LaunchpadFunctionalLayer),
-    }
+    "builder.rst": LayeredDocFileSuite(
+        "../doc/builder.rst",
+        setUp=setUp,
+        tearDown=tearDown,
+        layer=LaunchpadFunctionalLayer,
+    ),
+    "buildqueue.rst": LayeredDocFileSuite(
+        "../doc/buildqueue.rst",
+        setUp=setUp,
+        tearDown=tearDown,
+        layer=LaunchpadFunctionalLayer,
+    ),
+}
 
 
 def test_suite():
diff --git a/lib/lp/buildmaster/tests/test_interactor.py b/lib/lp/buildmaster/tests/test_interactor.py
index 583fe1b..bfd4c48 100644
--- a/lib/lp/buildmaster/tests/test_interactor.py
+++ b/lib/lp/buildmaster/tests/test_interactor.py
@@ -4,9 +4,9 @@
 """Test BuilderInteractor features."""
 
 __all__ = [
-    'FakeBuildQueue',
-    'MockBuilderFactory',
-    ]
+    "FakeBuildQueue",
+    "MockBuilderFactory",
+]
 
 import hashlib
 import os
@@ -14,16 +14,16 @@ import signal
 import tempfile
 import xmlrpc.client
 
-from lpbuildd.builder import BuilderStatus
 import six
+import treq
+from lpbuildd.builder import BuilderStatus
 from testtools.matchers import ContainsAll
 from testtools.testcase import ExpectedException
 from testtools.twistedsupport import (
-    assert_fails_with,
     AsynchronousDeferredRunTest,
     AsynchronousDeferredRunTestForBrokenTwisted,
-    )
-import treq
+    assert_fails_with,
+)
 from twisted.internet import defer
 from twisted.internet.task import Clock
 from twisted.python.failure import Failure
@@ -33,23 +33,20 @@ from lp.buildmaster.enums import (
     BuilderResetProtocol,
     BuildQueueStatus,
     BuildStatus,
-    )
+)
 from lp.buildmaster.interactor import (
     BuilderInteractor,
     BuilderWorker,
     extract_vitals_from_db,
     make_download_process_pool,
     shut_down_default_process_pool,
-    )
+)
 from lp.buildmaster.interfaces.builder import (
     BuildDaemonIsolationError,
     CannotFetchFile,
     CannotResumeHost,
-    )
-from lp.buildmaster.manager import (
-    BaseBuilderFactory,
-    PrefetchedBuilderFactory,
-    )
+)
+from lp.buildmaster.manager import BaseBuilderFactory, PrefetchedBuilderFactory
 from lp.buildmaster.tests.mock_workers import (
     AbortingWorker,
     BuildingWorker,
@@ -59,28 +56,21 @@ from lp.buildmaster.tests.mock_workers import (
     OkWorker,
     WaitingWorker,
     WorkerTestHelpers,
-    )
+)
 from lp.services.config import config
 from lp.services.twistedsupport.testing import TReqFixture
 from lp.services.twistedsupport.treq import check_status
 from lp.soyuz.enums import PackagePublishingStatus
 from lp.soyuz.model.binarypackagebuildbehaviour import (
     BinaryPackageBuildBehaviour,
-    )
-from lp.testing import (
-    TestCase,
-    TestCaseWithFactory,
-    )
+)
+from lp.testing import TestCase, TestCaseWithFactory
 from lp.testing.fakemethod import FakeMethod
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    ZopelessDatabaseLayer,
-    )
+from lp.testing.layers import LaunchpadZopelessLayer, ZopelessDatabaseLayer
 
 
 class FakeBuildQueue:
-
-    def __init__(self, cookie='PACKAGEBUILD-1'):
+    def __init__(self, cookie="PACKAGEBUILD-1"):
         self.build_cookie = cookie
         self.reset = FakeMethod()
         self.status = BuildQueueStatus.RUNNING
@@ -124,7 +114,8 @@ class TestBuilderInteractor(TestCase):
     def resumeWorkerHost(self, builder):
         vitals = extract_vitals_from_db(builder)
         return BuilderInteractor.resumeWorkerHost(
-            vitals, BuilderInteractor.makeWorkerFromVitals(vitals))
+            vitals, BuilderInteractor.makeWorkerFromVitals(vitals)
+        )
 
     def test_resumeWorkerHost_nonvirtual(self):
         d = self.resumeWorkerHost(MockBuilder(virtualized=False))
@@ -139,22 +130,26 @@ class TestBuilderInteractor(TestCase):
             [builddmaster]
             vm_resume_command: /bin/echo -n snap %(buildd_name)s %(vm_host)s
             """
-        config.push('reset', reset_config)
-        self.addCleanup(config.pop, 'reset')
+        config.push("reset", reset_config)
+        self.addCleanup(config.pop, "reset")
 
-        d = self.resumeWorkerHost(MockBuilder(
-            url="http://crackle.ppa/";, virtualized=True, vm_host="pop"))
+        d = self.resumeWorkerHost(
+            MockBuilder(
+                url="http://crackle.ppa/";, virtualized=True, vm_host="pop"
+            )
+        )
 
         def got_resume(output):
-            self.assertEqual((b'snap crackle pop', b''), output)
+            self.assertEqual((b"snap crackle pop", b""), output)
+
         return d.addCallback(got_resume)
 
     def test_resumeWorkerHost_command_failed(self):
         reset_fail_config = """
             [builddmaster]
             vm_resume_command: /bin/false"""
-        config.push('reset fail', reset_fail_config)
-        self.addCleanup(config.pop, 'reset fail')
+        config.push("reset fail", reset_fail_config)
+        self.addCleanup(config.pop, "reset fail")
         d = self.resumeWorkerHost(MockBuilder(virtualized=True, vm_host="pop"))
         return assert_fails_with(d, CannotResumeHost)
 
@@ -181,8 +176,10 @@ class TestBuilderInteractorCleanWorker(TestCase):
     @defer.inlineCallbacks
     def assertCleanCalls(self, builder, worker, calls, done):
         actually_done = yield BuilderInteractor.cleanWorker(
-            extract_vitals_from_db(builder), worker,
-            MockBuilderFactory(builder, None))
+            extract_vitals_from_db(builder),
+            worker,
+            MockBuilderFactory(builder, None),
+        )
         self.assertEqual(done, actually_done)
         self.assertEqual(calls, worker.method_log)
 
@@ -191,10 +188,14 @@ class TestBuilderInteractorCleanWorker(TestCase):
         # Virtual builders using protocol 1.1 get reset, and once the
         # trigger completes we're happy that it's clean.
         builder = MockBuilder(
-            virtualized=True, clean_status=BuilderCleanStatus.DIRTY,
-            vm_host='lol', vm_reset_protocol=BuilderResetProtocol.PROTO_1_1)
+            virtualized=True,
+            clean_status=BuilderCleanStatus.DIRTY,
+            vm_host="lol",
+            vm_reset_protocol=BuilderResetProtocol.PROTO_1_1,
+        )
         yield self.assertCleanCalls(
-            builder, OkWorker(), ['resume', 'echo'], True)
+            builder, OkWorker(), ["resume", "echo"], True
+        )
 
     @defer.inlineCallbacks
     def test_virtual_2_0_dirty(self):
@@ -202,9 +203,12 @@ class TestBuilderInteractorCleanWorker(TestCase):
         # CLEANING. It's then up to the non-Launchpad reset code to set
         # the builder back to CLEAN using the webservice.
         builder = MockBuilder(
-            virtualized=True, clean_status=BuilderCleanStatus.DIRTY,
-            vm_host='lol', vm_reset_protocol=BuilderResetProtocol.PROTO_2_0)
-        yield self.assertCleanCalls(builder, OkWorker(), ['resume'], False)
+            virtualized=True,
+            clean_status=BuilderCleanStatus.DIRTY,
+            vm_host="lol",
+            vm_reset_protocol=BuilderResetProtocol.PROTO_2_0,
+        )
+        yield self.assertCleanCalls(builder, OkWorker(), ["resume"], False)
         self.assertEqual(BuilderCleanStatus.CLEANING, builder.clean_status)
 
     @defer.inlineCallbacks
@@ -213,8 +217,11 @@ class TestBuilderInteractorCleanWorker(TestCase):
         # they're DIRTY. Once they're cleaning, they're not our problem
         # until they return to CLEAN, so we ignore them.
         builder = MockBuilder(
-            virtualized=True, clean_status=BuilderCleanStatus.CLEANING,
-            vm_host='lol', vm_reset_protocol=BuilderResetProtocol.PROTO_2_0)
+            virtualized=True,
+            clean_status=BuilderCleanStatus.CLEANING,
+            vm_host="lol",
+            vm_reset_protocol=BuilderResetProtocol.PROTO_2_0,
+        )
         yield self.assertCleanCalls(builder, OkWorker(), [], False)
         self.assertEqual(BuilderCleanStatus.CLEANING, builder.clean_status)
 
@@ -223,22 +230,31 @@ class TestBuilderInteractorCleanWorker(TestCase):
         # Virtual builders fail to clean unless vm_reset_protocol is
         # set.
         builder = MockBuilder(
-            virtualized=True, clean_status=BuilderCleanStatus.DIRTY,
-            vm_host='lol')
+            virtualized=True,
+            clean_status=BuilderCleanStatus.DIRTY,
+            vm_host="lol",
+        )
         builder.vm_reset_protocol = None
         with ExpectedException(
-                CannotResumeHost, "Invalid vm_reset_protocol: None"):
+            CannotResumeHost, "Invalid vm_reset_protocol: None"
+        ):
             yield BuilderInteractor.cleanWorker(
-                extract_vitals_from_db(builder), OkWorker(),
-                MockBuilderFactory(builder, None))
+                extract_vitals_from_db(builder),
+                OkWorker(),
+                MockBuilderFactory(builder, None),
+            )
 
     @defer.inlineCallbacks
     def test_nonvirtual_idle(self):
         # An IDLE non-virtual worker is already as clean as we can get it.
         yield self.assertCleanCalls(
             MockBuilder(
-                virtualized=False, clean_status=BuilderCleanStatus.DIRTY),
-            OkWorker(), ['status'], True)
+                virtualized=False, clean_status=BuilderCleanStatus.DIRTY
+            ),
+            OkWorker(),
+            ["status"],
+            True,
+        )
 
     @defer.inlineCallbacks
     def test_nonvirtual_building(self):
@@ -246,8 +262,12 @@ class TestBuilderInteractorCleanWorker(TestCase):
         # through ABORTING and eventually be picked up from WAITING.
         yield self.assertCleanCalls(
             MockBuilder(
-                virtualized=False, clean_status=BuilderCleanStatus.DIRTY),
-            BuildingWorker(), ['status', 'abort'], False)
+                virtualized=False, clean_status=BuilderCleanStatus.DIRTY
+            ),
+            BuildingWorker(),
+            ["status", "abort"],
+            False,
+        )
 
     @defer.inlineCallbacks
     def test_nonvirtual_aborting(self):
@@ -255,30 +275,40 @@ class TestBuilderInteractorCleanWorker(TestCase):
         # hit WAITING eventually.
         yield self.assertCleanCalls(
             MockBuilder(
-                virtualized=False, clean_status=BuilderCleanStatus.DIRTY),
-            AbortingWorker(), ['status'], False)
+                virtualized=False, clean_status=BuilderCleanStatus.DIRTY
+            ),
+            AbortingWorker(),
+            ["status"],
+            False,
+        )
 
     @defer.inlineCallbacks
     def test_nonvirtual_waiting(self):
         # A WAITING non-virtual worker just needs clean() called.
         yield self.assertCleanCalls(
             MockBuilder(
-                virtualized=False, clean_status=BuilderCleanStatus.DIRTY),
-            WaitingWorker(), ['status', 'clean'], True)
+                virtualized=False, clean_status=BuilderCleanStatus.DIRTY
+            ),
+            WaitingWorker(),
+            ["status", "clean"],
+            True,
+        )
 
     @defer.inlineCallbacks
     def test_nonvirtual_broken(self):
         # A broken non-virtual builder is probably unrecoverable, so the
         # method just crashes.
         builder = MockBuilder(
-            virtualized=False, clean_status=BuilderCleanStatus.DIRTY)
+            virtualized=False, clean_status=BuilderCleanStatus.DIRTY
+        )
         vitals = extract_vitals_from_db(builder)
         worker = LostBuildingBrokenWorker()
         try:
             yield BuilderInteractor.cleanWorker(
-                vitals, worker, MockBuilderFactory(builder, None))
+                vitals, worker, MockBuilderFactory(builder, None)
+            )
         except xmlrpc.client.Fault:
-            self.assertEqual(['status', 'abort'], worker.call_log)
+            self.assertEqual(["status", "abort"], worker.call_log)
         else:
             self.fail("abort() should crash.")
 
@@ -290,9 +320,16 @@ class TestBuilderWorkerStatus(TestCase):
     run_tests_with = AsynchronousDeferredRunTest
 
     @defer.inlineCallbacks
-    def assertStatus(self, worker, builder_status=None, build_status=None,
-                     build_id=False, logtail=False, filemap=None,
-                     dependencies=None):
+    def assertStatus(
+        self,
+        worker,
+        builder_status=None,
+        build_status=None,
+        build_id=False,
+        logtail=False,
+        filemap=None,
+        dependencies=None,
+    ):
         status = yield worker.status()
 
         expected = {}
@@ -316,22 +353,31 @@ class TestBuilderWorkerStatus(TestCase):
         self.assertEqual(expected, status)
 
     def test_status_idle_worker(self):
-        self.assertStatus(OkWorker(), builder_status='BuilderStatus.IDLE')
+        self.assertStatus(OkWorker(), builder_status="BuilderStatus.IDLE")
 
     def test_status_building_worker(self):
         self.assertStatus(
-            BuildingWorker(), builder_status='BuilderStatus.BUILDING',
-            build_id=True, logtail=True)
+            BuildingWorker(),
+            builder_status="BuilderStatus.BUILDING",
+            build_id=True,
+            logtail=True,
+        )
 
     def test_status_waiting_worker(self):
         self.assertStatus(
-            WaitingWorker(), builder_status='BuilderStatus.WAITING',
-            build_status='BuildStatus.OK', build_id=True, filemap={})
+            WaitingWorker(),
+            builder_status="BuilderStatus.WAITING",
+            build_status="BuildStatus.OK",
+            build_id=True,
+            filemap={},
+        )
 
     def test_status_aborting_worker(self):
         self.assertStatus(
-            AbortingWorker(), builder_status='BuilderStatus.ABORTING',
-            build_id=True)
+            AbortingWorker(),
+            builder_status="BuilderStatus.ABORTING",
+            build_id=True,
+        )
 
 
 class TestBuilderInteractorDB(TestCaseWithFactory):
@@ -344,13 +390,14 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         """An idle builder has no build behaviour."""
         self.assertIs(
             None,
-            BuilderInteractor.getBuildBehaviour(None, MockBuilder(), None))
+            BuilderInteractor.getBuildBehaviour(None, MockBuilder(), None),
+        )
 
     def test_getBuildBehaviour_building(self):
         """The current behaviour is set automatically from the current job."""
         # Set the builder attribute on the buildqueue record so that our
         # builder will think it has a current build.
-        builder = self.factory.makeBuilder(name='builder')
+        builder = self.factory.makeBuilder(name="builder")
         worker = BuildingWorker()
         build = self.factory.makeBinaryPackageBuild()
         bq = build.queueBuild()
@@ -363,13 +410,16 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
     def _setupBuilder(self):
         processor = self.factory.makeProcessor(name="i386")
         builder = self.factory.makeBuilder(
-            processors=[processor], virtualized=True, vm_host="bladh")
+            processors=[processor], virtualized=True, vm_host="bladh"
+        )
         builder.setCleanStatus(BuilderCleanStatus.CLEAN)
-        self.patch(BuilderWorker, 'makeBuilderWorker', FakeMethod(OkWorker()))
+        self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(OkWorker()))
         distroseries = self.factory.makeDistroSeries()
         das = self.factory.makeDistroArchSeries(
-            distroseries=distroseries, architecturetag="i386",
-            processor=processor)
+            distroseries=distroseries,
+            architecturetag="i386",
+            processor=processor,
+        )
         chroot = self.factory.makeLibraryFileAlias(db_only=True)
         das.addOrUpdateChroot(chroot)
         distroseries.nominatedarchindep = das
@@ -380,7 +430,8 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         # recipe, returning both.
         builder, distroseries, distroarchseries = self._setupBuilder()
         build = self.factory.makeSourcePackageRecipeBuild(
-            distroseries=distroseries)
+            distroseries=distroseries
+        )
         return builder, build
 
     def _setupBinaryBuildAndBuilder(self):
@@ -388,7 +439,8 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         # binary package, returning both.
         builder, distroseries, distroarchseries = self._setupBuilder()
         build = self.factory.makeBinaryPackageBuild(
-            distroarchseries=distroarchseries, builder=builder)
+            distroarchseries=distroarchseries, builder=builder
+        )
         return builder, build
 
     def test_findAndStartJob_returns_candidate(self):
@@ -402,7 +454,8 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         builder_factory.findBuildCandidate = FakeMethod(result=candidate)
         vitals = extract_vitals_from_db(builder)
         d = BuilderInteractor.findAndStartJob(
-            vitals, builder, OkWorker(), builder_factory)
+            vitals, builder, OkWorker(), builder_factory
+        )
         return d.addCallback(self.assertEqual, candidate)
 
     @defer.inlineCallbacks
@@ -413,8 +466,10 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         builder, distroseries, distroarchseries = self._setupBuilder()
         builds = [
             self.factory.makeBinaryPackageBuild(
-                distroarchseries=distroarchseries)
-            for _ in range(3)]
+                distroarchseries=distroarchseries
+            )
+            for _ in range(3)
+        ]
         candidates = [build.queueBuild() for build in builds]
         builder_factory = PrefetchedBuilderFactory()
         candidates_iter = iter(candidates)
@@ -429,12 +484,17 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         # Starting a job selects a non-superseded candidate, and supersedes
         # the candidates that have superseded source packages.
         candidate = yield BuilderInteractor.findAndStartJob(
-            vitals, builder, OkWorker(), builder_factory)
+            vitals, builder, OkWorker(), builder_factory
+        )
         self.assertEqual(candidates[2], candidate)
         self.assertEqual(
-            [BuildStatus.SUPERSEDED, BuildStatus.SUPERSEDED,
-             BuildStatus.BUILDING],
-            [build.status for build in builds])
+            [
+                BuildStatus.SUPERSEDED,
+                BuildStatus.SUPERSEDED,
+                BuildStatus.BUILDING,
+            ],
+            [build.status for build in builds],
+        )
 
     def test_findAndStartJob_starts_job(self):
         # findAndStartJob finds the next queued job using findBuildCandidate
@@ -446,7 +506,8 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         builder_factory.findBuildCandidate = FakeMethod(result=candidate)
         vitals = extract_vitals_from_db(builder)
         d = BuilderInteractor.findAndStartJob(
-            vitals, builder, OkWorker(), builder_factory)
+            vitals, builder, OkWorker(), builder_factory
+        )
 
         def check_build_started(candidate):
             self.assertEqual(candidate.builder, builder)
@@ -464,10 +525,12 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         builder_factory.findBuildCandidate = FakeMethod(result=candidate)
         vitals = extract_vitals_from_db(builder)
         with ExpectedException(
-                BuildDaemonIsolationError,
-                "Attempted to start build on a dirty worker."):
+            BuildDaemonIsolationError,
+            "Attempted to start build on a dirty worker.",
+        ):
             yield BuilderInteractor.findAndStartJob(
-                vitals, builder, OkWorker(), builder_factory)
+                vitals, builder, OkWorker(), builder_factory
+            )
 
     @defer.inlineCallbacks
     def test_findAndStartJob_dirties_worker(self):
@@ -478,7 +541,8 @@ class TestBuilderInteractorDB(TestCaseWithFactory):
         builder_factory.findBuildCandidate = FakeMethod(result=candidate)
         vitals = extract_vitals_from_db(builder)
         yield BuilderInteractor.findAndStartJob(
-            vitals, builder, OkWorker(), builder_factory)
+            vitals, builder, OkWorker(), builder_factory
+        )
         self.assertEqual(BuilderCleanStatus.DIRTY, builder.clean_status)
 
 
@@ -499,7 +563,7 @@ class TestWorker(TestCase):
     def test_abort(self):
         worker = self.worker_helper.getClientWorker()
         # We need to be in a BUILDING state before we can abort.
-        build_id = 'some-id'
+        build_id = "some-id"
         response = yield self.worker_helper.triggerGoodBuild(worker, build_id)
         self.assertEqual([BuilderStatus.BUILDING, build_id], response)
         response = yield worker.abort()
@@ -510,7 +574,7 @@ class TestWorker(TestCase):
         # Calling 'build' with an expected builder type, a good build id,
         # valid chroot & filemaps works and returns a BuilderStatus of
         # BUILDING.
-        build_id = 'some-id'
+        build_id = "some-id"
         worker = self.worker_helper.getClientWorker()
         response = yield self.worker_helper.triggerGoodBuild(worker, build_id)
         self.assertEqual([BuilderStatus.BUILDING, build_id], response)
@@ -521,7 +585,7 @@ class TestWorker(TestCase):
         # it to be in either the WAITING or ABORTED states, and both of these
         # states are very difficult to achieve in a test environment. For the
         # time being, we'll just assert that a clean attribute exists.
-        self.assertNotEqual(getattr(worker, 'clean', None), None)
+        self.assertNotEqual(getattr(worker, "clean", None), None)
 
     @defer.inlineCallbacks
     def test_echo(self):
@@ -529,8 +593,8 @@ class TestWorker(TestCase):
         # gave it.
         self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
-        response = yield worker.echo('foo', 'bar', 42)
-        self.assertEqual(['foo', 'bar', 42], response)
+        response = yield worker.echo("foo", "bar", 42)
+        self.assertEqual(["foo", "bar", 42], response)
 
     @defer.inlineCallbacks
     def test_info(self):
@@ -543,12 +607,19 @@ class TestWorker(TestCase):
         # into the tac file for the remote worker and config is returned from
         # the configuration file.
         self.assertEqual(3, len(info))
-        self.assertEqual(['1.0', 'i386'], info[:2])
+        self.assertEqual(["1.0", "i386"], info[:2])
         self.assertThat(
             info[2],
             ContainsAll(
-                ('sourcepackagerecipe', 'translation-templates',
-                 'binarypackage', 'livefs', 'snap')))
+                (
+                    "sourcepackagerecipe",
+                    "translation-templates",
+                    "binarypackage",
+                    "livefs",
+                    "snap",
+                )
+            ),
+        )
 
     @defer.inlineCallbacks
     def test_initial_status(self):
@@ -557,7 +628,7 @@ class TestWorker(TestCase):
         self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
         status = yield worker.status()
-        self.assertEqual(BuilderStatus.IDLE, status['builder_status'])
+        self.assertEqual(BuilderStatus.IDLE, status["builder_status"])
 
     @defer.inlineCallbacks
     def test_status_after_build(self):
@@ -566,50 +637,51 @@ class TestWorker(TestCase):
         # WAITING if the build finishes before we have a chance to check its
         # status.)
         worker = self.worker_helper.getClientWorker()
-        build_id = 'status-build-id'
+        build_id = "status-build-id"
         response = yield self.worker_helper.triggerGoodBuild(worker, build_id)
         self.assertEqual([BuilderStatus.BUILDING, build_id], response)
         status = yield worker.status()
         self.assertIn(
-            status['builder_status'],
-            {BuilderStatus.BUILDING, BuilderStatus.WAITING})
-        self.assertEqual(build_id, status['build_id'])
+            status["builder_status"],
+            {BuilderStatus.BUILDING, BuilderStatus.WAITING},
+        )
+        self.assertEqual(build_id, status["build_id"])
         # We only see a logtail if the build is still in the BUILDING
         # status.
-        if 'logtail' in status:
-            self.assertIsInstance(status['logtail'], xmlrpc.client.Binary)
+        if "logtail" in status:
+            self.assertIsInstance(status["logtail"], xmlrpc.client.Binary)
 
     @defer.inlineCallbacks
     def test_ensurepresent_not_there(self):
         # ensurepresent checks to see if a file is there.
         self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
-        response = yield worker.ensurepresent('blahblah', None, None, None)
-        self.assertEqual([False, 'No URL'], response)
+        response = yield worker.ensurepresent("blahblah", None, None, None)
+        self.assertEqual([False, "No URL"], response)
 
     @defer.inlineCallbacks
     def test_ensurepresent_actually_there(self):
         # ensurepresent checks to see if a file is there.
         tachandler = self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
-        self.worker_helper.makeCacheFile(tachandler, 'blahblah')
-        response = yield worker.ensurepresent('blahblah', None, None, None)
-        self.assertEqual([True, 'No URL'], response)
+        self.worker_helper.makeCacheFile(tachandler, "blahblah")
+        response = yield worker.ensurepresent("blahblah", None, None, None)
+        self.assertEqual([True, "No URL"], response)
 
     def test_sendFileToWorker_not_there(self):
         self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
-        d = worker.sendFileToWorker('blahblah', None, None, None)
+        d = worker.sendFileToWorker("blahblah", None, None, None)
         return assert_fails_with(d, CannotFetchFile)
 
     @defer.inlineCallbacks
     def test_sendFileToWorker_actually_there(self):
         tachandler = self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
-        self.worker_helper.makeCacheFile(tachandler, 'blahblah')
-        yield worker.sendFileToWorker('blahblah', None, None, None)
-        response = yield worker.ensurepresent('blahblah', None, None, None)
-        self.assertEqual([True, 'No URL'], response)
+        self.worker_helper.makeCacheFile(tachandler, "blahblah")
+        yield worker.sendFileToWorker("blahblah", None, None, None)
+        response = yield worker.ensurepresent("blahblah", None, None, None)
+        self.assertEqual([True, "No URL"], response)
 
     @defer.inlineCallbacks
     def test_resumeHost_success(self):
@@ -620,7 +692,8 @@ class TestWorker(TestCase):
 
         # The configuration testing command-line.
         self.assertEqual(
-            'echo %(vm_host)s', config.builddmaster.vm_resume_command)
+            "echo %(vm_host)s", config.builddmaster.vm_resume_command
+        )
 
         out, err, code = yield worker.resume()
         self.assertEqual(os.EX_OK, code)
@@ -640,8 +713,8 @@ class TestWorker(TestCase):
         [builddmaster]
         vm_resume_command: test "%(vm_host)s = 'no-sir'"
         """
-        config.push('failed_resume_command', failed_config)
-        self.addCleanup(config.pop, 'failed_resume_command')
+        config.push("failed_resume_command", failed_config)
+        self.addCleanup(config.pop, "failed_resume_command")
 
         # On failures, the response is a twisted `Failure` object containing
         # a tuple.
@@ -649,6 +722,7 @@ class TestWorker(TestCase):
             out, err, code = failure.value
             # The process will exit with a return code of "1".
             self.assertEqual(code, 1)
+
         d = worker.resume()
         d.addBoth(check_resume_failure)
         return d
@@ -663,8 +737,8 @@ class TestWorker(TestCase):
         vm_resume_command: sleep 5
         socket_timeout: 1
         """
-        config.push('timeout_resume_command', timeout_config)
-        self.addCleanup(config.pop, 'timeout_resume_command')
+        config.push("timeout_resume_command", timeout_config)
+        self.addCleanup(config.pop, "timeout_resume_command")
 
         self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
@@ -675,6 +749,7 @@ class TestWorker(TestCase):
             self.assertIsInstance(failure, Failure)
             out, err, code = failure.value
             self.assertEqual(code, signal.SIGKILL)
+
         clock = Clock()
         d = worker.resume(clock=clock)
         # Move the clock beyond the socket_timeout but earlier than the
@@ -690,7 +765,8 @@ class TestWorkerTimeouts(TestCase):
     # as required.
 
     run_tests_with = AsynchronousDeferredRunTestForBrokenTwisted.make_factory(
-        timeout=30)
+        timeout=30
+    )
 
     def setUp(self):
         super().setUp()
@@ -698,7 +774,8 @@ class TestWorkerTimeouts(TestCase):
         self.clock = Clock()
         self.proxy = DeadProxy(b"url")
         self.worker = self.worker_helper.getClientWorker(
-            reactor=self.clock, proxy=self.proxy)
+            reactor=self.clock, proxy=self.proxy
+        )
         self.addCleanup(shut_down_default_process_pool)
 
     def assertCancelled(self, d, timeout=None):
@@ -723,11 +800,13 @@ class TestWorkerTimeouts(TestCase):
     def test_timeout_ensurepresent(self):
         return self.assertCancelled(
             self.worker.ensurepresent(None, None, None, None),
-            config.builddmaster.socket_timeout * 5)
+            config.builddmaster.socket_timeout * 5,
+        )
 
     def test_timeout_build(self):
         return self.assertCancelled(
-            self.worker.build(None, None, None, None, None))
+            self.worker.build(None, None, None, None, None)
+        )
 
 
 class TestWorkerConnectionTimeouts(TestCase):
@@ -748,7 +827,7 @@ class TestWorkerConnectionTimeouts(TestCase):
     def test_connection_timeout(self):
         # The default timeout of 30 seconds should not cause a timeout,
         # only the config value should.
-        self.pushConfig('builddmaster', socket_timeout=180)
+        self.pushConfig("builddmaster", socket_timeout=180)
 
         worker = self.worker_helper.getClientWorker(reactor=self.clock)
         d = worker.echo()
@@ -769,7 +848,8 @@ class TestWorkerWithLibrarian(TestCaseWithFactory):
 
     layer = LaunchpadZopelessLayer
     run_tests_with = AsynchronousDeferredRunTestForBrokenTwisted.make_factory(
-        timeout=30)
+        timeout=30
+    )
 
     def setUp(self):
         super().setUp()
@@ -783,12 +863,13 @@ class TestWorkerWithLibrarian(TestCaseWithFactory):
 
         # Use the Librarian because it's a "convenient" web server.
         lf = self.factory.makeLibraryFileAlias(
-            'HelloWorld.txt', content="Hello World")
+            "HelloWorld.txt", content="Hello World"
+        )
         self.layer.txn.commit()
         self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
         d = worker.ensurepresent(lf.content.sha1, lf.http_url, "", "")
-        d.addCallback(self.assertEqual, [True, 'Download'])
+        d.addCallback(self.assertEqual, [True, "Download"])
         return d
 
     @defer.inlineCallbacks
@@ -797,12 +878,16 @@ class TestWorkerWithLibrarian(TestCaseWithFactory):
         # filename made from the sha1 of the content underneath the
         # 'filecache' directory.
         from twisted.internet import reactor
+
         content = b"Hello World"
         lf = self.factory.makeLibraryFileAlias(
-            'HelloWorld.txt', content=content)
+            "HelloWorld.txt", content=content
+        )
         self.layer.txn.commit()
-        expected_url = '%s/filecache/%s' % (
-            self.worker_helper.base_url, lf.content.sha1)
+        expected_url = "%s/filecache/%s" % (
+            self.worker_helper.base_url,
+            lf.content.sha1,
+        )
         self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
         yield worker.ensurepresent(lf.content.sha1, lf.http_url, "", "")
@@ -837,7 +922,7 @@ class TestWorkerWithLibrarian(TestCaseWithFactory):
         # content_map so we can compare downloads later.
         dl = []
         for content in contents:
-            filename = content + '.txt'
+            filename = content + ".txt"
             lf = self.factory.makeLibraryFileAlias(filename, content=content)
             content_map[lf.content.sha1] = content
             files.append((lf.content.sha1, tempfile.mkstemp()[1]))
@@ -881,7 +966,7 @@ class TestWorkerWithLibrarian(TestCaseWithFactory):
         # content_map so we can compare downloads later.
         dl = []
         for content in contents:
-            filename = content + '.txt'
+            filename = content + ".txt"
             lf = self.factory.makeLibraryFileAlias(filename, content=content)
             content_map[lf.content.sha1] = content
             files.append((lf.content.sha1, tempfile.mkstemp()[1]))
@@ -899,11 +984,11 @@ class TestWorkerWithLibrarian(TestCaseWithFactory):
         worker = self.worker_helper.getClientWorker()
         temp_fd, temp_name = tempfile.mkstemp()
         self.addCleanup(os.remove, temp_name)
-        empty_sha1 = hashlib.sha1(b'').hexdigest()
-        self.worker_helper.makeCacheFile(tachandler, empty_sha1, contents=b'')
+        empty_sha1 = hashlib.sha1(b"").hexdigest()
+        self.worker_helper.makeCacheFile(tachandler, empty_sha1, contents=b"")
         yield worker.getFiles([(empty_sha1, temp_name)])
-        with open(temp_name, 'rb') as f:
-            self.assertEqual(b'', f.read())
+        with open(temp_name, "rb") as f:
+            self.assertEqual(b"", f.read())
 
     @defer.inlineCallbacks
     def test_getFiles_to_subdirectory(self):
@@ -912,9 +997,9 @@ class TestWorkerWithLibrarian(TestCaseWithFactory):
         tachandler = self.worker_helper.getServerWorker()
         worker = self.worker_helper.getClientWorker()
         temp_dir = self.makeTemporaryDirectory()
-        temp_name = os.path.join(temp_dir, 'build:0', 'log')
-        empty_sha1 = hashlib.sha1(b'').hexdigest()
-        self.worker_helper.makeCacheFile(tachandler, empty_sha1, contents=b'')
+        temp_name = os.path.join(temp_dir, "build:0", "log")
+        empty_sha1 = hashlib.sha1(b"").hexdigest()
+        self.worker_helper.makeCacheFile(tachandler, empty_sha1, contents=b"")
         yield worker.getFiles([(empty_sha1, temp_name)])
-        with open(temp_name, 'rb') as f:
-            self.assertEqual(b'', f.read())
+        with open(temp_name, "rb") as f:
+            self.assertEqual(b"", f.read())
diff --git a/lib/lp/buildmaster/tests/test_manager.py b/lib/lp/buildmaster/tests/test_manager.py
index 1716b94..b7d300e 100644
--- a/lib/lp/buildmaster/tests/test_manager.py
+++ b/lib/lp/buildmaster/tests/test_manager.py
@@ -6,18 +6,14 @@
 import os
 import signal
 import time
-from unittest import mock
 import xmlrpc.client
+from unittest import mock
 
+import transaction
 from testtools.matchers import Equals
 from testtools.testcase import ExpectedException
 from testtools.twistedsupport import AsynchronousDeferredRunTest
-import transaction
-from twisted.internet import (
-    defer,
-    reactor,
-    task,
-    )
+from twisted.internet import defer, reactor, task
 from twisted.internet.task import deferLater
 from twisted.python.failure import Failure
 from zope.component import getUtility
@@ -27,44 +23,44 @@ from lp.buildmaster.enums import (
     BuilderCleanStatus,
     BuildQueueStatus,
     BuildStatus,
-    )
+)
 from lp.buildmaster.interactor import (
     BuilderInteractor,
     BuilderWorker,
     extract_vitals_from_db,
     shut_down_default_process_pool,
-    )
+)
 from lp.buildmaster.interfaces.builder import (
     BuildDaemonIsolationError,
     BuildWorkerFailure,
     IBuilderSet,
-    )
+)
 from lp.buildmaster.interfaces.buildqueue import IBuildQueueSet
 from lp.buildmaster.manager import (
-    BuilddManager,
     BUILDER_FAILURE_THRESHOLD,
-    BuilderFactory,
     JOB_RESET_THRESHOLD,
-    judge_failure,
+    BuilddManager,
+    BuilderFactory,
     PrefetchedBuilderFactory,
-    recover_failure,
     WorkerScanner,
-    )
+    judge_failure,
+    recover_failure,
+)
 from lp.buildmaster.tests.harness import BuilddManagerTestSetup
 from lp.buildmaster.tests.mock_workers import (
     BrokenWorker,
     BuildingWorker,
     LostBuildingBrokenWorker,
-    make_publisher,
     MockBuilder,
     OkWorker,
     TrivialBehaviour,
     WaitingWorker,
-    )
+    make_publisher,
+)
 from lp.buildmaster.tests.test_interactor import (
     FakeBuildQueue,
     MockBuilderFactory,
-    )
+)
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.services.config import config
 from lp.services.log.logger import BufferLogger
@@ -72,14 +68,14 @@ from lp.services.statsd.tests import StatsMixin
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.model.binarypackagebuildbehaviour import (
     BinaryPackageBuildBehaviour,
-    )
+)
 from lp.testing import (
     ANONYMOUS,
-    login,
     StormStatementRecorder,
     TestCase,
     TestCaseWithFactory,
-    )
+    login,
+)
 from lp.testing.dbuser import switch_dbuser
 from lp.testing.factory import LaunchpadObjectFactory
 from lp.testing.fakemethod import FakeMethod
@@ -87,7 +83,7 @@ from lp.testing.layers import (
     LaunchpadScriptLayer,
     LaunchpadZopelessLayer,
     ZopelessDatabaseLayer,
-    )
+)
 from lp.testing.matchers import HasQueryCount
 from lp.testing.sampledata import BOB_THE_BUILDER_NAME
 
@@ -97,6 +93,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
 
     This method uses the old framework for scanning and dispatching builds.
     """
+
     layer = ZopelessDatabaseLayer
     run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=20)
 
@@ -105,8 +102,8 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         super().setUp()
         # Creating the required chroots needed for dispatching.
         self.test_publisher = make_publisher()
-        ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
-        hoary = ubuntu.getSeries('hoary')
+        ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
+        hoary = ubuntu.getSeries("hoary")
         self.test_publisher.setUpDefaultDistroSeries(hoary)
         self.test_publisher.addFakeChroots(db_only=True)
         self.addCleanup(shut_down_default_process_pool)
@@ -126,7 +123,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
     def assertBuildingJob(self, job, builder, logtail=None):
         """Assert the given job is building on the given builder."""
         if logtail is None:
-            logtail = 'Dummy sampledata entry, not processing'
+            logtail = "Dummy sampledata entry, not processing"
 
         self.assertTrue(job is not None)
         self.assertEqual(job.builder, builder)
@@ -147,9 +144,9 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         manager = BuilddManager(builder_factory=builder_factory, clock=clock)
         manager.logger = BufferLogger()
         scanner = WorkerScanner(
-            builder_name, builder_factory, manager, BufferLogger(),
-            clock=clock)
-        scanner.logger.name = 'worker-scanner'
+            builder_name, builder_factory, manager, BufferLogger(), clock=clock
+        )
+        scanner.logger.name = "worker-scanner"
 
         return scanner
 
@@ -160,7 +157,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         # Reset sampledata builder.
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
         self._resetBuilder(builder)
-        self.patch(BuilderWorker, 'makeBuilderWorker', FakeMethod(OkWorker()))
+        self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(OkWorker()))
         # Set this to 1 here so that _checkDispatch can make sure it's
         # reset to 0 after a successful dispatch.
         builder.failure_count = 1
@@ -200,8 +197,10 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         # Sampledata builder is enabled and is assigned to an active job.
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
         self.patch(
-            BuilderWorker, 'makeBuilderWorker',
-            FakeMethod(BuildingWorker(build_id='PACKAGEBUILD-8')))
+            BuilderWorker,
+            "makeBuilderWorker",
+            FakeMethod(BuildingWorker(build_id="PACKAGEBUILD-8")),
+        )
         self.assertTrue(builder.builderok)
         job = builder.currentjob
         self.assertBuildingJob(job, builder)
@@ -219,7 +218,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         self.assertIs(None, builder.currentjob)
         self._checkJobRescued(builder, job)
 
-    def _checkJobUpdated(self, builder, job, logtail='This is a build log: 0'):
+    def _checkJobUpdated(self, builder, job, logtail="This is a build log: 0"):
         """`WorkerScanner.scan` updates legitimate jobs.
 
         Job is kept assigned to the active builder and its 'logtail' is
@@ -237,10 +236,13 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         # worker. It will respond as if it was building the sampledata job.
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
 
-        login('foo.bar@xxxxxxxxxxxxx')
+        login("foo.bar@xxxxxxxxxxxxx")
         builder.builderok = True
-        self.patch(BuilderWorker, 'makeBuilderWorker',
-                   FakeMethod(BuildingWorker(build_id='PACKAGEBUILD-8')))
+        self.patch(
+            BuilderWorker,
+            "makeBuilderWorker",
+            FakeMethod(BuildingWorker(build_id="PACKAGEBUILD-8")),
+        )
         transaction.commit()
         login(ANONYMOUS)
 
@@ -259,7 +261,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         factory = LaunchpadObjectFactory()
         builder = factory.makeBuilder()
         builder.setCleanStatus(BuilderCleanStatus.CLEAN)
-        self.patch(BuilderWorker, 'makeBuilderWorker', FakeMethod(OkWorker()))
+        self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(OkWorker()))
         transaction.commit()
         scanner = self._getScanner(builder_name=builder.name)
         yield scanner.scan()
@@ -270,7 +272,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         # Reset sampledata builder.
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
         self._resetBuilder(builder)
-        self.patch(BuilderWorker, 'makeBuilderWorker', FakeMethod(OkWorker()))
+        self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(OkWorker()))
         builder.manual = True
         transaction.commit()
         scanner = self._getScanner()
@@ -282,7 +284,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         # Reset sampledata builder.
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
         self._resetBuilder(builder)
-        self.patch(BuilderWorker, 'makeBuilderWorker', FakeMethod(OkWorker()))
+        self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(OkWorker()))
         builder.builderok = False
         transaction.commit()
         scanner = self._getScanner()
@@ -295,7 +297,8 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
         self._resetBuilder(builder)
         self.patch(
-            BuilderWorker, 'makeBuilderWorker', FakeMethod(BrokenWorker()))
+            BuilderWorker, "makeBuilderWorker", FakeMethod(BrokenWorker())
+        )
         builder.failure_count = 0
         transaction.commit()
         scanner = self._getScanner(builder_name=builder.name)
@@ -314,11 +317,13 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
                 return status
 
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
-        login('foo.bar@xxxxxxxxxxxxx')
+        login("foo.bar@xxxxxxxxxxxxx")
         builder.builderok = True
         self.patch(
-            BuilderWorker, 'makeBuilderWorker',
-            FakeMethod(BrokenUTF8Worker(build_id='PACKAGEBUILD-8')))
+            BuilderWorker,
+            "makeBuilderWorker",
+            FakeMethod(BrokenUTF8Worker(build_id="PACKAGEBUILD-8")),
+        )
         transaction.commit()
         login(ANONYMOUS)
 
@@ -343,11 +348,13 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
                 return status
 
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
-        login('foo.bar@xxxxxxxxxxxxx')
+        login("foo.bar@xxxxxxxxxxxxx")
         builder.builderok = True
         self.patch(
-            BuilderWorker, 'makeBuilderWorker',
-            FakeMethod(NULWorker(build_id='PACKAGEBUILD-8')))
+            BuilderWorker,
+            "makeBuilderWorker",
+            FakeMethod(NULWorker(build_id="PACKAGEBUILD-8")),
+        )
         transaction.commit()
         login(ANONYMOUS)
 
@@ -393,6 +400,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         def _fake_scan():
             fake_scan()
             return defer.succeed(None)
+
         scanner.scan = _fake_scan
         self.assertEqual(0, fake_scan.call_count)
 
@@ -424,10 +432,12 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
                 return status
 
         build = self.factory.makeSnapBuild(
-            distroarchseries=self.test_publisher.distroseries.architectures[0])
+            distroarchseries=self.test_publisher.distroseries.architectures[0]
+        )
         job = build.queueBuild()
         builder = self.factory.makeBuilder(
-            processors=[job.processor], vm_host="fake_vm_host")
+            processors=[job.processor], vm_host="fake_vm_host"
+        )
         job.markAsBuilding(builder)
         worker = SnapBuildingWorker(build_id="SNAPBUILD-%d" % build.id)
         self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(worker))
@@ -445,18 +455,25 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         self.assertEqual("dummy", build.revision_id)
 
     @defer.inlineCallbacks
-    def _assertFailureCounting(self, builder_count, job_count,
-                               expected_builder_count, expected_job_count):
+    def _assertFailureCounting(
+        self,
+        builder_count,
+        job_count,
+        expected_builder_count,
+        expected_job_count,
+    ):
         # If scan() fails with an exception, failure_counts should be
         # incremented.  What we do with the results of the failure
         # counts is tested below separately, this test just makes sure that
         # scan() is setting the counts.
         def failing_scan():
             return defer.fail(Exception("fake exception"))
+
         scanner = self._getScanner()
         scanner.scan = failing_scan
         from lp.buildmaster import manager as manager_module
-        self.patch(manager_module, 'recover_failure', FakeMethod())
+
+        self.patch(manager_module, "recover_failure", FakeMethod())
         builder = getUtility(IBuilderSet)[scanner.builder_name]
 
         builder.failure_count = builder_count
@@ -475,35 +492,45 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         # in TestFailureAssessments.
         self.assertEqual(expected_builder_count, builder.failure_count)
         self.assertEqual(
-            expected_job_count,
-            builder.current_build.failure_count)
+            expected_job_count, builder.current_build.failure_count
+        )
         self.assertEqual(1, manager_module.recover_failure.call_count)
 
     def test_scan_first_fail(self):
         # The first failure of a job should result in the failure_count
         # on the job and the builder both being incremented.
         return self._assertFailureCounting(
-            builder_count=0, job_count=0, expected_builder_count=1,
-            expected_job_count=1)
+            builder_count=0,
+            job_count=0,
+            expected_builder_count=1,
+            expected_job_count=1,
+        )
 
     def test_scan_second_builder_fail(self):
         # The first failure of a job should result in the failure_count
         # on the job and the builder both being incremented.
         return self._assertFailureCounting(
-            builder_count=1, job_count=0, expected_builder_count=2,
-            expected_job_count=1)
+            builder_count=1,
+            job_count=0,
+            expected_builder_count=2,
+            expected_job_count=1,
+        )
 
     def test_scan_second_job_fail(self):
         # The first failure of a job should result in the failure_count
         # on the job and the builder both being incremented.
         return self._assertFailureCounting(
-            builder_count=0, job_count=1, expected_builder_count=1,
-            expected_job_count=2)
+            builder_count=0,
+            job_count=1,
+            expected_builder_count=1,
+            expected_job_count=2,
+        )
 
     @defer.inlineCallbacks
     def test_scanFailed_handles_lack_of_a_job_on_the_builder(self):
         def failing_scan():
             return defer.fail(Exception("fake exception"))
+
         scanner = self._getScanner()
         scanner.scan = failing_scan
         builder = getUtility(IBuilderSet)[scanner.builder_name]
@@ -518,6 +545,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
     def test_scanFailed_increments_counter(self):
         def failing_scan():
             return defer.fail(Exception("fake exception"))
+
         scanner = self._getScanner()
         scanner.scan = failing_scan
         builder = getUtility(IBuilderSet)[scanner.builder_name]
@@ -527,10 +555,14 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
 
         yield scanner.singleCycle()
         self.assertEqual(2, self.stats_client.incr.call_count)
-        self.stats_client.incr.assert_has_calls([
-            mock.call("build.reset,arch=386,env=test,job_type=PACKAGEBUILD"),
-            mock.call("builders.judged_failed,build=False,env=test"),
-            ])
+        self.stats_client.incr.assert_has_calls(
+            [
+                mock.call(
+                    "build.reset,arch=386,env=test,job_type=PACKAGEBUILD"
+                ),
+                mock.call("builders.judged_failed,build=False,env=test"),
+            ]
+        )
 
     @defer.inlineCallbacks
     def test_fail_to_resume_leaves_it_dirty(self):
@@ -540,16 +572,18 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         # Make a worker with a failing resume() method.
         worker = OkWorker()
         worker.resume = lambda: deferLater(
-            reactor, 0, defer.fail, Failure(('out', 'err', 1)))
+            reactor, 0, defer.fail, Failure(("out", "err", 1))
+        )
 
         # Reset sampledata builder.
         builder = removeSecurityProxy(
-            getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME])
+            getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
+        )
         self._resetBuilder(builder)
         builder.setCleanStatus(BuilderCleanStatus.DIRTY)
         builder.virtualized = True
         self.assertEqual(0, builder.failure_count)
-        self.patch(BuilderWorker, 'makeBuilderWorker', FakeMethod(worker))
+        self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(worker))
         builder.vm_host = "fake_vm_host"
         transaction.commit()
 
@@ -578,7 +612,8 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         yield self._getScanner().singleCycle()
         self.assertFalse(builder.builderok)
         self.assertEqual(
-            'Non-dirty builder allegedly building.', builder.failnotes)
+            "Non-dirty builder allegedly building.", builder.failnotes
+        )
         self.assertIsNone(build.buildqueue_record)
         self.assertEqual(BuildStatus.FAILEDTOBUILD, build.status)
 
@@ -590,7 +625,7 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
         builder.version = "99"
         self._resetBuilder(builder)
-        self.patch(BuilderWorker, 'makeBuilderWorker', FakeMethod(worker))
+        self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(worker))
         scanner = self._getScanner()
         yield scanner.scan()
         self.assertEqual("100", builder.version)
@@ -618,12 +653,12 @@ class TestWorkerScannerScan(StatsMixin, TestCaseWithFactory):
 
         # Set the sample data builder building with the worker from above.
         builder = getUtility(IBuilderSet)[BOB_THE_BUILDER_NAME]
-        login('foo.bar@xxxxxxxxxxxxx')
+        login("foo.bar@xxxxxxxxxxxxx")
         builder.builderok = True
         # For now, we can only cancel virtual builds.
         builder.virtualized = True
         builder.vm_host = "fake_vm_host"
-        self.patch(BuilderWorker, 'makeBuilderWorker', FakeMethod(worker))
+        self.patch(BuilderWorker, "makeBuilderWorker", FakeMethod(worker))
         transaction.commit()
         login(ANONYMOUS)
         buildqueue = builder.currentjob
@@ -676,24 +711,27 @@ class TestWorkerScannerWithLibrarian(TestCaseWithFactory):
         # a second build.
         build = self.factory.makeBinaryPackageBuild()
         build.distro_arch_series.addOrUpdateChroot(
-            self.factory.makeLibraryFileAlias(db_only=True))
+            self.factory.makeLibraryFileAlias(db_only=True)
+        )
         bq = build.queueBuild()
         bq.manualScore(9000)
         build2 = self.factory.makeBinaryPackageBuild(
-            distroarchseries=build.distro_arch_series)
+            distroarchseries=build.distro_arch_series
+        )
         bq2 = build2.queueBuild()
         bq2.manualScore(8900)
 
         builder = self.factory.makeBuilder(
-            processors=[bq.processor], manual=False, vm_host='VMHOST')
+            processors=[bq.processor], manual=False, vm_host="VMHOST"
+        )
         transaction.commit()
 
         # Mock out the build behaviour's handleSuccess so it doesn't
         # try to upload things to the librarian or queue.
         def handleSuccess(self, worker_status, logger):
             return BuildStatus.UPLOADING
-        self.patch(
-            BinaryPackageBuildBehaviour, 'handleSuccess', handleSuccess)
+
+        self.patch(BinaryPackageBuildBehaviour, "handleSuccess", handleSuccess)
 
         # And create a WorkerScanner with a worker and a clock that we
         # control.
@@ -702,14 +740,19 @@ class TestWorkerScannerWithLibrarian(TestCaseWithFactory):
         manager = BuilddManager(clock=clock)
         manager.logger = BufferLogger()
         scanner = WorkerScanner(
-            builder.name, BuilderFactory(), manager, BufferLogger(),
-            worker_factory=get_worker, clock=clock)
+            builder.name,
+            BuilderFactory(),
+            manager,
+            BufferLogger(),
+            worker_factory=get_worker,
+            clock=clock,
+        )
 
         # The worker is idle and dirty, so the first scan will clean it
         # with a reset.
         self.assertEqual(BuilderCleanStatus.DIRTY, builder.clean_status)
         yield scanner.scan()
-        self.assertEqual(['resume', 'echo'], get_worker.result.method_log)
+        self.assertEqual(["resume", "echo"], get_worker.result.method_log)
         self.assertEqual(BuilderCleanStatus.CLEAN, builder.clean_status)
         self.assertIs(None, builder.currentjob)
 
@@ -718,8 +761,8 @@ class TestWorkerScannerWithLibrarian(TestCaseWithFactory):
         get_worker.result = OkWorker()
         yield scanner.scan()
         self.assertEqual(
-            ['status', 'ensurepresent', 'build'],
-            get_worker.result.method_log)
+            ["status", "ensurepresent", "build"], get_worker.result.method_log
+        )
         self.assertEqual(bq, builder.currentjob)
         self.assertEqual(BuildQueueStatus.RUNNING, bq.status)
         self.assertEqual(BuildStatus.BUILDING, build.status)
@@ -739,7 +782,8 @@ class TestWorkerScannerWithLibrarian(TestCaseWithFactory):
         yield scanner.manager.flushLogTails()
         self.assertEqual("This is a build log: 2", bq.logtail)
         self.assertEqual(
-            ['status', 'status', 'status'], get_worker.result.method_log)
+            ["status", "status", "status"], get_worker.result.method_log
+        )
 
         # When the build finishes, the scanner will notice and call
         # handleStatus(). Our fake handleSuccess() doesn't do anything
@@ -748,7 +792,7 @@ class TestWorkerScannerWithLibrarian(TestCaseWithFactory):
         # The builder remains dirty afterward.
         get_worker.result = WaitingWorker(build_id=build.build_cookie)
         yield scanner.scan()
-        self.assertEqual(['status', 'getFile'], get_worker.result.method_log)
+        self.assertEqual(["status", "getFile"], get_worker.result.method_log)
         self.assertIs(None, builder.currentjob)
         self.assertEqual(BuildStatus.UPLOADING, build.status)
         self.assertEqual(builder, build.builder)
@@ -758,7 +802,7 @@ class TestWorkerScannerWithLibrarian(TestCaseWithFactory):
         # confirm that the worker gets cleaned.
         get_worker.result = OkWorker()
         yield scanner.scan()
-        self.assertEqual(['resume', 'echo'], get_worker.result.method_log)
+        self.assertEqual(["resume", "echo"], get_worker.result.method_log)
         self.assertIs(None, builder.currentjob)
         self.assertEqual(BuilderCleanStatus.CLEAN, builder.clean_status)
 
@@ -768,8 +812,8 @@ class TestWorkerScannerWithLibrarian(TestCaseWithFactory):
         get_worker.result = OkWorker()
         yield scanner.scan()
         self.assertEqual(
-            ['status', 'ensurepresent', 'build'],
-            get_worker.result.method_log)
+            ["status", "ensurepresent", "build"], get_worker.result.method_log
+        )
         self.assertEqual(bq2, builder.currentjob)
         self.assertEqual(BuildQueueStatus.RUNNING, bq2.status)
         self.assertEqual(BuildStatus.BUILDING, build2.status)
@@ -866,7 +910,8 @@ class TestPrefetchedBuilderFactory(TestCaseWithFactory):
         # with the non-prefetching BuilderFactory.
         self.assertEqual(7, len(all_vitals))
         self.assertEqual(
-            4, len([v for v in all_vitals if v.build_queue is not None]))
+            4, len([v for v in all_vitals if v.build_queue is not None])
+        )
         self.assertContentEqual(BuilderFactory().iterVitals(), all_vitals)
 
     def test_findBuildCandidate_avoids_duplicates(self):
@@ -874,12 +919,14 @@ class TestPrefetchedBuilderFactory(TestCaseWithFactory):
         # of candidates, so a second call returns a different job.
         das = self.factory.makeDistroArchSeries()
         builders = [
-            self.factory.makeBuilder(
-                processors=[das.processor]) for _ in range(2)]
+            self.factory.makeBuilder(processors=[das.processor])
+            for _ in range(2)
+        ]
         builder_names = [builder.name for builder in builders]
         for _ in range(5):
             self.factory.makeBinaryPackageBuild(
-                distroarchseries=das).queueBuild()
+                distroarchseries=das
+            ).queueBuild()
         transaction.commit()
         pbf = PrefetchedBuilderFactory()
         pbf.update()
@@ -889,7 +936,8 @@ class TestPrefetchedBuilderFactory(TestCaseWithFactory):
         transaction.abort()
         with StormStatementRecorder() as recorder:
             candidate1 = pbf.findBuildCandidate(
-                pbf.getVitals(builder_names[1]))
+                pbf.getVitals(builder_names[1])
+            )
         self.assertIsNotNone(candidate1)
         self.assertNotEqual(candidate0, candidate1)
         # The second call made only a single query, to fetch the candidate
@@ -906,7 +954,8 @@ class TestPrefetchedBuilderFactory(TestCaseWithFactory):
         pbf.update()
 
         candidate = pbf.acquireBuildCandidate(
-            pbf.getVitals(builder.name), builder)
+            pbf.getVitals(builder.name), builder
+        )
         self.assertEqual(BuildQueueStatus.RUNNING, candidate.status)
 
 
@@ -928,11 +977,17 @@ class TestWorkerScannerWithoutDB(TestCase):
         super().setUp()
         self.addCleanup(shut_down_default_process_pool)
 
-    def getScanner(self, builder_factory=None, interactor=None, worker=None,
-                   behaviour=None):
+    def getScanner(
+        self,
+        builder_factory=None,
+        interactor=None,
+        worker=None,
+        behaviour=None,
+    ):
         if builder_factory is None:
             builder_factory = MockBuilderFactory(
-                MockBuilder(virtualized=False), None)
+                MockBuilder(virtualized=False), None
+            )
         if interactor is None:
             interactor = BuilderInteractor()
             interactor.updateBuild = FakeMethod()
@@ -941,43 +996,50 @@ class TestWorkerScannerWithoutDB(TestCase):
         if behaviour is None:
             behaviour = TrivialBehaviour()
         return WorkerScanner(
-            'mock', builder_factory, FakeBuilddManager(), BufferLogger(),
+            "mock",
+            builder_factory,
+            FakeBuilddManager(),
+            BufferLogger(),
             interactor_factory=FakeMethod(interactor),
             worker_factory=FakeMethod(worker),
-            behaviour_factory=FakeMethod(behaviour))
+            behaviour_factory=FakeMethod(behaviour),
+        )
 
     @defer.inlineCallbacks
     def test_scan_with_job(self):
         # WorkerScanner.scan calls updateBuild() when a job is building.
-        worker = BuildingWorker('trivial')
-        bq = FakeBuildQueue('trivial')
+        worker = BuildingWorker("trivial")
+        bq = FakeBuildQueue("trivial")
         scanner = self.getScanner(
             builder_factory=MockBuilderFactory(MockBuilder(), bq),
-            worker=worker)
+            worker=worker,
+        )
 
         yield scanner.scan()
-        self.assertEqual(['status'], worker.call_log)
+        self.assertEqual(["status"], worker.call_log)
         self.assertEqual(
-            1, scanner.interactor_factory.result.updateBuild.call_count)
+            1, scanner.interactor_factory.result.updateBuild.call_count
+        )
         self.assertEqual(0, bq.reset.call_count)
 
     @defer.inlineCallbacks
     def test_scan_recovers_lost_worker_with_job(self):
         # WorkerScanner.scan identifies workers that aren't building what
         # they should be, resets the jobs, and then aborts the workers.
-        worker = BuildingWorker('nontrivial')
-        bq = FakeBuildQueue('trivial')
+        worker = BuildingWorker("nontrivial")
+        bq = FakeBuildQueue("trivial")
         builder = MockBuilder(virtualized=False)
         scanner = self.getScanner(
-            builder_factory=MockBuilderFactory(builder, bq),
-            worker=worker)
+            builder_factory=MockBuilderFactory(builder, bq), worker=worker
+        )
 
         # A single scan will call status(), notice that the worker is lost,
         # and reset() the job without calling updateBuild().
         yield scanner.scan()
-        self.assertEqual(['status'], worker.call_log)
+        self.assertEqual(["status"], worker.call_log)
         self.assertEqual(
-            0, scanner.interactor_factory.result.updateBuild.call_count)
+            0, scanner.interactor_factory.result.updateBuild.call_count
+        )
         self.assertEqual(1, bq.reset.call_count)
         # The reset would normally have unset build_queue.
         scanner.builder_factory.updateTestData(builder, None)
@@ -985,7 +1047,7 @@ class TestWorkerScannerWithoutDB(TestCase):
         # The next scan will see a dirty idle builder with a BUILDING
         # worker, and abort() it.
         yield scanner.scan()
-        self.assertEqual(['status', 'status', 'abort'], worker.call_log)
+        self.assertEqual(["status", "status", "abort"], worker.call_log)
 
     @defer.inlineCallbacks
     def test_scan_recovers_lost_worker_when_idle(self):
@@ -994,7 +1056,7 @@ class TestWorkerScannerWithoutDB(TestCase):
         worker = BuildingWorker()
         scanner = self.getScanner(worker=worker)
         yield scanner.scan()
-        self.assertEqual(['status', 'abort'], worker.call_log)
+        self.assertEqual(["status", "abort"], worker.call_log)
 
     @defer.inlineCallbacks
     def test_scan_building_but_not_dirty_builder_explodes(self):
@@ -1004,11 +1066,12 @@ class TestWorkerScannerWithoutDB(TestCase):
         builder = MockBuilder(clean_status=BuilderCleanStatus.CLEAN)
         bq = FakeBuildQueue()
         scanner = self.getScanner(
-            worker=worker, builder_factory=MockBuilderFactory(builder, bq))
+            worker=worker, builder_factory=MockBuilderFactory(builder, bq)
+        )
 
         with ExpectedException(
-                BuildDaemonIsolationError,
-                "Non-dirty builder allegedly building."):
+            BuildDaemonIsolationError, "Non-dirty builder allegedly building."
+        ):
             yield scanner.scan()
         self.assertEqual([], worker.call_log)
 
@@ -1019,56 +1082,62 @@ class TestWorkerScannerWithoutDB(TestCase):
         worker = BuildingWorker()
         builder = MockBuilder(clean_status=BuilderCleanStatus.CLEAN)
         scanner = self.getScanner(
-            worker=worker, builder_factory=MockBuilderFactory(builder, None))
+            worker=worker, builder_factory=MockBuilderFactory(builder, None)
+        )
 
         with ExpectedException(
-                BuildDaemonIsolationError,
-                r"Allegedly clean worker not idle \(%r instead\)" %
-                'BuilderStatus.BUILDING'):
+            BuildDaemonIsolationError,
+            r"Allegedly clean worker not idle \(%r instead\)"
+            % "BuilderStatus.BUILDING",
+        ):
             yield scanner.scan()
-        self.assertEqual(['status'], worker.call_log)
+        self.assertEqual(["status"], worker.call_log)
 
     def test_getExpectedCookie_caches(self):
-        bq = FakeBuildQueue('trivial')
+        bq = FakeBuildQueue("trivial")
         bf = MockBuilderFactory(MockBuilder(), bq)
         manager = BuilddManager()
         manager.logger = BufferLogger()
         scanner = WorkerScanner(
-            'mock', bf, manager, BufferLogger(),
+            "mock",
+            bf,
+            manager,
+            BufferLogger(),
             interactor_factory=FakeMethod(None),
             worker_factory=FakeMethod(None),
-            behaviour_factory=FakeMethod(TrivialBehaviour()))
+            behaviour_factory=FakeMethod(TrivialBehaviour()),
+        )
 
         # The first call retrieves the cookie from the BuildQueue.
-        cookie1 = scanner.getExpectedCookie(bf.getVitals('foo'))
-        self.assertEqual('trivial', cookie1)
+        cookie1 = scanner.getExpectedCookie(bf.getVitals("foo"))
+        self.assertEqual("trivial", cookie1)
 
         # A second call with the same BuildQueue will not reretrieve it.
-        bq.build_cookie = 'nontrivial'
-        cookie2 = scanner.getExpectedCookie(bf.getVitals('foo'))
-        self.assertEqual('trivial', cookie2)
+        bq.build_cookie = "nontrivial"
+        cookie2 = scanner.getExpectedCookie(bf.getVitals("foo"))
+        self.assertEqual("trivial", cookie2)
 
         # But a call with a new BuildQueue will regrab.
-        bf.updateTestData(bf._builder, FakeBuildQueue('complicated'))
-        cookie3 = scanner.getExpectedCookie(bf.getVitals('foo'))
-        self.assertEqual('complicated', cookie3)
+        bf.updateTestData(bf._builder, FakeBuildQueue("complicated"))
+        cookie3 = scanner.getExpectedCookie(bf.getVitals("foo"))
+        self.assertEqual("complicated", cookie3)
 
         # And unsetting the BuildQueue returns None again.
         bf.updateTestData(bf._builder, None)
-        cookie4 = scanner.getExpectedCookie(bf.getVitals('foo'))
+        cookie4 = scanner.getExpectedCookie(bf.getVitals("foo"))
         self.assertIs(None, cookie4)
 
 
 class TestJudgeFailure(TestCase):
-
     def test_same_count_below_threshold(self):
         # A few consecutive failures aren't any cause for alarm, as it
         # could just be a network glitch.
         self.assertEqual(
             (None, None),
             judge_failure(
-                JOB_RESET_THRESHOLD - 1, JOB_RESET_THRESHOLD - 1,
-                Exception()))
+                JOB_RESET_THRESHOLD - 1, JOB_RESET_THRESHOLD - 1, Exception()
+            ),
+        )
 
     def test_same_count_exceeding_threshold(self):
         # Several consecutive failures suggest that something might be
@@ -1076,7 +1145,9 @@ class TestJudgeFailure(TestCase):
         self.assertEqual(
             (None, True),
             judge_failure(
-                JOB_RESET_THRESHOLD, JOB_RESET_THRESHOLD, Exception()))
+                JOB_RESET_THRESHOLD, JOB_RESET_THRESHOLD, Exception()
+            ),
+        )
 
     def test_same_count_no_retries(self):
         # A single failure of both causes a job reset if retries are
@@ -1084,8 +1155,12 @@ class TestJudgeFailure(TestCase):
         self.assertEqual(
             (None, True),
             judge_failure(
-                JOB_RESET_THRESHOLD - 1, JOB_RESET_THRESHOLD - 1, Exception(),
-                retry=False))
+                JOB_RESET_THRESHOLD - 1,
+                JOB_RESET_THRESHOLD - 1,
+                Exception(),
+                retry=False,
+            ),
+        )
 
     def test_bad_builder(self):
         # A bad builder resets its job and dirties itself. The next scan
@@ -1093,23 +1168,23 @@ class TestJudgeFailure(TestCase):
         # case, or just retrying for non-virts).
         self.assertEqual(
             (True, True),
-            judge_failure(BUILDER_FAILURE_THRESHOLD - 1, 1, Exception()))
+            judge_failure(BUILDER_FAILURE_THRESHOLD - 1, 1, Exception()),
+        )
 
     def test_bad_builder_gives_up(self):
         # A persistently bad builder resets its job and fails itself.
         self.assertEqual(
             (False, True),
-            judge_failure(BUILDER_FAILURE_THRESHOLD, 1, Exception()))
+            judge_failure(BUILDER_FAILURE_THRESHOLD, 1, Exception()),
+        )
 
     def test_bad_job_fails(self):
-        self.assertEqual(
-            (None, False),
-            judge_failure(1, 2, Exception()))
+        self.assertEqual((None, False), judge_failure(1, 2, Exception()))
 
     def test_isolation_violation_double_kills(self):
         self.assertEqual(
-            (False, False),
-            judge_failure(1, 1, BuildDaemonIsolationError()))
+            (False, False), judge_failure(1, 1, BuildDaemonIsolationError())
+        )
 
 
 class TestCancellationChecking(TestCaseWithFactory):
@@ -1133,8 +1208,9 @@ class TestCancellationChecking(TestCaseWithFactory):
         manager = BuilddManager(clock=clock)
         manager.logger = BufferLogger()
         scanner = WorkerScanner(
-            None, BuilderFactory(), manager, BufferLogger(), clock=clock)
-        scanner.logger.name = 'worker-scanner'
+            None, BuilderFactory(), manager, BufferLogger(), clock=clock
+        )
+        scanner.logger.name = "worker-scanner"
         return scanner
 
     def test_ignores_build_not_cancelling(self):
@@ -1175,7 +1251,8 @@ class TestCancellationChecking(TestCaseWithFactory):
 
         clock.advance(WorkerScanner.CANCEL_TIMEOUT)
         with ExpectedException(
-                BuildWorkerFailure, "Timeout waiting for .* to cancel"):
+            BuildWorkerFailure, "Timeout waiting for .* to cancel"
+        ):
             yield scanner.checkCancellation(self.vitals, worker)
 
     @defer.inlineCallbacks
@@ -1185,7 +1262,8 @@ class TestCancellationChecking(TestCaseWithFactory):
         worker = LostBuildingBrokenWorker()
         self.builder.current_build.cancel()
         with ExpectedException(
-                xmlrpc.client.Fault, "<Fault 8002: %r>" % 'Could not abort'):
+            xmlrpc.client.Fault, "<Fault 8002: %r>" % "Could not abort"
+        ):
             yield self._getScanner().checkCancellation(self.vitals, worker)
 
 
@@ -1196,15 +1274,14 @@ class TestBuilddManager(TestCase):
     def _stub_out_scheduleNextScanCycle(self):
         # stub out the code that adds a callLater, so that later tests
         # don't get surprises.
-        self.patch(WorkerScanner, 'startCycle', FakeMethod())
+        self.patch(WorkerScanner, "startCycle", FakeMethod())
 
     def test_addScanForBuilders(self):
         # Test that addScanForBuilders generates WorkerScanner objects.
         self._stub_out_scheduleNextScanCycle()
 
         manager = BuilddManager()
-        builder_names = {
-            builder.name for builder in getUtility(IBuilderSet)}
+        builder_names = {builder.name for builder in getUtility(IBuilderSet)}
         scanners = manager.addScanForBuilders(builder_names)
         scanner_names = {scanner.builder_name for scanner in scanners}
         self.assertEqual(builder_names, scanner_names)
@@ -1272,8 +1349,12 @@ class TestFailureAssessments(TestCaseWithFactory):
         # Helper for recover_failure boilerplate.
         logger = BufferLogger()
         recover_failure(
-            logger, extract_vitals_from_db(self.builder), self.builder,
-            retry, Exception(fail_notes))
+            logger,
+            extract_vitals_from_db(self.builder),
+            self.builder,
+            retry,
+            Exception(fail_notes),
+        )
         return logger.getLogBuffer()
 
     def test_job_reset_threshold_with_retry(self):
@@ -1423,8 +1504,10 @@ class TestNewBuilders(TestCase):
         advance = manager.SCAN_BUILDERS_INTERVAL + 1
         clock.advance(advance)
         self.assertNotEqual(
-            0, manager.scanBuilders.call_count,
-            "startService did not schedule a scanBuilders loop")
+            0,
+            manager.scanBuilders.call_count,
+            "startService did not schedule a scanBuilders loop",
+        )
 
     def test_checkForNewBuilders(self):
         # Test that checkForNewBuilders() detects a new builder
@@ -1473,7 +1556,8 @@ class TestNewBuilders(TestCase):
         clock = task.Clock()
         manager = self._getManager(clock=clock)
         manager.checkForNewBuilders = FakeMethod(
-            failure=Exception("CHAOS REIGNS"))
+            failure=Exception("CHAOS REIGNS")
+        )
         manager.startService()
         self.addCleanup(manager.stopService)
         self.assertEqual(1, manager.checkForNewBuilders.call_count)
@@ -1506,15 +1590,18 @@ class TestFlushLogTails(TestCaseWithFactory):
 
         clock.advance(manager.FLUSH_LOGTAILS_INTERVAL + 1)
         self.assertNotEqual(
-            0, manager.flushLogTails.call_count,
-            "scheduleUpdate did not schedule a flushLogTails loop")
+            0,
+            manager.flushLogTails.call_count,
+            "scheduleUpdate did not schedule a flushLogTails loop",
+        )
 
     def test_flushLogTails(self):
         # flushLogTails flushes pending log tail updates to the database.
         manager = self._getManager()
         bqs = [
             self.factory.makeBinaryPackageBuild().queueBuild()
-            for _ in range(3)]
+            for _ in range(3)
+        ]
         manager.addLogTail(bqs[0].id, "A log tail")
         manager.addLogTail(bqs[1].id, "Another log tail")
         transaction.commit()
@@ -1545,7 +1632,8 @@ class TestFlushLogTails(TestCaseWithFactory):
         self.addCleanup(manager.stopService)
         self.assertIn(
             "Failure while flushing log tail updates:",
-            manager.logger.getLogBuffer())
+            manager.logger.getLogBuffer(),
+        )
 
         # Even though the previous flushUpdates raised an exception, further
         # updates will happen as normal.
@@ -1619,7 +1707,7 @@ class TestBuilddManagerScript(TestCaseWithFactory):
         self.assertTrue(is_file_growing(logfilepath))
         # After rotating the log, the process keeps using the old file, no
         # new file is created.
-        rotated_logfilepath = logfilepath + '.1'
+        rotated_logfilepath = logfilepath + ".1"
         os.rename(logfilepath, rotated_logfilepath)
         self.assertTrue(is_file_growing(rotated_logfilepath))
         self.assertFalse(os.access(logfilepath, os.F_OK))
@@ -1635,14 +1723,16 @@ class TestBuilddManagerScript(TestCaseWithFactory):
         # 1000000 bytes but this is deactivated for the buildd manager.
         test_setup = BuilddManagerTestSetup()
         logfilepath = test_setup.logfile
-        rotated_logfilepath = logfilepath + '.1'
+        rotated_logfilepath = logfilepath + ".1"
         # Prefill the log file to just under 1000000 bytes.
         test_setup.precreateLogfile(
-            "2010-07-27 12:36:54+0200 [-] Starting scanning cycle.\n", 18518)
+            "2010-07-27 12:36:54+0200 [-] Starting scanning cycle.\n", 18518
+        )
         self.useFixture(test_setup)
         # The process logs to the logfile.
         self.assertTrue(is_file_growing(logfilepath))
         # No rotation occured.
         self.assertFalse(
             os.access(rotated_logfilepath, os.F_OK),
-            "Twistd's log file was rotated by twistd.")
+            "Twistd's log file was rotated by twistd.",
+        )
diff --git a/lib/lp/buildmaster/tests/test_packagebuild.py b/lib/lp/buildmaster/tests/test_packagebuild.py
index 47a5798..2930754 100644
--- a/lib/lp/buildmaster/tests/test_packagebuild.py
+++ b/lib/lp/buildmaster/tests/test_packagebuild.py
@@ -10,11 +10,7 @@ from zope.security.management import checkPermission
 
 from lp.buildmaster.enums import BuildStatus
 from lp.buildmaster.interfaces.packagebuild import IPackageBuild
-from lp.testing import (
-    login,
-    login_person,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCaseWithFactory, login, login_person
 from lp.testing.layers import LaunchpadFunctionalLayer
 
 
@@ -31,41 +27,45 @@ class TestPackageBuildMixin(TestCaseWithFactory):
         joe = self.factory.makePerson(name="joe")
         joes_ppa = self.factory.makeArchive(owner=joe, name="ppa")
         self.package_build = self.factory.makeSourcePackageRecipeBuild(
-            archive=joes_ppa)
+            archive=joes_ppa
+        )
 
     def test_providesInterface(self):
         # PackageBuild provides IPackageBuild
-        login('admin@xxxxxxxxxxxxx')
+        login("admin@xxxxxxxxxxxxx")
         self.assertProvides(self.package_build, IPackageBuild)
 
     def test_updateStatus_MANUALDEPWAIT_sets_dependencies(self):
         # updateStatus sets dependencies for a MANUALDEPWAIT build.
         self.package_build.updateStatus(
-            BuildStatus.MANUALDEPWAIT, worker_status={'dependencies': 'deps'})
-        self.assertEqual('deps', self.package_build.dependencies)
+            BuildStatus.MANUALDEPWAIT, worker_status={"dependencies": "deps"}
+        )
+        self.assertEqual("deps", self.package_build.dependencies)
         self.package_build.updateStatus(
-            BuildStatus.MANUALDEPWAIT, worker_status={})
+            BuildStatus.MANUALDEPWAIT, worker_status={}
+        )
         self.assertEqual(None, self.package_build.dependencies)
 
     def test_updateStatus_unsets_dependencies_for_other_statuses(self):
         # updateStatus unsets existing dependencies when transitioning
         # to another state.
         self.package_build.updateStatus(
-            BuildStatus.MANUALDEPWAIT, worker_status={'dependencies': 'deps'})
-        self.assertEqual('deps', self.package_build.dependencies)
+            BuildStatus.MANUALDEPWAIT, worker_status={"dependencies": "deps"}
+        )
+        self.assertEqual("deps", self.package_build.dependencies)
         self.package_build.updateStatus(BuildStatus.NEEDSBUILD)
         self.assertEqual(None, self.package_build.dependencies)
 
     def test_log_url(self):
         # The url of the build log file is determined by the PackageBuild.
-        lfa = self.factory.makeLibraryFileAlias('mybuildlog.txt')
+        lfa = self.factory.makeLibraryFileAlias("mybuildlog.txt")
         self.package_build.setLog(lfa)
         log_url = self.package_build.log_url
         self.assertEqual(
-            'http://launchpad.test/~joe/+archive/ubuntu/ppa/'
-            '+recipebuild/%d/+files/mybuildlog.txt' % (
-                self.package_build.id),
-            log_url)
+            "http://launchpad.test/~joe/+archive/ubuntu/ppa/";
+            "+recipebuild/%d/+files/mybuildlog.txt" % (self.package_build.id),
+            log_url,
+        )
 
     def test_storeUploadLog(self):
         # The given content is uploaded to the librarian and linked as
@@ -74,12 +74,13 @@ class TestPackageBuildMixin(TestCaseWithFactory):
         self.assertIsNotNone(self.package_build.upload_log)
         self.assertEqual(
             hashlib.sha1(b"Some content").hexdigest(),
-            self.package_build.upload_log.content.sha1)
+            self.package_build.upload_log.content.sha1,
+        )
 
     def test_storeUploadLog_private(self):
         # A private package build will store the upload log on the
         # restricted librarian.
-        login('admin@xxxxxxxxxxxxx')
+        login("admin@xxxxxxxxxxxxx")
         self.package_build.archive.private = True
         self.assertTrue(self.package_build.is_private)
         self.package_build.storeUploadLog("Some content")
@@ -91,8 +92,9 @@ class TestPackageBuildMixin(TestCaseWithFactory):
         self.package_build.storeUploadLog(unicode_content)
         self.assertIsNotNone(self.package_build.upload_log)
         self.assertEqual(
-            hashlib.sha1(unicode_content.encode('utf-8')).hexdigest(),
-            self.package_build.upload_log.content.sha1)
+            hashlib.sha1(unicode_content.encode("utf-8")).hexdigest(),
+            self.package_build.upload_log.content.sha1,
+        )
 
     def test_upload_log_url(self):
         # The url of the upload log file is determined by the PackageBuild.
@@ -100,31 +102,32 @@ class TestPackageBuildMixin(TestCaseWithFactory):
         self.package_build.storeUploadLog("Some content")
         log_url = self.package_build.upload_log_url
         self.assertEqual(
-            'http://launchpad.test/~joe/+archive/ubuntu/ppa/'
-            '+recipebuild/%d/+files/upload_%d_log.txt' % (
-                self.package_build.id, self.package_build.id),
-            log_url)
+            "http://launchpad.test/~joe/+archive/ubuntu/ppa/";
+            "+recipebuild/%d/+files/upload_%d_log.txt"
+            % (self.package_build.id, self.package_build.id),
+            log_url,
+        )
 
     def test_view_package_build(self):
         # Anonymous access can read public builds, but not edit.
-        self.assertTrue(checkPermission('launchpad.View', self.package_build))
-        self.assertFalse(checkPermission('launchpad.Edit', self.package_build))
+        self.assertTrue(checkPermission("launchpad.View", self.package_build))
+        self.assertFalse(checkPermission("launchpad.Edit", self.package_build))
 
     def test_edit_package_build(self):
         # An authenticated user who belongs to the owning archive team
         # can edit the build.
         login_person(self.package_build.archive.owner)
-        self.assertTrue(checkPermission('launchpad.View', self.package_build))
-        self.assertTrue(checkPermission('launchpad.Edit', self.package_build))
+        self.assertTrue(checkPermission("launchpad.View", self.package_build))
+        self.assertTrue(checkPermission("launchpad.Edit", self.package_build))
 
         # But other users cannot.
         other_person = self.factory.makePerson()
         login_person(other_person)
-        self.assertTrue(checkPermission('launchpad.View', self.package_build))
-        self.assertFalse(checkPermission('launchpad.Edit', self.package_build))
+        self.assertTrue(checkPermission("launchpad.View", self.package_build))
+        self.assertFalse(checkPermission("launchpad.Edit", self.package_build))
 
     def test_admin_package_build(self):
         # Users with edit access can update attributes.
-        login('admin@xxxxxxxxxxxxx')
-        self.assertTrue(checkPermission('launchpad.View', self.package_build))
-        self.assertTrue(checkPermission('launchpad.Edit', self.package_build))
+        login("admin@xxxxxxxxxxxxx")
+        self.assertTrue(checkPermission("launchpad.View", self.package_build))
+        self.assertTrue(checkPermission("launchpad.Edit", self.package_build))
diff --git a/lib/lp/buildmaster/tests/test_processor.py b/lib/lp/buildmaster/tests/test_processor.py
index 756dfda..b51c90d 100644
--- a/lib/lp/buildmaster/tests/test_processor.py
+++ b/lib/lp/buildmaster/tests/test_processor.py
@@ -9,13 +9,10 @@ from lp.buildmaster.interfaces.processor import (
     IProcessor,
     IProcessorSet,
     ProcessorNotFound,
-    )
+)
 from lp.buildmaster.model.processor import Processor
 from lp.services.database.interfaces import IStore
-from lp.testing import (
-    ExpectedException,
-    TestCaseWithFactory,
-    )
+from lp.testing import ExpectedException, TestCaseWithFactory
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.pages import webservice_for_person
 
@@ -25,31 +22,35 @@ class ProcessorSetTests(TestCaseWithFactory):
 
     def test_getByName(self):
         processor_set = getUtility(IProcessorSet)
-        q1 = self.factory.makeProcessor(name='q1')
-        self.assertEqual(q1, processor_set.getByName('q1'))
+        q1 = self.factory.makeProcessor(name="q1")
+        self.assertEqual(q1, processor_set.getByName("q1"))
 
     def test_getByName_not_found(self):
         processor_set = getUtility(IProcessorSet)
-        with ExpectedException(ProcessorNotFound, 'No such processor.*'):
-            processor_set.getByName('q1')
+        with ExpectedException(ProcessorNotFound, "No such processor.*"):
+            processor_set.getByName("q1")
 
     def test_getAll(self):
         processor_set = getUtility(IProcessorSet)
         # Make it easy to filter out sample data
         store = IStore(Processor)
         store.execute("UPDATE Processor SET name = 'sample_data_' || name")
-        self.factory.makeProcessor(name='q1')
-        self.factory.makeProcessor(name='i686')
-        self.factory.makeProcessor(name='g4')
+        self.factory.makeProcessor(name="q1")
+        self.factory.makeProcessor(name="i686")
+        self.factory.makeProcessor(name="g4")
         self.assertEqual(
-            ['g4', 'i686', 'q1'],
+            ["g4", "i686", "q1"],
             sorted(
-            processor.name for processor in processor_set.getAll()
-            if not processor.name.startswith('sample_data_')))
+                processor.name
+                for processor in processor_set.getAll()
+                if not processor.name.startswith("sample_data_")
+            ),
+        )
 
     def test_new(self):
         proc = getUtility(IProcessorSet).new(
-            "avr2001", "The 2001 AVR", "Fast as light.")
+            "avr2001", "The 2001 AVR", "Fast as light."
+        )
         self.assertProvides(proc, IProcessor)
 
 
@@ -57,27 +58,31 @@ class ProcessorSetWebServiceTests(TestCaseWithFactory):
     layer = DatabaseFunctionalLayer
 
     def test_getByName(self):
-        self.factory.makeProcessor(name='transmeta')
+        self.factory.makeProcessor(name="transmeta")
 
         webservice = webservice_for_person(None)
         processor = webservice.named_get(
-            '/+processors', 'getByName', name='transmeta',
-            api_version='devel').jsonBody()
-        self.assertEqual('transmeta', processor['name'])
+            "/+processors", "getByName", name="transmeta", api_version="devel"
+        ).jsonBody()
+        self.assertEqual("transmeta", processor["name"])
 
     def test_default_collection(self):
         # Make it easy to filter out sample data
         store = IStore(Processor)
         store.execute("UPDATE Processor SET name = 'sample_data_' || name")
-        self.factory.makeProcessor(name='q1')
-        self.factory.makeProcessor(name='i686')
-        self.factory.makeProcessor(name='g4')
+        self.factory.makeProcessor(name="q1")
+        self.factory.makeProcessor(name="i686")
+        self.factory.makeProcessor(name="g4")
 
         webservice = webservice_for_person(None)
         collection = webservice.get(
-            '/+processors?ws.size=10', api_version='devel').jsonBody()
+            "/+processors?ws.size=10", api_version="devel"
+        ).jsonBody()
         self.assertEqual(
-            ['g4', 'i686', 'q1'],
+            ["g4", "i686", "q1"],
             sorted(
-            processor['name'] for processor in collection['entries']
-            if not processor['name'].startswith('sample_data_')))
+                processor["name"]
+                for processor in collection["entries"]
+                if not processor["name"].startswith("sample_data_")
+            ),
+        )
diff --git a/lib/lp/buildmaster/tests/test_queuedepth.py b/lib/lp/buildmaster/tests/test_queuedepth.py
index 5dacf21..6261ff2 100644
--- a/lib/lp/buildmaster/tests/test_queuedepth.py
+++ b/lib/lp/buildmaster/tests/test_queuedepth.py
@@ -2,10 +2,7 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 """Test BuildQueue start time estimation."""
 
-from datetime import (
-    datetime,
-    timedelta,
-    )
+from datetime import datetime, timedelta
 
 from pytz import utc
 from zope.component import getUtility
@@ -20,13 +17,10 @@ from lp.buildmaster.queuedepth import (
     estimate_time_to_next_builder,
     get_builder_data,
     get_free_builders_count,
-    )
+)
 from lp.buildmaster.tests.test_buildqueue import find_job
 from lp.services.database.interfaces import IStore
-from lp.soyuz.enums import (
-    ArchivePurpose,
-    PackagePublishingStatus,
-    )
+from lp.soyuz.enums import ArchivePurpose, PackagePublishingStatus
 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
 from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
 from lp.testing import TestCaseWithFactory
@@ -37,25 +31,29 @@ def check_mintime_to_builder(test, bq, min_time):
     """Test the estimated time until a builder becomes available."""
     time_stamp = bq.date_started or datetime.now(utc)
     delay = estimate_time_to_next_builder(
-        removeSecurityProxy(bq), now=time_stamp)
+        removeSecurityProxy(bq), now=time_stamp
+    )
     test.assertTrue(
         delay <= min_time,
         "Wrong min time to next available builder (%s > %s)"
-        % (delay, min_time))
+        % (delay, min_time),
+    )
 
 
 def set_remaining_time_for_running_job(bq, remainder):
     """Set remaining running time for job."""
     offset = bq.estimated_duration.seconds - remainder
-    removeSecurityProxy(bq).date_started = (
-        datetime.now(utc) - timedelta(seconds=offset))
+    removeSecurityProxy(bq).date_started = datetime.now(utc) - timedelta(
+        seconds=offset
+    )
 
 
 def check_delay_for_job(test, the_job, delay):
     # Obtain the builder statistics pertaining to this job.
     builder_data = get_builder_data()
     estimated_delay = estimate_job_delay(
-        removeSecurityProxy(the_job), builder_data)
+        removeSecurityProxy(the_job), builder_data
+    )
     test.assertEqual(delay, estimated_delay)
 
 
@@ -68,7 +66,7 @@ def total_builders():
 def builders_for_job(job):
     """How many available builders can run the given job?"""
     builder_data = get_builder_data()
-    return builder_data[(getattr(job.processor, 'id', None), job.virtualized)]
+    return builder_data[(getattr(job.processor, "id", None), job.virtualized)]
 
 
 def check_estimate(test, job, delay_in_seconds):
@@ -76,15 +74,18 @@ def check_estimate(test, job, delay_in_seconds):
     estimate = job.getEstimatedJobStartTime(now=time_stamp)
     if delay_in_seconds is None:
         test.assertEqual(
-            delay_in_seconds, estimate,
+            delay_in_seconds,
+            estimate,
             "An estimate should not be possible at present but one was "
-            "returned (%s) nevertheless." % estimate)
+            "returned (%s) nevertheless." % estimate,
+        )
     else:
         estimate -= time_stamp
         test.assertTrue(
             estimate.seconds <= delay_in_seconds,
-            "The estimated delay deviates from the expected one (%s > %s)" %
-            (estimate.seconds, delay_in_seconds))
+            "The estimated delay deviates from the expected one (%s > %s)"
+            % (estimate.seconds, delay_in_seconds),
+        )
 
 
 def disable_builders(test, processor_name, virtualized):
@@ -100,12 +101,12 @@ def nth_builder(test, bq, n):
 
     def builder_key(job):
         """Access key for builders capable of running the given job."""
-        return (getattr(job.processor, 'id', None), job.virtualized)
+        return (getattr(job.processor, "id", None), job.virtualized)
 
     builder = None
     builders = test.builders.get(builder_key(bq), [])
     try:
-        for builder in builders[n - 1:]:
+        for builder in builders[n - 1 :]:
             if builder.builderok:
                 break
     except IndexError:
@@ -113,7 +114,7 @@ def nth_builder(test, bq, n):
     return builder
 
 
-def assign_to_builder(test, job_name, builder_number, processor='386'):
+def assign_to_builder(test, job_name, builder_number, processor="386"):
     """Simulate assigning a build to a builder."""
     build, bq = find_job(test, job_name, processor)
     builder = nth_builder(test, bq, builder_number)
@@ -131,54 +132,75 @@ class TestBuildQueueBase(TestCaseWithFactory):
         self.publisher.prepareBreezyAutotest()
 
         # First make nine 'i386' builders.
-        self.i1 = self.factory.makeBuilder(name='i386-v-1')
-        self.i2 = self.factory.makeBuilder(name='i386-v-2')
-        self.i3 = self.factory.makeBuilder(name='i386-v-3')
-        self.i4 = self.factory.makeBuilder(name='i386-v-4')
-        self.i5 = self.factory.makeBuilder(name='i386-v-5')
-        self.i6 = self.factory.makeBuilder(name='i386-n-6', virtualized=False)
-        self.i7 = self.factory.makeBuilder(name='i386-n-7', virtualized=False)
-        self.i8 = self.factory.makeBuilder(name='i386-n-8', virtualized=False)
-        self.i9 = self.factory.makeBuilder(name='i386-n-9', virtualized=False)
+        self.i1 = self.factory.makeBuilder(name="i386-v-1")
+        self.i2 = self.factory.makeBuilder(name="i386-v-2")
+        self.i3 = self.factory.makeBuilder(name="i386-v-3")
+        self.i4 = self.factory.makeBuilder(name="i386-v-4")
+        self.i5 = self.factory.makeBuilder(name="i386-v-5")
+        self.i6 = self.factory.makeBuilder(name="i386-n-6", virtualized=False)
+        self.i7 = self.factory.makeBuilder(name="i386-n-7", virtualized=False)
+        self.i8 = self.factory.makeBuilder(name="i386-n-8", virtualized=False)
+        self.i9 = self.factory.makeBuilder(name="i386-n-9", virtualized=False)
 
         # Next make seven 'hppa' builders.
-        self.hppa_proc = getUtility(IProcessorSet).getByName('hppa')
+        self.hppa_proc = getUtility(IProcessorSet).getByName("hppa")
         self.h1 = self.factory.makeBuilder(
-            name='hppa-v-1', processors=[self.hppa_proc])
+            name="hppa-v-1", processors=[self.hppa_proc]
+        )
         self.h2 = self.factory.makeBuilder(
-            name='hppa-v-2', processors=[self.hppa_proc])
+            name="hppa-v-2", processors=[self.hppa_proc]
+        )
         self.h3 = self.factory.makeBuilder(
-            name='hppa-v-3', processors=[self.hppa_proc])
+            name="hppa-v-3", processors=[self.hppa_proc]
+        )
         self.h4 = self.factory.makeBuilder(
-            name='hppa-v-4', processors=[self.hppa_proc])
+            name="hppa-v-4", processors=[self.hppa_proc]
+        )
         self.h5 = self.factory.makeBuilder(
-            name='hppa-n-5', processors=[self.hppa_proc], virtualized=False)
+            name="hppa-n-5", processors=[self.hppa_proc], virtualized=False
+        )
         self.h6 = self.factory.makeBuilder(
-            name='hppa-n-6', processors=[self.hppa_proc], virtualized=False)
+            name="hppa-n-6", processors=[self.hppa_proc], virtualized=False
+        )
         self.h7 = self.factory.makeBuilder(
-            name='hppa-n-7', processors=[self.hppa_proc], virtualized=False)
+            name="hppa-n-7", processors=[self.hppa_proc], virtualized=False
+        )
 
         # Finally make five 'amd64' builders.
-        self.amd_proc = getUtility(IProcessorSet).getByName('amd64')
+        self.amd_proc = getUtility(IProcessorSet).getByName("amd64")
         self.a1 = self.factory.makeBuilder(
-            name='amd64-v-1', processors=[self.amd_proc])
+            name="amd64-v-1", processors=[self.amd_proc]
+        )
         self.a2 = self.factory.makeBuilder(
-            name='amd64-v-2', processors=[self.amd_proc])
+            name="amd64-v-2", processors=[self.amd_proc]
+        )
         self.a3 = self.factory.makeBuilder(
-            name='amd64-v-3', processors=[self.amd_proc])
+            name="amd64-v-3", processors=[self.amd_proc]
+        )
         self.a4 = self.factory.makeBuilder(
-            name='amd64-n-4', processors=[self.amd_proc], virtualized=False)
+            name="amd64-n-4", processors=[self.amd_proc], virtualized=False
+        )
         self.a5 = self.factory.makeBuilder(
-            name='amd64-n-5', processors=[self.amd_proc], virtualized=False)
+            name="amd64-n-5", processors=[self.amd_proc], virtualized=False
+        )
 
         self.builders = dict()
-        self.x86_proc = getUtility(IProcessorSet).getByName('386')
+        self.x86_proc = getUtility(IProcessorSet).getByName("386")
         # x86 native
         self.builders[(self.x86_proc.id, False)] = [
-            self.i6, self.i7, self.i8, self.i9]
+            self.i6,
+            self.i7,
+            self.i8,
+            self.i9,
+        ]
         # x86 virtual
         self.builders[(self.x86_proc.id, True)] = [
-            self.i1, self.i2, self.i3, self.i4, self.i5]
+            self.i1,
+            self.i2,
+            self.i3,
+            self.i4,
+            self.i5,
+        ]
 
         # amd64 native
         self.builders[(self.amd_proc.id, False)] = [self.a4, self.a5]
@@ -190,10 +212,14 @@ class TestBuildQueueBase(TestCaseWithFactory):
             self.h5,
             self.h6,
             self.h7,
-            ]
+        ]
         # hppa virtual
         self.builders[(self.hppa_proc.id, True)] = [
-            self.h1, self.h2, self.h3, self.h4]
+            self.h1,
+            self.h2,
+            self.h3,
+            self.h4,
+        ]
 
         # Ensure all builders are operational.
         for builders in self.builders.values():
@@ -204,37 +230,51 @@ class TestBuildQueueBase(TestCaseWithFactory):
         # Native builders irrespective of processor.
         self.builders[(None, False)] = []
         self.builders[(None, False)].extend(
-            self.builders[(self.x86_proc.id, False)])
+            self.builders[(self.x86_proc.id, False)]
+        )
         self.builders[(None, False)].extend(
-            self.builders[(self.amd_proc.id, False)])
+            self.builders[(self.amd_proc.id, False)]
+        )
         self.builders[(None, False)].extend(
-            self.builders[(self.hppa_proc.id, False)])
+            self.builders[(self.hppa_proc.id, False)]
+        )
 
         # Virtual builders irrespective of processor.
         self.builders[(None, True)] = []
         self.builders[(None, True)].extend(
-            self.builders[(self.x86_proc.id, True)])
+            self.builders[(self.x86_proc.id, True)]
+        )
         self.builders[(None, True)].extend(
-            self.builders[(self.amd_proc.id, True)])
+            self.builders[(self.amd_proc.id, True)]
+        )
         self.builders[(None, True)].extend(
-            self.builders[(self.hppa_proc.id, True)])
+            self.builders[(self.hppa_proc.id, True)]
+        )
 
         # Disable the sample data builders.
-        getUtility(IBuilderSet)['bob'].builderok = False
-        getUtility(IBuilderSet)['frog'].builderok = False
-
-    def makeCustomBuildQueue(self, score=9876, virtualized=True,
-                             estimated_duration=64, sourcename=None,
-                             recipe_build=None):
+        getUtility(IBuilderSet)["bob"].builderok = False
+        getUtility(IBuilderSet)["frog"].builderok = False
+
+    def makeCustomBuildQueue(
+        self,
+        score=9876,
+        virtualized=True,
+        estimated_duration=64,
+        sourcename=None,
+        recipe_build=None,
+    ):
         """Create a `SourcePackageRecipeBuild` and a `BuildQueue` for
         testing."""
         if recipe_build is None:
             recipe_build = self.factory.makeSourcePackageRecipeBuild(
-                sourcename=sourcename)
+                sourcename=sourcename
+            )
         bq = BuildQueue(
-            build_farm_job=recipe_build.build_farm_job, lastscore=score,
+            build_farm_job=recipe_build.build_farm_job,
+            lastscore=score,
             estimated_duration=timedelta(seconds=estimated_duration),
-            virtualized=virtualized)
+            virtualized=virtualized,
+        )
         IStore(BuildQueue).add(bq)
         return bq
 
@@ -267,44 +307,67 @@ class SingleArchBuildsBase(TestBuildQueueBase):
 
         # We test builds that target a primary archive.
         self.non_ppa = self.factory.makeArchive(
-            name="primary", purpose=ArchivePurpose.PRIMARY)
+            name="primary", purpose=ArchivePurpose.PRIMARY
+        )
         self.non_ppa.require_virtualized = False
 
         self.builds = []
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="gedit", status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                sourcename="gedit",
+                status=PackagePublishingStatus.PUBLISHED,
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
                 sourcename="firefox",
                 status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="apg", status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                sourcename="apg",
+                status=PackagePublishingStatus.PUBLISHED,
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="vim", status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                sourcename="vim",
+                status=PackagePublishingStatus.PUBLISHED,
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="gcc", status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                sourcename="gcc",
+                status=PackagePublishingStatus.PUBLISHED,
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="bison", status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                sourcename="bison",
+                status=PackagePublishingStatus.PUBLISHED,
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="flex", status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                sourcename="flex",
+                status=PackagePublishingStatus.PUBLISHED,
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
                 sourcename="postgres",
                 status=PackagePublishingStatus.PUBLISHED,
-                archive=self.non_ppa).createMissingBuilds())
+                archive=self.non_ppa,
+            ).createMissingBuilds()
+        )
         # Set up the builds for test.
         score = 1000
         duration = 0
@@ -326,64 +389,88 @@ class TestBuilderData(SingleArchBuildsBase):
         # be the same for all of our builds.
         bq = self.builds[0].buildqueue_record
         self.assertEqual(
-            21, total_builders(),
-            "The total number of builders is wrong.")
+            21, total_builders(), "The total number of builders is wrong."
+        )
         self.assertEqual(
-            4, builders_for_job(bq),
+            4,
+            builders_for_job(bq),
             "[1] The total number of builders that can build the job in "
-            "question is wrong.")
+            "question is wrong.",
+        )
         builder_stats = get_builder_data()
         self.assertEqual(
-            4, builder_stats[(self.x86_proc.id, False)],
-            "The number of native x86 builders is wrong")
+            4,
+            builder_stats[(self.x86_proc.id, False)],
+            "The number of native x86 builders is wrong",
+        )
         self.assertEqual(
-            5, builder_stats[(self.x86_proc.id, True)],
-            "The number of virtual x86 builders is wrong")
+            5,
+            builder_stats[(self.x86_proc.id, True)],
+            "The number of virtual x86 builders is wrong",
+        )
         self.assertEqual(
-            2, builder_stats[(self.amd_proc.id, False)],
-            "The number of native amd64 builders is wrong")
+            2,
+            builder_stats[(self.amd_proc.id, False)],
+            "The number of native amd64 builders is wrong",
+        )
         self.assertEqual(
-            3, builder_stats[(self.amd_proc.id, True)],
-            "The number of virtual amd64 builders is wrong")
+            3,
+            builder_stats[(self.amd_proc.id, True)],
+            "The number of virtual amd64 builders is wrong",
+        )
         self.assertEqual(
-            3, builder_stats[(self.hppa_proc.id, False)],
-            "The number of native hppa builders is wrong")
+            3,
+            builder_stats[(self.hppa_proc.id, False)],
+            "The number of native hppa builders is wrong",
+        )
         self.assertEqual(
-            4, builder_stats[(self.hppa_proc.id, True)],
-            "The number of virtual hppa builders is wrong")
+            4,
+            builder_stats[(self.hppa_proc.id, True)],
+            "The number of virtual hppa builders is wrong",
+        )
         self.assertEqual(
-            9, builder_stats[(None, False)],
-            "The number of *virtual* builders across all processors is wrong")
+            9,
+            builder_stats[(None, False)],
+            "The number of *virtual* builders across all processors is wrong",
+        )
         self.assertEqual(
-            12, builder_stats[(None, True)],
-            "The number of *native* builders across all processors is wrong")
+            12,
+            builder_stats[(None, True)],
+            "The number of *native* builders across all processors is wrong",
+        )
         # Disable the native x86 builders.
         for builder in self.builders[(self.x86_proc.id, False)]:
             builder.builderok = False
         # Since all native x86 builders were disabled there are none left
         # to build the job.
         self.assertEqual(
-            0, builders_for_job(bq),
+            0,
+            builders_for_job(bq),
             "[2] The total number of builders that can build the job in "
-            "question is wrong.")
+            "question is wrong.",
+        )
         # Re-enable one of them.
         for builder in self.builders[(self.x86_proc.id, False)]:
             builder.builderok = True
             break
         # Now there should be one builder available to build the job.
         self.assertEqual(
-            1, builders_for_job(bq),
+            1,
+            builders_for_job(bq),
             "[3] The total number of builders that can build the job in "
-            "question is wrong.")
+            "question is wrong.",
+        )
         # Disable the *virtual* x86 builders -- should not make any
         # difference.
         for builder in self.builders[(self.x86_proc.id, True)]:
             builder.builderok = False
         # There should still be one builder available to build the job.
         self.assertEqual(
-            1, builders_for_job(bq),
+            1,
+            builders_for_job(bq),
             "[4] The total number of builders that can build the job in "
-            "question is wrong.")
+            "question is wrong.",
+        )
 
     def test_free_builder_counts(self):
         # Make sure the builder numbers are correct. The builder data will
@@ -399,40 +486,48 @@ class TestBuilderData(SingleArchBuildsBase):
         builder_stats = get_builder_data()
         # We have 4 x86 native builders.
         self.assertEqual(
-            4, builder_stats[(self.x86_proc.id, False)],
-            "The number of native x86 builders is wrong")
+            4,
+            builder_stats[(self.x86_proc.id, False)],
+            "The number of native x86 builders is wrong",
+        )
         # Initially all 4 builders are free.
         free_count = get_free_builders_count(
-            build.processor, build.virtualized)
+            build.processor, build.virtualized
+        )
         self.assertEqual(4, free_count)
         # Once we assign a build to one of them we should see the free
         # builders count drop by one.
-        assign_to_builder(self, 'postgres', 1)
+        assign_to_builder(self, "postgres", 1)
         free_count = get_free_builders_count(
-            build.processor, build.virtualized)
+            build.processor, build.virtualized
+        )
         self.assertEqual(3, free_count)
         # When we assign another build to one of them we should see the free
         # builders count drop by one again.
-        assign_to_builder(self, 'gcc', 2)
+        assign_to_builder(self, "gcc", 2)
         free_count = get_free_builders_count(
-            build.processor, build.virtualized)
+            build.processor, build.virtualized
+        )
         self.assertEqual(2, free_count)
         # Let's use up another builder.
-        assign_to_builder(self, 'apg', 3)
+        assign_to_builder(self, "apg", 3)
         free_count = get_free_builders_count(
-            build.processor, build.virtualized)
+            build.processor, build.virtualized
+        )
         self.assertEqual(1, free_count)
         # And now for the last one.
-        assign_to_builder(self, 'flex', 4)
+        assign_to_builder(self, "flex", 4)
         free_count = get_free_builders_count(
-            build.processor, build.virtualized)
+            build.processor, build.virtualized
+        )
         self.assertEqual(0, free_count)
         # If we reset the 'flex' build the builder that was assigned to it
         # will be free again.
-        build, bq = find_job(self, 'flex')
+        build, bq = find_job(self, "flex")
         bq.reset()
         free_count = get_free_builders_count(
-            build.processor, build.virtualized)
+            build.processor, build.virtualized
+        )
         self.assertEqual(1, free_count)
 
 
@@ -460,23 +555,23 @@ class TestMinTimeToNextBuilder(SingleArchBuildsBase):
         # p=processor, v=virtualized, e=estimated_duration, s=score
 
         # This will be the job of interest.
-        apg_build, apg_job = find_job(self, 'apg')
+        apg_build, apg_job = find_job(self, "apg")
         # One of four builders for the 'apg' build is immediately available.
         check_mintime_to_builder(self, apg_job, 0)
 
         # Assign the postgres job to a builder.
-        assign_to_builder(self, 'postgres', 1)
+        assign_to_builder(self, "postgres", 1)
         # Now one builder is gone. But there should still be a builder
         # immediately available.
         check_mintime_to_builder(self, apg_job, 0)
 
-        assign_to_builder(self, 'flex', 2)
+        assign_to_builder(self, "flex", 2)
         check_mintime_to_builder(self, apg_job, 0)
 
-        assign_to_builder(self, 'bison', 3)
+        assign_to_builder(self, "bison", 3)
         check_mintime_to_builder(self, apg_job, 0)
 
-        assign_to_builder(self, 'gcc', 4)
+        assign_to_builder(self, "gcc", 4)
         # Now that no builder is immediately available, the shortest
         # remaing build time (based on the estimated duration) is returned:
         #   300 seconds
@@ -486,13 +581,13 @@ class TestMinTimeToNextBuilder(SingleArchBuildsBase):
         # Now we pretend that the 'postgres' started 6 minutes ago. Its
         # remaining execution time should be 2 minutes = 120 seconds and
         # it now becomes the job whose builder becomes available next.
-        build, bq = find_job(self, 'postgres')
+        build, bq = find_job(self, "postgres")
         set_remaining_time_for_running_job(bq, 120)
         check_mintime_to_builder(self, apg_job, 120)
 
         # What happens when jobs overdraw the estimated duration? Let's
         # pretend the 'flex' job started 8 minutes ago.
-        build, bq = find_job(self, 'flex')
+        build, bq = find_job(self, "flex")
         set_remaining_time_for_running_job(bq, -60)
         # In such a case we assume that the job will complete within 2
         # minutes, this is a guess that has worked well so far.
@@ -500,7 +595,7 @@ class TestMinTimeToNextBuilder(SingleArchBuildsBase):
 
         # If there's a job that will complete within a shorter time then
         # we expect to be given that time frame.
-        build, bq = find_job(self, 'postgres')
+        build, bq = find_job(self, "postgres")
         set_remaining_time_for_running_job(bq, 30)
         check_mintime_to_builder(self, apg_job, 30)
 
@@ -516,8 +611,11 @@ class TestMinTimeToNextBuilder(SingleArchBuildsBase):
 
         # The following job can only run on a native builder.
         job = self.makeCustomBuildQueue(
-            estimated_duration=111, sourcename='xxr-gftp', score=1055,
-            virtualized=False)
+            estimated_duration=111,
+            sourcename="xxr-gftp",
+            score=1055,
+            virtualized=False,
+        )
         self.builds.append(job.specific_build)
 
         # Disable all native builders.
@@ -567,57 +665,80 @@ class MultiArchBuildsBase(TestBuildQueueBase):
 
         # We test builds that target a primary archive.
         self.non_ppa = self.factory.makeArchive(
-            name="primary", purpose=ArchivePurpose.PRIMARY)
+            name="primary", purpose=ArchivePurpose.PRIMARY
+        )
         self.non_ppa.require_virtualized = False
 
         self.builds = []
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="gedit", status=PackagePublishingStatus.PUBLISHED,
+                sourcename="gedit",
+                status=PackagePublishingStatus.PUBLISHED,
                 archive=self.non_ppa,
-                architecturehintlist='any').createMissingBuilds())
+                architecturehintlist="any",
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
                 sourcename="firefox",
                 status=PackagePublishingStatus.PUBLISHED,
                 archive=self.non_ppa,
-                architecturehintlist='any').createMissingBuilds())
+                architecturehintlist="any",
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="apg", status=PackagePublishingStatus.PUBLISHED,
+                sourcename="apg",
+                status=PackagePublishingStatus.PUBLISHED,
                 archive=self.non_ppa,
-                architecturehintlist='any').createMissingBuilds())
+                architecturehintlist="any",
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="vim", status=PackagePublishingStatus.PUBLISHED,
+                sourcename="vim",
+                status=PackagePublishingStatus.PUBLISHED,
                 archive=self.non_ppa,
-                architecturehintlist='any').createMissingBuilds())
+                architecturehintlist="any",
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="gcc", status=PackagePublishingStatus.PUBLISHED,
+                sourcename="gcc",
+                status=PackagePublishingStatus.PUBLISHED,
                 archive=self.non_ppa,
-                architecturehintlist='any').createMissingBuilds())
+                architecturehintlist="any",
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="bison", status=PackagePublishingStatus.PUBLISHED,
+                sourcename="bison",
+                status=PackagePublishingStatus.PUBLISHED,
                 archive=self.non_ppa,
-                architecturehintlist='any').createMissingBuilds())
+                architecturehintlist="any",
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
-                sourcename="flex", status=PackagePublishingStatus.PUBLISHED,
+                sourcename="flex",
+                status=PackagePublishingStatus.PUBLISHED,
                 archive=self.non_ppa,
-                architecturehintlist='any').createMissingBuilds())
+                architecturehintlist="any",
+            ).createMissingBuilds()
+        )
         self.builds.extend(
             self.publisher.getPubSource(
                 sourcename="postgres",
                 status=PackagePublishingStatus.PUBLISHED,
                 archive=self.non_ppa,
-                architecturehintlist='any').createMissingBuilds())
+                architecturehintlist="any",
+            ).createMissingBuilds()
+        )
         # Set up the builds for test.
         score = 1000
         duration = 0
         for build in self.builds:
-            score += getattr(self, 'score_increment', 1)
+            score += getattr(self, "score_increment", 1)
             score += 1
             duration += 60
             bq = build.buildqueue_record
@@ -633,19 +754,19 @@ class TestMinTimeToNextBuilderMulti(MultiArchBuildsBase):
         the queue becoming available?"""
         # XXX AaronBentley 2010-03-19 bug=541914: Fails spuriously
         # One of four builders for the 'apg' build is immediately available.
-        apg_build, apg_job = find_job(self, 'apg', 'hppa')
+        apg_build, apg_job = find_job(self, "apg", "hppa")
         check_mintime_to_builder(self, apg_job, 0)
 
         # Assign the postgres job to a builder.
-        assign_to_builder(self, 'postgres', 1, 'hppa')
+        assign_to_builder(self, "postgres", 1, "hppa")
         # Now one builder is gone. But there should still be a builder
         # immediately available.
         check_mintime_to_builder(self, apg_job, 0)
 
-        assign_to_builder(self, 'flex', 2, 'hppa')
+        assign_to_builder(self, "flex", 2, "hppa")
         check_mintime_to_builder(self, apg_job, 0)
 
-        assign_to_builder(self, 'bison', 3, 'hppa')
+        assign_to_builder(self, "bison", 3, "hppa")
         # Now that no builder is immediately available, the shortest
         # remaing build time (based on the estimated duration) is returned:
         #   660 seconds
@@ -655,13 +776,13 @@ class TestMinTimeToNextBuilderMulti(MultiArchBuildsBase):
         # Now we pretend that the 'postgres' started 13 minutes ago. Its
         # remaining execution time should be 2 minutes = 120 seconds and
         # it now becomes the job whose builder becomes available next.
-        build, bq = find_job(self, 'postgres', 'hppa')
+        build, bq = find_job(self, "postgres", "hppa")
         set_remaining_time_for_running_job(bq, 120)
         check_mintime_to_builder(self, apg_job, 120)
 
         # What happens when jobs overdraw the estimated duration? Let's
         # pretend the 'flex' job started 14 minutes ago.
-        build, bq = find_job(self, 'flex', 'hppa')
+        build, bq = find_job(self, "flex", "hppa")
         set_remaining_time_for_running_job(bq, -60)
         # In such a case we assume that the job will complete within 2
         # minutes, this is a guess that has worked well so far.
@@ -669,7 +790,7 @@ class TestMinTimeToNextBuilderMulti(MultiArchBuildsBase):
 
         # If there's a job that will complete within a shorter time then
         # we expect to be given that time frame.
-        build, bq = find_job(self, 'postgres', 'hppa')
+        build, bq = find_job(self, "postgres", "hppa")
         set_remaining_time_for_running_job(bq, 30)
         check_mintime_to_builder(self, apg_job, 30)
 
@@ -683,8 +804,11 @@ class TestMinTimeToNextBuilderMulti(MultiArchBuildsBase):
 
         # Let's add a processor-independent job to the mix.
         job = self.makeCustomBuildQueue(
-            virtualized=False, estimated_duration=22,
-            sourcename='my-recipe-digikam', score=9999)
+            virtualized=False,
+            estimated_duration=22,
+            sourcename="my-recipe-digikam",
+            score=9999,
+        )
         # There are still builders available for the processor-independent
         # job.
         self.assertEqual(6, builders_for_job(job))
@@ -692,7 +816,8 @@ class TestMinTimeToNextBuilderMulti(MultiArchBuildsBase):
         self.assertTrue(
             bq._getFreeBuildersCount(job.processor, job.virtualized) > 0,
             "Builders are immediately available for processor-independent "
-            "jobs.")
+            "jobs.",
+        )
         check_mintime_to_builder(self, job, 0)
 
         # Let's disable all builders.
@@ -719,8 +844,8 @@ class TestMinTimeToNextBuilderMulti(MultiArchBuildsBase):
 
         # Now this builder is the one that becomes available next (29 minutes
         # remaining build time).
-        assign_to_builder(self, 'gcc', 1, '386')
-        build, bq = find_job(self, 'gcc', '386')
+        assign_to_builder(self, "gcc", 1, "386")
+        build, bq = find_job(self, "gcc", "386")
         set_remaining_time_for_running_job(bq, 29)
 
         check_mintime_to_builder(self, apg_job, 29)
@@ -735,6 +860,7 @@ class TestMinTimeToNextBuilderMulti(MultiArchBuildsBase):
 
 class TestMultiArchJobDelayEstimation(MultiArchBuildsBase):
     """Test estimated job delays with various processors."""
+
     score_increment = 2
 
     def setUp(self):
@@ -745,59 +871,65 @@ class TestMultiArchJobDelayEstimation(MultiArchBuildsBase):
         In case of jobs with equal scores the one with the lesser 'job' value
         (i.e. the older one wins).
 
-            3,              gedit, p: hppa, v:False e:0:01:00 *** s: 1003
-            4,              gedit, p:  386, v:False e:0:02:00 *** s: 1006
-            5,            firefox, p: hppa, v:False e:0:03:00 *** s: 1009
-            6,            firefox, p:  386, v:False e:0:04:00 *** s: 1012
-            7,                apg, p: hppa, v:False e:0:05:00 *** s: 1015
-            9,                vim, p: hppa, v:False e:0:07:00 *** s: 1021
-           10,                vim, p:  386, v:False e:0:08:00 *** s: 1024
-            8,                apg, p:  386, v:False e:0:06:00 *** s: 1024
-      -->  19,     xx-recipe-bash, p: None, v:False e:0:00:22 *** s: 1025
-           11,                gcc, p: hppa, v:False e:0:09:00 *** s: 1027
-           12,                gcc, p:  386, v:False e:0:10:00 *** s: 1030
-           13,              bison, p: hppa, v:False e:0:11:00 *** s: 1033
-           14,              bison, p:  386, v:False e:0:12:00 *** s: 1036
-           15,               flex, p: hppa, v:False e:0:13:00 *** s: 1039
-           16,               flex, p:  386, v:False e:0:14:00 *** s: 1042
-           17,           postgres, p: hppa, v:False e:0:15:00 *** s: 1045
-           18,           postgres, p:  386, v:False e:0:16:00 *** s: 1048
-      -->  20,      xx-recipe-zsh, p: None, v:False e:0:03:42 *** s: 1053
-
-         p=processor, v=virtualized, e=estimated_duration, s=score
+              3,              gedit, p: hppa, v:False e:0:01:00 *** s: 1003
+              4,              gedit, p:  386, v:False e:0:02:00 *** s: 1006
+              5,            firefox, p: hppa, v:False e:0:03:00 *** s: 1009
+              6,            firefox, p:  386, v:False e:0:04:00 *** s: 1012
+              7,                apg, p: hppa, v:False e:0:05:00 *** s: 1015
+              9,                vim, p: hppa, v:False e:0:07:00 *** s: 1021
+             10,                vim, p:  386, v:False e:0:08:00 *** s: 1024
+              8,                apg, p:  386, v:False e:0:06:00 *** s: 1024
+        -->  19,     xx-recipe-bash, p: None, v:False e:0:00:22 *** s: 1025
+             11,                gcc, p: hppa, v:False e:0:09:00 *** s: 1027
+             12,                gcc, p:  386, v:False e:0:10:00 *** s: 1030
+             13,              bison, p: hppa, v:False e:0:11:00 *** s: 1033
+             14,              bison, p:  386, v:False e:0:12:00 *** s: 1036
+             15,               flex, p: hppa, v:False e:0:13:00 *** s: 1039
+             16,               flex, p:  386, v:False e:0:14:00 *** s: 1042
+             17,           postgres, p: hppa, v:False e:0:15:00 *** s: 1045
+             18,           postgres, p:  386, v:False e:0:16:00 *** s: 1048
+        -->  20,      xx-recipe-zsh, p: None, v:False e:0:03:42 *** s: 1053
+
+        p=processor, v=virtualized, e=estimated_duration, s=score
         """
         super().setUp()
 
         job = self.makeCustomBuildQueue(
-            virtualized=False, estimated_duration=22,
-            sourcename='xx-recipe-bash', score=1025)
+            virtualized=False,
+            estimated_duration=22,
+            sourcename="xx-recipe-bash",
+            score=1025,
+        )
         self.builds.append(job.specific_build)
         job = self.makeCustomBuildQueue(
-            virtualized=False, estimated_duration=222,
-            sourcename='xx-recipe-zsh', score=1053)
+            virtualized=False,
+            estimated_duration=222,
+            sourcename="xx-recipe-zsh",
+            score=1053,
+        )
         self.builds.append(job.specific_build)
 
         # Assign the same score to the '386' vim and apg build jobs.
-        _apg_build, apg_job = find_job(self, 'apg', '386')
+        _apg_build, apg_job = find_job(self, "apg", "386")
         apg_job.lastscore = 1024
 
     def disabled_test_job_delay_for_binary_builds(self):
         # One of four builders for the 'flex' build is immediately available.
-        flex_build, flex_job = find_job(self, 'flex', 'hppa')
+        flex_build, flex_job = find_job(self, "flex", "hppa")
         check_mintime_to_builder(self, flex_job, 0)
 
         # The delay will be 900 (= 15*60) + 222 seconds
         check_delay_for_job(self, flex_job, 1122)
 
         # Assign the postgres job to a builder.
-        assign_to_builder(self, 'postgres', 1, 'hppa')
+        assign_to_builder(self, "postgres", 1, "hppa")
         # The 'postgres' job is not pending any more.  Now only the 222
         # seconds (the estimated duration of the platform-independent job)
         # should be returned.
         check_delay_for_job(self, flex_job, 222)
 
         # How about some estimates for x86 builds?
-        _bison_build, bison_job = find_job(self, 'bison', '386')
+        _bison_build, bison_job = find_job(self, "bison", "386")
         check_mintime_to_builder(self, bison_job, 0)
         # The delay will be 900 (= (14+16)*60/2) + 222 seconds.
         check_delay_for_job(self, bison_job, 1122)
@@ -809,13 +941,13 @@ class TestMultiArchJobDelayEstimation(MultiArchBuildsBase):
 
         # Also, this tests that jobs with equal score but a lower 'job' value
         # (i.e. older jobs) are queued ahead of the job of interest (JOI).
-        _vim_build, vim_job = find_job(self, 'vim', '386')
+        _vim_build, vim_job = find_job(self, "vim", "386")
         check_mintime_to_builder(self, vim_job, 0)
         # The delay will be 870 (= (6+10+12+14+16)*60/4) + 122 (= (222+22)/2)
         # seconds.
         check_delay_for_job(self, vim_job, 992)
 
-        _gedit_build, gedit_job = find_job(self, 'gedit', '386')
+        _gedit_build, gedit_job = find_job(self, "gedit", "386")
         check_mintime_to_builder(self, gedit_job, 0)
         # The delay will be
         #   1080 (= (4+6+8+10+12+14+16)*60/4) + 122 (= (222+22)/2)
@@ -824,7 +956,7 @@ class TestMultiArchJobDelayEstimation(MultiArchBuildsBase):
 
     def disabled_test_job_delay_for_recipe_builds(self):
         # One of the 9 builders for the 'bash' build is immediately available.
-        bash_build, bash_job = find_job(self, 'xx-recipe-bash', None)
+        bash_build, bash_job = find_job(self, "xx-recipe-bash", None)
         check_mintime_to_builder(self, bash_job, 0)
 
         # The delay will be 960 + 780 + 222 = 1962, where
@@ -833,7 +965,7 @@ class TestMultiArchJobDelayEstimation(MultiArchBuildsBase):
         check_delay_for_job(self, bash_job, 1962)
 
         # One of the 9 builders for the 'zsh' build is immediately available.
-        zsh_build, zsh_job = find_job(self, 'xx-recipe-zsh', None)
+        zsh_build, zsh_job = find_job(self, "xx-recipe-zsh", None)
         check_mintime_to_builder(self, zsh_job, 0)
 
         # The delay will be 0 since this is the head job.
@@ -841,7 +973,7 @@ class TestMultiArchJobDelayEstimation(MultiArchBuildsBase):
 
         # Assign the zsh job to a builder.
         self.assertEqual((None, False), bash_job._getHeadJobPlatform())
-        assign_to_builder(self, 'xx-recipe-zsh', 1, None)
+        assign_to_builder(self, "xx-recipe-zsh", 1, None)
         self.assertEqual((1, False), bash_job._getHeadJobPlatform())
 
         # Now that the highest-scored job is out of the way, the estimation
@@ -852,7 +984,7 @@ class TestMultiArchJobDelayEstimation(MultiArchBuildsBase):
         #    386 job delays: 780 = (10+12+14+16)*60/4
         check_delay_for_job(self, bash_job, 1740)
 
-        _postgres_build, postgres_job = find_job(self, 'postgres', '386')
+        _postgres_build, postgres_job = find_job(self, "postgres", "386")
         # The delay will be 0 since this is the head job now.
         check_delay_for_job(self, postgres_job, 0)
         # Also, the platform of the postgres job is returned since it *is*
@@ -863,6 +995,7 @@ class TestMultiArchJobDelayEstimation(MultiArchBuildsBase):
 
 class TestJobDispatchTimeEstimation(MultiArchBuildsBase):
     """Test estimated job delays with various processors."""
+
     score_increment = 2
 
     def setUp(self):
@@ -899,34 +1032,46 @@ class TestJobDispatchTimeEstimation(MultiArchBuildsBase):
         super().setUp()
 
         job = self.makeCustomBuildQueue(
-            virtualized=False, estimated_duration=332,
-            sourcename='xxr-aptitude', score=1025)
+            virtualized=False,
+            estimated_duration=332,
+            sourcename="xxr-aptitude",
+            score=1025,
+        )
         self.builds.append(job.specific_build)
         job = self.makeCustomBuildQueue(
-            virtualized=False, estimated_duration=443,
-            sourcename='xxr-auto-apt', score=1053)
+            virtualized=False,
+            estimated_duration=443,
+            sourcename="xxr-auto-apt",
+            score=1053,
+        )
         self.builds.append(job.specific_build)
         job = self.makeCustomBuildQueue(
-            estimated_duration=554, sourcename='xxr-daptup', score=1051)
+            estimated_duration=554, sourcename="xxr-daptup", score=1051
+        )
         self.builds.append(job.specific_build)
         job = self.makeCustomBuildQueue(
-            estimated_duration=665, sourcename='xxr-cron-apt', score=1043)
+            estimated_duration=665, sourcename="xxr-cron-apt", score=1043
+        )
         self.builds.append(job.specific_build)
         job = self.makeCustomBuildQueue(
-            estimated_duration=776, sourcename='xxr-apt-build', score=1043)
+            estimated_duration=776, sourcename="xxr-apt-build", score=1043
+        )
         self.builds.append(job.specific_build)
         job = self.makeCustomBuildQueue(
-            estimated_duration=887, sourcename='xxr-debdelta', score=1044)
+            estimated_duration=887, sourcename="xxr-debdelta", score=1044
+        )
         self.builds.append(job.specific_build)
         job = self.makeCustomBuildQueue(
-            estimated_duration=998, sourcename='xxr-apt', score=1044)
+            estimated_duration=998, sourcename="xxr-apt", score=1044
+        )
         self.builds.append(job.specific_build)
         job = self.makeCustomBuildQueue(
-            estimated_duration=1110, sourcename='xxr-cupt', score=1044)
+            estimated_duration=1110, sourcename="xxr-cupt", score=1044
+        )
         self.builds.append(job.specific_build)
 
         # Assign the same score to the '386' vim and apg build jobs.
-        _apg_build, apg_job = find_job(self, 'apg', '386')
+        _apg_build, apg_job = find_job(self, "apg", "386")
         apg_job.lastscore = 1024
 
         # Also, toggle the 'virtualized' flag for all '386' jobs.
@@ -937,12 +1082,12 @@ class TestJobDispatchTimeEstimation(MultiArchBuildsBase):
 
     def test_pending_jobs_only(self):
         # Let's see the assertion fail for a job that's not pending any more.
-        assign_to_builder(self, 'gedit', 1, 'hppa')
-        gedit_build, gedit_job = find_job(self, 'gedit', 'hppa')
+        assign_to_builder(self, "gedit", 1, "hppa")
+        gedit_build, gedit_job = find_job(self, "gedit", "hppa")
         self.assertRaises(AssertionError, gedit_job.getEstimatedJobStartTime)
 
     def test_estimation_binary_virtual(self):
-        gcc_build, gcc_job = find_job(self, 'gcc', '386')
+        gcc_build, gcc_job = find_job(self, "gcc", "386")
         # The delay of 1671 seconds is calculated as follows:
         #                     386 jobs: (12+14+16)*60/3           = 840
         #   processor-independent jobs:
@@ -951,7 +1096,7 @@ class TestJobDispatchTimeEstimation(MultiArchBuildsBase):
         self.assertEqual(5, builders_for_job(gcc_job))
 
     def test_proc_indep_virtual_true(self):
-        xxr_build, xxr_job = find_job(self, 'xxr-apt-build', None)
+        xxr_build, xxr_job = find_job(self, "xxr-apt-build", None)
         # The delay of 1802 seconds is calculated as follows:
         #                     386 jobs: 16*60                    = 960
         #   processor-independent jobs:
@@ -959,7 +1104,7 @@ class TestJobDispatchTimeEstimation(MultiArchBuildsBase):
         check_estimate(self, xxr_job, 1802)
 
     def test_estimation_binary_virtual_long_queue(self):
-        gedit_build, gedit_job = find_job(self, 'gedit', '386')
+        gedit_build, gedit_job = find_job(self, "gedit", "386")
         # The delay of 1671 seconds is calculated as follows:
         #                     386 jobs:
         #       (4+6+8+10+12+14+16)*60/5                          = 840
@@ -968,26 +1113,26 @@ class TestJobDispatchTimeEstimation(MultiArchBuildsBase):
         check_estimate(self, gedit_job, 1671)
 
     def test_proc_indep_virtual_null_headjob(self):
-        xxr_build, xxr_job = find_job(self, 'xxr-daptup', None)
+        xxr_build, xxr_job = find_job(self, "xxr-daptup", None)
         # This job is at the head of the queue for virtualized builders and
         # will get dispatched within the next 5 seconds.
         check_estimate(self, xxr_job, 5)
 
     def test_proc_indep_virtual_false(self):
-        xxr_build, xxr_job = find_job(self, 'xxr-aptitude', None)
+        xxr_build, xxr_job = find_job(self, "xxr-aptitude", None)
         # The delay of 1403 seconds is calculated as follows:
         #                    hppa jobs: (9+11+13+15)*60/3        = 960
         #   processor-independent jobs: 7:23                     = 443
         check_estimate(self, xxr_job, 1403)
 
     def test_proc_indep_virtual_false_headjob(self):
-        xxr_build, xxr_job = find_job(self, 'xxr-auto-apt', None)
+        xxr_build, xxr_job = find_job(self, "xxr-auto-apt", None)
         # This job is at the head of the queue for native builders and
         # will get dispatched within the next 5 seconds.
         check_estimate(self, xxr_job, 5)
 
     def test_estimation_binary_virtual_same_score(self):
-        vim_build, vim_job = find_job(self, 'vim', '386')
+        vim_build, vim_job = find_job(self, "vim", "386")
         # The apg job is ahead of the vim job.
         # The delay of 1527 seconds is calculated as follows:
         #                     386 jobs: (6+10+12+14+16)*60/5      = 696
@@ -998,32 +1143,32 @@ class TestJobDispatchTimeEstimation(MultiArchBuildsBase):
     def test_no_builder_no_estimate(self):
         # No dispatch estimate is provided in the absence of builders that
         # can run the job of interest (JOI).
-        disable_builders(self, '386', True)
-        vim_build, vim_job = find_job(self, 'vim', '386')
+        disable_builders(self, "386", True)
+        vim_build, vim_job = find_job(self, "vim", "386")
         check_estimate(self, vim_job, None)
 
     def disabled_test_estimates_with_small_builder_pool(self):
         # Test that a reduced builder pool results in longer dispatch time
         # estimates.
-        vim_build, vim_job = find_job(self, 'vim', '386')
-        disable_builders(self, '386', True)
+        vim_build, vim_job = find_job(self, "vim", "386")
+        disable_builders(self, "386", True)
         # Re-enable one builder.
         builder = self.builders[(self.x86_proc.id, True)][0]
         builder.builderok = True
         # Dispatch the firefox job to it.
-        assign_to_builder(self, 'firefox', 1, '386')
+        assign_to_builder(self, "firefox", 1, "386")
         # Dispatch the head job, making postgres/386 the new head job and
         # resulting in a 240 seconds head job dispatch delay.
-        assign_to_builder(self, 'xxr-daptup', 1, None)
+        assign_to_builder(self, "xxr-daptup", 1, None)
         check_mintime_to_builder(self, vim_job, 240)
         # Re-enable another builder.
         builder = self.builders[(self.x86_proc.id, True)][1]
         builder.builderok = True
         # Assign a job to it.
-        assign_to_builder(self, 'gedit', 2, '386')
+        assign_to_builder(self, "gedit", 2, "386")
         check_mintime_to_builder(self, vim_job, 120)
 
-        xxr_build, xxr_job = find_job(self, 'xxr-apt', None)
+        xxr_build, xxr_job = find_job(self, "xxr-apt", None)
         # The delay of 2627+120 seconds is calculated as follows:
         #                     386 jobs : (6+10+12+14+16)*60/2     = 1740
         #   processor-independent jobs :
@@ -1035,13 +1180,13 @@ class TestJobDispatchTimeEstimation(MultiArchBuildsBase):
 
     def test_estimation_binary_virtual_headjob(self):
         # The head job only waits for the next builder to become available.
-        disable_builders(self, '386', True)
+        disable_builders(self, "386", True)
         # Re-enable one builder.
         builder = self.builders[(self.x86_proc.id, True)][0]
         builder.builderok = True
         # Assign a job to it.
-        assign_to_builder(self, 'gedit', 1, '386')
+        assign_to_builder(self, "gedit", 1, "386")
         # Dispatch the head job, making postgres/386 the new head job.
-        assign_to_builder(self, 'xxr-daptup', 2, None)
-        postgres_build, postgres_job = find_job(self, 'postgres', '386')
+        assign_to_builder(self, "xxr-daptup", 2, None)
+        postgres_build, postgres_job = find_job(self, "postgres", "386")
         check_estimate(self, postgres_job, 120)
diff --git a/lib/lp/buildmaster/tests/test_webservice.py b/lib/lp/buildmaster/tests/test_webservice.py
index bc96432..8b7fd7a 100644
--- a/lib/lp/buildmaster/tests/test_webservice.py
+++ b/lib/lp/buildmaster/tests/test_webservice.py
@@ -13,18 +13,15 @@ from lp.registry.interfaces.person import IPersonSet
 from lp.services.webapp import canonical_url
 from lp.services.webapp.interfaces import OAuthPermission
 from lp.testing import (
+    RequestTimelineCollector,
+    TestCaseWithFactory,
     admin_logged_in,
     api_url,
     logout,
-    RequestTimelineCollector,
-    TestCaseWithFactory,
-    )
+)
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.matchers import HasQueryCount
-from lp.testing.pages import (
-    LaunchpadWebServiceCaller,
-    webservice_for_person,
-    )
+from lp.testing.pages import LaunchpadWebServiceCaller, webservice_for_person
 
 
 class TestBuildersCollection(TestCaseWithFactory):
@@ -35,18 +32,21 @@ class TestBuildersCollection(TestCaseWithFactory):
         self.webservice = LaunchpadWebServiceCaller()
 
     def test_list(self):
-        names = ['bob', 'frog']
+        names = ["bob", "frog"]
         for i in range(3):
             builder = self.factory.makeBuilder()
             self.factory.makeBinaryPackageBuild().queueBuild().markAsBuilding(
-                builder)
+                builder
+            )
             names.append(builder.name)
         logout()
         with RequestTimelineCollector() as recorder:
             builders = self.webservice.get(
-                '/builders', api_version='devel').jsonBody()
+                "/builders", api_version="devel"
+            ).jsonBody()
         self.assertContentEqual(
-            names, [b['name'] for b in builders['entries']])
+            names, [b["name"] for b in builders["entries"]]
+        )
         self.assertThat(recorder, HasQueryCount(Equals(19)))
 
     def test_list_with_private_builds(self):
@@ -54,74 +54,94 @@ class TestBuildersCollection(TestCaseWithFactory):
         # current_build fields.
         with admin_logged_in():
             rbpb = self.factory.makeBinaryPackageBuild(
-                archive=self.factory.makeArchive(private=True))
+                archive=self.factory.makeArchive(private=True)
+            )
             rbpb.queueBuild().markAsBuilding(
-                self.factory.makeBuilder(name='restricted'))
+                self.factory.makeBuilder(name="restricted")
+            )
             bpb = self.factory.makeBinaryPackageBuild(
-                archive=self.factory.makeArchive(private=False))
+                archive=self.factory.makeArchive(private=False)
+            )
             bpb.queueBuild().markAsBuilding(
-                self.factory.makeBuilder(name='public'))
+                self.factory.makeBuilder(name="public")
+            )
             bpb_url = canonical_url(bpb, path_only_if_possible=True)
         logout()
 
         builders = self.webservice.get(
-            '/builders', api_version='devel').jsonBody()
+            "/builders", api_version="devel"
+        ).jsonBody()
         current_builds = {
-            b['name']: b['current_build_link'] for b in builders['entries']}
+            b["name"]: b["current_build_link"] for b in builders["entries"]
+        }
         self.assertEqual(
-            'tag:launchpad.net:2008:redacted', current_builds['restricted'])
+            "tag:launchpad.net:2008:redacted", current_builds["restricted"]
+        )
         self.assertEqual(
-            'http://api.launchpad.test/devel' + bpb_url,
-            current_builds['public'])
+            "http://api.launchpad.test/devel"; + bpb_url,
+            current_builds["public"],
+        )
 
     def test_getBuildQueueSizes(self):
         logout()
         results = self.webservice.named_get(
-            '/builders', 'getBuildQueueSizes', api_version='devel')
+            "/builders", "getBuildQueueSizes", api_version="devel"
+        )
         self.assertEqual(
-            ['nonvirt', 'virt'], sorted(results.jsonBody().keys()))
+            ["nonvirt", "virt"], sorted(results.jsonBody().keys())
+        )
 
     def test_getBuildersForQueue(self):
-        g1 = self.factory.makeProcessor('g1')
-        quantum = self.factory.makeProcessor('quantum')
-        self.factory.makeBuilder(
-            processors=[quantum], name='quantum_builder1')
-        self.factory.makeBuilder(
-            processors=[quantum], name='quantum_builder2')
+        g1 = self.factory.makeProcessor("g1")
+        quantum = self.factory.makeProcessor("quantum")
+        self.factory.makeBuilder(processors=[quantum], name="quantum_builder1")
+        self.factory.makeBuilder(processors=[quantum], name="quantum_builder2")
         self.factory.makeBuilder(
-            processors=[quantum], name='quantum_builder3', virtualized=False)
+            processors=[quantum], name="quantum_builder3", virtualized=False
+        )
         self.factory.makeBuilder(
-            processors=[g1], name='g1_builder', virtualized=False)
+            processors=[g1], name="g1_builder", virtualized=False
+        )
 
         logout()
         results = self.webservice.named_get(
-            '/builders', 'getBuildersForQueue',
-            processor=api_url(quantum), virtualized=True,
-            api_version='devel').jsonBody()
+            "/builders",
+            "getBuildersForQueue",
+            processor=api_url(quantum),
+            virtualized=True,
+            api_version="devel",
+        ).jsonBody()
         self.assertEqual(
-            ['quantum_builder1', 'quantum_builder2'],
-            sorted(builder['name'] for builder in results['entries']))
+            ["quantum_builder1", "quantum_builder2"],
+            sorted(builder["name"] for builder in results["entries"]),
+        )
 
     def test_new(self):
         person = self.factory.makePerson()
-        badmins = getUtility(IPersonSet).getByName('launchpad-buildd-admins')
+        badmins = getUtility(IPersonSet).getByName("launchpad-buildd-admins")
         webservice = webservice_for_person(
-            person, permission=OAuthPermission.WRITE_PRIVATE)
+            person, permission=OAuthPermission.WRITE_PRIVATE
+        )
         args = dict(
-            name='foo', processors=['/+processors/386'], title='foobar',
-            url='http://foo.buildd:8221/', virtualized=False,
-            api_version='devel')
+            name="foo",
+            processors=["/+processors/386"],
+            title="foobar",
+            url="http://foo.buildd:8221/";,
+            virtualized=False,
+            api_version="devel",
+        )
 
-        response = webservice.named_post('/builders', 'new', **args)
+        response = webservice.named_post("/builders", "new", **args)
         self.assertEqual(401, response.status)
 
         with admin_logged_in():
             badmins.addMember(person, badmins)
-        response = webservice.named_post('/builders', 'new', **args)
+        response = webservice.named_post("/builders", "new", **args)
         self.assertEqual(201, response.status)
 
         self.assertEqual(
-            'foobar', webservice.get('/builders/foo').jsonBody()['title'])
+            "foobar", webservice.get("/builders/foo").jsonBody()["title"]
+        )
 
 
 class TestBuilderEntry(TestCaseWithFactory):
@@ -139,62 +159,79 @@ class TestBuilderEntry(TestCaseWithFactory):
         builder = self.factory.makeBuilder()
         user = self.factory.makePerson()
         user_webservice = webservice_for_person(
-            user, permission=OAuthPermission.WRITE_PUBLIC)
-        clean_status_patch = dumps({'clean_status': 'Cleaning'})
+            user, permission=OAuthPermission.WRITE_PUBLIC
+        )
+        clean_status_patch = dumps({"clean_status": "Cleaning"})
         logout()
 
         # A normal user is unauthorized.
         response = user_webservice.patch(
-            api_url(builder), 'application/json',
-            clean_status_patch, api_version='devel')
+            api_url(builder),
+            "application/json",
+            clean_status_patch,
+            api_version="devel",
+        )
         self.assertEqual(401, response.status)
 
         # But a buildd admin can set the attribute.
         with admin_logged_in():
             buildd_admins = getUtility(IPersonSet).getByName(
-                'launchpad-buildd-admins')
+                "launchpad-buildd-admins"
+            )
             buildd_admins.addMember(user, buildd_admins.teamowner)
         response = user_webservice.patch(
-            api_url(builder), 'application/json',
-            clean_status_patch, api_version='devel')
+            api_url(builder),
+            "application/json",
+            clean_status_patch,
+            api_version="devel",
+        )
         self.assertEqual(209, response.status)
-        self.assertEqual('Cleaning', response.jsonBody()['clean_status'])
+        self.assertEqual("Cleaning", response.jsonBody()["clean_status"])
 
     def test_security_builder_reset(self):
-        builder = getUtility(IBuilderSet)['bob']
+        builder = getUtility(IBuilderSet)["bob"]
         person = self.factory.makePerson()
         user_webservice = webservice_for_person(
-            person, permission=OAuthPermission.WRITE_PUBLIC)
-        change_patch = dumps({'builderok': False, 'manual': False,
-                              'failnotes': 'test notes'})
+            person, permission=OAuthPermission.WRITE_PUBLIC
+        )
+        change_patch = dumps(
+            {"builderok": False, "manual": False, "failnotes": "test notes"}
+        )
         logout()
 
         # A normal user is unauthorized.
         response = user_webservice.patch(
-            api_url(builder), 'application/json', change_patch,
-            api_version='devel')
+            api_url(builder),
+            "application/json",
+            change_patch,
+            api_version="devel",
+        )
         self.assertEqual(401, response.status)
 
         # But a registry expert can set the attributes.
         with admin_logged_in():
-            reg_expert = getUtility(IPersonSet).getByName('registry')
+            reg_expert = getUtility(IPersonSet).getByName("registry")
             reg_expert.addMember(person, reg_expert)
         response = user_webservice.patch(
-            api_url(builder), 'application/json', change_patch,
-            api_version='devel')
+            api_url(builder),
+            "application/json",
+            change_patch,
+            api_version="devel",
+        )
         self.assertEqual(209, response.status)
-        self.assertEqual(False, response.jsonBody()['builderok'])
-        self.assertEqual(False, response.jsonBody()['manual'])
-        self.assertEqual('test notes', response.jsonBody()['failnotes'])
+        self.assertEqual(False, response.jsonBody()["builderok"])
+        self.assertEqual(False, response.jsonBody()["manual"])
+        self.assertEqual("test notes", response.jsonBody()["failnotes"])
 
     def test_exports_processor(self):
-        processor = self.factory.makeProcessor('s1')
+        processor = self.factory.makeProcessor("s1")
         builder = self.factory.makeBuilder(processors=[processor])
 
         logout()
         entry = self.webservice.get(
-            api_url(builder), api_version='devel').jsonBody()
-        self.assertEndsWith(entry['processor_link'], '/+processors/s1')
+            api_url(builder), api_version="devel"
+        ).jsonBody()
+        self.assertEndsWith(entry["processor_link"], "/+processors/s1")
 
     def test_getBuildRecords(self):
         builder = self.factory.makeBuilder()
@@ -203,11 +240,18 @@ class TestBuilderEntry(TestCaseWithFactory):
 
         logout()
         results = self.webservice.named_get(
-            api_url(builder), 'getBuildRecords', pocket='Release',
-            api_version='devel').jsonBody()
+            api_url(builder),
+            "getBuildRecords",
+            pocket="Release",
+            api_version="devel",
+        ).jsonBody()
         self.assertEqual(
-            [build_title], [entry['title'] for entry in results['entries']])
+            [build_title], [entry["title"] for entry in results["entries"]]
+        )
         results = self.webservice.named_get(
-            api_url(builder), 'getBuildRecords', pocket='Proposed',
-            api_version='devel').jsonBody()
-        self.assertEqual(0, len(results['entries']))
+            api_url(builder),
+            "getBuildRecords",
+            pocket="Proposed",
+            api_version="devel",
+        ).jsonBody()
+        self.assertEqual(0, len(results["entries"]))
diff --git a/lib/lp/buildmaster/vocabularies.py b/lib/lp/buildmaster/vocabularies.py
index 2752b44..c8c224f 100644
--- a/lib/lp/buildmaster/vocabularies.py
+++ b/lib/lp/buildmaster/vocabularies.py
@@ -4,8 +4,8 @@
 """Soyuz vocabularies."""
 
 __all__ = [
-    'ProcessorVocabulary',
-    ]
+    "ProcessorVocabulary",
+]
 
 from lp.buildmaster.model.processor import Processor
 from lp.services.webapp.vocabulary import NamedSQLObjectVocabulary
@@ -13,6 +13,6 @@ from lp.services.webapp.vocabulary import NamedSQLObjectVocabulary
 
 class ProcessorVocabulary(NamedSQLObjectVocabulary):
 
-    displayname = 'Select a processor'
+    displayname = "Select a processor"
     _table = Processor
-    _orderBy = 'name'
+    _orderBy = "name"