← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:black-services into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:black-services into launchpad:master.

Commit message:
lp.services: Apply black

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/427273
-- 
The attached diff has been truncated due to its size.
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:black-services into launchpad:master.
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index f03ace6..19c4d14 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -82,3 +82,5 @@ ed7d7b97b8fb4ebe92799f922b0fa9c4bd1714e8
 0a9f5f09fc0c930b73619e77524e60f2740595ed
 # apply black to lp.scripts
 66301b8ad409deeb9092dfe62c8e4ef6f3093302
+# apply black to lp.services
+4719b7aa672a2674c7fdbbde58772871b77c3301
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index c678d42..d56d052 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -55,6 +55,7 @@ repos:
             |oci
             |registry
             |scripts
+            |services
           )/
 -   repo: https://github.com/PyCQA/isort
     rev: 5.9.2
@@ -86,6 +87,7 @@ repos:
             |oci
             |registry
             |scripts
+            |services
           )/
     -   id: isort
         alias: isort-black
@@ -107,6 +109,7 @@ repos:
             |oci
             |registry
             |scripts
+            |services
           )/
 -   repo: https://github.com/PyCQA/flake8
     rev: 3.9.2
diff --git a/lib/lp/services/apachelogparser/base.py b/lib/lp/services/apachelogparser/base.py
index 6b3a15a..32ea7dc 100644
--- a/lib/lp/services/apachelogparser/base.py
+++ b/lib/lp/services/apachelogparser/base.py
@@ -1,18 +1,15 @@
 # Copyright 2009-2019 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from datetime import datetime
 import gzip
 import os
 import struct
+from datetime import datetime
 
-from contrib import apachelog
-from lazr.uri import (
-    InvalidURIError,
-    URI,
-    )
 import pytz
 import six
+from contrib import apachelog
+from lazr.uri import URI, InvalidURIError
 from zope.component import getUtility
 
 from lp.services.apachelogparser.model.parsedapachelog import ParsedApacheLog
@@ -20,8 +17,7 @@ from lp.services.config import config
 from lp.services.database.interfaces import IStore
 from lp.services.geoip.interfaces import IGeoIP
 
-
-parser = apachelog.parser(apachelog.formats['extended'])
+parser = apachelog.parser(apachelog.formats["extended"])
 
 
 def get_files_to_parse(file_paths):
@@ -36,7 +32,7 @@ def get_files_to_parse(file_paths):
     file_paths = sorted(file_paths, key=lambda path: os.stat(path).st_mtime)
     for file_path in file_paths:
         fd, file_size = get_fd_and_file_size(file_path)
-        first_line = six.ensure_text(fd.readline(), errors='replace')
+        first_line = six.ensure_text(fd.readline(), errors="replace")
         parsed_file = store.find(ParsedApacheLog, first_line=first_line).one()
         position = 0
         if parsed_file is not None:
@@ -63,16 +59,16 @@ def get_fd_and_file_size(file_path):
     The file size returned is that of the uncompressed file, in case the given
     file_path points to a gzipped file.
     """
-    if file_path.endswith('.gz'):
+    if file_path.endswith(".gz"):
         # The last 4 bytes of the file contains the uncompressed file's
         # size, modulo 2**32.  This code is somewhat stolen from the gzip
         # module in Python 2.6.
         fd = gzip.open(file_path)
         fd.fileobj.seek(-4, os.SEEK_END)
-        file_size = struct.unpack('<I', fd.fileobj.read(4))[0]
+        file_size = struct.unpack("<I", fd.fileobj.read(4))[0]
         fd.fileobj.seek(0)
     else:
-        fd = open(file_path, 'rb')
+        fd = open(file_path, "rb")
         file_size = os.path.getsize(file_path)
     return fd, file_size
 
@@ -99,14 +95,15 @@ def parse_file(fd, start_position, logger, get_download_key, parsed_lines=0):
 
     # Check for an optional max_parsed_lines config option.
     max_parsed_lines = getattr(
-        config.launchpad, 'logparser_max_parsed_lines', None)
+        config.launchpad, "logparser_max_parsed_lines", None
+    )
 
     while next_line:
         if max_parsed_lines is not None and parsed_lines >= max_parsed_lines:
             break
 
         line = next_line
-        line_text = six.ensure_text(line, errors='replace')
+        line_text = six.ensure_text(line, errors="replace")
 
         # Always skip the last line as it may be truncated since we're
         # rsyncing live logs, unless there is only one line for us to
@@ -123,14 +120,15 @@ def parse_file(fd, start_position, logger, get_download_key, parsed_lines=0):
             parsed_lines += 1
             parsed_bytes += len(line)
             host, date, status, request = get_host_date_status_and_request(
-                line_text)
+                line_text
+            )
 
-            if status != '200':
+            if status != "200":
                 continue
 
             method, path = get_method_and_path(request)
 
-            if method != 'GET':
+            if method != "GET":
                 continue
 
             download_key = get_download_key(path)
@@ -163,17 +161,22 @@ def parse_file(fd, start_position, logger, get_download_key, parsed_lines=0):
             break
 
     if parsed_lines > 0:
-        logger.info('Parsed %d lines resulting in %d download stats.' % (
-            parsed_lines, len(downloads)))
+        logger.info(
+            "Parsed %d lines resulting in %d download stats."
+            % (parsed_lines, len(downloads))
+        )
 
     return downloads, parsed_bytes, parsed_lines
 
 
 def create_or_update_parsedlog_entry(first_line, parsed_bytes):
     """Create or update the ParsedApacheLog with the given first_line."""
-    first_line = six.ensure_text(first_line, errors='replace')
-    parsed_file = IStore(ParsedApacheLog).find(
-        ParsedApacheLog, first_line=first_line).one()
+    first_line = six.ensure_text(first_line, errors="replace")
+    parsed_file = (
+        IStore(ParsedApacheLog)
+        .find(ParsedApacheLog, first_line=first_line)
+        .one()
+    )
     if parsed_file is None:
         ParsedApacheLog(first_line, parsed_bytes)
     else:
@@ -194,26 +197,26 @@ def get_host_date_status_and_request(line):
     """Extract the host, date, status and request from the given line."""
     # The keys in the 'data' dictionary below are the Apache log format codes.
     data = parser.parse(line)
-    return data['%h'], data['%t'], data['%>s'], data['%r']
+    return data["%h"], data["%t"], data["%>s"], data["%r"]
 
 
 def get_method_and_path(request):
     """Extract the method of the request and path of the requested file."""
-    method, ignore, rest = request.partition(' ')
+    method, ignore, rest = request.partition(" ")
     # In the below, the common case is that `first` is the path and `last` is
     # the protocol.
-    first, ignore, last = rest.rpartition(' ')
-    if first == '':
+    first, ignore, last = rest.rpartition(" ")
+    if first == "":
         # HTTP 1.0 requests might omit the HTTP version so we cope with them.
         path = last
-    elif not last.startswith('HTTP'):
+    elif not last.startswith("HTTP"):
         # We cope with HTTP 1.0 protocol without HTTP version *and* a
         # space in the path (see bug 676489 for example).
         path = rest
     else:
         # This is the common case.
         path = first
-    if path.startswith('http://') or path.startswith('https://'):
+    if path.startswith("http://";) or path.startswith("https://";):
         try:
             uri = URI(path)
             path = uri.path
diff --git a/lib/lp/services/apachelogparser/interfaces/parsedapachelog.py b/lib/lp/services/apachelogparser/interfaces/parsedapachelog.py
index 6d09815..a4ad8b0 100644
--- a/lib/lp/services/apachelogparser/interfaces/parsedapachelog.py
+++ b/lib/lp/services/apachelogparser/interfaces/parsedapachelog.py
@@ -1,14 +1,10 @@
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-__all__ = ['IParsedApacheLog']
+__all__ = ["IParsedApacheLog"]
 
 from zope.interface import Interface
-from zope.schema import (
-    Datetime,
-    Int,
-    TextLine,
-    )
+from zope.schema import Datetime, Int, TextLine
 
 from lp import _
 
@@ -20,9 +16,11 @@ class IParsedApacheLog(Interface):
     """
 
     first_line = TextLine(
-        title=_("The log file's first line"), required=True,
-        readonly=True)
+        title=_("The log file's first line"), required=True, readonly=True
+    )
     bytes_read = Int(
-        title=_('Number of bytes read'), required=True, readonly=False)
+        title=_("Number of bytes read"), required=True, readonly=False
+    )
     date_last_parsed = Datetime(
-        title=_('Date last parsed'), required=False, readonly=False)
+        title=_("Date last parsed"), required=False, readonly=False
+    )
diff --git a/lib/lp/services/apachelogparser/model/parsedapachelog.py b/lib/lp/services/apachelogparser/model/parsedapachelog.py
index 50d8889..0afc4c1 100644
--- a/lib/lp/services/apachelogparser/model/parsedapachelog.py
+++ b/lib/lp/services/apachelogparser/model/parsedapachelog.py
@@ -1,19 +1,15 @@
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-__all__ = ['ParsedApacheLog']
+__all__ = ["ParsedApacheLog"]
 
 import six
-from storm.locals import (
-    Int,
-    Storm,
-    Unicode,
-    )
+from storm.locals import Int, Storm, Unicode
 from zope.interface import implementer
 
 from lp.services.apachelogparser.interfaces.parsedapachelog import (
     IParsedApacheLog,
-    )
+)
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.interfaces import IStore
@@ -22,7 +18,8 @@ from lp.services.database.interfaces import IStore
 @implementer(IParsedApacheLog)
 class ParsedApacheLog(Storm):
     """See `IParsedApacheLog`"""
-    __storm_table__ = 'ParsedApacheLog'
+
+    __storm_table__ = "ParsedApacheLog"
 
     id = Int(primary=True)
     first_line = Unicode(allow_none=False)
diff --git a/lib/lp/services/apachelogparser/script.py b/lib/lp/services/apachelogparser/script.py
index c5faf6e..8f89fb0 100644
--- a/lib/lp/services/apachelogparser/script.py
+++ b/lib/lp/services/apachelogparser/script.py
@@ -1,7 +1,7 @@
 # Copyright 2009-2017 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-__all__ = ['ParseApacheLogs']
+__all__ = ["ParseApacheLogs"]
 
 import glob
 import os
@@ -13,7 +13,7 @@ from lp.services.apachelogparser.base import (
     create_or_update_parsedlog_entry,
     get_files_to_parse,
     parse_file,
-    )
+)
 from lp.services.config import config
 from lp.services.scripts.base import LaunchpadCronScript
 from lp.services.worlddata.interfaces.country import ICountrySet
@@ -30,7 +30,7 @@ class ParseApacheLogs(LaunchpadCronScript):
     """
 
     # Glob to restrict filenames that are parsed.
-    log_file_glob = '*'
+    log_file_glob = "*"
 
     def setUpUtilities(self):
         """Prepare any utilities that might be used many times."""
@@ -74,20 +74,25 @@ class ParseApacheLogs(LaunchpadCronScript):
         # files. Note that we still error if a file we want to parse
         # disappears before we get around to parsing it, which is
         # desirable behaviour.
-        files_to_parse = list(get_files_to_parse(
-            glob.glob(os.path.join(self.root, self.log_file_glob))))
+        files_to_parse = list(
+            get_files_to_parse(
+                glob.glob(os.path.join(self.root, self.log_file_glob))
+            )
+        )
 
         country_set = getUtility(ICountrySet)
         parsed_lines = 0
         max_parsed_lines = getattr(
-            config.launchpad, 'logparser_max_parsed_lines', None)
+            config.launchpad, "logparser_max_parsed_lines", None
+        )
         max_is_set = max_parsed_lines is not None
         for fd, position in files_to_parse:
             # If we've used up our budget of lines to process, stop.
-            if (max_is_set and parsed_lines >= max_parsed_lines):
+            if max_is_set and parsed_lines >= max_parsed_lines:
                 break
             downloads, parsed_bytes, parsed_lines = parse_file(
-                fd, position, self.logger, self.getDownloadKey)
+                fd, position, self.logger, self.getDownloadKey
+            )
             # Use a while loop here because we want to pop items from the dict
             # in order to free some memory as we go along. This is a good
             # thing here because the downloads dict may get really huge.
@@ -114,7 +119,7 @@ class ParseApacheLogs(LaunchpadCronScript):
             fd.close()
             create_or_update_parsedlog_entry(first_line, parsed_bytes)
             self.txn.commit()
-            name = getattr(fd, 'name', fd)
-            self.logger.info('Finished parsing %s' % name)
+            name = getattr(fd, "name", fd)
+            self.logger.info("Finished parsing %s" % name)
 
-        self.logger.info('Done parsing apache log files')
+        self.logger.info("Done parsing apache log files")
diff --git a/lib/lp/services/apachelogparser/tests/test_apachelogparser.py b/lib/lp/services/apachelogparser/tests/test_apachelogparser.py
index 48af8bd..b820eb1 100644
--- a/lib/lp/services/apachelogparser/tests/test_apachelogparser.py
+++ b/lib/lp/services/apachelogparser/tests/test_apachelogparser.py
@@ -1,12 +1,12 @@
 # Copyright 2009-2019 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from datetime import datetime
 import gzip
 import io
 import os
 import tempfile
 import time
+from datetime import datetime
 
 from fixtures import TempDir
 
@@ -18,7 +18,7 @@ from lp.services.apachelogparser.base import (
     get_host_date_status_and_request,
     get_method_and_path,
     parse_file,
-    )
+)
 from lp.services.apachelogparser.model.parsedapachelog import ParsedApacheLog
 from lp.services.config import config
 from lp.services.database.interfaces import IStore
@@ -27,11 +27,7 @@ from lp.services.log.logger import BufferLogger
 from lp.services.osutils import write_file
 from lp.testing import TestCase
 from lp.testing.dbuser import switch_dbuser
-from lp.testing.layers import (
-    LaunchpadZopelessLayer,
-    ZopelessLayer,
-    )
-
+from lp.testing.layers import LaunchpadZopelessLayer, ZopelessLayer
 
 here = os.path.dirname(__file__)
 
@@ -41,104 +37,120 @@ class TestLineParsing(TestCase):
 
     def test_return_value(self):
         fd = open(
-            os.path.join(here, 'apache-log-files', 'librarian-oneline.log'))
+            os.path.join(here, "apache-log-files", "librarian-oneline.log")
+        )
         host, date, status, request = get_host_date_status_and_request(
-            fd.readline())
-        self.assertEqual(host, '201.158.154.121')
-        self.assertEqual(date, '[13/Jun/2008:18:38:57 +0100]')
-        self.assertEqual(status, '200')
+            fd.readline()
+        )
+        self.assertEqual(host, "201.158.154.121")
+        self.assertEqual(date, "[13/Jun/2008:18:38:57 +0100]")
+        self.assertEqual(status, "200")
         self.assertEqual(
-            request, 'GET /15166065/gnome-do-0.5.0.1.tar.gz HTTP/1.1')
+            request, "GET /15166065/gnome-do-0.5.0.1.tar.gz HTTP/1.1"
+        )
 
     def test_parsing_line_with_quotes_inside_user_agent_and_referrer(self):
         # Some lines have quotes as part of the referrer and/or user agent,
         # and they are parsed just fine too.
-        line = (r'84.113.215.193 - - [25/Jan/2009:15:48:07 +0000] "GET '
-                r'/10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0" 200 12341 '
-                r'"http://foo.bar/?baz=\"bang\""; '
-                r'"\"Nokia2630/2.0 (05.20) Profile/MIDP-2.1 '
-                r'Configuration/CLDC-1.1\""')
+        line = (
+            r'84.113.215.193 - - [25/Jan/2009:15:48:07 +0000] "GET '
+            r'/10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0" 200 12341 '
+            r'"http://foo.bar/?baz=\"bang\""; '
+            r'"\"Nokia2630/2.0 (05.20) Profile/MIDP-2.1 '
+            r'Configuration/CLDC-1.1\""'
+        )
         host, date, status, request = get_host_date_status_and_request(line)
-        self.assertEqual(host, '84.113.215.193')
-        self.assertEqual(date, '[25/Jan/2009:15:48:07 +0000]')
-        self.assertEqual(status, '200')
+        self.assertEqual(host, "84.113.215.193")
+        self.assertEqual(date, "[25/Jan/2009:15:48:07 +0000]")
+        self.assertEqual(status, "200")
         self.assertEqual(
-            request, 'GET /10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0')
+            request, "GET /10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0"
+        )
 
     def test_parsing_line_with_spaces_in_username(self):
         # Some lines have spaces in the username, left unquoted by
         # Apache. They can still be parsed OK, since no other fields
         # have similar issues.
-        line = (r'1.1.1.1 - Some User [25/Jan/2009:15:48:07 +0000] "GET '
-                r'/10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0" 200 12341 '
-                r'"http://foo.bar/?baz=\"bang\""; '
-                r'"\"Nokia2630/2.0 (05.20) Profile/MIDP-2.1 '
-                r'Configuration/CLDC-1.1\""')
+        line = (
+            r'1.1.1.1 - Some User [25/Jan/2009:15:48:07 +0000] "GET '
+            r'/10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0" 200 12341 '
+            r'"http://foo.bar/?baz=\"bang\""; '
+            r'"\"Nokia2630/2.0 (05.20) Profile/MIDP-2.1 '
+            r'Configuration/CLDC-1.1\""'
+        )
         host, date, status, request = get_host_date_status_and_request(line)
-        self.assertEqual(host, '1.1.1.1')
-        self.assertEqual(date, '[25/Jan/2009:15:48:07 +0000]')
-        self.assertEqual(status, '200')
+        self.assertEqual(host, "1.1.1.1")
+        self.assertEqual(date, "[25/Jan/2009:15:48:07 +0000]")
+        self.assertEqual(status, "200")
         self.assertEqual(
-            request, 'GET /10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0')
+            request, "GET /10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0"
+        )
 
     def test_parsing_line_with_ipv6_address(self):
         # IPv6 addresses in the hostname field are parsed.
-        line = (r'2001:67c:1560:8003::8003 - - [25/Jan/2009:15:48:07 +0000] '
-                r'"GET /10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0" 200 12341 '
-                r'"http://foo.bar/baz"; "Mozilla/5.0"')
+        line = (
+            r"2001:67c:1560:8003::8003 - - [25/Jan/2009:15:48:07 +0000] "
+            r'"GET /10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0" 200 12341 '
+            r'"http://foo.bar/baz"; "Mozilla/5.0"'
+        )
         host, date, status, request = get_host_date_status_and_request(line)
-        self.assertEqual(host, '2001:67c:1560:8003::8003')
-        self.assertEqual(date, '[25/Jan/2009:15:48:07 +0000]')
-        self.assertEqual(status, '200')
+        self.assertEqual(host, "2001:67c:1560:8003::8003")
+        self.assertEqual(date, "[25/Jan/2009:15:48:07 +0000]")
+        self.assertEqual(status, "200")
         self.assertEqual(
-            request, 'GET /10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0')
+            request, "GET /10133748/cramfsswap_1.4.1.tar.gz HTTP/1.0"
+        )
 
     def test_day_extraction(self):
-        date = '[13/Jun/2008:18:38:57 +0100]'
+        date = "[13/Jun/2008:18:38:57 +0100]"
         self.assertEqual(get_day(date), datetime(2008, 6, 13))
 
     def test_parsing_path_with_missing_protocol(self):
-        request = (r'GET /56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?'
-                   r'N\x1f\x9b')
+        request = (
+            r"GET /56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?" r"N\x1f\x9b"
+        )
         method, path = get_method_and_path(request)
-        self.assertEqual(method, 'GET')
+        self.assertEqual(method, "GET")
         self.assertEqual(
-            path,
-            r'/56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?N\x1f\x9b')
+            path, r"/56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?N\x1f\x9b"
+        )
 
     def test_parsing_path_with_space(self):
         # See bug 676489.
-        request = (r'GET /56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?'
-                   r'N\x1f\x9b Z%7B... HTTP/1.0')
+        request = (
+            r"GET /56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?"
+            r"N\x1f\x9b Z%7B... HTTP/1.0"
+        )
         method, path = get_method_and_path(request)
-        self.assertEqual(method, 'GET')
+        self.assertEqual(method, "GET")
         self.assertEqual(
             path,
-            r'/56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?N\x1f\x9b Z%7B...')
+            r"/56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?N\x1f\x9b Z%7B...",
+        )
 
     def test_parsing_path_with_space_and_missing_protocol(self):
         # This is a variation of bug 676489.
-        request = (r'GET /56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?'
-                   r'N\x1f\x9b Z%7B...')
+        request = (
+            r"GET /56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?"
+            r"N\x1f\x9b Z%7B..."
+        )
         method, path = get_method_and_path(request)
-        self.assertEqual(method, 'GET')
+        self.assertEqual(method, "GET")
         self.assertEqual(
             path,
-            r'/56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?N\x1f\x9b Z%7B...')
+            r"/56222647/deluge-gtk_1.3.0-0ubuntu1_all.deb?N\x1f\x9b Z%7B...",
+        )
 
     def test_parsing_invalid_url(self):
         # An invalid URL should just be treated as a path, not cause an
         # exception.
-        request = r'GET http://blah/1234/fewfwfw GET http://blah HTTP/1.0'
+        request = r"GET http://blah/1234/fewfwfw GET http://blah HTTP/1.0"
         method, path = get_method_and_path(request)
-        self.assertEqual(method, 'GET')
-        self.assertEqual(
-            path,
-            r'http://blah/1234/fewfwfw GET http://blah')
+        self.assertEqual(method, "GET")
+        self.assertEqual(path, r"http://blah/1234/fewfwfw GET http://blah";)
 
 
 class Test_get_fd_and_file_size(TestCase):
-
     def _ensureFileSizeIsCorrect(self, file_path):
         """Ensure the file size returned is correct.
 
@@ -151,13 +163,14 @@ class Test_get_fd_and_file_size(TestCase):
 
     def test_regular_file(self):
         file_path = os.path.join(
-            here, 'apache-log-files', 'librarian-oneline.log')
+            here, "apache-log-files", "librarian-oneline.log"
+        )
         self._ensureFileSizeIsCorrect(file_path)
 
     def test_gzip_file(self):
         file_path = os.path.join(
-            here, 'apache-log-files',
-            'launchpadlibrarian.net.access-log.1.gz')
+            here, "apache-log-files", "launchpadlibrarian.net.access-log.1.gz"
+        )
         self._ensureFileSizeIsCorrect(file_path)
 
 
@@ -173,7 +186,8 @@ class TestLogFileParsing(TestCase):
         '69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "%(method)s '
         '/15018215/ul_logo_64x64.png HTTP/1.1" %(status)s 2261 '
         '"https://launchpad.net/~ubuntulite/+archive"; "Mozilla/5.0 (X11; '
-        'U; Linux i686; en-US; rv:1.9b5) Gecko/2008041514 Firefox/3.0b5"')
+        'U; Linux i686; en-US; rv:1.9b5) Gecko/2008041514 Firefox/3.0b5"'
+    )
 
     def setUp(self):
         TestCase.setUp(self)
@@ -200,20 +214,29 @@ class TestLogFileParsing(TestCase):
         # it doesn't show up in the dict returned.
         fd = open(
             os.path.join(
-                here, 'apache-log-files', 'launchpadlibrarian.net.access-log'),
-            'rb')
+                here, "apache-log-files", "launchpadlibrarian.net.access-log"
+            ),
+            "rb",
+        )
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=0, logger=self.logger,
-            get_download_key=get_path_download_key)
+            fd,
+            start_position=0,
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+        )
         self.assertEqual(
             self.logger.getLogBuffer().strip(),
-            'INFO Parsed 5 lines resulting in 3 download stats.')
+            "INFO Parsed 5 lines resulting in 3 download stats.",
+        )
         date = datetime(2008, 6, 13)
         self.assertContentEqual(
             downloads.items(),
-            [('/12060796/me-tv-icon-64x64.png', {date: {'AU': 1}}),
-             ('/8196569/mediumubuntulogo.png', {date: {'AR': 1, 'JP': 1}}),
-             ('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
+            [
+                ("/12060796/me-tv-icon-64x64.png", {date: {"AU": 1}}),
+                ("/8196569/mediumubuntulogo.png", {date: {"AR": 1, "JP": 1}}),
+                ("/9096290/me-tv-icon-14x14.png", {date: {"AU": 1}}),
+            ],
+        )
 
         # The last line is skipped, so we'll record that the file has been
         # parsed until the beginning of the last line.
@@ -226,93 +249,128 @@ class TestLogFileParsing(TestCase):
         # line without worrying about whether or not it's been truncated.
         fd = open(
             os.path.join(
-                here, 'apache-log-files', 'launchpadlibrarian.net.access-log'),
-            'rb')
+                here, "apache-log-files", "launchpadlibrarian.net.access-log"
+            ),
+            "rb",
+        )
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=self._getLastLineStart(fd), logger=self.logger,
-            get_download_key=get_path_download_key)
+            fd,
+            start_position=self._getLastLineStart(fd),
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+        )
         self.assertEqual(
             self.logger.getLogBuffer().strip(),
-            'INFO Parsed 1 lines resulting in 1 download stats.')
+            "INFO Parsed 1 lines resulting in 1 download stats.",
+        )
         self.assertEqual(parsed_bytes, fd.tell())
 
         self.assertContentEqual(
             downloads.items(),
-            [('/15018215/ul_logo_64x64.png',
-              {datetime(2008, 6, 13): {'US': 1}})])
+            [
+                (
+                    "/15018215/ul_logo_64x64.png",
+                    {datetime(2008, 6, 13): {"US": 1}},
+                )
+            ],
+        )
 
     def test_unexpected_error_while_parsing(self):
         # When there's an unexpected error, we log it and return as if we had
         # parsed up to the line before the one where the failure occurred.
         # Here we force an unexpected error on the first line.
-        fd = io.BytesIO(b'Not a log')
+        fd = io.BytesIO(b"Not a log")
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=0, logger=self.logger,
-            get_download_key=get_path_download_key)
-        self.assertIn('Error', self.logger.getLogBuffer())
+            fd,
+            start_position=0,
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+        )
+        self.assertIn("Error", self.logger.getLogBuffer())
         self.assertEqual(downloads, {})
         self.assertEqual(parsed_bytes, 0)
 
     def _assertResponseWithGivenStatusIsIgnored(self, status):
         """Assert that responses with the given status are ignored."""
         fd = io.BytesIO(
-            (self.sample_line % dict(status=status, method='GET')).encode(
-                'UTF-8'))
+            (self.sample_line % dict(status=status, method="GET")).encode(
+                "UTF-8"
+            )
+        )
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=0, logger=self.logger,
-            get_download_key=get_path_download_key)
+            fd,
+            start_position=0,
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+        )
         self.assertEqual(
             self.logger.getLogBuffer().strip(),
-            'INFO Parsed 1 lines resulting in 0 download stats.')
+            "INFO Parsed 1 lines resulting in 0 download stats.",
+        )
         self.assertEqual(downloads, {})
         self.assertEqual(parsed_bytes, fd.tell())
 
     def test_responses_with_404_status_are_ignored(self):
-        self._assertResponseWithGivenStatusIsIgnored('404')
+        self._assertResponseWithGivenStatusIsIgnored("404")
 
     def test_responses_with_206_status_are_ignored(self):
-        self._assertResponseWithGivenStatusIsIgnored('206')
+        self._assertResponseWithGivenStatusIsIgnored("206")
 
     def test_responses_with_304_status_are_ignored(self):
-        self._assertResponseWithGivenStatusIsIgnored('304')
+        self._assertResponseWithGivenStatusIsIgnored("304")
 
     def test_responses_with_503_status_are_ignored(self):
-        self._assertResponseWithGivenStatusIsIgnored('503')
+        self._assertResponseWithGivenStatusIsIgnored("503")
 
     def _assertRequestWithGivenMethodIsIgnored(self, method):
         """Assert that requests with the given method are ignored."""
         fd = io.BytesIO(
-            (self.sample_line % dict(status='200', method=method)).encode(
-                'UTF-8'))
+            (self.sample_line % dict(status="200", method=method)).encode(
+                "UTF-8"
+            )
+        )
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=0, logger=self.logger,
-            get_download_key=get_path_download_key)
+            fd,
+            start_position=0,
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+        )
         self.assertEqual(
             self.logger.getLogBuffer().strip(),
-            'INFO Parsed 1 lines resulting in 0 download stats.')
+            "INFO Parsed 1 lines resulting in 0 download stats.",
+        )
         self.assertEqual(downloads, {})
         self.assertEqual(parsed_bytes, fd.tell())
 
     def test_HEAD_request_is_ignored(self):
-        self._assertRequestWithGivenMethodIsIgnored('HEAD')
+        self._assertRequestWithGivenMethodIsIgnored("HEAD")
 
     def test_POST_request_is_ignored(self):
-        self._assertRequestWithGivenMethodIsIgnored('POST')
+        self._assertRequestWithGivenMethodIsIgnored("POST")
 
     def test_normal_request_is_not_ignored(self):
         fd = io.BytesIO(
-            (self.sample_line % dict(status=200, method='GET')).encode(
-                'UTF-8'))
+            (self.sample_line % dict(status=200, method="GET")).encode("UTF-8")
+        )
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=0, logger=self.logger,
-            get_download_key=get_path_download_key)
+            fd,
+            start_position=0,
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+        )
         self.assertEqual(
             self.logger.getLogBuffer().strip(),
-            'INFO Parsed 1 lines resulting in 1 download stats.')
+            "INFO Parsed 1 lines resulting in 1 download stats.",
+        )
 
-        self.assertEqual(downloads,
-            {'/15018215/ul_logo_64x64.png':
-                {datetime(2008, 6, 13): {'US': 1}}})
+        self.assertEqual(
+            downloads,
+            {
+                "/15018215/ul_logo_64x64.png": {
+                    datetime(2008, 6, 13): {"US": 1}
+                }
+            },
+        )
 
         self.assertEqual(parsed_bytes, fd.tell())
 
@@ -320,18 +378,23 @@ class TestLogFileParsing(TestCase):
         # The max_parsed_lines config option limits the number of parsed
         # lines.
         config.push(
-            'log_parser config',
-            '[launchpad]\nlogparser_max_parsed_lines: 2')
-        self.addCleanup(config.pop, 'log_parser config')
+            "log_parser config", "[launchpad]\nlogparser_max_parsed_lines: 2"
+        )
+        self.addCleanup(config.pop, "log_parser config")
         fd = open(
             os.path.join(
-                here, 'apache-log-files', 'launchpadlibrarian.net.access-log'),
-            'rb')
+                here, "apache-log-files", "launchpadlibrarian.net.access-log"
+            ),
+            "rb",
+        )
         self.addCleanup(fd.close)
 
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=0, logger=self.logger,
-            get_download_key=get_path_download_key)
+            fd,
+            start_position=0,
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+        )
 
         # We have initially parsed only the first two lines of data,
         # corresponding to one download (the first line is a 404 and
@@ -340,7 +403,8 @@ class TestLogFileParsing(TestCase):
         date = datetime(2008, 6, 13)
         self.assertContentEqual(
             downloads.items(),
-            [('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
+            [("/9096290/me-tv-icon-14x14.png", {date: {"AU": 1}})],
+        )
         fd.seek(0)
         lines = fd.readlines()
         line_lengths = [len(line) for line in lines]
@@ -349,12 +413,18 @@ class TestLogFileParsing(TestCase):
         # And the subsequent parse will be for the 3rd and 4th lines,
         # corresponding to two downloads of the same file.
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=parsed_bytes, logger=self.logger,
-            get_download_key=get_path_download_key)
+            fd,
+            start_position=parsed_bytes,
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+        )
         self.assertContentEqual(
             downloads.items(),
-            [('/12060796/me-tv-icon-64x64.png', {date: {'AU': 1}}),
-             ('/8196569/mediumubuntulogo.png', {date: {'AR': 1}})])
+            [
+                ("/12060796/me-tv-icon-64x64.png", {date: {"AU": 1}}),
+                ("/8196569/mediumubuntulogo.png", {date: {"AR": 1}}),
+            ],
+        )
         self.assertEqual(parsed_bytes, sum(line_lengths[:4]))
 
     def test_max_parsed_lines_exceeded(self):
@@ -364,13 +434,15 @@ class TestLogFileParsing(TestCase):
         # The max_parsed_lines config option limits the number of parsed
         # lines.
         config.push(
-            'log_parser config',
-            '[launchpad]\nlogparser_max_parsed_lines: 2')
-        self.addCleanup(config.pop, 'log_parser config')
+            "log_parser config", "[launchpad]\nlogparser_max_parsed_lines: 2"
+        )
+        self.addCleanup(config.pop, "log_parser config")
         fd = open(
             os.path.join(
-                here, 'apache-log-files', 'launchpadlibrarian.net.access-log'),
-            'rb')
+                here, "apache-log-files", "launchpadlibrarian.net.access-log"
+            ),
+            "rb",
+        )
         self.addCleanup(fd.close)
 
         # We want to start parsing on line 2 so we will have a value in
@@ -384,8 +456,12 @@ class TestLogFileParsing(TestCase):
         # of lines to parse has been reached.
         parsed_lines = 1
         downloads, parsed_bytes, parsed_lines = parse_file(
-            fd, start_position=start_position, logger=self.logger,
-            get_download_key=get_path_download_key, parsed_lines=parsed_lines)
+            fd,
+            start_position=start_position,
+            logger=self.logger,
+            get_download_key=get_path_download_key,
+            parsed_lines=parsed_lines,
+        )
 
         # The total number of lines parsed during the run (1 line) plus the
         # number of lines parsed previously (1 line, as passed in via
@@ -396,7 +472,8 @@ class TestLogFileParsing(TestCase):
         date = datetime(2008, 6, 13)
         self.assertContentEqual(
             downloads.items(),
-            [('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
+            [("/9096290/me-tv-icon-14x14.png", {date: {"AU": 1}})],
+        )
 
 
 class TestParsedFilesDetection(TestCase):
@@ -404,8 +481,8 @@ class TestParsedFilesDetection(TestCase):
 
     layer = LaunchpadZopelessLayer
     # The directory in which the sample log files live.
-    root = os.path.join(here, 'apache-log-files')
-    file_path = os.path.join(root, 'launchpadlibrarian.net.access-log')
+    root = os.path.join(here, "apache-log-files")
+    file_path = os.path.join(root, "launchpadlibrarian.net.access-log")
 
     def setUp(self):
         super().setUp()
@@ -417,14 +494,14 @@ class TestParsedFilesDetection(TestCase):
         file_paths = [root.join(str(name)) for name in range(3)]
         now = time.time()
         for i, path in enumerate(file_paths):
-            write_file(path, ('%s\n' % i).encode('UTF-8'))
+            write_file(path, ("%s\n" % i).encode("UTF-8"))
             os.utime(path, (now - i, now - i))
         contents = []
         for fd, _ in get_files_to_parse(file_paths):
             fd.seek(0)
             contents.append(fd.read())
             fd.close()
-        self.assertEqual([b'2\n', b'1\n', b'0\n'], contents)
+        self.assertEqual([b"2\n", b"1\n", b"0\n"], contents)
 
     def test_not_parsed_file(self):
         # A file that has never been parsed will have to be parsed from the
@@ -466,14 +543,14 @@ class TestParsedFilesDetection(TestCase):
         # with a name matching that of an already parsed file but with content
         # differing from the last file with that name parsed, we know we need
         # to parse the file from the start.
-        ParsedApacheLog('First line', bytes_read=1000)
+        ParsedApacheLog("First line", bytes_read=1000)
 
         # This file has the same name of the previous one (which has been
         # parsed already), but its first line is different, so we'll have to
         # parse it from the start.
         fd, new_path = tempfile.mkstemp()
-        content2 = 'Different First Line\nSecond Line'
-        with open(new_path, 'w') as fd:
+        content2 = "Different First Line\nSecond Line"
+        with open(new_path, "w") as fd:
             fd.write(content2)
         files_to_parse = get_files_to_parse([new_path])
         positions = []
@@ -485,7 +562,7 @@ class TestParsedFilesDetection(TestCase):
     def test_fresh_gzipped_file(self):
         # get_files_to_parse() handles gzipped files just like uncompressed
         # ones.  The first time we see one, we'll parse from the beginning.
-        gz_name = 'launchpadlibrarian.net.access-log.1.gz'
+        gz_name = "launchpadlibrarian.net.access-log.1.gz"
         gz_path = os.path.join(self.root, gz_name)
         files_to_parse = get_files_to_parse([gz_path])
         positions = []
@@ -497,7 +574,7 @@ class TestParsedFilesDetection(TestCase):
     def test_resumed_gzipped_file(self):
         # In subsequent runs of the script we will resume from where we
         # stopped last time. (Here we pretend we parsed only the first line)
-        gz_name = 'launchpadlibrarian.net.access-log.1.gz'
+        gz_name = "launchpadlibrarian.net.access-log.1.gz"
         gz_path = os.path.join(self.root, gz_name)
         first_line = gzip.open(gz_path).readline()
         ParsedApacheLog(first_line, len(first_line))
@@ -522,12 +599,16 @@ class Test_create_or_update_parsedlog_entry(TestCase):
         # When given a first_line that doesn't exist in the ParsedApacheLog
         # table, create_or_update_parsedlog_entry() will create a new entry
         # with the given number of bytes read.
-        first_line = 'First line'
+        first_line = "First line"
         create_or_update_parsedlog_entry(
-            first_line, parsed_bytes=len(first_line))
-
-        entry = IStore(ParsedApacheLog).find(
-            ParsedApacheLog, first_line=first_line).one()
+            first_line, parsed_bytes=len(first_line)
+        )
+
+        entry = (
+            IStore(ParsedApacheLog)
+            .find(ParsedApacheLog, first_line=first_line)
+            .one()
+        )
         self.assertIsNot(None, entry)
         self.assertEqual(entry.bytes_read, len(first_line))
 
@@ -535,7 +616,7 @@ class Test_create_or_update_parsedlog_entry(TestCase):
         # When given a first_line that already exists in the ParsedApacheLog
         # table, create_or_update_parsedlog_entry() will update that entry
         # with the given number of bytes read.
-        first_line = 'First line'
+        first_line = "First line"
         create_or_update_parsedlog_entry(first_line, parsed_bytes=2)
         store = IStore(ParsedApacheLog)
         entry = store.find(ParsedApacheLog, first_line=first_line).one()
@@ -545,7 +626,8 @@ class Test_create_or_update_parsedlog_entry(TestCase):
         self.assertEqual(entry.bytes_read, 2)
 
         create_or_update_parsedlog_entry(
-            first_line, parsed_bytes=len(first_line))
+            first_line, parsed_bytes=len(first_line)
+        )
 
         # And here we see that same entry was updated by the second call to
         # create_or_update_parsedlog_entry().
diff --git a/lib/lp/services/auth/browser.py b/lib/lp/services/auth/browser.py
index 5c9a35d..c704026 100644
--- a/lib/lp/services/auth/browser.py
+++ b/lib/lp/services/auth/browser.py
@@ -5,27 +5,18 @@
 
 __all__ = [
     "AccessTokensView",
-    ]
+]
 
-from lazr.restful.interface import (
-    copy_field,
-    use_template,
-    )
+from lazr.restful.interface import copy_field, use_template
 from zope.component import getUtility
 from zope.interface import Interface
 
 from lp import _
-from lp.app.browser.launchpadform import (
-    action,
-    LaunchpadFormView,
-    )
+from lp.app.browser.launchpadform import LaunchpadFormView, action
 from lp.app.errors import UnexpectedFormData
 from lp.app.widgets.date import DateTimeWidget
 from lp.app.widgets.itemswidgets import LabeledMultiCheckBoxWidget
-from lp.services.auth.interfaces import (
-    IAccessToken,
-    IAccessTokenSet,
-    )
+from lp.services.auth.interfaces import IAccessToken, IAccessTokenSet
 from lp.services.propertycache import cachedproperty
 from lp.services.webapp.publisher import canonical_url
 
@@ -33,14 +24,18 @@ from lp.services.webapp.publisher import canonical_url
 class IAccessTokenCreateSchema(Interface):
     """Schema for creating a personal access token."""
 
-    use_template(IAccessToken, include=[
-        "description",
-        "scopes",
-        ])
+    use_template(
+        IAccessToken,
+        include=[
+            "description",
+            "scopes",
+        ],
+    )
 
     date_expires = copy_field(
         IAccessToken["date_expires"],
-        description=_("When the token should expire."))
+        description=_("When the token should expire."),
+    )
 
 
 class AccessTokensView(LaunchpadFormView):
@@ -57,8 +52,11 @@ class AccessTokensView(LaunchpadFormView):
 
     @cachedproperty
     def access_tokens(self):
-        return list(getUtility(IAccessTokenSet).findByTarget(
-            self.context, visible_by_user=self.user))
+        return list(
+            getUtility(IAccessTokenSet).findByTarget(
+                self.context, visible_by_user=self.user
+            )
+        )
 
     @action("Revoke", name="revoke")
     def revoke_action(self, action, data):
@@ -71,10 +69,13 @@ class AccessTokensView(LaunchpadFormView):
         except ValueError:
             raise UnexpectedFormData("token_id is not an integer")
         token = getUtility(IAccessTokenSet).getByTargetAndID(
-            self.context, token_id, visible_by_user=self.user)
+            self.context, token_id, visible_by_user=self.user
+        )
         if token is not None:
             token.revoke(self.user)
             self.request.response.addInfoNotification(
-                "Token revoked successfully.")
+                "Token revoked successfully."
+            )
         self.request.response.redirect(
-            canonical_url(self.context, view_name="+access-tokens"))
+            canonical_url(self.context, view_name="+access-tokens")
+        )
diff --git a/lib/lp/services/auth/enums.py b/lib/lp/services/auth/enums.py
index c590856..9ce4cbb 100644
--- a/lib/lp/services/auth/enums.py
+++ b/lib/lp/services/auth/enums.py
@@ -5,25 +5,26 @@
 
 __all__ = [
     "AccessTokenScope",
-    ]
+]
 
-from lazr.enum import (
-    EnumeratedType,
-    Item,
-    )
+from lazr.enum import EnumeratedType, Item
 
 
 class AccessTokenScope(EnumeratedType):
     """A scope specifying the capabilities of an access token."""
 
-    REPOSITORY_BUILD_STATUS = Item("""
+    REPOSITORY_BUILD_STATUS = Item(
+        """
         repository:build_status
 
         Can see and update the build status for all commits in a repository.
-        """)
+        """
+    )
 
-    REPOSITORY_PUSH = Item("""
+    REPOSITORY_PUSH = Item(
+        """
         repository:push
 
         Can push to a repository.
-        """)
+        """
+    )
diff --git a/lib/lp/services/auth/interfaces.py b/lib/lp/services/auth/interfaces.py
index 7a9927a..471d458 100644
--- a/lib/lp/services/auth/interfaces.py
+++ b/lib/lp/services/auth/interfaces.py
@@ -8,9 +8,10 @@ __all__ = [
     "IAccessTokenSet",
     "IAccessTokenTarget",
     "IAccessTokenVerifiedRequest",
-    ]
+]
 
 from lazr.restful.declarations import (
+    REQUEST_USER,
     call_with,
     export_read_operation,
     export_write_operation,
@@ -18,18 +19,10 @@ from lazr.restful.declarations import (
     exported_as_webservice_entry,
     operation_for_version,
     operation_returns_collection_of,
-    REQUEST_USER,
-    )
+)
 from lazr.restful.fields import Reference
 from zope.interface import Interface
-from zope.schema import (
-    Bool,
-    Choice,
-    Datetime,
-    Int,
-    List,
-    TextLine,
-    )
+from zope.schema import Bool, Choice, Datetime, Int, List, TextLine
 
 from lp import _
 from lp.services.auth.enums import AccessTokenScope
@@ -46,56 +39,96 @@ class IAccessToken(Interface):
 
     id = Int(title=_("ID"), required=True, readonly=True)
 
-    date_created = exported(Datetime(
-        title=_("Creation date"),
-        description=_("When the token was created."),
-        required=True, readonly=True))
+    date_created = exported(
+        Datetime(
+            title=_("Creation date"),
+            description=_("When the token was created."),
+            required=True,
+            readonly=True,
+        )
+    )
 
-    owner = exported(PublicPersonChoice(
-        title=_("Owner"),
-        description=_("The person who created the token."),
-        vocabulary="ValidPersonOrTeam", required=True, readonly=True))
+    owner = exported(
+        PublicPersonChoice(
+            title=_("Owner"),
+            description=_("The person who created the token."),
+            vocabulary="ValidPersonOrTeam",
+            required=True,
+            readonly=True,
+        )
+    )
 
-    description = exported(TextLine(
-        title=_("Description"),
-        description=_("A short description of the token."), required=True))
+    description = exported(
+        TextLine(
+            title=_("Description"),
+            description=_("A short description of the token."),
+            required=True,
+        )
+    )
 
     git_repository = Reference(
         title=_("Git repository"),
         description=_("The Git repository for which the token was issued."),
         # Really IGitRepository, patched in _schema_circular_imports.py.
-        schema=Interface, required=True, readonly=True)
-
-    target = exported(Reference(
-        title=_("Target"),
-        description=_("The target for which the token was issued."),
-        # Really IAccessTokenTarget, patched in _schema_circular_imports.py.
-        schema=Interface, required=True, readonly=True))
-
-    scopes = exported(List(
-        value_type=Choice(vocabulary=AccessTokenScope),
-        title=_("Scopes"),
-        description=_("A list of scopes granted by the token."),
-        required=True, readonly=True))
-
-    date_last_used = exported(Datetime(
-        title=_("Date last used"),
-        description=_("When the token was last used."),
-        required=False, readonly=True))
-
-    date_expires = exported(Datetime(
-        title=_("Expiry date"),
-        description=_("When the token should expire or was revoked."),
-        required=False, readonly=True))
+        schema=Interface,
+        required=True,
+        readonly=True,
+    )
+
+    target = exported(
+        Reference(
+            title=_("Target"),
+            description=_("The target for which the token was issued."),
+            # Really IAccessTokenTarget, patched below.
+            schema=Interface,
+            required=True,
+            readonly=True,
+        )
+    )
+
+    scopes = exported(
+        List(
+            value_type=Choice(vocabulary=AccessTokenScope),
+            title=_("Scopes"),
+            description=_("A list of scopes granted by the token."),
+            required=True,
+            readonly=True,
+        )
+    )
+
+    date_last_used = exported(
+        Datetime(
+            title=_("Date last used"),
+            description=_("When the token was last used."),
+            required=False,
+            readonly=True,
+        )
+    )
+
+    date_expires = exported(
+        Datetime(
+            title=_("Expiry date"),
+            description=_("When the token should expire or was revoked."),
+            required=False,
+            readonly=True,
+        )
+    )
 
     is_expired = Bool(
         description=_("Whether this token has expired."),
-        required=False, readonly=True)
+        required=False,
+        readonly=True,
+    )
 
-    revoked_by = exported(PublicPersonChoice(
-        title=_("Revoked by"),
-        description=_("The person who revoked the token, if any."),
-        vocabulary="ValidPersonOrTeam", required=False, readonly=True))
+    revoked_by = exported(
+        PublicPersonChoice(
+            title=_("Revoked by"),
+            description=_("The person who revoked the token, if any."),
+            vocabulary="ValidPersonOrTeam",
+            required=False,
+            readonly=True,
+        )
+    )
 
     def updateLastUsed():
         """Update this token's last-used date, if possible."""
diff --git a/lib/lp/services/auth/model.py b/lib/lp/services/auth/model.py
index aa1ff11..3cb6cb2 100644
--- a/lib/lp/services/auth/model.py
+++ b/lib/lp/services/auth/model.py
@@ -6,30 +6,15 @@
 __all__ = [
     "AccessToken",
     "AccessTokenTargetMixin",
-    ]
+]
 
-from datetime import (
-    datetime,
-    timedelta,
-    )
 import hashlib
+from datetime import datetime, timedelta
 
 import pytz
 from storm.databases.postgres import JSON
-from storm.expr import (
-    And,
-    Cast,
-    Or,
-    Select,
-    SQL,
-    Update,
-    )
-from storm.locals import (
-    DateTime,
-    Int,
-    Reference,
-    Unicode,
-    )
+from storm.expr import SQL, And, Cast, Or, Select, Update
+from storm.locals import DateTime, Int, Reference, Unicode
 from zope.component import getUtility
 from zope.interface import implementer
 from zope.security.proxy import removeSecurityProxy
@@ -38,15 +23,9 @@ from lp.code.interfaces.gitcollection import IAllGitRepositories
 from lp.code.interfaces.gitrepository import IGitRepository
 from lp.registry.model.teammembership import TeamParticipation
 from lp.services.auth.enums import AccessTokenScope
-from lp.services.auth.interfaces import (
-    IAccessToken,
-    IAccessTokenSet,
-    )
+from lp.services.auth.interfaces import IAccessToken, IAccessTokenSet
 from lp.services.database.constants import UTC_NOW
-from lp.services.database.interfaces import (
-    IMasterStore,
-    IStore,
-    )
+from lp.services.database.interfaces import IMasterStore, IStore
 from lp.services.database.stormbase import StormBase
 
 
@@ -59,7 +38,8 @@ class AccessToken(StormBase):
     id = Int(primary=True)
 
     date_created = DateTime(
-        name="date_created", tzinfo=pytz.UTC, allow_none=False)
+        name="date_created", tzinfo=pytz.UTC, allow_none=False
+    )
 
     _token_sha256 = Unicode(name="token_sha256", allow_none=False)
 
@@ -74,17 +54,20 @@ class AccessToken(StormBase):
     _scopes = JSON(name="scopes", allow_none=False)
 
     date_last_used = DateTime(
-        name="date_last_used", tzinfo=pytz.UTC, allow_none=True)
+        name="date_last_used", tzinfo=pytz.UTC, allow_none=True
+    )
     date_expires = DateTime(
-        name="date_expires", tzinfo=pytz.UTC, allow_none=True)
+        name="date_expires", tzinfo=pytz.UTC, allow_none=True
+    )
 
     revoked_by_id = Int(name="revoked_by", allow_none=True)
     revoked_by = Reference(revoked_by_id, "Person.id")
 
     resolution = timedelta(minutes=10)
 
-    def __init__(self, secret, owner, description, target, scopes,
-                 date_expires=None):
+    def __init__(
+        self, secret, owner, description, target, scopes, date_expires=None
+    ):
         """Construct an `AccessToken`."""
         self._token_sha256 = hashlib.sha256(secret.encode()).hexdigest()
         self.owner = owner
@@ -107,7 +90,8 @@ class AccessToken(StormBase):
         """See `IAccessToken`."""
         return [
             AccessTokenScope.getTermByToken(scope).value
-            for scope in self._scopes]
+            for scope in self._scopes
+        ]
 
     @scopes.setter
     def scopes(self, scopes):
@@ -117,21 +101,30 @@ class AccessToken(StormBase):
     def updateLastUsed(self):
         """See `IAccessToken`."""
         store = IMasterStore(AccessToken)
-        store.execute(Update(
-            {AccessToken.date_last_used: UTC_NOW},
-            where=And(
-                # Skip the update if the AccessToken row is already locked,
-                # for example by a concurrent request using the same token.
-                AccessToken.id.is_in(SQL(
-                    "SELECT id FROM AccessToken WHERE id = ? "
-                    "FOR UPDATE SKIP LOCKED", params=(self.id,))),
-                # Only update the last-used date every so often, to avoid
-                # bloat.
-                Or(
-                    AccessToken.date_last_used == None,
-                    AccessToken.date_last_used <
-                        UTC_NOW - Cast(self.resolution, 'interval'))),
-            table=AccessToken))
+        store.execute(
+            Update(
+                {AccessToken.date_last_used: UTC_NOW},
+                where=And(
+                    # Skip the update if the AccessToken row is already locked,
+                    # for example by a concurrent request using the same token.
+                    AccessToken.id.is_in(
+                        SQL(
+                            "SELECT id FROM AccessToken WHERE id = ? "
+                            "FOR UPDATE SKIP LOCKED",
+                            params=(self.id,),
+                        )
+                    ),
+                    # Only update the last-used date every so often, to avoid
+                    # bloat.
+                    Or(
+                        AccessToken.date_last_used == None,
+                        AccessToken.date_last_used
+                        < UTC_NOW - Cast(self.resolution, "interval"),
+                    ),
+                ),
+                table=AccessToken,
+            )
+        )
         store.invalidate(self)
 
     @property
@@ -147,22 +140,32 @@ class AccessToken(StormBase):
 
 @implementer(IAccessTokenSet)
 class AccessTokenSet:
-
-    def new(self, secret, owner, description, target, scopes,
-            date_expires=None):
+    def new(
+        self, secret, owner, description, target, scopes, date_expires=None
+    ):
         """See `IAccessTokenSet`."""
         store = IStore(AccessToken)
         token = AccessToken(
-            secret, owner, description, target, scopes,
-            date_expires=date_expires)
+            secret,
+            owner,
+            description,
+            target,
+            scopes,
+            date_expires=date_expires,
+        )
         store.add(token)
         return token
 
     def getBySecret(self, secret):
         """See `IAccessTokenSet`."""
-        return IStore(AccessToken).find(
-            AccessToken,
-            _token_sha256=hashlib.sha256(secret.encode()).hexdigest()).one()
+        return (
+            IStore(AccessToken)
+            .find(
+                AccessToken,
+                _token_sha256=hashlib.sha256(secret.encode()).hexdigest(),
+            )
+            .one()
+        )
 
     def findByOwner(self, owner):
         """See `IAccessTokenSet`."""
@@ -174,27 +177,47 @@ class AccessTokenSet:
         if IGitRepository.providedBy(target):
             clauses.append(AccessToken.git_repository == target)
             if visible_by_user is not None:
-                collection = getUtility(IAllGitRepositories).visibleByUser(
-                    visible_by_user).ownedByTeamMember(visible_by_user)
+                collection = (
+                    getUtility(IAllGitRepositories)
+                    .visibleByUser(visible_by_user)
+                    .ownedByTeamMember(visible_by_user)
+                )
                 ids = collection.getRepositoryIds()
-                clauses.append(Or(
-                    AccessToken.owner_id.is_in(Select(
-                        TeamParticipation.teamID,
-                        where=TeamParticipation.person == visible_by_user.id)),
-                    AccessToken.git_repository_id.is_in(
-                        removeSecurityProxy(ids)._get_select())))
+                clauses.append(
+                    Or(
+                        AccessToken.owner_id.is_in(
+                            Select(
+                                TeamParticipation.teamID,
+                                where=TeamParticipation.person
+                                == visible_by_user.id,
+                            )
+                        ),
+                        AccessToken.git_repository_id.is_in(
+                            removeSecurityProxy(ids)._get_select()
+                        ),
+                    )
+                )
         else:
             raise TypeError("Unsupported target: {!r}".format(target))
-        clauses.append(Or(
-            AccessToken.date_expires == None,
-            AccessToken.date_expires > UTC_NOW))
-        return IStore(AccessToken).find(AccessToken, *clauses).order_by(
-            AccessToken.date_created)
+        clauses.append(
+            Or(
+                AccessToken.date_expires == None,
+                AccessToken.date_expires > UTC_NOW,
+            )
+        )
+        return (
+            IStore(AccessToken)
+            .find(AccessToken, *clauses)
+            .order_by(AccessToken.date_created)
+        )
 
     def getByTargetAndID(self, target, token_id, visible_by_user=None):
         """See `IAccessTokenSet`."""
-        return self.findByTarget(target, visible_by_user=visible_by_user).find(
-            id=token_id).one()
+        return (
+            self.findByTarget(target, visible_by_user=visible_by_user)
+            .find(id=token_id)
+            .one()
+        )
 
 
 class AccessTokenTargetMixin:
@@ -203,4 +226,5 @@ class AccessTokenTargetMixin:
     def getAccessTokens(self, visible_by_user=None):
         """See `IAccessTokenTarget`."""
         return getUtility(IAccessTokenSet).findByTarget(
-            self, visible_by_user=visible_by_user)
+            self, visible_by_user=visible_by_user
+        )
diff --git a/lib/lp/services/auth/security.py b/lib/lp/services/auth/security.py
index 6edf291..a12eb20 100644
--- a/lib/lp/services/auth/security.py
+++ b/lib/lp/services/auth/security.py
@@ -13,7 +13,7 @@ from lp.services.auth.interfaces import IAccessToken
 
 
 class EditAccessToken(AuthorizationBase):
-    permission = 'launchpad.Edit'
+    permission = "launchpad.Edit"
     usedfor = IAccessToken
 
     def checkAuthenticated(self, user):
@@ -24,7 +24,8 @@ class EditAccessToken(AuthorizationBase):
         # allows target owners to exercise some control over access to their
         # object.
         adapter = queryAdapter(
-            self.obj.target, IAuthorization, 'launchpad.Edit')
+            self.obj.target, IAuthorization, "launchpad.Edit"
+        )
         if adapter is not None and adapter.checkAuthenticated(user):
             return True
         return False
diff --git a/lib/lp/services/auth/tests/test_browser.py b/lib/lp/services/auth/tests/test_browser.py
index c98de78..9921188 100644
--- a/lib/lp/services/auth/tests/test_browser.py
+++ b/lib/lp/services/auth/tests/test_browser.py
@@ -13,28 +13,28 @@ from testtools.matchers import (
     MatchesListwise,
     MatchesStructure,
     Not,
-    )
+)
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
 from lp.services.webapp.interfaces import IPlacelessAuthUtility
 from lp.services.webapp.publisher import canonical_url
-from lp.testing import (
-    login_person,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCaseWithFactory, login_person
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.views import create_view
 
-
 breadcrumbs_tag = soupmatchers.Tag(
-    "breadcrumbs", "ol", attrs={"class": "breadcrumbs"})
+    "breadcrumbs", "ol", attrs={"class": "breadcrumbs"}
+)
 tokens_page_crumb_tag = soupmatchers.Tag(
-    "tokens page breadcrumb", "li", text=re.compile(r"Personal access tokens"))
+    "tokens page breadcrumb", "li", text=re.compile(r"Personal access tokens")
+)
 token_listing_constants = soupmatchers.HTMLContains(
-    soupmatchers.Within(breadcrumbs_tag, tokens_page_crumb_tag))
+    soupmatchers.Within(breadcrumbs_tag, tokens_page_crumb_tag)
+)
 token_listing_tag = soupmatchers.Tag(
-    "tokens table", "table", attrs={"class": "listing"})
+    "tokens table", "table", attrs={"class": "listing"}
+)
 
 
 class TestAccessTokenViewBase:
@@ -55,13 +55,19 @@ class TestAccessTokenViewBase:
         # in create_view instead, but that approach needs care to avoid
         # adding an extra query to tests that might be sensitive to that.
         principal = getUtility(IPlacelessAuthUtility).getPrincipal(
-            self.owner.accountID)
+            self.owner.accountID
+        )
         view = create_view(
-            self.target, name, principal=principal, current_request=True,
-            **kwargs)
+            self.target,
+            name,
+            principal=principal,
+            current_request=True,
+            **kwargs,
+        )
         # To test the breadcrumbs we need a correct traversal stack.
-        view.request.traversed_objects = (
-            self.getTraversalStack(self.target) + [view])
+        view.request.traversed_objects = self.getTraversalStack(
+            self.target
+        ) + [view]
         # The navigation menu machinery needs this to find the view from the
         # request.
         view.request._last_obj_traversed = view
@@ -71,7 +77,8 @@ class TestAccessTokenViewBase:
     def test_access_tokens_link(self):
         target_url = canonical_url(self.target, rootsite="code")
         expected_tokens_url = canonical_url(
-            self.target, view_name="+access-tokens", rootsite="code")
+            self.target, view_name="+access-tokens", rootsite="code"
+        )
         browser = self.getUserBrowser(target_url, user=self.owner)
         tokens_link = browser.getLink("Manage access tokens")
         self.assertEqual(expected_tokens_url, tokens_link.url)
@@ -79,23 +86,30 @@ class TestAccessTokenViewBase:
     def makeTokensAndMatchers(self, count):
         tokens = [
             self.factory.makeAccessToken(target=self.target)[1]
-            for _ in range(count)]
+            for _ in range(count)
+        ]
         # There is a row for each token.
         matchers = []
         for token in tokens:
             row_tag = soupmatchers.Tag(
-                "token row", "tr",
-                attrs={"token-id": removeSecurityProxy(token).id})
+                "token row",
+                "tr",
+                attrs={"token-id": removeSecurityProxy(token).id},
+            )
             column_tags = [
+                soupmatchers.Tag("description", "td", text=token.description),
                 soupmatchers.Tag(
-                    "description", "td", text=token.description),
-                soupmatchers.Tag(
-                    "scopes", "td",
-                    text=", ".join(scope.title for scope in token.scopes)),
+                    "scopes",
+                    "td",
+                    text=", ".join(scope.title for scope in token.scopes),
+                ),
+            ]
+            matchers.extend(
+                [
+                    soupmatchers.Within(row_tag, column_tag)
+                    for column_tag in column_tags
                 ]
-            matchers.extend([
-                soupmatchers.Within(row_tag, column_tag)
-                for column_tag in column_tags])
+            )
         return matchers
 
     def test_empty(self):
@@ -103,7 +117,9 @@ class TestAccessTokenViewBase:
             self.makeView("+access-tokens")(),
             MatchesAll(
                 token_listing_constants,
-                soupmatchers.HTMLContains(token_listing_tag)))
+                soupmatchers.HTMLContains(token_listing_tag),
+            ),
+        )
 
     def test_existing_tokens(self):
         token_matchers = self.makeTokensAndMatchers(10)
@@ -111,47 +127,70 @@ class TestAccessTokenViewBase:
             self.makeView("+access-tokens")(),
             MatchesAll(
                 token_listing_constants,
-                soupmatchers.HTMLContains(token_listing_tag, *token_matchers)))
+                soupmatchers.HTMLContains(token_listing_tag, *token_matchers),
+            ),
+        )
 
     def test_revoke(self):
         tokens = [
             self.factory.makeAccessToken(target=self.target)[1]
-            for _ in range(3)]
+            for _ in range(3)
+        ]
         token_ids = [token.id for token in tokens]
         access_tokens_url = canonical_url(
-            self.target, view_name="+access-tokens")
+            self.target, view_name="+access-tokens"
+        )
         browser = self.getUserBrowser(access_tokens_url, user=self.owner)
         for token_id in token_ids:
             self.assertThat(
                 browser.getForm(name="revoke-%s" % token_id).controls,
-                MatchesListwise([
-                    MatchesStructure.byEquality(
-                        type="hidden", name="token_id", value=str(token_id)),
-                    MatchesStructure.byEquality(
-                        type="submit", name="field.actions.revoke",
-                        value="Revoke"),
-                    ]))
+                MatchesListwise(
+                    [
+                        MatchesStructure.byEquality(
+                            type="hidden", name="token_id", value=str(token_id)
+                        ),
+                        MatchesStructure.byEquality(
+                            type="submit",
+                            name="field.actions.revoke",
+                            value="Revoke",
+                        ),
+                    ]
+                ),
+            )
         browser.getForm(name="revoke-%s" % token_ids[1]).getControl(
-            "Revoke").click()
+            "Revoke"
+        ).click()
         login_person(self.owner)
         self.assertEqual(access_tokens_url, browser.url)
-        self.assertThat(tokens[0], MatchesStructure(
-            id=Equals(token_ids[0]),
-            date_expires=Is(None),
-            revoked_by=Is(None)))
-        self.assertThat(tokens[1], MatchesStructure(
-            id=Equals(token_ids[1]),
-            date_expires=Not(Is(None)),
-            revoked_by=Equals(self.owner)))
-        self.assertThat(tokens[2], MatchesStructure(
-            id=Equals(token_ids[2]),
-            date_expires=Is(None),
-            revoked_by=Is(None)))
+        self.assertThat(
+            tokens[0],
+            MatchesStructure(
+                id=Equals(token_ids[0]),
+                date_expires=Is(None),
+                revoked_by=Is(None),
+            ),
+        )
+        self.assertThat(
+            tokens[1],
+            MatchesStructure(
+                id=Equals(token_ids[1]),
+                date_expires=Not(Is(None)),
+                revoked_by=Equals(self.owner),
+            ),
+        )
+        self.assertThat(
+            tokens[2],
+            MatchesStructure(
+                id=Equals(token_ids[2]),
+                date_expires=Is(None),
+                revoked_by=Is(None),
+            ),
+        )
 
 
 class TestAccessTokenViewGitRepository(
-        TestAccessTokenViewBase, TestCaseWithFactory):
-
+    TestAccessTokenViewBase, TestCaseWithFactory
+):
     def makeTarget(self):
         return self.factory.makeGitRepository()
 
diff --git a/lib/lp/services/auth/tests/test_model.py b/lib/lp/services/auth/tests/test_model.py
index 82d44b0..14f32cc 100644
--- a/lib/lp/services/auth/tests/test_model.py
+++ b/lib/lp/services/auth/tests/test_model.py
@@ -3,21 +3,15 @@
 
 """Test personal access tokens."""
 
-from datetime import (
-    datetime,
-    timedelta,
-    )
 import hashlib
 import os
 import signal
+from datetime import datetime, timedelta
 
 import pytz
-from storm.store import Store
-from testtools.matchers import (
-    Is,
-    MatchesStructure,
-    )
 import transaction
+from storm.store import Store
+from testtools.matchers import Is, MatchesStructure
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
@@ -27,17 +21,17 @@ from lp.services.auth.utils import create_access_token_secret
 from lp.services.database.sqlbase import (
     disconnect_stores,
     get_transaction_timestamp,
-    )
+)
 from lp.services.webapp.authorization import check_permission
 from lp.services.webapp.interfaces import OAuthPermission
 from lp.testing import (
+    TestCaseWithFactory,
     api_url,
     login,
     login_person,
     person_logged_in,
     record_two_runs,
-    TestCaseWithFactory,
-    )
+)
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.matchers import HasQueryCount
 from lp.testing.pages import webservice_for_person
@@ -151,21 +145,28 @@ class TestAccessToken(TestCaseWithFactory):
         _, current_token = self.factory.makeAccessToken(owner=owner)
         _, expired_token = self.factory.makeAccessToken(
             owner=owner,
-            date_expires=datetime.now(pytz.UTC) - timedelta(minutes=1))
+            date_expires=datetime.now(pytz.UTC) - timedelta(minutes=1),
+        )
         self.assertFalse(current_token.is_expired)
         self.assertTrue(expired_token.is_expired)
 
     def test_revoke(self):
         owner = self.factory.makePerson()
         _, token = self.factory.makeAccessToken(
-            owner=owner, scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS])
+            owner=owner, scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS]
+        )
         login_person(owner)
-        self.assertThat(token, MatchesStructure(
-            date_expires=Is(None), revoked_by=Is(None)))
+        self.assertThat(
+            token, MatchesStructure(date_expires=Is(None), revoked_by=Is(None))
+        )
         token.revoke(token.owner)
         now = get_transaction_timestamp(Store.of(token))
-        self.assertThat(token, MatchesStructure.byEquality(
-            date_expires=now, revoked_by=token.owner))
+        self.assertThat(
+            token,
+            MatchesStructure.byEquality(
+                date_expires=now, revoked_by=token.owner
+            ),
+        )
 
 
 class TestAccessTokenSet(TestCaseWithFactory):
@@ -180,21 +181,33 @@ class TestAccessTokenSet(TestCaseWithFactory):
         target = self.factory.makeGitRepository()
         scopes = [AccessTokenScope.REPOSITORY_BUILD_STATUS]
         _, token = self.factory.makeAccessToken(
-            secret=secret, owner=owner, description=description, target=target,
-            scopes=scopes)
+            secret=secret,
+            owner=owner,
+            description=description,
+            target=target,
+            scopes=scopes,
+        )
         self.assertThat(
-            removeSecurityProxy(token), MatchesStructure.byEquality(
+            removeSecurityProxy(token),
+            MatchesStructure.byEquality(
                 _token_sha256=hashlib.sha256(secret.encode()).hexdigest(),
-                owner=owner, description=description, target=target,
-                scopes=scopes))
+                owner=owner,
+                description=description,
+                target=target,
+                scopes=scopes,
+            ),
+        )
 
     def test_getBySecret(self):
         secret, token = self.factory.makeAccessToken()
         self.assertEqual(
-            token, getUtility(IAccessTokenSet).getBySecret(secret))
+            token, getUtility(IAccessTokenSet).getBySecret(secret)
+        )
         self.assertIsNone(
             getUtility(IAccessTokenSet).getBySecret(
-                create_access_token_secret()))
+                create_access_token_secret()
+            )
+        )
 
     def test_findByOwner(self):
         owners = [self.factory.makePerson() for _ in range(3)]
@@ -202,13 +215,16 @@ class TestAccessTokenSet(TestCaseWithFactory):
             self.factory.makeAccessToken(owner=owners[0])[1],
             self.factory.makeAccessToken(owner=owners[0])[1],
             self.factory.makeAccessToken(owner=owners[1])[1],
-            ]
+        ]
         self.assertContentEqual(
-            tokens[:2], getUtility(IAccessTokenSet).findByOwner(owners[0]))
+            tokens[:2], getUtility(IAccessTokenSet).findByOwner(owners[0])
+        )
         self.assertContentEqual(
-            [tokens[2]], getUtility(IAccessTokenSet).findByOwner(owners[1]))
+            [tokens[2]], getUtility(IAccessTokenSet).findByOwner(owners[1])
+        )
         self.assertContentEqual(
-            [], getUtility(IAccessTokenSet).findByOwner(owners[2]))
+            [], getUtility(IAccessTokenSet).findByOwner(owners[2])
+        )
 
     def test_findByTarget(self):
         targets = [self.factory.makeGitRepository() for _ in range(3)]
@@ -216,51 +232,65 @@ class TestAccessTokenSet(TestCaseWithFactory):
             self.factory.makeAccessToken(target=targets[0])[1],
             self.factory.makeAccessToken(target=targets[0])[1],
             self.factory.makeAccessToken(target=targets[1])[1],
-            ]
+        ]
         self.assertContentEqual(
-            tokens[:2], getUtility(IAccessTokenSet).findByTarget(targets[0]))
+            tokens[:2], getUtility(IAccessTokenSet).findByTarget(targets[0])
+        )
         self.assertContentEqual(
-            [tokens[2]], getUtility(IAccessTokenSet).findByTarget(targets[1]))
+            [tokens[2]], getUtility(IAccessTokenSet).findByTarget(targets[1])
+        )
         self.assertContentEqual(
-            [], getUtility(IAccessTokenSet).findByTarget(targets[2]))
+            [], getUtility(IAccessTokenSet).findByTarget(targets[2])
+        )
 
     def test_findByTarget_visible_by_user(self):
         targets = [self.factory.makeGitRepository() for _ in range(3)]
         owners = [self.factory.makePerson() for _ in range(3)]
         tokens = [
             self.factory.makeAccessToken(
-                owner=owners[owner_index], target=targets[target_index])[1]
+                owner=owners[owner_index], target=targets[target_index]
+            )[1]
             for owner_index, target_index in (
-                (0, 0), (0, 0), (1, 0), (1, 1), (2, 1))]
+                (0, 0),
+                (0, 0),
+                (1, 0),
+                (1, 1),
+                (2, 1),
+            )
+        ]
         for owner_index, target_index, expected_tokens in (
-                (0, 0, tokens[:2]),
-                (0, 1, []),
-                (0, 2, []),
-                (1, 0, [tokens[2]]),
-                (1, 1, [tokens[3]]),
-                (1, 2, []),
-                (2, 0, []),
-                (2, 1, [tokens[4]]),
-                (2, 2, []),
-                ):
+            (0, 0, tokens[:2]),
+            (0, 1, []),
+            (0, 2, []),
+            (1, 0, [tokens[2]]),
+            (1, 1, [tokens[3]]),
+            (1, 2, []),
+            (2, 0, []),
+            (2, 1, [tokens[4]]),
+            (2, 2, []),
+        ):
             self.assertContentEqual(
                 expected_tokens,
                 getUtility(IAccessTokenSet).findByTarget(
-                    targets[target_index],
-                    visible_by_user=owners[owner_index]))
+                    targets[target_index], visible_by_user=owners[owner_index]
+                ),
+            )
 
     def test_findByTarget_excludes_expired(self):
         target = self.factory.makeGitRepository()
         _, current_token = self.factory.makeAccessToken(target=target)
         _, expires_soon_token = self.factory.makeAccessToken(
             target=target,
-            date_expires=datetime.now(pytz.UTC) + timedelta(hours=1))
+            date_expires=datetime.now(pytz.UTC) + timedelta(hours=1),
+        )
         _, expired_token = self.factory.makeAccessToken(
             target=target,
-            date_expires=datetime.now(pytz.UTC) - timedelta(minutes=1))
+            date_expires=datetime.now(pytz.UTC) - timedelta(minutes=1),
+        )
         self.assertContentEqual(
             [current_token, expires_soon_token],
-            getUtility(IAccessTokenSet).findByTarget(target))
+            getUtility(IAccessTokenSet).findByTarget(target),
+        )
 
     def test_getByTargetAndID(self):
         targets = [self.factory.makeGitRepository() for _ in range(3)]
@@ -268,42 +298,57 @@ class TestAccessTokenSet(TestCaseWithFactory):
             self.factory.makeAccessToken(target=targets[0])[1],
             self.factory.makeAccessToken(target=targets[0])[1],
             self.factory.makeAccessToken(target=targets[1])[1],
-            ]
+        ]
         self.assertEqual(
             tokens[0],
             getUtility(IAccessTokenSet).getByTargetAndID(
-                targets[0], removeSecurityProxy(tokens[0]).id))
+                targets[0], removeSecurityProxy(tokens[0]).id
+            ),
+        )
         self.assertEqual(
             tokens[1],
             getUtility(IAccessTokenSet).getByTargetAndID(
-                targets[0], removeSecurityProxy(tokens[1]).id))
+                targets[0], removeSecurityProxy(tokens[1]).id
+            ),
+        )
         self.assertIsNone(
             getUtility(IAccessTokenSet).getByTargetAndID(
-                targets[0], removeSecurityProxy(tokens[2]).id))
+                targets[0], removeSecurityProxy(tokens[2]).id
+            )
+        )
 
     def test_getByTargetAndID_visible_by_user(self):
         targets = [self.factory.makeGitRepository() for _ in range(3)]
         owners = [self.factory.makePerson() for _ in range(3)]
         tokens = [
             self.factory.makeAccessToken(
-                owner=owners[owner_index], target=targets[target_index])[1]
+                owner=owners[owner_index], target=targets[target_index]
+            )[1]
             for owner_index, target_index in (
-                (0, 0), (0, 0), (1, 0), (1, 1), (2, 1))]
+                (0, 0),
+                (0, 0),
+                (1, 0),
+                (1, 1),
+                (2, 1),
+            )
+        ]
         for owner_index, target_index, expected_tokens in (
-                (0, 0, tokens[:2]),
-                (0, 1, []),
-                (0, 2, []),
-                (1, 0, [tokens[2]]),
-                (1, 1, [tokens[3]]),
-                (1, 2, []),
-                (2, 0, []),
-                (2, 1, [tokens[4]]),
-                (2, 2, []),
-                ):
+            (0, 0, tokens[:2]),
+            (0, 1, []),
+            (0, 2, []),
+            (1, 0, [tokens[2]]),
+            (1, 1, [tokens[3]]),
+            (1, 2, []),
+            (2, 0, []),
+            (2, 1, [tokens[4]]),
+            (2, 2, []),
+        ):
             for token in tokens:
                 fetched_token = getUtility(IAccessTokenSet).getByTargetAndID(
-                    targets[target_index], removeSecurityProxy(token).id,
-                    visible_by_user=owners[owner_index])
+                    targets[target_index],
+                    removeSecurityProxy(token).id,
+                    visible_by_user=owners[owner_index],
+                )
                 if token in expected_tokens:
                     self.assertEqual(token, fetched_token)
                 else:
@@ -314,21 +359,29 @@ class TestAccessTokenSet(TestCaseWithFactory):
         _, current_token = self.factory.makeAccessToken(target=target)
         _, expires_soon_token = self.factory.makeAccessToken(
             target=target,
-            date_expires=datetime.now(pytz.UTC) + timedelta(hours=1))
+            date_expires=datetime.now(pytz.UTC) + timedelta(hours=1),
+        )
         _, expired_token = self.factory.makeAccessToken(
             target=target,
-            date_expires=datetime.now(pytz.UTC) - timedelta(minutes=1))
+            date_expires=datetime.now(pytz.UTC) - timedelta(minutes=1),
+        )
         self.assertEqual(
             current_token,
             getUtility(IAccessTokenSet).getByTargetAndID(
-                target, removeSecurityProxy(current_token).id))
+                target, removeSecurityProxy(current_token).id
+            ),
+        )
         self.assertEqual(
             expires_soon_token,
             getUtility(IAccessTokenSet).getByTargetAndID(
-                target, removeSecurityProxy(expires_soon_token).id))
+                target, removeSecurityProxy(expires_soon_token).id
+            ),
+        )
         self.assertIsNone(
             getUtility(IAccessTokenSet).getByTargetAndID(
-                target, removeSecurityProxy(expired_token).id))
+                target, removeSecurityProxy(expired_token).id
+            )
+        )
 
 
 class TestAccessTokenTargetBase:
@@ -341,39 +394,47 @@ class TestAccessTokenTargetBase:
         self.owner = self.target.owner
         self.target_url = api_url(self.target)
         self.webservice = webservice_for_person(
-            self.owner, permission=OAuthPermission.WRITE_PRIVATE)
+            self.owner, permission=OAuthPermission.WRITE_PRIVATE
+        )
 
     def test_getAccessTokens(self):
         with person_logged_in(self.owner):
             for description in ("Test token 1", "Test token 2"):
                 self.factory.makeAccessToken(
-                    owner=self.owner, description=description,
-                    target=self.target)
+                    owner=self.owner,
+                    description=description,
+                    target=self.target,
+                )
         response = self.webservice.named_get(
-            self.target_url, "getAccessTokens", api_version="devel")
+            self.target_url, "getAccessTokens", api_version="devel"
+        )
         self.assertEqual(200, response.status)
         self.assertContentEqual(
             ["Test token 1", "Test token 2"],
-            [entry["description"] for entry in response.jsonBody()["entries"]])
+            [entry["description"] for entry in response.jsonBody()["entries"]],
+        )
 
     def test_getAccessTokens_permissions(self):
         webservice = webservice_for_person(None)
         response = webservice.named_get(
-            self.target_url, "getAccessTokens", api_version="devel")
+            self.target_url, "getAccessTokens", api_version="devel"
+        )
         self.assertEqual(401, response.status)
         self.assertIn(b"launchpad.Edit", response.body)
 
     def test_getAccessTokens_query_count(self):
         def get_tokens():
             response = self.webservice.named_get(
-                self.target_url, "getAccessTokens", api_version="devel")
+                self.target_url, "getAccessTokens", api_version="devel"
+            )
             self.assertEqual(200, response.status)
             self.assertIn(len(response.jsonBody()["entries"]), {0, 2, 4})
 
         def create_token():
             with person_logged_in(self.owner):
                 self.factory.makeAccessToken(
-                    owner=self.owner, target=self.target)
+                    owner=self.owner, target=self.target
+                )
 
         get_tokens()
         recorder1, recorder2 = record_two_runs(get_tokens, create_token, 2)
@@ -381,7 +442,7 @@ class TestAccessTokenTargetBase:
 
 
 class TestAccessTokenTargetGitRepository(
-        TestAccessTokenTargetBase, TestCaseWithFactory):
-
+    TestAccessTokenTargetBase, TestCaseWithFactory
+):
     def makeTarget(self):
         return self.factory.makeGitRepository()
diff --git a/lib/lp/services/auth/tests/test_yuitests.py b/lib/lp/services/auth/tests/test_yuitests.py
index 069cc9d..be22651 100644
--- a/lib/lp/services/auth/tests/test_yuitests.py
+++ b/lib/lp/services/auth/tests/test_yuitests.py
@@ -5,10 +5,7 @@
 
 __all__ = []
 
-from lp.testing import (
-    build_yui_unittest_suite,
-    YUIUnitTestCase,
-    )
+from lp.testing import YUIUnitTestCase, build_yui_unittest_suite
 from lp.testing.layers import YUITestLayer
 
 
diff --git a/lib/lp/services/auth/utils.py b/lib/lp/services/auth/utils.py
index a3b956d..f4c997b 100644
--- a/lib/lp/services/auth/utils.py
+++ b/lib/lp/services/auth/utils.py
@@ -5,7 +5,7 @@
 
 __all__ = [
     "create_access_token_secret",
-    ]
+]
 
 import binascii
 import os
diff --git a/lib/lp/services/auth/webservice.py b/lib/lp/services/auth/webservice.py
index 08e7ca1..ceb378a 100644
--- a/lib/lp/services/auth/webservice.py
+++ b/lib/lp/services/auth/webservice.py
@@ -6,9 +6,6 @@
 __all__ = [
     "IAccessToken",
     "IAccessTokenTarget",
-    ]
+]
 
-from lp.services.auth.interfaces import (
-    IAccessToken,
-    IAccessTokenTarget,
-    )
+from lp.services.auth.interfaces import IAccessToken, IAccessTokenTarget
diff --git a/lib/lp/services/authserver/interfaces.py b/lib/lp/services/authserver/interfaces.py
index d94755f..ece3436 100644
--- a/lib/lp/services/authserver/interfaces.py
+++ b/lib/lp/services/authserver/interfaces.py
@@ -4,9 +4,9 @@
 """Interface for the XML-RPC authentication server."""
 
 __all__ = [
-    'IAuthServer',
-    'IAuthServerApplication',
-    ]
+    "IAuthServer",
+    "IAuthServerApplication",
+]
 
 
 from zope.interface import Interface
diff --git a/lib/lp/services/authserver/testing.py b/lib/lp/services/authserver/testing.py
index 5dba0c9..34f1cd5 100644
--- a/lib/lp/services/authserver/testing.py
+++ b/lib/lp/services/authserver/testing.py
@@ -4,17 +4,14 @@
 """In-process authserver fixture."""
 
 __all__ = [
-    'InProcessAuthServerFixture',
-    ]
+    "InProcessAuthServerFixture",
+]
 
 from textwrap import dedent
 
 import fixtures
 from twisted.internet import reactor
-from twisted.web import (
-    server,
-    xmlrpc,
-    )
+from twisted.web import server, xmlrpc
 from zope.component import getUtility
 from zope.publisher.xmlrpc import TestRequest
 
@@ -24,16 +21,16 @@ from lp.xmlrpc.interfaces import IPrivateApplication
 
 
 class InProcessAuthServer(xmlrpc.XMLRPC):
-
     def __init__(self, *args, **kwargs):
         xmlrpc.XMLRPC.__init__(self, *args, **kwargs)
         private_root = getUtility(IPrivateApplication)
         self.authserver = AuthServerAPIView(
-            private_root.authserver, TestRequest())
+            private_root.authserver, TestRequest()
+        )
 
     def __getattr__(self, name):
         if name.startswith("xmlrpc_"):
-            return getattr(self.authserver, name[len("xmlrpc_"):])
+            return getattr(self.authserver, name[len("xmlrpc_") :])
         else:
             raise AttributeError("%r has no attribute '%s'" % name)
 
@@ -44,7 +41,10 @@ class InProcessAuthServerFixture(fixtures.Fixture, xmlrpc.XMLRPC):
     def _setUp(self):
         listener = reactor.listenTCP(0, server.Site(InProcessAuthServer()))
         self.addCleanup(listener.stopListening)
-        config.push("in-process-auth-server-fixture", dedent("""
+        config.push(
+            "in-process-auth-server-fixture",
+            dedent(
+                """
             [builddmaster]
             authentication_endpoint: http://localhost:{port}/
 
@@ -53,5 +53,7 @@ class InProcessAuthServerFixture(fixtures.Fixture, xmlrpc.XMLRPC):
 
             [librarian]
             authentication_endpoint: http://localhost:{port}/
-            """).format(port=listener.getHost().port))
+            """
+            ).format(port=listener.getHost().port),
+        )
         self.addCleanup(config.pop, "in-process-auth-server-fixture")
diff --git a/lib/lp/services/authserver/tests/test_authserver.py b/lib/lp/services/authserver/tests/test_authserver.py
index 272f02b..2093a92 100644
--- a/lib/lp/services/authserver/tests/test_authserver.py
+++ b/lib/lp/services/authserver/tests/test_authserver.py
@@ -7,12 +7,7 @@ import xmlrpc.client
 
 from pymacaroons import Macaroon
 from storm.sqlobject import SQLObjectNotFound
-from testtools.matchers import (
-    Equals,
-    Is,
-    MatchesListwise,
-    MatchesStructure,
-    )
+from testtools.matchers import Equals, Is, MatchesListwise, MatchesStructure
 from zope.component import getUtility
 from zope.interface import implementer
 from zope.publisher.xmlrpc import TestRequest
@@ -20,30 +15,23 @@ from zope.publisher.xmlrpc import TestRequest
 from lp.services.authserver.interfaces import (
     IAuthServer,
     IAuthServerApplication,
-    )
+)
 from lp.services.authserver.xmlrpc import AuthServerAPIView
 from lp.services.config import config
 from lp.services.identity.interfaces.account import AccountStatus
 from lp.services.librarian.interfaces import (
     ILibraryFileAlias,
     ILibraryFileAliasSet,
-    )
+)
 from lp.services.macaroons.interfaces import (
+    NO_USER,
     BadMacaroonContext,
     IMacaroonIssuer,
-    NO_USER,
-    )
+)
 from lp.services.macaroons.model import MacaroonIssuerBase
-from lp.testing import (
-    person_logged_in,
-    TestCaseWithFactory,
-    verifyObject,
-    )
+from lp.testing import TestCaseWithFactory, person_logged_in, verifyObject
 from lp.testing.fixture import ZopeUtilityFixture
-from lp.testing.layers import (
-    DatabaseFunctionalLayer,
-    ZopelessDatabaseLayer,
-    )
+from lp.testing.layers import DatabaseFunctionalLayer, ZopelessDatabaseLayer
 from lp.testing.xmlrpc import XMLRPCTestTransport
 from lp.xmlrpc import faults
 from lp.xmlrpc.interfaces import IPrivateApplication
@@ -58,19 +46,20 @@ class TestAuthServerInterfaces(TestCaseWithFactory):
         # of our private XML-RPC instance.
         private_root = getUtility(IPrivateApplication)
         self.assertTrue(
-            verifyObject(IAuthServerApplication, private_root.authserver))
+            verifyObject(IAuthServerApplication, private_root.authserver)
+        )
 
     def test_api_interface(self):
         # The AuthServerAPIView provides the IAuthServer XML-RPC API.
         private_root = getUtility(IPrivateApplication)
         authserver_api = AuthServerAPIView(
-            private_root.authserver, TestRequest())
+            private_root.authserver, TestRequest()
+        )
         self.assertTrue(verifyObject(IAuthServer, authserver_api))
 
 
 class GetUserAndSSHKeysTests(TestCaseWithFactory):
-    """Tests for the implementation of `IAuthServer.getUserAndSSHKeys`.
-    """
+    """Tests for the implementation of `IAuthServer.getUserAndSSHKeys`."""
 
     layer = DatabaseFunctionalLayer
 
@@ -78,14 +67,16 @@ class GetUserAndSSHKeysTests(TestCaseWithFactory):
         TestCaseWithFactory.setUp(self)
         private_root = getUtility(IPrivateApplication)
         self.authserver = AuthServerAPIView(
-            private_root.authserver, TestRequest())
+            private_root.authserver, TestRequest()
+        )
 
     def test_user_not_found(self):
         # getUserAndSSHKeys returns the NoSuchPersonWithName fault if there is
         # no Person of the given name.
         self.assertEqual(
-            faults.NoSuchPersonWithName('no-one'),
-            self.authserver.getUserAndSSHKeys('no-one'))
+            faults.NoSuchPersonWithName("no-one"),
+            self.authserver.getUserAndSSHKeys("no-one"),
+        )
 
     def test_user_no_keys(self):
         # getUserAndSSHKeys returns a dict with keys ['id', 'name', 'keys'].
@@ -94,7 +85,8 @@ class GetUserAndSSHKeysTests(TestCaseWithFactory):
         new_person = self.factory.makePerson()
         self.assertEqual(
             dict(id=new_person.id, name=new_person.name, keys=[]),
-            self.authserver.getUserAndSSHKeys(new_person.name))
+            self.authserver.getUserAndSSHKeys(new_person.name),
+        )
 
     def test_user_with_keys(self):
         # For a user with registered SSH keys, getUserAndSSHKeys returns the
@@ -104,40 +96,51 @@ class GetUserAndSSHKeysTests(TestCaseWithFactory):
         with person_logged_in(new_person):
             key = self.factory.makeSSHKey(person=new_person)
             self.assertEqual(
-                dict(id=new_person.id, name=new_person.name,
-                     keys=[(key.keytype.title, key.keytext)]),
-                self.authserver.getUserAndSSHKeys(new_person.name))
+                dict(
+                    id=new_person.id,
+                    name=new_person.name,
+                    keys=[(key.keytype.title, key.keytext)],
+                ),
+                self.authserver.getUserAndSSHKeys(new_person.name),
+            )
 
     def test_inactive_user_with_keys(self):
         # getUserAndSSHKeys returns the InactiveAccount fault if the given
         # name refers to an inactive account.
         new_person = self.factory.makePerson(
-            account_status=AccountStatus.SUSPENDED)
+            account_status=AccountStatus.SUSPENDED
+        )
         with person_logged_in(new_person):
             self.factory.makeSSHKey(person=new_person)
             self.assertEqual(
                 faults.InactiveAccount(new_person.name),
-                self.authserver.getUserAndSSHKeys(new_person.name))
+                self.authserver.getUserAndSSHKeys(new_person.name),
+            )
 
     def test_via_xmlrpc(self):
         new_person = self.factory.makePerson()
         with person_logged_in(new_person):
             key = self.factory.makeSSHKey(person=new_person)
         authserver = xmlrpc.client.ServerProxy(
-            'http://xmlrpc-private.launchpad.test:8087/authserver',
-            transport=XMLRPCTestTransport())
+            "http://xmlrpc-private.launchpad.test:8087/authserver";,
+            transport=XMLRPCTestTransport(),
+        )
         self.assertEqual(
-            {'id': new_person.id, 'name': new_person.name,
-             'keys': [[key.keytype.title, key.keytext]]},
-            authserver.getUserAndSSHKeys(new_person.name))
+            {
+                "id": new_person.id,
+                "name": new_person.name,
+                "keys": [[key.keytype.title, key.keytext]],
+            },
+            authserver.getUserAndSSHKeys(new_person.name),
+        )
 
 
 @implementer(IMacaroonIssuer)
 class DummyMacaroonIssuer(MacaroonIssuerBase):
 
-    identifier = 'test'
+    identifier = "test"
     issuable_via_authserver = True
-    _root_secret = 'test'
+    _root_secret = "test"
     _verified_user = NO_USER
 
     def checkIssuingContext(self, context, **kwargs):
@@ -167,59 +170,77 @@ class MacaroonTests(TestCaseWithFactory):
     def setUp(self):
         super().setUp()
         self.issuer = DummyMacaroonIssuer()
-        self.useFixture(ZopeUtilityFixture(
-            self.issuer, IMacaroonIssuer, name='test'))
+        self.useFixture(
+            ZopeUtilityFixture(self.issuer, IMacaroonIssuer, name="test")
+        )
         private_root = getUtility(IPrivateApplication)
         self.authserver = AuthServerAPIView(
-            private_root.authserver, TestRequest())
+            private_root.authserver, TestRequest()
+        )
 
     def test_issue_unknown_issuer(self):
         self.assertEqual(
             faults.PermissionDenied(),
             self.authserver.issueMacaroon(
-                'unknown-issuer', 'LibraryFileAlias', 1))
+                "unknown-issuer", "LibraryFileAlias", 1
+            ),
+        )
 
     def test_issue_wrong_context_type(self):
         self.assertEqual(
             faults.PermissionDenied(),
-            self.authserver.issueMacaroon(
-                'unknown-issuer', 'nonsense', 1))
+            self.authserver.issueMacaroon("unknown-issuer", "nonsense", 1),
+        )
 
     def test_issue_not_issuable_via_authserver(self):
         self.issuer.issuable_via_authserver = False
         self.assertEqual(
             faults.PermissionDenied(),
-            self.authserver.issueMacaroon('test', 'LibraryFileAlias', 1))
+            self.authserver.issueMacaroon("test", "LibraryFileAlias", 1),
+        )
 
     def test_issue_bad_context(self):
         build = self.factory.makeSnapBuild()
         self.assertEqual(
             faults.PermissionDenied(),
-            self.authserver.issueMacaroon('test', 'SnapBuild', build.id))
+            self.authserver.issueMacaroon("test", "SnapBuild", build.id),
+        )
 
     def test_issue_success(self):
         macaroon = Macaroon.deserialize(
-            self.authserver.issueMacaroon('test', 'LibraryFileAlias', 1))
-        self.assertThat(macaroon, MatchesStructure(
-            location=Equals(config.vhost.mainsite.hostname),
-            identifier=Equals('test'),
-            caveats=MatchesListwise([
-                MatchesStructure.byEquality(caveat_id='lp.test 1'),
-                ])))
+            self.authserver.issueMacaroon("test", "LibraryFileAlias", 1)
+        )
+        self.assertThat(
+            macaroon,
+            MatchesStructure(
+                location=Equals(config.vhost.mainsite.hostname),
+                identifier=Equals("test"),
+                caveats=MatchesListwise(
+                    [
+                        MatchesStructure.byEquality(caveat_id="lp.test 1"),
+                    ]
+                ),
+            ),
+        )
 
     def test_verify_nonsense_macaroon(self):
         self.assertEqual(
             faults.Unauthorized(),
-            self.authserver.verifyMacaroon('nonsense', 'LibraryFileAlias', 1))
+            self.authserver.verifyMacaroon("nonsense", "LibraryFileAlias", 1),
+        )
 
     def test_verify_unknown_issuer(self):
         macaroon = Macaroon(
             location=config.vhost.mainsite.hostname,
-            identifier='unknown-issuer', key='test')
+            identifier="unknown-issuer",
+            key="test",
+        )
         self.assertEqual(
             faults.Unauthorized(),
             self.authserver.verifyMacaroon(
-                macaroon.serialize(), 'LibraryFileAlias', 1))
+                macaroon.serialize(), "LibraryFileAlias", 1
+            ),
+        )
 
     def test_verify_wrong_context_type(self):
         lfa = getUtility(ILibraryFileAliasSet)[1]
@@ -227,7 +248,9 @@ class MacaroonTests(TestCaseWithFactory):
         self.assertEqual(
             faults.Unauthorized(),
             self.authserver.verifyMacaroon(
-                macaroon.serialize(), 'nonsense', lfa.id))
+                macaroon.serialize(), "nonsense", lfa.id
+            ),
+        )
 
     def test_verify_wrong_context(self):
         lfa = getUtility(ILibraryFileAliasSet)[1]
@@ -235,20 +258,27 @@ class MacaroonTests(TestCaseWithFactory):
         self.assertEqual(
             faults.Unauthorized(),
             self.authserver.verifyMacaroon(
-                macaroon.serialize(), 'LibraryFileAlias', 2))
+                macaroon.serialize(), "LibraryFileAlias", 2
+            ),
+        )
 
     def test_verify_nonexistent_lfa(self):
         macaroon = self.issuer.issueMacaroon(
-            getUtility(ILibraryFileAliasSet)[1])
+            getUtility(ILibraryFileAliasSet)[1]
+        )
         # Pick a large ID that doesn't exist in sampledata.
         lfa_id = 1000000
         self.assertRaises(
-            SQLObjectNotFound, getUtility(ILibraryFileAliasSet).__getitem__,
-            lfa_id)
+            SQLObjectNotFound,
+            getUtility(ILibraryFileAliasSet).__getitem__,
+            lfa_id,
+        )
         self.assertEqual(
             faults.Unauthorized(),
             self.authserver.verifyMacaroon(
-                macaroon.serialize(), 'LibraryFileAlias', lfa_id))
+                macaroon.serialize(), "LibraryFileAlias", lfa_id
+            ),
+        )
 
     def test_verify_unverified_user(self):
         # The authserver refuses macaroons whose issuer doesn't explicitly
@@ -259,7 +289,9 @@ class MacaroonTests(TestCaseWithFactory):
         self.assertEqual(
             faults.Unauthorized(),
             self.authserver.verifyMacaroon(
-                macaroon.serialize(), 'LibraryFileAlias', lfa.id))
+                macaroon.serialize(), "LibraryFileAlias", lfa.id
+            ),
+        )
 
     def test_verify_specific_user(self):
         # The authserver refuses macaroons that were issued on behalf of a
@@ -270,12 +302,16 @@ class MacaroonTests(TestCaseWithFactory):
         self.assertEqual(
             faults.Unauthorized(),
             self.authserver.verifyMacaroon(
-                macaroon.serialize(), 'LibraryFileAlias', lfa.id))
+                macaroon.serialize(), "LibraryFileAlias", lfa.id
+            ),
+        )
 
     def test_verify_success(self):
         lfa = getUtility(ILibraryFileAliasSet)[1]
         macaroon = self.issuer.issueMacaroon(lfa)
         self.assertThat(
             self.authserver.verifyMacaroon(
-                macaroon.serialize(), 'LibraryFileAlias', lfa.id),
-            Is(True))
+                macaroon.serialize(), "LibraryFileAlias", lfa.id
+            ),
+            Is(True),
+        )
diff --git a/lib/lp/services/authserver/xmlrpc.py b/lib/lp/services/authserver/xmlrpc.py
index 1c91bdf..54312d7 100644
--- a/lib/lp/services/authserver/xmlrpc.py
+++ b/lib/lp/services/authserver/xmlrpc.py
@@ -4,9 +4,9 @@
 """Auth-Server XML-RPC API ."""
 
 __all__ = [
-    'AuthServerApplication',
-    'AuthServerAPIView',
-    ]
+    "AuthServerApplication",
+    "AuthServerAPIView",
+]
 
 from pymacaroons import Macaroon
 from storm.sqlobject import SQLObjectNotFound
@@ -21,14 +21,14 @@ from lp.registry.interfaces.person import IPersonSet
 from lp.services.authserver.interfaces import (
     IAuthServer,
     IAuthServerApplication,
-    )
+)
 from lp.services.identity.interfaces.account import AccountStatus
 from lp.services.librarian.interfaces import ILibraryFileAliasSet
 from lp.services.macaroons.interfaces import (
+    NO_USER,
     BadMacaroonContext,
     IMacaroonIssuer,
-    NO_USER,
-    )
+)
 from lp.services.webapp import LaunchpadXMLRPCView
 from lp.snappy.interfaces.snapbuild import ISnapBuildSet
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
@@ -48,11 +48,12 @@ class AuthServerAPIView(LaunchpadXMLRPCView):
         if person.account_status != AccountStatus.ACTIVE:
             return faults.InactiveAccount(name)
         return {
-            'id': person.id,
-            'name': person.name,
-            'keys': [(key.keytype.title, key.keytext)
-                     for key in person.sshkeys],
-            }
+            "id": person.id,
+            "name": person.name,
+            "keys": [
+                (key.keytype.title, key.keytext) for key in person.sshkeys
+            ],
+        }
 
     def _resolveContext(self, context_type, context):
         """Resolve a serialised context.
@@ -64,25 +65,25 @@ class AuthServerAPIView(LaunchpadXMLRPCView):
         :param context: The context as plain data (e.g. an ID).
         :return: The resolved context, or None.
         """
-        if context_type == 'LibraryFileAlias':
+        if context_type == "LibraryFileAlias":
             # The context is a `LibraryFileAlias` ID.
             try:
                 return getUtility(ILibraryFileAliasSet)[context]
             except SQLObjectNotFound:
                 return None
-        elif context_type == 'BinaryPackageBuild':
+        elif context_type == "BinaryPackageBuild":
             # The context is a `BinaryPackageBuild` ID.
             return getUtility(IBinaryPackageBuildSet).getByID(context)
-        elif context_type == 'LiveFSBuild':
+        elif context_type == "LiveFSBuild":
             # The context is a `LiveFSBuild` ID.
             return getUtility(ILiveFSBuildSet).getByID(context)
-        elif context_type == 'SnapBuild':
+        elif context_type == "SnapBuild":
             # The context is a `SnapBuild` ID.
             return getUtility(ISnapBuildSet).getByID(context)
-        elif context_type == 'OCIRecipeBuild':
+        elif context_type == "OCIRecipeBuild":
             # The context is an `OCIRecipeBuild` ID.
             return getUtility(IOCIRecipeBuildSet).getByID(context)
-        elif context_type == 'CIBuild':
+        elif context_type == "CIBuild":
             # The context is a `CIBuild` ID.
             return getUtility(ICIBuildSet).getByID(context)
         else:
diff --git a/lib/lp/services/beautifulsoup.py b/lib/lp/services/beautifulsoup.py
index 6a39229..8c32655 100644
--- a/lib/lp/services/beautifulsoup.py
+++ b/lib/lp/services/beautifulsoup.py
@@ -4,9 +4,9 @@
 """Beautiful Soup wrapper for Launchpad."""
 
 __all__ = [
-    'BeautifulSoup',
-    'SoupStrainer',
-    ]
+    "BeautifulSoup",
+    "SoupStrainer",
+]
 
 
 from bs4 import BeautifulSoup as _BeautifulSoup
@@ -14,9 +14,7 @@ from bs4.element import SoupStrainer
 
 
 class BeautifulSoup(_BeautifulSoup):
-
     def __init__(self, markup="", features="html.parser", **kwargs):
-        if (not isinstance(markup, str) and
-                "from_encoding" not in kwargs):
+        if not isinstance(markup, str) and "from_encoding" not in kwargs:
             kwargs["from_encoding"] = "UTF-8"
         super().__init__(markup=markup, features=features, **kwargs)
diff --git a/lib/lp/services/browser_helpers.py b/lib/lp/services/browser_helpers.py
index e4a1902..c87168f 100644
--- a/lib/lp/services/browser_helpers.py
+++ b/lib/lp/services/browser_helpers.py
@@ -4,9 +4,9 @@
 """Helpers for examining the browser user_agent header."""
 
 __all__ = [
-    'get_user_agent_distroseries',
-    'get_plural_text',
-    ]
+    "get_user_agent_distroseries",
+    "get_plural_text",
+]
 
 import re
 
@@ -18,13 +18,13 @@ def get_user_agent_distroseries(user_agent_string):
         return None
 
     # We're matching on the Ubuntu/10.09 section of the user-agent string.
-    pattern = r'Ubuntu/(?P<version>\d*\.\d*)'
+    pattern = r"Ubuntu/(?P<version>\d*\.\d*)"
     match = re.search(pattern, user_agent_string)
 
     if match is not None:
         # Great, the browser is telling us the platform is Ubuntu.
         # Now grab the Ubuntu series/version number:
-        return match.groupdict()['version']
+        return match.groupdict()["version"]
     else:
         return None
 
diff --git a/lib/lp/services/channels.py b/lib/lp/services/channels.py
index 7f0d467..2d36856 100644
--- a/lib/lp/services/channels.py
+++ b/lib/lp/services/channels.py
@@ -7,13 +7,12 @@ __all__ = [
     "CHANNEL_COMPONENTS_DELIMITER",
     "channel_list_to_string",
     "channel_string_to_list",
-    ]
+]
 
 from lp.registry.enums import StoreRisk
 
-
 # delimiter separating channel components
-CHANNEL_COMPONENTS_DELIMITER = '/'
+CHANNEL_COMPONENTS_DELIMITER = "/"
 
 
 def _is_risk(component):
@@ -42,7 +41,8 @@ def channel_string_to_list(channel):
         if _is_risk(components[0]):
             if _is_risk(components[1]):
                 raise ValueError(
-                    "Branch name cannot match a risk name: %r" % channel)
+                    "Branch name cannot match a risk name: %r" % channel
+                )
             track = None
             risk, branch = components
         elif _is_risk(components[1]):
@@ -70,4 +70,5 @@ def channel_list_to_string(track, risk, branch):
     if track == "latest":
         track = None
     return CHANNEL_COMPONENTS_DELIMITER.join(
-        [c for c in (track, risk, branch) if c is not None])
+        [c for c in (track, risk, branch) if c is not None]
+    )
diff --git a/lib/lp/services/command_spawner.py b/lib/lp/services/command_spawner.py
index 31b8940..8bf3ccc 100644
--- a/lib/lp/services/command_spawner.py
+++ b/lib/lp/services/command_spawner.py
@@ -4,20 +4,16 @@
 """Execute commands in parallel sub-processes."""
 
 __all__ = [
-    'CommandSpawner',
-    'OutputLineHandler',
-    'ReturnCodeReceiver',
-    ]
+    "CommandSpawner",
+    "OutputLineHandler",
+    "ReturnCodeReceiver",
+]
 
 import errno
-from fcntl import (
-    F_GETFL,
-    F_SETFL,
-    fcntl,
-    )
-from os import O_NONBLOCK
 import select
 import subprocess
+from fcntl import F_GETFL, F_SETFL, fcntl
+from os import O_NONBLOCK
 
 
 def get_process_output_files(process):
@@ -25,7 +21,7 @@ def get_process_output_files(process):
     return [
         process.stdout,
         process.stderr,
-        ]
+    ]
 
 
 def make_files_nonblocking(files):
@@ -40,13 +36,13 @@ def make_files_nonblocking(files):
 
 def has_pending_output(poll_event):
     """Does the given event mask from `poll` indicate there's data to read?"""
-    input_mask = (select.POLLIN | select.POLLPRI)
+    input_mask = select.POLLIN | select.POLLPRI
     return (poll_event & input_mask) != 0
 
 
 def has_terminated(poll_event):
     """Does the given event mask from `poll` indicate process death?"""
-    death_mask = (select.POLLERR | select.POLLHUP | select.POLLNVAL)
+    death_mask = select.POLLERR | select.POLLHUP | select.POLLNVAL
     return (poll_event & death_mask) != 0
 
 
@@ -81,8 +77,13 @@ class CommandSpawner:
         self.running_processes = {}
         self.poll = select.poll()
 
-    def start(self, command, stdout_handler=None, stderr_handler=None,
-              completion_handler=None):
+    def start(
+        self,
+        command,
+        stdout_handler=None,
+        stderr_handler=None,
+        completion_handler=None,
+    ):
         """Run `command` in a sub-process.
 
         This starts the command, but does not wait for it to complete.
@@ -156,8 +157,12 @@ class CommandSpawner:
     def _spawn(self, command):
         """Spawn a sub-process for `command`.  Overridable in tests."""
         return subprocess.Popen(
-            command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-            close_fds=True, universal_newlines=True)
+            command,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            close_fds=True,
+            universal_newlines=True,
+        )
 
     def _handle(self, process, event, *args):
         """If we have a handler for `event` on `process`, call it."""
diff --git a/lib/lp/services/comments/browser/comment.py b/lib/lp/services/comments/browser/comment.py
index e745444..8c2bbb7 100644
--- a/lib/lp/services/comments/browser/comment.py
+++ b/lib/lp/services/comments/browser/comment.py
@@ -2,9 +2,9 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'download_body',
-    'MAX_RENDERABLE',
-    ]
+    "download_body",
+    "MAX_RENDERABLE",
+]
 
 
 from lp.app.browser.tales import download_link
@@ -13,8 +13,7 @@ from lp.services.webapp.publisher import (
     DataDownloadView,
     LaunchpadView,
     UserAttributeCache,
-    )
-
+)
 
 MAX_RENDERABLE = 10000
 
@@ -22,11 +21,11 @@ MAX_RENDERABLE = 10000
 class CommentBodyDownloadView(DataDownloadView, UserAttributeCache):
     """Download the body text of a comment."""
 
-    content_type = 'text/plain'
+    content_type = "text/plain"
 
     @property
     def filename(self):
-        return 'comment-%d.txt' % self.context.index
+        return "comment-%d.txt" % self.context.index
 
     def getBody(self):
         """The body of the HTTP response is the message body."""
diff --git a/lib/lp/services/comments/browser/messagecomment.py b/lib/lp/services/comments/browser/messagecomment.py
index 9824042..93eb4a5 100644
--- a/lib/lp/services/comments/browser/messagecomment.py
+++ b/lib/lp/services/comments/browser/messagecomment.py
@@ -1,7 +1,7 @@
 # Copyright 2009-2018 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-__all__ = ['MessageComment']
+__all__ = ["MessageComment"]
 
 
 from lp.services.comments.browser.comment import MAX_RENDERABLE
@@ -12,7 +12,7 @@ from lp.services.propertycache import cachedproperty
 class MessageComment:
     """Mixin to partially implement IComment in terms of IMessage."""
 
-    extra_css_class = ''
+    extra_css_class = ""
 
     has_footer = False
 
@@ -24,9 +24,9 @@ class MessageComment:
         if not self.visible:
             # If a comment that isn't visible is being rendered, it's being
             # rendered for an admin or registry_expert.
-            return 'adminHiddenComment'
+            return "adminHiddenComment"
         else:
-            return ''
+            return ""
 
     @property
     def display_attachments(self):
@@ -70,6 +70,6 @@ class MessageComment:
         # the ellipsis, this breaks down when the comment limit is
         # less than 3 (which can happen in a testcase) and it makes
         # counting the strings harder.
-        return "%s..." % self.body_text[:self.comment_limit]
+        return "%s..." % self.body_text[: self.comment_limit]
 
     show_spam_controls = False
diff --git a/lib/lp/services/comments/browser/tests/test_comment.py b/lib/lp/services/comments/browser/tests/test_comment.py
index 0d593bc..b786a08 100644
--- a/lib/lp/services/comments/browser/tests/test_comment.py
+++ b/lib/lp/services/comments/browser/tests/test_comment.py
@@ -3,10 +3,7 @@
 
 from lp.services.comments.browser.comment import CommentBodyDownloadView
 from lp.services.webapp.servers import LaunchpadTestRequest
-from lp.testing import (
-    person_logged_in,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCaseWithFactory, person_logged_in
 from lp.testing.layers import DatabaseFunctionalLayer
 
 
@@ -31,13 +28,13 @@ class TestCommentBodyDownloadView(TestCaseWithFactory):
 
     def test_anonymous_body_obfuscated(self):
         """For anonymous users, email addresses are obfuscated."""
-        output = self.view('example@xxxxxxxxxxx')
-        self.assertNotIn(output, 'example@xxxxxxxxxxx')
-        self.assertIn(output, '<email address hidden>')
+        output = self.view("example@xxxxxxxxxxx")
+        self.assertNotIn(output, "example@xxxxxxxxxxx")
+        self.assertIn(output, "<email address hidden>")
 
     def test_logged_in_not_obfuscated(self):
         """For logged-in users, email addresses are not obfuscated."""
         with person_logged_in(self.factory.makePerson()):
-            output = self.view('example@xxxxxxxxxxx')
-            self.assertIn(output, 'example@xxxxxxxxxxx')
-            self.assertNotIn(output, '<email address hidden>')
+            output = self.view("example@xxxxxxxxxxx")
+            self.assertIn(output, "example@xxxxxxxxxxx")
+            self.assertNotIn(output, "<email address hidden>")
diff --git a/lib/lp/services/comments/interfaces/conversation.py b/lib/lp/services/comments/interfaces/conversation.py
index aa5ff8b..5bccf9a 100644
--- a/lib/lp/services/comments/interfaces/conversation.py
+++ b/lib/lp/services/comments/interfaces/conversation.py
@@ -4,23 +4,14 @@
 """Interfaces to do with conversations on Launchpad entities."""
 
 __all__ = [
-    'IComment',
-    'IConversation',
-    ]
+    "IComment",
+    "IConversation",
+]
 
 
-from lazr.restful.fields import (
-    CollectionField,
-    Reference,
-    )
+from lazr.restful.fields import CollectionField, Reference
 from zope.interface import Interface
-from zope.schema import (
-    Bool,
-    Datetime,
-    Int,
-    Text,
-    TextLine,
-    )
+from zope.schema import Bool, Datetime, Int, Text, TextLine
 
 from lp import _
 
@@ -28,54 +19,64 @@ from lp import _
 class IComment(Interface):
     """A comment which may have a body or footer."""
 
-    index = Int(title='The comment number', required=True, readonly=True)
+    index = Int(title="The comment number", required=True, readonly=True)
 
     extra_css_class = TextLine(
-        description=_("A css class to apply to the comment's outer div."))
+        description=_("A css class to apply to the comment's outer div.")
+    )
 
     has_body = Bool(
-        description=_("Does the comment have body text?"),
-        readonly=True)
+        description=_("Does the comment have body text?"), readonly=True
+    )
 
     has_footer = Bool(
-        description=_("Does the comment have a footer?"),
-        readonly=True)
+        description=_("Does the comment have a footer?"), readonly=True
+    )
 
     too_long = Bool(
-        title='Whether the comment body is too long to display in full.',
-        readonly=True)
+        title="Whether the comment body is too long to display in full.",
+        readonly=True,
+    )
 
     too_long_to_render = Bool(
-        title=('Whether the comment body is so long that rendering is'
-        ' inappropriate.'), readonly=True)
+        title=(
+            "Whether the comment body is so long that rendering is"
+            " inappropriate."
+        ),
+        readonly=True,
+    )
 
     text_for_display = Text(
-        title='The comment text to be displayed in the UI.', readonly=True)
+        title="The comment text to be displayed in the UI.", readonly=True
+    )
 
     body_text = Text(
-        description=_("The body text of the comment."),
-        readonly=True)
+        description=_("The body text of the comment."), readonly=True
+    )
 
     download_url = Text(
-        description=_("URL for downloading full text."),
-        readonly=True)
+        description=_("URL for downloading full text."), readonly=True
+    )
 
     comment_author = Reference(
         # Really IPerson.
-        Interface, title=_("The author of the comment."),
-        readonly=True)
+        Interface,
+        title=_("The author of the comment."),
+        readonly=True,
+    )
 
-    comment_date = Datetime(
-        title=_('Comment date.'), readonly=True)
+    comment_date = Datetime(title=_("Comment date."), readonly=True)
 
     display_attachments = Bool(
         description=_("Should attachments be displayed for this comment."),
-        readonly=True)
+        readonly=True,
+    )
 
     visible = Bool(title=_("Whether this comment is visible."))
 
     show_spam_controls = Bool(
-        title=_("Whether to show spam controls for this comment."))
+        title=_("Whether to show spam controls for this comment.")
+    )
 
 
 class IConversation(Interface):
@@ -83,4 +84,5 @@ class IConversation(Interface):
 
     comments = CollectionField(
         value_type=Reference(schema=IComment),
-        title=_('The comments in the conversation'))
+        title=_("The comments in the conversation"),
+    )
diff --git a/lib/lp/services/compat.py b/lib/lp/services/compat.py
index d364a4e..15a0f4c 100644
--- a/lib/lp/services/compat.py
+++ b/lib/lp/services/compat.py
@@ -7,8 +7,8 @@ Use this for things that six doesn't provide.
 """
 
 __all__ = [
-    'message_as_bytes',
-    ]
+    "message_as_bytes",
+]
 
 import io
 
@@ -18,7 +18,6 @@ def message_as_bytes(message):
     from email.policy import compat32
 
     fp = io.BytesIO()
-    g = BytesGenerator(
-        fp, mangle_from_=False, maxheaderlen=0, policy=compat32)
+    g = BytesGenerator(fp, mangle_from_=False, maxheaderlen=0, policy=compat32)
     g.flatten(message)
     return fp.getvalue()
diff --git a/lib/lp/services/config/__init__.py b/lib/lp/services/config/__init__.py
index 87b9b62..3924ca1 100644
--- a/lib/lp/services/config/__init__.py
+++ b/lib/lp/services/config/__init__.py
@@ -1,12 +1,12 @@
 # Copyright 2009-2021 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-'''
+"""
 Configuration information pulled from launchpad-lazr.conf.
 
 The configuration section used is specified using the LPCONFIG
 environment variable, and defaults to 'development'
-'''
+"""
 
 import glob
 import os
@@ -17,43 +17,42 @@ from lazr.config import ImplicitTypeSchema
 from lazr.config.interfaces import ConfigErrors
 
 from lp.services.osutils import open_for_writing
-from lp.services.propertycache import (
-    cachedproperty,
-    get_property_cache,
-    )
-
+from lp.services.propertycache import cachedproperty, get_property_cache
 
 __all__ = [
-    'dbconfig',
-    'config',
-    ]
+    "dbconfig",
+    "config",
+]
 
 
 # The config to use can be specified in one of these files.
-CONFIG_LOOKUP_FILES = ['/etc/launchpad/config']
-if os.environ.get('HOME'):
+CONFIG_LOOKUP_FILES = ["/etc/launchpad/config"]
+if os.environ.get("HOME"):
     CONFIG_LOOKUP_FILES.insert(
-        0, os.path.join(os.environ['HOME'], '.lpconfig'))
+        0, os.path.join(os.environ["HOME"], ".lpconfig")
+    )
 
 # LPCONFIG specifies the config to use, which corresponds to a subdirectory
 # of configs. It overrides any setting in the CONFIG_LOOKUP_FILES.
-LPCONFIG = 'LPCONFIG'
+LPCONFIG = "LPCONFIG"
 
 # If no CONFIG_LOOKUP_FILE is found and there is no LPCONFIG environment
 # variable, we have a fallback. This is what developers normally use.
-DEFAULT_CONFIG = 'development'
+DEFAULT_CONFIG = "development"
 
 PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
 
 # Root of the launchpad tree so code can stop jumping through hoops
 # with __file__.
 TREE_ROOT = os.path.abspath(
-    os.path.join(PACKAGE_DIR, os.pardir, os.pardir, os.pardir, os.pardir))
+    os.path.join(PACKAGE_DIR, os.pardir, os.pardir, os.pardir, os.pardir)
+)
 
 # The directories containing instances configuration directories.
 CONFIG_ROOT_DIRS = [
-    os.path.join(TREE_ROOT, 'configs'),
-    os.path.join(TREE_ROOT, 'production-configs')]
+    os.path.join(TREE_ROOT, "configs"),
+    os.path.join(TREE_ROOT, "production-configs"),
+]
 
 
 def find_instance_name():
@@ -83,7 +82,8 @@ def find_config_dir(instance_name):
         if os.path.isdir(config_dir):
             return config_dir
     raise ValueError(
-        "Can't find %s in %s" % (instance_name, ", ".join(CONFIG_ROOT_DIRS)))
+        "Can't find %s in %s" % (instance_name, ", ".join(CONFIG_ROOT_DIRS))
+    )
 
 
 class LaunchpadConfig:
@@ -116,7 +116,7 @@ class LaunchpadConfig:
         self.root = TREE_ROOT
 
     def _make_process_name(self):
-        if getattr(sys, 'argv', None) is None:
+        if getattr(sys, "argv", None) is None:
             return None
         basename = os.path.basename(sys.argv[0])
         return os.path.splitext(basename)[0]
@@ -166,8 +166,10 @@ class LaunchpadConfig:
         but not if its the testrunner-appserver, development or production
         config.
         """
-        return (self.instance_name == 'testrunner' or
-                self.instance_name.startswith('testrunner_'))
+        return (
+            self.instance_name == "testrunner"
+            or self.instance_name.startswith("testrunner_")
+        )
 
     @property
     def process_name(self):
@@ -201,29 +203,31 @@ class LaunchpadConfig:
         if self._config is not None:
             return
 
-        schema_file = os.path.join(PACKAGE_DIR, 'schema-lazr.conf')
+        schema_file = os.path.join(PACKAGE_DIR, "schema-lazr.conf")
         config_dir = self.config_dir
         config_file = os.path.join(
-            config_dir, '%s-lazr.conf' % self.process_name)
+            config_dir, "%s-lazr.conf" % self.process_name
+        )
         if not os.path.isfile(config_file):
-            config_file = os.path.join(config_dir, 'launchpad-lazr.conf')
+            config_file = os.path.join(config_dir, "launchpad-lazr.conf")
         schema = ImplicitTypeSchema(schema_file)
         self._config = schema.load(config_file)
         self._loadConfigOverlays(config_file)
         try:
             self._config.validate()
         except ConfigErrors as error:
-            message = '\n'.join([str(e) for e in error.errors])
+            message = "\n".join([str(e) for e in error.errors])
             raise ConfigErrors(message)
 
     def _loadConfigOverlays(self, config_file):
         """Apply config overlays from the launchpad.config_overlay_dir."""
-        rel_dir = self._config['launchpad']['config_overlay_dir']
+        rel_dir = self._config["launchpad"]["config_overlay_dir"]
         if not rel_dir:
             return
         dir = os.path.join(
-            os.path.dirname(os.path.abspath(config_file)), rel_dir)
-        for path in sorted(glob.glob(os.path.join(dir, '*-lazr.conf'))):
+            os.path.dirname(os.path.abspath(config_file)), rel_dir
+        )
+        for path in sorted(glob.glob(os.path.join(dir, "*-lazr.conf"))):
             with open(path) as f:
                 text = f.read()
             self._config.push(path, text)
@@ -233,25 +237,29 @@ class LaunchpadConfig:
 
         Call this method before letting any ZCML processing occur.
         """
-        loader_file = os.path.join(self.root, 'zcml/+config-overrides.zcml')
-        loader = open_for_writing(loader_file, 'w')
+        loader_file = os.path.join(self.root, "zcml/+config-overrides.zcml")
+        loader = open_for_writing(loader_file, "w")
 
-        print("""
+        print(
+            """
             <configure xmlns="http://namespaces.zope.org/zope";>
                 <!-- This file automatically generated using
                      lp.services.config.LaunchpadConfig.generate_overrides.
                      DO NOT EDIT. -->
                 <include files="%s/*.zcml" />
-                </configure>""" % self.config_dir, file=loader)
+                </configure>"""
+            % self.config_dir,
+            file=loader,
+        )
         loader.close()
 
-    def appserver_root_url(self, facet='mainsite', ensureSlash=False):
+    def appserver_root_url(self, facet="mainsite", ensureSlash=False):
         """Return the correct app server root url for the given facet."""
         root_url = str(getattr(self.vhost, facet).rooturl)
         if not ensureSlash:
-            return root_url.rstrip('/')
-        if not root_url.endswith('/'):
-            return root_url + '/'
+            return root_url.rstrip("/")
+        if not root_url.endswith("/"):
+            return root_url + "/"
         return root_url
 
     def __getattr__(self, name):
@@ -293,15 +301,24 @@ class DatabaseConfigOverrides:
 
 class DatabaseConfig:
     """A class to provide the Launchpad database configuration."""
+
     _config_section = None
-    _db_config_attrs = frozenset([
-        'dbuser',
-        'rw_main_primary', 'rw_main_standby',
-        'db_statement_timeout', 'db_statement_timeout_precision',
-        'isolation_level', 'soft_request_timeout',
-        'storm_cache', 'storm_cache_size'])
-    _db_config_required_attrs = frozenset([
-        'dbuser', 'rw_main_primary', 'rw_main_standby'])
+    _db_config_attrs = frozenset(
+        [
+            "dbuser",
+            "rw_main_primary",
+            "rw_main_standby",
+            "db_statement_timeout",
+            "db_statement_timeout_precision",
+            "isolation_level",
+            "soft_request_timeout",
+            "storm_cache",
+            "storm_cache_size",
+        ]
+    )
+    _db_config_required_attrs = frozenset(
+        ["dbuser", "rw_main_primary", "rw_main_standby"]
+    )
 
     def __init__(self):
         self.reset()
@@ -312,7 +329,7 @@ class DatabaseConfig:
 
     @cachedproperty
     def main_standby(self):
-        return random.choice(self.rw_main_standby.split(','))
+        return random.choice(self.rw_main_standby.split(","))
 
     # XXX cjwatson 2021-10-01: Remove these once Launchpad's store flavors
     # have been renamed.
@@ -331,13 +348,14 @@ class DatabaseConfig:
         """
         for attr, value in kwargs.items():
             assert attr in self._db_config_attrs, (
-                "%s cannot be overridden" % attr)
+                "%s cannot be overridden" % attr
+            )
             if value is None:
                 if hasattr(self.overrides, attr):
                     delattr(self.overrides, attr)
             else:
                 setattr(self.overrides, attr, value)
-                if attr == 'rw_main_standby':
+                if attr == "rw_main_standby":
                     del get_property_cache(self).main_standby
 
     def reset(self):
@@ -365,7 +383,7 @@ class DatabaseConfig:
                 break
         # Some values must be provided by the config
         if value is None and name in self._db_config_required_attrs:
-            raise ValueError('%s must be set' % name)
+            raise ValueError("%s must be set" % name)
         return value
 
 
diff --git a/lib/lp/services/config/fixture.py b/lib/lp/services/config/fixture.py
index 1f4fc21..ef9e03f 100644
--- a/lib/lp/services/config/fixture.py
+++ b/lib/lp/services/config/fixture.py
@@ -4,15 +4,15 @@
 """Fixtures related to configs."""
 
 __all__ = [
-    'ConfigFixture',
-    'ConfigMismatchError',
-    'ConfigUseFixture',
-    ]
+    "ConfigFixture",
+    "ConfigMismatchError",
+    "ConfigUseFixture",
+]
 
-from configparser import RawConfigParser
 import io
 import os.path
 import shutil
+from configparser import RawConfigParser
 from textwrap import dedent
 
 from fixtures import Fixture
@@ -27,10 +27,12 @@ class ConfigMismatchError(Exception):
 class ConfigFixture(Fixture):
     """Create a unique launchpad config."""
 
-    _extend_str = dedent("""\
+    _extend_str = dedent(
+        """\
         [meta]
         extends: ../%s/launchpad-lazr.conf
-        """)
+        """
+    )
 
     def __init__(self, instance_name, copy_from_instance):
         """Create a ConfigFixture.
@@ -54,19 +56,20 @@ class ConfigFixture(Fixture):
             with open(conf_filename) as conf_file:
                 conf_data = conf_file.read()
         else:
-            conf_data = ''
+            conf_data = ""
         return self._parseConfigData(conf_data, conf_filename)
 
     def _writeConfigFile(self, parser, conf_filename):
         """Write a parsed config to a file."""
-        with open(conf_filename, 'w') as conf_file:
+        with open(conf_filename, "w") as conf_file:
             for i, section in enumerate(parser.sections()):
                 if i:
-                    conf_file.write('\n')
-                conf_file.write('[%s]\n' % section)
+                    conf_file.write("\n")
+                conf_file.write("[%s]\n" % section)
                 for key, value in parser.items(section):
                     conf_file.write(
-                        '%s: %s\n' % (key, str(value).replace('\n', '\n\t')))
+                        "%s: %s\n" % (key, str(value).replace("\n", "\n\t"))
+                    )
 
     def _refresh(self):
         """Trigger a config refresh if necessary.
@@ -79,10 +82,11 @@ class ConfigFixture(Fixture):
 
     def add_section(self, sectioncontent):
         """Add sectioncontent to the lazr config."""
-        conf_filename = os.path.join(self.absroot, 'launchpad-lazr.conf')
+        conf_filename = os.path.join(self.absroot, "launchpad-lazr.conf")
         parser = self._parseConfigFile(conf_filename)
         add_parser = self._parseConfigData(
-            sectioncontent, '<configuration to add>')
+            sectioncontent, "<configuration to add>"
+        )
         for section in add_parser.sections():
             if not parser.has_section(section):
                 parser.add_section(section)
@@ -93,10 +97,11 @@ class ConfigFixture(Fixture):
 
     def remove_section(self, sectioncontent):
         """Remove sectioncontent from the lazr config."""
-        conf_filename = os.path.join(self.absroot, 'launchpad-lazr.conf')
+        conf_filename = os.path.join(self.absroot, "launchpad-lazr.conf")
         parser = self._parseConfigFile(conf_filename)
         remove_parser = self._parseConfigData(
-            sectioncontent, '<configuration to remove>')
+            sectioncontent, "<configuration to remove>"
+        )
         for section in remove_parser.sections():
             if not parser.has_section(section):
                 continue
@@ -107,9 +112,9 @@ class ConfigFixture(Fixture):
                 if value != current_value:
                     raise ConfigMismatchError(
                         "Can't remove %s.%s option from %s: "
-                        "expected value '%s', current value '%s'" % (
-                            section, name, conf_filename,
-                            value, current_value))
+                        "expected value '%s', current value '%s'"
+                        % (section, name, conf_filename, value, current_value)
+                    )
                 parser.remove_option(section, name)
             if not parser.options(section):
                 parser.remove_section(section)
@@ -117,17 +122,17 @@ class ConfigFixture(Fixture):
         self._refresh()
 
     def _setUp(self):
-        root = os.path.join(config.root, 'configs', self.instance_name)
+        root = os.path.join(config.root, "configs", self.instance_name)
         os.mkdir(root)
         self.absroot = os.path.abspath(root)
         self.addCleanup(shutil.rmtree, self.absroot)
-        source = os.path.join(config.root, 'configs', self.copy_from_instance)
+        source = os.path.join(config.root, "configs", self.copy_from_instance)
         for entry in os.scandir(source):
-            if entry.name == 'launchpad-lazr.conf':
+            if entry.name == "launchpad-lazr.conf":
                 self.add_section(self._extend_str % self.copy_from_instance)
                 continue
             with open(entry.path) as input:
-                with open(os.path.join(root, entry.name), 'w') as out:
+                with open(os.path.join(root, entry.name), "w") as out:
                     out.write(input.read())
 
 
diff --git a/lib/lp/services/config/tests/test_config.py b/lib/lp/services/config/tests/test_config.py
index 4161eb8..bf990dc 100644
--- a/lib/lp/services/config/tests/test_config.py
+++ b/lib/lp/services/config/tests/test_config.py
@@ -4,29 +4,24 @@
 # We know we are not using root and handlers.
 """Test lp.services.config."""
 
-from doctest import (
-    DocTestSuite,
-    ELLIPSIS,
-    NORMALIZE_WHITESPACE,
-    )
 import os
 import unittest
+from doctest import ELLIPSIS, NORMALIZE_WHITESPACE, DocTestSuite
 
+import testtools
 from fixtures import TempDir
 from lazr.config import ConfigSchema
 from lazr.config.interfaces import ConfigErrors
-import testtools
 
 import lp.services.config
 from lp.services.config.fixture import ConfigUseFixture
 
-
 # Configs that shouldn't be tested.
-EXCLUDED_CONFIGS = ['lpnet-template']
+EXCLUDED_CONFIGS = ["lpnet-template"]
 
 # Calculate some landmark paths.
 here = os.path.dirname(lp.services.config.__file__)
-lazr_schema_file = os.path.join(here, 'schema-lazr.conf')
+lazr_schema_file = os.path.join(here, "schema-lazr.conf")
 
 
 def make_config_test(config_file, description):
@@ -35,8 +30,10 @@ def make_config_test(config_file, description):
     The config file name is shown in the output of test.py -vv. eg.
     (lp.services.config.tests.test_config.../configs/schema.lazr.conf)
     """
+
     class LAZRConfigTestCase(unittest.TestCase):
         """Test a lazr.config."""
+
         def testConfig(self):
             """Validate the config against the schema.
 
@@ -47,16 +44,16 @@ def make_config_test(config_file, description):
             try:
                 config.validate()
             except ConfigErrors as error:
-                message = '\n'.join([str(e) for e in error.errors])
+                message = "\n".join([str(e) for e in error.errors])
                 self.fail(message)
+
     # Hack the config file name into the class name.
-    LAZRConfigTestCase.__name__ = '../' + description
+    LAZRConfigTestCase.__name__ = "../" + description
     LAZRConfigTestCase.__qualname__ = LAZRConfigTestCase.__name__
     return LAZRConfigTestCase
 
 
 class TestLaunchpadConfig(testtools.TestCase):
-
     def test_dir(self):
         # dir(config) returns methods, variables and section names.
         config = lp.services.config.config
@@ -79,56 +76,65 @@ class TestLaunchpadConfig(testtools.TestCase):
         # The launchpad.config_overlay_dir setting can be used to load
         # extra config files over the top. This is useful for overlaying
         # non-version-controlled secrets.
-        config_dir = self.useFixture(TempDir(rootdir='configs'))
+        config_dir = self.useFixture(TempDir(rootdir="configs"))
         config_name = os.path.basename(config_dir.path)
-        overlay_dir = self.useFixture(TempDir(rootdir='configs'))
-        with open(config_dir.join('launchpad-lazr.conf'), 'w') as f:
-            f.write("""
+        overlay_dir = self.useFixture(TempDir(rootdir="configs"))
+        with open(config_dir.join("launchpad-lazr.conf"), "w") as f:
+            f.write(
+                """
                 [meta]
                 extends: ../testrunner/launchpad-lazr.conf
 
                 [launchpad]
                 config_overlay_dir: ../%s
-                """ % os.path.basename(overlay_dir.path))
+                """
+                % os.path.basename(overlay_dir.path)
+            )
 
         config = lp.services.config.config
 
         with ConfigUseFixture(config_name):
-            self.assertEqual('launchpad_main', config.launchpad.dbuser)
-            self.assertEqual('', config.launchpad.site_message)
+            self.assertEqual("launchpad_main", config.launchpad.dbuser)
+            self.assertEqual("", config.launchpad.site_message)
 
-        with open(overlay_dir.join('00-test-lazr.conf'), 'w') as f:
-            f.write("""
+        with open(overlay_dir.join("00-test-lazr.conf"), "w") as f:
+            f.write(
+                """
                 [launchpad]
                 dbuser: overlay-user
                 site_message: An overlay!
-                """)
+                """
+            )
         with ConfigUseFixture(config_name):
-            self.assertEqual('overlay-user', config.launchpad.dbuser)
-            self.assertEqual('An overlay!', config.launchpad.site_message)
+            self.assertEqual("overlay-user", config.launchpad.dbuser)
+            self.assertEqual("An overlay!", config.launchpad.site_message)
 
-        with open(overlay_dir.join('01-test-lazr.conf'), 'w') as f:
-            f.write("""
+        with open(overlay_dir.join("01-test-lazr.conf"), "w") as f:
+            f.write(
+                """
                 [launchpad]
                 site_message: Another overlay!
-                """)
+                """
+            )
         with ConfigUseFixture(config_name):
-            self.assertEqual('overlay-user', config.launchpad.dbuser)
-            self.assertEqual('Another overlay!', config.launchpad.site_message)
+            self.assertEqual("overlay-user", config.launchpad.dbuser)
+            self.assertEqual("Another overlay!", config.launchpad.site_message)
 
-        os.unlink(overlay_dir.join('00-test-lazr.conf'))
+        os.unlink(overlay_dir.join("00-test-lazr.conf"))
         with ConfigUseFixture(config_name):
-            self.assertEqual('launchpad_main', config.launchpad.dbuser)
-            self.assertEqual('Another overlay!', config.launchpad.site_message)
+            self.assertEqual("launchpad_main", config.launchpad.dbuser)
+            self.assertEqual("Another overlay!", config.launchpad.site_message)
 
 
 def test_suite():
     """Return a suite of canonical.conf and all conf files."""
     suite = unittest.TestSuite()
-    suite.addTest(DocTestSuite(
-        'lp.services.config',
-        optionflags=NORMALIZE_WHITESPACE | ELLIPSIS,
-        ))
+    suite.addTest(
+        DocTestSuite(
+            "lp.services.config",
+            optionflags=NORMALIZE_WHITESPACE | ELLIPSIS,
+        )
+    )
     load_testcase = unittest.defaultTestLoader.loadTestsFromTestCase
     # Add a test for every launchpad[.lazr].conf file in our tree.
     for config_dir in lp.services.config.CONFIG_ROOT_DIRS:
@@ -137,7 +143,7 @@ def test_suite():
                 del dirnames[:]  # Don't look in subdirectories.
                 continue
             for filename in filenames:
-                if filename.endswith('-lazr.conf'):
+                if filename.endswith("-lazr.conf"):
                     # Test the lazr.config conf files.
                     config_file = os.path.join(dirpath, filename)
                     description = os.path.relpath(config_file, config_dir)
diff --git a/lib/lp/services/config/tests/test_config_lookup.py b/lib/lp/services/config/tests/test_config_lookup.py
index d0b4bcf..4f04561 100644
--- a/lib/lp/services/config/tests/test_config_lookup.py
+++ b/lib/lp/services/config/tests/test_config_lookup.py
@@ -7,69 +7,66 @@ __all__ = []
 
 import os
 import shutil
-from tempfile import (
-    mkdtemp,
-    NamedTemporaryFile,
-    )
+from tempfile import NamedTemporaryFile, mkdtemp
 
 from lp.services import config
 from lp.testing import TestCase
 
 
 class TestConfigLookup(TestCase):
-
     def setUp(self):
         super().setUp()
         self.temp_lookup_file = None
         self.original_CONFIG_LOOKUP_FILES = config.CONFIG_LOOKUP_FILES
-        self.original_LPCONFIG = os.environ['LPCONFIG']
+        self.original_LPCONFIG = os.environ["LPCONFIG"]
 
     def tearDown(self):
         del self.temp_lookup_file
         config.CONFIG_LOOKUP_FILES = self.original_CONFIG_LOOKUP_FILES
-        os.environ['LPCONFIG'] = self.original_LPCONFIG
+        os.environ["LPCONFIG"] = self.original_LPCONFIG
         super().tearDown()
 
     def makeLookupFile(self):
         self.temp_lookup_file = NamedTemporaryFile()
-        self.temp_lookup_file.write(b'\nfrom_disk \n')
+        self.temp_lookup_file.write(b"\nfrom_disk \n")
         self.temp_lookup_file.flush()
         config.CONFIG_LOOKUP_FILES = [
-            NamedTemporaryFile().name, self.temp_lookup_file.name]
+            NamedTemporaryFile().name,
+            self.temp_lookup_file.name,
+        ]
 
     def testByEnvironment(self):
         # Create the lookup file to demonstrate it is overridden.
         self.makeLookupFile()
 
-        os.environ['LPCONFIG'] = 'from_env'
+        os.environ["LPCONFIG"] = "from_env"
 
-        self.assertEqual(config.find_instance_name(), 'from_env')
+        self.assertEqual(config.find_instance_name(), "from_env")
 
     def testByFile(self):
         # Create the lookup file.
         self.makeLookupFile()
 
         # Trash the environment variable so it doesn't override.
-        del os.environ['LPCONFIG']
+        del os.environ["LPCONFIG"]
 
-        self.assertEqual(config.find_instance_name(), 'from_disk')
+        self.assertEqual(config.find_instance_name(), "from_disk")
 
     def testByDefault(self):
         # Trash the environment variable so it doesn't override.
-        del os.environ['LPCONFIG']
+        del os.environ["LPCONFIG"]
 
         self.assertEqual(config.find_instance_name(), config.DEFAULT_CONFIG)
 
 
 class ConfigTestCase(TestCase):
-    """Base test case that provides fixtures for testing configuration.
-    """
+    """Base test case that provides fixtures for testing configuration."""
 
     def setUpConfigRoots(self):
         """Create an alternate config roots."""
-        if hasattr(self, 'temp_config_root_dir'):
+        if hasattr(self, "temp_config_root_dir"):
             return
-        self.temp_config_root_dir = mkdtemp('configs')
+        self.temp_config_root_dir = mkdtemp("configs")
         self.original_root_dirs = config.CONFIG_ROOT_DIRS
         config.CONFIG_ROOT_DIRS = [self.temp_config_root_dir]
         self.addCleanup(self.tearDownConfigRoots)
@@ -86,13 +83,14 @@ class ConfigTestCase(TestCase):
         """
         self.setUpConfigRoots()
         instance_config_dir = os.path.join(
-            self.temp_config_root_dir, instance_name)
+            self.temp_config_root_dir, instance_name
+        )
         os.mkdir(instance_config_dir)
 
         # Create an empty config file.
         open(
-            os.path.join(instance_config_dir, 'launchpad-lazr.conf'),
-            'w').close()
+            os.path.join(instance_config_dir, "launchpad-lazr.conf"), "w"
+        ).close()
         return instance_config_dir
 
 
@@ -104,21 +102,22 @@ class TestInstanceConfigDirLookup(ConfigTestCase):
         self.setUpConfigRoots()
 
     def test_find_config_dir_raises_ValueError(self):
-        self.assertRaises(
-            ValueError, config.find_config_dir, 'no_instance')
+        self.assertRaises(ValueError, config.find_config_dir, "no_instance")
 
     def test_find_config_dir(self):
-        instance_config_dir = self.setUpInstanceConfig('an_instance')
+        instance_config_dir = self.setUpInstanceConfig("an_instance")
         self.assertEqual(
-            instance_config_dir, config.find_config_dir('an_instance'))
+            instance_config_dir, config.find_config_dir("an_instance")
+        )
 
     def test_Config_uses_find_config_dir(self):
-        instance_config_dir = self.setUpInstanceConfig('an_instance')
+        instance_config_dir = self.setUpInstanceConfig("an_instance")
         # Create a very simple config file.
-        cfg = config.LaunchpadConfig('an_instance')
+        cfg = config.LaunchpadConfig("an_instance")
         config_file = open(
-            os.path.join(instance_config_dir, 'launchpad-lazr.conf'), 'w')
-        config_file.write('[launchpad]\ndefault_batch_size=2323')
+            os.path.join(instance_config_dir, "launchpad-lazr.conf"), "w"
+        )
+        config_file.write("[launchpad]\ndefault_batch_size=2323")
         config_file.close()
 
         self.assertEqual(2323, cfg.launchpad.default_batch_size)
@@ -128,15 +127,16 @@ class TestGenerateOverrides(ConfigTestCase):
     """Test the generate_overrides method of LaunchpadConfig."""
 
     def test_generate_overrides(self):
-        instance_dir = self.setUpInstanceConfig('zcmltest')
-        cfg = config.LaunchpadConfig('zcmltest')
+        instance_dir = self.setUpInstanceConfig("zcmltest")
+        cfg = config.LaunchpadConfig("zcmltest")
         # The ZCML override file is generated in the root of the tree.
         # Set that root to the temporary directory.
         cfg.root = self.temp_config_root_dir
         cfg.generate_overrides()
-        override_file = os.path.join(cfg.root, 'zcml/+config-overrides.zcml')
+        override_file = os.path.join(cfg.root, "zcml/+config-overrides.zcml")
         self.assertTrue(
-            os.path.isfile(override_file), "Overrides file wasn't created.")
+            os.path.isfile(override_file), "Overrides file wasn't created."
+        )
 
         fh = open(override_file)
         overrides = fh.read()
@@ -145,5 +145,6 @@ class TestGenerateOverrides(ConfigTestCase):
         magic_line = '<include files="%s/*.zcml" />' % instance_dir
         self.assertTrue(
             magic_line in overrides,
-            "Overrides doesn't contain the magic include line (%s):\n%s" %
-            (magic_line, overrides))
+            "Overrides doesn't contain the magic include line (%s):\n%s"
+            % (magic_line, overrides),
+        )
diff --git a/lib/lp/services/config/tests/test_database_config.py b/lib/lp/services/config/tests/test_database_config.py
index e600b77..84b4dd3 100644
--- a/lib/lp/services/config/tests/test_database_config.py
+++ b/lib/lp/services/config/tests/test_database_config.py
@@ -14,32 +14,32 @@ class TestDatabaseConfig(TestCase):
     def test_override(self):
         # dbuser and isolation_level can be overridden at runtime.
         dbc = DatabaseConfig()
-        self.assertEqual('launchpad_main', dbc.dbuser)
-        self.assertEqual('repeatable_read', dbc.isolation_level)
+        self.assertEqual("launchpad_main", dbc.dbuser)
+        self.assertEqual("repeatable_read", dbc.isolation_level)
 
         # dbuser and isolation_level overrides both work.
-        dbc.override(dbuser='not_launchpad', isolation_level='autocommit')
-        self.assertEqual('not_launchpad', dbc.dbuser)
-        self.assertEqual('autocommit', dbc.isolation_level)
+        dbc.override(dbuser="not_launchpad", isolation_level="autocommit")
+        self.assertEqual("not_launchpad", dbc.dbuser)
+        self.assertEqual("autocommit", dbc.isolation_level)
 
         # Overriding dbuser again preserves the isolation_level override.
-        dbc.override(dbuser='also_not_launchpad')
-        self.assertEqual('also_not_launchpad', dbc.dbuser)
-        self.assertEqual('autocommit', dbc.isolation_level)
+        dbc.override(dbuser="also_not_launchpad")
+        self.assertEqual("also_not_launchpad", dbc.dbuser)
+        self.assertEqual("autocommit", dbc.isolation_level)
 
         # Overriding with None removes the override.
         dbc.override(dbuser=None, isolation_level=None)
-        self.assertEqual('launchpad_main', dbc.dbuser)
-        self.assertEqual('repeatable_read', dbc.isolation_level)
+        self.assertEqual("launchpad_main", dbc.dbuser)
+        self.assertEqual("repeatable_read", dbc.isolation_level)
 
     def test_reset(self):
         # reset() removes any overrides.
         dbc = DatabaseConfig()
-        self.assertEqual('launchpad_main', dbc.dbuser)
-        dbc.override(dbuser='not_launchpad')
-        self.assertEqual('not_launchpad', dbc.dbuser)
+        self.assertEqual("launchpad_main", dbc.dbuser)
+        dbc.override(dbuser="not_launchpad")
+        self.assertEqual("not_launchpad", dbc.dbuser)
         dbc.reset()
-        self.assertEqual('launchpad_main', dbc.dbuser)
+        self.assertEqual("launchpad_main", dbc.dbuser)
 
     def test_main_standby(self):
         # If rw_main_standby is a comma-separated list, then the
@@ -48,13 +48,14 @@ class TestDatabaseConfig(TestCase):
         dbc = DatabaseConfig()
         original_standby = dbc.main_standby
         standbys = [
-            'dbname=launchpad_standby1 port=5433',
-            'dbname=launchpad_standby2 port=5433',
-            ]
-        dbc.override(rw_main_standby=','.join(standbys))
+            "dbname=launchpad_standby1 port=5433",
+            "dbname=launchpad_standby2 port=5433",
+        ]
+        dbc.override(rw_main_standby=",".join(standbys))
         selected_standby = dbc.main_standby
         self.assertIn(selected_standby, standbys)
         self.assertEqual(
-            selected_standby, get_property_cache(dbc).main_standby)
+            selected_standby, get_property_cache(dbc).main_standby
+        )
         dbc.reset()
         self.assertEqual(original_standby, dbc.main_standby)
diff --git a/lib/lp/services/config/tests/test_doc.py b/lib/lp/services/config/tests/test_doc.py
index 942d84e..708bdba 100644
--- a/lib/lp/services/config/tests/test_doc.py
+++ b/lib/lp/services/config/tests/test_doc.py
@@ -10,7 +10,6 @@ import os
 from lp.services.testing import build_test_suite
 from lp.testing.layers import LaunchpadFunctionalLayer
 
-
 here = os.path.dirname(os.path.realpath(__file__))
 
 
diff --git a/lib/lp/services/config/tests/test_fixture.py b/lib/lp/services/config/tests/test_fixture.py
index f83acc9..8c16fcd 100644
--- a/lib/lp/services/config/tests/test_fixture.py
+++ b/lib/lp/services/config/tests/test_fixture.py
@@ -11,83 +11,101 @@ from lp.services.config.fixture import (
     ConfigFixture,
     ConfigMismatchError,
     ConfigUseFixture,
-    )
+)
 from lp.testing import TestCase
 
 
 class TestConfigUseFixture(TestCase):
-
     def test_sets_restores_instance(self):
-        fixture = ConfigUseFixture('foo')
+        fixture = ConfigUseFixture("foo")
         orig_instance = config.instance_name
         fixture.setUp()
         try:
-            self.assertEqual('foo', config.instance_name)
+            self.assertEqual("foo", config.instance_name)
         finally:
             fixture.cleanUp()
         self.assertEqual(orig_instance, config.instance_name)
 
 
 class TestConfigFixture(TestCase):
-
     def test_copies_and_derives(self):
-        fixture = ConfigFixture('testtestconfig', 'testrunner')
+        fixture = ConfigFixture("testtestconfig", "testrunner")
         to_copy = [
-            'test-process-lazr.conf',
-            ]
+            "test-process-lazr.conf",
+        ]
         fixture.setUp()
         try:
             for base in to_copy:
-                path = 'configs/testtestconfig/' + base
-                source = 'configs/testrunner/' + base
-                with open(source, 'rb') as f:
+                path = "configs/testtestconfig/" + base
+                source = "configs/testrunner/" + base
+                with open(source, "rb") as f:
                     old = f.read()
-                with open(path, 'rb') as f:
+                with open(path, "rb") as f:
                     new = f.read()
                 self.assertEqual(old, new)
-            confpath = 'configs/testtestconfig/launchpad-lazr.conf'
+            confpath = "configs/testtestconfig/launchpad-lazr.conf"
             with open(confpath) as f:
                 lazr_config = f.read()
             self.assertEqual(
-                "[meta]\n"
-                "extends: ../testrunner/launchpad-lazr.conf",
-                lazr_config.strip())
+                "[meta]\n" "extends: ../testrunner/launchpad-lazr.conf",
+                lazr_config.strip(),
+            )
         finally:
             fixture.cleanUp()
 
     def test_add_and_remove_section(self):
-        fixture = ConfigFixture('testtestconfig', 'testrunner')
+        fixture = ConfigFixture("testtestconfig", "testrunner")
         fixture.setUp()
         try:
-            confpath = 'configs/testtestconfig/launchpad-lazr.conf'
+            confpath = "configs/testtestconfig/launchpad-lazr.conf"
             with open(confpath) as f:
                 lazr_config = f.read()
-            self.assertEqual(dedent("""\
+            self.assertEqual(
+                dedent(
+                    """\
                 [meta]
                 extends: ../testrunner/launchpad-lazr.conf
-                """), lazr_config)
-
-            fixture.add_section(dedent("""\
+                """
+                ),
+                lazr_config,
+            )
+
+            fixture.add_section(
+                dedent(
+                    """\
                 [test1]
                 key: false
-                """))
+                """
+                )
+            )
             with open(confpath) as f:
                 lazr_config = f.read()
-            self.assertEqual(dedent("""\
+            self.assertEqual(
+                dedent(
+                    """\
                 [meta]
                 extends: ../testrunner/launchpad-lazr.conf
 
                 [test1]
                 key: false
-                """), lazr_config)
-
-            fixture.add_section(dedent("""\
+                """
+                ),
+                lazr_config,
+            )
+
+            fixture.add_section(
+                dedent(
+                    """\
                 [test2]
                 key: true
-                """))
+                """
+                )
+            )
             with open(confpath) as f:
                 lazr_config = f.read()
-            self.assertEqual(dedent("""\
+            self.assertEqual(
+                dedent(
+                    """\
                 [meta]
                 extends: ../testrunner/launchpad-lazr.conf
 
@@ -96,44 +114,64 @@ class TestConfigFixture(TestCase):
 
                 [test2]
                 key: true
-                """), lazr_config)
-
-            fixture.remove_section(dedent("""\
+                """
+                ),
+                lazr_config,
+            )
+
+            fixture.remove_section(
+                dedent(
+                    """\
                 [test1]
                 key: false
-                """))
+                """
+                )
+            )
             with open(confpath) as f:
                 lazr_config = f.read()
-            self.assertEqual(dedent("""\
+            self.assertEqual(
+                dedent(
+                    """\
                 [meta]
                 extends: ../testrunner/launchpad-lazr.conf
 
                 [test2]
                 key: true
-                """), lazr_config)
+                """
+                ),
+                lazr_config,
+            )
         finally:
             fixture.cleanUp()
 
     def test_remove_section_unexpected_value(self):
-        fixture = ConfigFixture('testtestconfig', 'testrunner')
+        fixture = ConfigFixture("testtestconfig", "testrunner")
         fixture.setUp()
         try:
             confpath = os.path.abspath(
-                'configs/testtestconfig/launchpad-lazr.conf')
+                "configs/testtestconfig/launchpad-lazr.conf"
+            )
 
-            fixture.add_section(dedent("""\
+            fixture.add_section(
+                dedent(
+                    """\
                 [test1]
                 key: false
-                """))
+                """
+                )
+            )
 
             self.assertRaisesWithContent(
                 ConfigMismatchError,
                 "Can't remove test1.key option from %s: "
                 "expected value 'true', current value 'false'" % confpath,
                 fixture.remove_section,
-                dedent("""\
+                dedent(
+                    """\
                     [test1]
                     key: true
-                    """))
+                    """
+                ),
+            )
         finally:
             fixture.cleanUp()
diff --git a/lib/lp/services/crypto/interfaces.py b/lib/lp/services/crypto/interfaces.py
index ea251ec..a9d4ac7 100644
--- a/lib/lp/services/crypto/interfaces.py
+++ b/lib/lp/services/crypto/interfaces.py
@@ -4,14 +4,11 @@
 """Interface to data encrypted at rest using configured keys."""
 
 __all__ = [
-    'CryptoError',
-    'IEncryptedContainer',
-    ]
+    "CryptoError",
+    "IEncryptedContainer",
+]
 
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
+from zope.interface import Attribute, Interface
 
 
 class CryptoError(Exception):
@@ -23,7 +20,8 @@ class IEncryptedContainer(Interface):
 
     can_encrypt = Attribute(
         "True iff this container has the configuration it needs to encrypt "
-        "data.")
+        "data."
+    )
 
     def encrypt(data):
         """Encrypt a blob of data to a JSON-serialisable form.
@@ -39,7 +37,8 @@ class IEncryptedContainer(Interface):
 
     can_decrypt = Attribute(
         "True iff this container has the configuration it needs to decrypt "
-        "data.")
+        "data."
+    )
 
     def decrypt(data):
         """Decrypt data that was encrypted by L{encrypt}.
diff --git a/lib/lp/services/crypto/model.py b/lib/lp/services/crypto/model.py
index e2467c2..4db7840 100644
--- a/lib/lp/services/crypto/model.py
+++ b/lib/lp/services/crypto/model.py
@@ -4,23 +4,16 @@
 """A container for data encrypted at rest using configured keys."""
 
 __all__ = [
-    'NaClEncryptedContainerBase',
-    ]
+    "NaClEncryptedContainerBase",
+]
 
 import base64
 
 from nacl.exceptions import CryptoError as NaClCryptoError
-from nacl.public import (
-    PrivateKey,
-    PublicKey,
-    SealedBox,
-    )
+from nacl.public import PrivateKey, PublicKey, SealedBox
 from zope.interface import implementer
 
-from lp.services.crypto.interfaces import (
-    CryptoError,
-    IEncryptedContainer,
-    )
+from lp.services.crypto.interfaces import CryptoError, IEncryptedContainer
 
 
 @implementer(IEncryptedContainer)
@@ -66,7 +59,8 @@ class NaClEncryptedContainerBase:
             raise CryptoError(str(e)) from e
         return (
             base64.b64encode(self.public_key_bytes).decode("UTF-8"),
-            base64.b64encode(data_encrypted).decode("UTF-8"))
+            base64.b64encode(data_encrypted).decode("UTF-8"),
+        )
 
     @property
     def private_key_bytes(self):
@@ -104,8 +98,9 @@ class NaClEncryptedContainerBase:
             raise CryptoError(str(e)) from e
         if public_key_bytes != self.public_key_bytes:
             raise ValueError(
-                "Public key %r does not match configured public key %r" %
-                (public_key_bytes, self.public_key_bytes))
+                "Public key %r does not match configured public key %r"
+                % (public_key_bytes, self.public_key_bytes)
+            )
         if self.private_key is None:
             raise ValueError("No private key configured")
         try:
diff --git a/lib/lp/services/crypto/scripts/generatekeypair.py b/lib/lp/services/crypto/scripts/generatekeypair.py
index fa8e57a..aab9f44 100644
--- a/lib/lp/services/crypto/scripts/generatekeypair.py
+++ b/lib/lp/services/crypto/scripts/generatekeypair.py
@@ -8,7 +8,7 @@ in Launchpad configuration files.  The private key should only be stored in
 secret overlays on systems that need it.
 """
 
-__all__ = ['main']
+__all__ = ["main"]
 
 import argparse
 import base64
@@ -17,15 +17,16 @@ from nacl.public import PrivateKey
 
 
 def encode_key(key):
-    return base64.b64encode(key.encode()).decode('ASCII')
+    return base64.b64encode(key.encode()).decode("ASCII")
 
 
 def main():
     parser = argparse.ArgumentParser(
         description=__doc__,
-        formatter_class=argparse.RawDescriptionHelpFormatter)
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+    )
     parser.parse_args()
 
     key = PrivateKey.generate()
-    print('Private: ' + encode_key(key))
-    print('Public:  ' + encode_key(key.public_key))
+    print("Private: " + encode_key(key))
+    print("Public:  " + encode_key(key.public_key))
diff --git a/lib/lp/services/crypto/scripts/tests/test_generatekeypair.py b/lib/lp/services/crypto/scripts/tests/test_generatekeypair.py
index 6c322f3..5543fef 100644
--- a/lib/lp/services/crypto/scripts/tests/test_generatekeypair.py
+++ b/lib/lp/services/crypto/scripts/tests/test_generatekeypair.py
@@ -6,15 +6,9 @@
 import base64
 
 from fixtures import MockPatch
-from nacl.public import (
-    PrivateKey,
-    PublicKey,
-    )
+from nacl.public import PrivateKey, PublicKey
 from testtools.content import text_content
-from testtools.matchers import (
-    MatchesListwise,
-    StartsWith,
-    )
+from testtools.matchers import MatchesListwise, StartsWith
 
 from lp.services.crypto.scripts.generatekeypair import main as gkp_main
 from lp.testing import TestCase
@@ -22,14 +16,13 @@ from lp.testing.fixture import CapturedOutput
 
 
 def decode_key(factory, data):
-    return factory(base64.b64decode(data.encode('ASCII')))
+    return factory(base64.b64decode(data.encode("ASCII")))
 
 
 class TestGenerateKeyPair(TestCase):
-
     def runScript(self, args, expect_exit=False):
         try:
-            with MockPatch('sys.argv', ['version-info'] + args):
+            with MockPatch("sys.argv", ["version-info"] + args):
                 with CapturedOutput() as captured:
                     gkp_main()
         except SystemExit:
@@ -38,27 +31,33 @@ class TestGenerateKeyPair(TestCase):
             exited = False
         stdout = captured.stdout.getvalue()
         stderr = captured.stderr.getvalue()
-        self.addDetail('stdout', text_content(stdout))
-        self.addDetail('stderr', text_content(stderr))
+        self.addDetail("stdout", text_content(stdout))
+        self.addDetail("stderr", text_content(stderr))
         if expect_exit:
             if not exited:
-                raise AssertionError('Script unexpectedly exited successfully')
+                raise AssertionError("Script unexpectedly exited successfully")
         else:
             if exited:
                 raise AssertionError(
-                    'Script unexpectedly exited unsuccessfully')
-            self.assertEqual('', stderr)
+                    "Script unexpectedly exited unsuccessfully"
+                )
+            self.assertEqual("", stderr)
         return stdout
 
     def test_bad_arguments(self):
-        self.runScript(['--nonsense'], expect_exit=True)
+        self.runScript(["--nonsense"], expect_exit=True)
 
     def test_generates_key_pair(self):
         lines = self.runScript([]).splitlines()
-        self.assertThat(lines, MatchesListwise([
-            StartsWith('Private: '),
-            StartsWith('Public:  '),
-            ]))
-        private_key = decode_key(PrivateKey, lines[0][len('Private: '):])
-        public_key = decode_key(PublicKey, lines[1][len('Public:  '):])
+        self.assertThat(
+            lines,
+            MatchesListwise(
+                [
+                    StartsWith("Private: "),
+                    StartsWith("Public:  "),
+                ]
+            ),
+        )
+        private_key = decode_key(PrivateKey, lines[0][len("Private: ") :])
+        public_key = decode_key(PublicKey, lines[1][len("Public:  ") :])
         self.assertEqual(public_key, private_key.public_key)
diff --git a/lib/lp/services/crypto/tests/test_model.py b/lib/lp/services/crypto/tests/test_model.py
index 3368797..5b6921d 100644
--- a/lib/lp/services/crypto/tests/test_model.py
+++ b/lib/lp/services/crypto/tests/test_model.py
@@ -12,7 +12,6 @@ from lp.testing.layers import ZopelessLayer
 
 
 class FakeEncryptedContainer(NaClEncryptedContainerBase):
-
     def __init__(self, public_key_bytes, private_key_bytes=None):
         self._public_key_bytes = public_key_bytes
         self._private_key_bytes = private_key_bytes
@@ -55,7 +54,8 @@ class TestNaClEncryptedContainerBase(TestCase):
     def test_private_key_valid(self):
         private_key = PrivateKey.generate()
         container = FakeEncryptedContainer(
-            bytes(private_key.public_key), bytes(private_key))
+            bytes(private_key.public_key), bytes(private_key)
+        )
         self.assertEqual(private_key, container.private_key)
         self.assertTrue(container.can_decrypt)
 
@@ -74,6 +74,8 @@ class TestNaClEncryptedContainerBase(TestCase):
     def test_encrypt_decrypt(self):
         private_key = PrivateKey.generate()
         container = FakeEncryptedContainer(
-            bytes(private_key.public_key), bytes(private_key))
+            bytes(private_key.public_key), bytes(private_key)
+        )
         self.assertEqual(
-            b"plaintext", container.decrypt(container.encrypt(b"plaintext")))
+            b"plaintext", container.decrypt(container.encrypt(b"plaintext"))
+        )
diff --git a/lib/lp/services/daemons/readyservice.py b/lib/lp/services/daemons/readyservice.py
index 67af435..534bb22 100644
--- a/lib/lp/services/daemons/readyservice.py
+++ b/lib/lp/services/daemons/readyservice.py
@@ -9,14 +9,13 @@ with the launchpad-buildd deployment.
 """
 
 __all__ = [
-    'ReadyService',
-    ]
+    "ReadyService",
+]
 
 from twisted.application import service
 from twisted.python import log
 
-
-LOG_MAGIC = 'daemon ready!'
+LOG_MAGIC = "daemon ready!"
 
 
 class ReadyService(service.Service):
@@ -24,5 +23,6 @@ class ReadyService(service.Service):
 
     def startService(self):
         from twisted.internet import reactor
-        reactor.addSystemEventTrigger('after', 'startup', log.msg, LOG_MAGIC)
+
+        reactor.addSystemEventTrigger("after", "startup", log.msg, LOG_MAGIC)
         service.Service.startService(self)
diff --git a/lib/lp/services/daemons/tachandler.py b/lib/lp/services/daemons/tachandler.py
index 4ab805e..8ebfdc2 100644
--- a/lib/lp/services/daemons/tachandler.py
+++ b/lib/lp/services/daemons/tachandler.py
@@ -4,30 +4,31 @@
 """Test harness for TAC (Twisted Application Configuration) files."""
 
 __all__ = [
-    'TacTestSetup',
-    'TacException',
-    ]
+    "TacTestSetup",
+    "TacException",
+]
 
 
 import os
 import sys
 
-from txfixtures.tachandler import (
-    TacException,
-    TacTestFixture,
-    )
+from txfixtures.tachandler import TacException, TacTestFixture
 
 import lp
 from lp.services.daemons import readyservice
-from lp.services.osutils import (
-    override_environ,
-    remove_if_exists,
+from lp.services.osutils import override_environ, remove_if_exists
+
+twistd_script = os.path.abspath(
+    os.path.join(
+        os.path.dirname(__file__),
+        os.pardir,
+        os.pardir,
+        os.pardir,
+        os.pardir,
+        "bin",
+        "twistd",
     )
-
-
-twistd_script = os.path.abspath(os.path.join(
-    os.path.dirname(__file__),
-    os.pardir, os.pardir, os.pardir, os.pardir, 'bin', 'twistd'))
+)
 
 
 class TacTestSetup(TacTestFixture):
@@ -44,9 +45,9 @@ class TacTestSetup(TacTestFixture):
         # this does not happen.
         self.removeLog()
         with override_environ(LP_DEBUG_SQL=None, LP_DEBUG_SQL_EXTRA=None):
-            TacTestFixture.setUp(self,
-                python_path=sys.executable,
-                twistd_script=twistd_script)
+            TacTestFixture.setUp(
+                self, python_path=sys.executable, twistd_script=twistd_script
+            )
 
     def _hasDaemonStarted(self):
         """Has the daemon started?
@@ -55,8 +56,8 @@ class TacTestSetup(TacTestFixture):
         the log file.
         """
         if os.path.exists(self.logfile):
-            with open(self.logfile, 'rb') as logfile:
-                return readyservice.LOG_MAGIC.encode('UTF-8') in logfile.read()
+            with open(self.logfile, "rb") as logfile:
+                return readyservice.LOG_MAGIC.encode("UTF-8") in logfile.read()
         else:
             return False
 
@@ -71,7 +72,7 @@ class TacTestSetup(TacTestFixture):
         0 bytes.
         """
         if os.path.exists(self.logfile):
-            log_magic_bytes = readyservice.LOG_MAGIC.encode('UTF-8')
+            log_magic_bytes = readyservice.LOG_MAGIC.encode("UTF-8")
             with open(self.logfile, "r+b") as logfile:
                 position = 0
                 for line in logfile:
@@ -85,8 +86,10 @@ class TacTestSetup(TacTestFixture):
     @property
     def daemon_directory(self):
         return os.path.abspath(
-            os.path.join(os.path.dirname(lp.__file__), os.pardir, os.pardir,
-            'daemons'))
+            os.path.join(
+                os.path.dirname(lp.__file__), os.pardir, os.pardir, "daemons"
+            )
+        )
 
     def setUpRoot(self):
         """Override this.
diff --git a/lib/lp/services/daemons/tests/cannotlisten.tac b/lib/lp/services/daemons/tests/cannotlisten.tac
index 3904cbc..c25c3b9 100644
--- a/lib/lp/services/daemons/tests/cannotlisten.tac
+++ b/lib/lp/services/daemons/tests/cannotlisten.tac
@@ -6,16 +6,12 @@ This TAC is used for the TacTestSetupTestCase.test_couldNotListenTac test case
 in test_tachandler.py.  It fails with a CannotListenError.
 """
 
-from twisted.application import (
-    internet,
-    service,
-    )
+from twisted.application import internet, service
 from twisted.internet import protocol
 
 from lp.services.daemons import readyservice
 
-
-application = service.Application('CannotListen')
+application = service.Application("CannotListen")
 serviceCollection = service.IServiceCollection(application)
 
 # Service that announces when the daemon is ready
diff --git a/lib/lp/services/daemons/tests/okay.tac b/lib/lp/services/daemons/tests/okay.tac
index fe940cc..dec08f0 100644
--- a/lib/lp/services/daemons/tests/okay.tac
+++ b/lib/lp/services/daemons/tests/okay.tac
@@ -12,15 +12,14 @@ from zope.component import getUtility
 from lp.services.daemons import readyservice
 from lp.services.database.interfaces import (
     DEFAULT_FLAVOR,
-    IStoreSelector,
     MAIN_STORE,
-    )
+    IStoreSelector,
+)
 from lp.services.scripts import execute_zcml_for_scripts
 
-
 execute_zcml_for_scripts()
 
-application = service.Application('Okay')
+application = service.Application("Okay")
 
 # Service that announces when the daemon is ready
 readyservice.ReadyService().setServiceParent(application)
diff --git a/lib/lp/services/daemons/tests/test_tachandler.py b/lib/lp/services/daemons/tests/test_tachandler.py
index 0eabfb8..8f1cab7 100644
--- a/lib/lp/services/daemons/tests/test_tachandler.py
+++ b/lib/lp/services/daemons/tests/test_tachandler.py
@@ -3,36 +3,21 @@
 
 """Tests for lp.services.daemons.tachandler"""
 
-from os.path import (
-    dirname,
-    exists,
-    join,
-    )
 import subprocess
 import warnings
+from os.path import dirname, exists, join
 
-from fixtures import TempDir
 import testtools
-from testtools.matchers import (
-    Matcher,
-    Mismatch,
-    Not,
-    )
+from fixtures import TempDir
+from testtools.matchers import Matcher, Mismatch, Not
 
 from lp.services.daemons.readyservice import LOG_MAGIC
-from lp.services.daemons.tachandler import (
-    TacException,
-    TacTestSetup,
-    )
-from lp.services.osutils import (
-    get_pid_from_file,
-    override_environ,
-    )
+from lp.services.daemons.tachandler import TacException, TacTestSetup
+from lp.services.osutils import get_pid_from_file, override_environ
 from lp.testing.layers import DatabaseLayer
 
 
 class SimpleTac(TacTestSetup):
-
     def __init__(self, name, tempdir):
         super().__init__()
         self.name, self.tempdir = name, tempdir
@@ -43,15 +28,15 @@ class SimpleTac(TacTestSetup):
 
     @property
     def tacfile(self):
-        return join(self.root, '%s.tac' % self.name)
+        return join(self.root, "%s.tac" % self.name)
 
     @property
     def pidfile(self):
-        return join(self.tempdir, '%s.pid' % self.name)
+        return join(self.tempdir, "%s.pid" % self.name)
 
     @property
     def logfile(self):
-        return join(self.tempdir, '%s.log' % self.name)
+        return join(self.tempdir, "%s.log" % self.name)
 
     def setUpRoot(self):
         pass
@@ -154,7 +139,8 @@ class TacTestSetupTestCase(testtools.TestCase):
 
         # One deprecation warning is emitted.
         self.assertEqual(
-            [UserWarning], [item.category for item in warnings_log])
+            [UserWarning], [item.category for item in warnings_log]
+        )
 
     def test_truncateLog(self):
         """
@@ -191,4 +177,5 @@ class TacTestSetupTestCase(testtools.TestCase):
         with open(fixture.logfile, "rb") as logfile:
             self.assertEqual(
                 ("One\nTwo\nThree, %s\n" % LOG_MAGIC).encode("UTF-8"),
-                logfile.read())
+                logfile.read(),
+            )
diff --git a/lib/lp/services/database/__init__.py b/lib/lp/services/database/__init__.py
index af113c6..015a6f4 100644
--- a/lib/lp/services/database/__init__.py
+++ b/lib/lp/services/database/__init__.py
@@ -4,28 +4,24 @@
 """The lp.services.database package."""
 
 __all__ = [
-    'activity_cols',
-    'read_transaction',
-    'write_transaction',
-    ]
+    "activity_cols",
+    "read_transaction",
+    "write_transaction",
+]
 
-from psycopg2.extensions import TransactionRollbackError
-from storm.exceptions import (
-    DisconnectionError,
-    IntegrityError,
-    )
 import transaction
+from psycopg2.extensions import TransactionRollbackError
+from storm.exceptions import DisconnectionError, IntegrityError
 from twisted.python.util import mergeFunctionMetadata
 
 from lp.services.database.sqlbase import reset_store
 
-
 RETRY_ATTEMPTS = 3
 
 
 def activity_cols(cur):
     """Adapt pg_stat_activity column names for the current DB server."""
-    return {'query': 'query', 'pid': 'pid'}
+    return {"query": "query", "pid": "pid"}
 
 
 def retry_transaction(func):
@@ -34,16 +30,21 @@ def retry_transaction(func):
     The function being decorated should not have side effects outside
     of the transaction.
     """
+
     def retry_transaction_decorator(*args, **kwargs):
         attempt = 0
         while True:
             attempt += 1
             try:
                 return func(*args, **kwargs)
-            except (DisconnectionError, IntegrityError,
-                    TransactionRollbackError):
+            except (
+                DisconnectionError,
+                IntegrityError,
+                TransactionRollbackError,
+            ):
                 if attempt >= RETRY_ATTEMPTS:
                     raise  # tried too many times
+
     return mergeFunctionMetadata(func, retry_transaction_decorator)
 
 
@@ -53,6 +54,7 @@ def read_transaction(func):
     The transaction will be aborted on successful completion of the
     function.  The transaction will be retried if appropriate.
     """
+
     @reset_store
     def read_transaction_decorator(*args, **kwargs):
         transaction.begin()
@@ -60,8 +62,10 @@ def read_transaction(func):
             return func(*args, **kwargs)
         finally:
             transaction.abort()
-    return retry_transaction(mergeFunctionMetadata(
-        func, read_transaction_decorator))
+
+    return retry_transaction(
+        mergeFunctionMetadata(func, read_transaction_decorator)
+    )
 
 
 def write_transaction(func):
@@ -71,6 +75,7 @@ def write_transaction(func):
     function, and aborted on failure.  The transaction will be retried
     if appropriate.
     """
+
     @reset_store
     def write_transaction_decorator(*args, **kwargs):
         transaction.begin()
@@ -81,5 +86,7 @@ def write_transaction(func):
             raise
         transaction.commit()
         return ret
-    return retry_transaction(mergeFunctionMetadata(
-        func, write_transaction_decorator))
+
+    return retry_transaction(
+        mergeFunctionMetadata(func, write_transaction_decorator)
+    )
diff --git a/lib/lp/services/database/bulk.py b/lib/lp/services/database/bulk.py
index 9f94220..bec5499 100644
--- a/lib/lp/services/database/bulk.py
+++ b/lib/lp/services/database/bulk.py
@@ -4,37 +4,23 @@
 """Optimized bulk operations against the database."""
 
 __all__ = [
-    'create',
-    'dbify_value',
-    'load',
-    'load_referencing',
-    'load_related',
-    'reload',
-    ]
+    "create",
+    "dbify_value",
+    "load",
+    "load_referencing",
+    "load_related",
+    "reload",
+]
 
 
 from collections import defaultdict
 from functools import partial
-from itertools import (
-    chain,
-    groupby,
-    )
-from operator import (
-    attrgetter,
-    itemgetter,
-    )
+from itertools import chain, groupby
+from operator import attrgetter, itemgetter
 
 from storm.databases.postgres import Returning
-from storm.expr import (
-    And,
-    Insert,
-    Or,
-    SQL,
-    )
-from storm.info import (
-    get_cls_info,
-    get_obj_info,
-    )
+from storm.expr import SQL, And, Insert, Or
+from storm.info import get_cls_info, get_obj_info
 from storm.references import Reference
 from storm.store import Store
 from zope.security.proxy import removeSecurityProxy
@@ -69,8 +55,9 @@ def gen_reload_queries(objects):
         primary_key = get_cls_info(object_type).primary_key
         if len(primary_key) != 1:
             raise AssertionError(
-                "Compound primary keys are not supported: %s." %
-                object_type.__name__)
+                "Compound primary keys are not supported: %s."
+                % object_type.__name__
+            )
         primary_key_column = primary_key[0]
         primary_key_column_getter = primary_key_column.__get__
         for store, objects in collate(objects, Store.of):
@@ -96,8 +83,9 @@ def _primary_key(object_type, allow_compound=False):
     else:
         if not allow_compound:
             raise AssertionError(
-                "Compound primary keys are not supported: %s." %
-                object_type.__name__)
+                "Compound primary keys are not supported: %s."
+                % object_type.__name__
+            )
         return primary_key
 
 
@@ -117,12 +105,17 @@ def _make_compound_load_clause(primary_key, values_list):
         usually more efficient to sort the whole sequence in one go).
     """
     if len(primary_key) > 1:
-        return Or(*(
-            And(
-                primary_key[0] == leading_value,
-                _make_compound_load_clause(
-                    primary_key[1:], [values[1:] for values in group]))
-            for leading_value, group in groupby(values_list, itemgetter(0))))
+        return Or(
+            *(
+                And(
+                    primary_key[0] == leading_value,
+                    _make_compound_load_clause(
+                        primary_key[1:], [values[1:] for values in group]
+                    ),
+                )
+                for leading_value, group in groupby(values_list, itemgetter(0))
+            )
+        )
     else:
         return primary_key[0].is_in([values[0] for values in values_list])
 
@@ -136,7 +129,8 @@ def load(object_type, primary_keys, store=None):
         return []
     if isinstance(primary_key, tuple):
         condition = _make_compound_load_clause(
-            primary_key, sorted(primary_keys))
+            primary_key, sorted(primary_keys)
+        )
     else:
         condition = primary_key.is_in(primary_keys)
     if store is None:
@@ -144,8 +138,9 @@ def load(object_type, primary_keys, store=None):
     return list(store.find(object_type, condition))
 
 
-def load_referencing(object_type, owning_objects, reference_keys,
-                     extra_conditions=[]):
+def load_referencing(
+    object_type, owning_objects, reference_keys, extra_conditions=[]
+):
     """Load objects of object_type that reference owning_objects.
 
     Note that complex types like Person are best loaded through dedicated
@@ -210,7 +205,8 @@ def dbify_value(col, val):
             return (None,) * len(col._relation._get_local_columns(col._cls))
         else:
             return col._relation.get_remote_variables(
-                get_obj_info(val).get_obj())
+                get_obj_info(val).get_obj()
+            )
     else:
         return (col.variable_factory(value=val),)
 
@@ -226,8 +222,7 @@ def dbify_column(col):
         return (col,)
 
 
-def create(columns, values, get_objects=False,
-           get_primary_keys=False):
+def create(columns, values, get_objects=False, get_primary_keys=False):
     """Create a large number of objects efficiently.
 
     :param columns: The Storm columns to insert values into. Must be from a
@@ -243,10 +238,12 @@ def create(columns, values, get_objects=False,
     if len(clses) != 1:
         raise ValueError(
             "The Storm columns to insert values into must be from a single "
-            "class.")
+            "class."
+        )
     if get_objects and get_primary_keys:
         raise ValueError(
-            "get_objects and get_primary_keys are mutually exclusive.")
+            "get_objects and get_primary_keys are mutually exclusive."
+        )
 
     if len(values) == 0:
         return [] if (get_objects or get_primary_keys) else None
@@ -258,14 +255,20 @@ def create(columns, values, get_objects=False,
     # get passed through the variable factory, while References get
     # squashed into primary key variables.
     db_values = [
-        list(chain.from_iterable(
-            dbify_value(col, val) for col, val in zip(columns, value)))
-        for value in values]
+        list(
+            chain.from_iterable(
+                dbify_value(col, val) for col, val in zip(columns, value)
+            )
+        )
+        for value in values
+    ]
 
     if get_objects or get_primary_keys:
         result = IStore(cls).execute(
-            Returning(Insert(
-                db_cols, values=db_values, primary_columns=primary_key)))
+            Returning(
+                Insert(db_cols, values=db_values, primary_columns=primary_key)
+            )
+        )
         keys = map(itemgetter(0), result) if len(primary_key) == 1 else result
         if get_objects:
             return load(cls, keys)
diff --git a/lib/lp/services/database/collection.py b/lib/lp/services/database/collection.py
index f580f0d..e6d97c5 100644
--- a/lib/lp/services/database/collection.py
+++ b/lib/lp/services/database/collection.py
@@ -4,13 +4,10 @@
 """A generic collection of database objects."""
 
 __all__ = [
-    'Collection',
-    ]
+    "Collection",
+]
 
-from storm.expr import (
-    Join,
-    LeftJoin,
-    )
+from storm.expr import Join, LeftJoin
 
 from lp.services.database.interfaces import IStore
 
@@ -59,21 +56,24 @@ class Collection:
         self.base = base
 
         if base is None:
-            base_conditions = (True, )
+            base_conditions = (True,)
             base_tables = []
         else:
             self.store = base.store
             base_conditions = base.conditions
             base_tables = list(base.tables)
 
-        self.store = kwargs.get('store')
+        self.store = kwargs.get("store")
         if self.store is None:
             from lp.services.librarian.model import LibraryFileAlias
+
             self.store = IStore(LibraryFileAlias)
 
         self.tables = (
-            starting_tables + base_tables +
-            self._parseTablesArg(kwargs.get('tables', [])))
+            starting_tables
+            + base_tables
+            + self._parseTablesArg(kwargs.get("tables", []))
+        )
 
         self.conditions = base_conditions + conditions
 
@@ -131,8 +131,9 @@ class Collection:
         else:
             # Select the starting table by default.
             assert self.starting_table is not None, (
-                "Collection %s does not define a starting table." %
-                    self.__class__.__name__)
+                "Collection %s does not define a starting table."
+                % self.__class__.__name__
+            )
             values = self.starting_table
 
         return source.find(values, *self.conditions)
diff --git a/lib/lp/services/database/constants.py b/lib/lp/services/database/constants.py
index ae710c9..f30599a 100644
--- a/lib/lp/services/database/constants.py
+++ b/lib/lp/services/database/constants.py
@@ -5,22 +5,22 @@
 
 from storm.expr import SQL
 
-
 UTC_NOW = SQL("CURRENT_TIMESTAMP AT TIME ZONE 'UTC'")
 
 DEFAULT = SQL("DEFAULT")
 
 # We can't use infinity, as psycopg doesn't know how to handle it. And
 # neither does Python I guess.
-#NEVER_EXPIRES = SQL("'infinity'::TIMESTAMP")
+# NEVER_EXPIRES = SQL("'infinity'::TIMESTAMP")
 
 NEVER_EXPIRES = SQL("'3000-01-01'::TIMESTAMP WITHOUT TIME ZONE")
 
 THIRTY_DAYS_AGO = SQL(
-    "CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - interval '30 days'")
+    "CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - interval '30 days'"
+)
 
 SEVEN_DAYS_AGO = SQL(
-    "CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - interval '7 days'")
+    "CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - interval '7 days'"
+)
 
-ONE_DAY_AGO = SQL(
-    "CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - interval '1 day'")
+ONE_DAY_AGO = SQL("CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - interval '1 day'")
diff --git a/lib/lp/services/database/datetimecol.py b/lib/lp/services/database/datetimecol.py
index 8a742fa..69a5725 100644
--- a/lib/lp/services/database/datetimecol.py
+++ b/lib/lp/services/database/datetimecol.py
@@ -1,13 +1,13 @@
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-'''UtcDateTimeCol for SQLObject'''
+"""UtcDateTimeCol for SQLObject"""
 
-__all__ = ['UtcDateTimeCol']
+__all__ = ["UtcDateTimeCol"]
 
 import pytz
 import storm.sqlobject
 
 
 class UtcDateTimeCol(storm.sqlobject.UtcDateTimeCol):
-    _kwargs = {'tzinfo': pytz.timezone('UTC')}
+    _kwargs = {"tzinfo": pytz.timezone("UTC")}
diff --git a/lib/lp/services/database/debug.py b/lib/lp/services/database/debug.py
index 5fa2da1..acb41b2 100644
--- a/lib/lp/services/database/debug.py
+++ b/lib/lp/services/database/debug.py
@@ -1,9 +1,9 @@
 # Copyright 2009-2016 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-'''
+"""
 Replace the psycopg connect method with one that returns a wrapped connection.
-'''
+"""
 
 import logging
 import textwrap
@@ -39,12 +39,12 @@ def LN(*args, **kwargs):
     out = []
     for obj in args:
         out.append(str(obj))
-    text = ' '.join(out)
-    if 'name' in kwargs:
-        text = 'L%s %s: %s' % (b, kwargs['name'], text)
+    text = " ".join(out)
+    if "name" in kwargs:
+        text = "L%s %s: %s" % (b, kwargs["name"], text)
     else:
-        text = 'L%s %s: %s' % (b, c, text)
-    if 'wrap' in kwargs and kwargs['wrap']:
+        text = "L%s %s: %s" % (b, c, text)
+    if "wrap" in kwargs and kwargs["wrap"]:
         text = textwrap.fill(text)
     return text
 
@@ -54,19 +54,20 @@ class ConnectionWrapper:
     _real_con = None
 
     def __init__(self, real_con):
-        self.__dict__['_log'] = \
-                logging.getLogger('lp.services.database.debug').debug
-        self.__dict__['_real_con'] = real_con
+        self.__dict__["_log"] = logging.getLogger(
+            "lp.services.database.debug"
+        ).debug
+        self.__dict__["_real_con"] = real_con
 
     def __getattr__(self, key):
-        if key in ('rollback', 'close', 'commit'):
-            print('%s %r.__getattr__(%r)' % (LN(), self, key))
-            self.__dict__['_log']('__getattr__(%r)', key)
+        if key in ("rollback", "close", "commit"):
+            print("%s %r.__getattr__(%r)" % (LN(), self, key))
+            self.__dict__["_log"]("__getattr__(%r)", key)
         return getattr(self._real_con, key)
 
     def __setattr__(self, key, val):
-        print('%s %r.__setattr__(%r, %r)' % (LN(), self, key, val))
-        self.__dict__['_log']('__setattr__(%r, %r)', key, val)
+        print("%s %r.__setattr__(%r, %r)" % (LN(), self, key, val))
+        self.__dict__["_log"]("__setattr__(%r, %r)", key, val)
         return setattr(self._real_con, key, val)
 
 
@@ -76,22 +77,22 @@ _org_connect = None
 def debug_connect(*args, **kw):
     global _org_connect
     con = ConnectionWrapper(_org_connect(*args, **kw))
-    logging.getLogger('lp.services.database.debug').debug(
-            'connect(*%r, **%r) == %r', args, kw, con
-            )
-    print('%s connect(*%r, **%r) == %r' % (LN(), args, kw, con))
+    logging.getLogger("lp.services.database.debug").debug(
+        "connect(*%r, **%r) == %r", args, kw, con
+    )
+    print("%s connect(*%r, **%r) == %r" % (LN(), args, kw, con))
     return con
 
 
 def install():
     global _org_connect
-    assert _org_connect is None, 'Already installed'
+    assert _org_connect is None, "Already installed"
     _org_connect = psycopg.connect
     psycopg.connect = debug_connect
 
 
 def uninstall():
     global _org_connect
-    assert _org_connect is not None, 'Not installed'
+    assert _org_connect is not None, "Not installed"
     psycopg.connect = _org_connect
     _org_connect = None
diff --git a/lib/lp/services/database/decoratedresultset.py b/lib/lp/services/database/decoratedresultset.py
index c524b69..89ad74d 100644
--- a/lib/lp/services/database/decoratedresultset.py
+++ b/lib/lp/services/database/decoratedresultset.py
@@ -2,20 +2,18 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DecoratedResultSet',
-    ]
+    "DecoratedResultSet",
+]
 
 from lazr.delegates import delegate_to
 from storm import Undef
 from storm.zope.interfaces import IResultSet
-from zope.security.proxy import (
-    isinstance as zope_isinstance,
-    ProxyFactory,
-    removeSecurityProxy,
-    )
+from zope.security.proxy import ProxyFactory
+from zope.security.proxy import isinstance as zope_isinstance
+from zope.security.proxy import removeSecurityProxy
 
 
-@delegate_to(IResultSet, context='result_set')
+@delegate_to(IResultSet, context="result_set")
 class DecoratedResultSet:
     """A decorated Storm ResultSet for 'Magic' (presenter) classes.
 
@@ -35,8 +33,15 @@ class DecoratedResultSet:
     DistroArchSeries), hence a generalised solution.
     """
 
-    def __init__(self, result_set, result_decorator=None, pre_iter_hook=None,
-                 bulk_decorator=None, slice_info=False, return_both=False):
+    def __init__(
+        self,
+        result_set,
+        result_decorator=None,
+        pre_iter_hook=None,
+        bulk_decorator=None,
+        slice_info=False,
+        return_both=False,
+    ):
         """
         Wrap `result_set` in a decorator.
 
@@ -59,11 +64,13 @@ class DecoratedResultSet:
         :param return_both: If True return both the plain and decorated
             values as a tuple.
         """
-        if (bulk_decorator is not None and
-            (result_decorator is not None or pre_iter_hook is not None)):
+        if bulk_decorator is not None and (
+            result_decorator is not None or pre_iter_hook is not None
+        ):
             raise TypeError(
                 "bulk_decorator cannot be used with result_decorator or "
-                "pre_iter_hook")
+                "pre_iter_hook"
+            )
         self.result_set = result_set
         self.result_decorator = result_decorator
         self.pre_iter_hook = pre_iter_hook
@@ -80,11 +87,14 @@ class DecoratedResultSet:
         """
         if not results:
             return [], []
-        elif (zope_isinstance(self.result_set, DecoratedResultSet)
-              and self.return_both):
+        elif (
+            zope_isinstance(self.result_set, DecoratedResultSet)
+            and self.return_both
+        ):
             assert (
                 removeSecurityProxy(self.result_set).return_both
-                    == self.return_both)
+                == self.return_both
+            )
             return tuple(zip(*results))
         else:
             return results, results
@@ -106,7 +116,8 @@ class DecoratedResultSet:
             elif self.bulk_decorator is not None:
                 if self.slice_info:
                     [decorated] = self.bulk_decorator(
-                        [result], slice(row_index, row_index + 1))
+                        [result], slice(row_index, row_index + 1)
+                    )
                 else:
                     [decorated] = self.bulk_decorator([result])
             else:
@@ -123,15 +134,20 @@ class DecoratedResultSet:
         """
         new_result_set = self.result_set.copy(*args, **kwargs)
         return DecoratedResultSet(
-            new_result_set, self.result_decorator, self.pre_iter_hook,
-            self.bulk_decorator, self.slice_info, self.return_both)
+            new_result_set,
+            self.result_decorator,
+            self.pre_iter_hook,
+            self.bulk_decorator,
+            self.slice_info,
+            self.return_both,
+        )
 
     def config(self, *args, **kwargs):
         """See `IResultSet`.
 
         :return: The decorated result set.after updating the config.
         """
-        return_both = kwargs.pop('return_both', None)
+        return_both = kwargs.pop("return_both", None)
         if return_both is not None:
             self.return_both = return_both
             if zope_isinstance(self.result_set, DecoratedResultSet):
@@ -187,8 +203,12 @@ class DecoratedResultSet:
         naked_value = removeSecurityProxy(value)
         if IResultSet.providedBy(naked_value):
             return DecoratedResultSet(
-                value, self.result_decorator, self.pre_iter_hook,
-                self.bulk_decorator, self.slice_info)
+                value,
+                self.result_decorator,
+                self.pre_iter_hook,
+                self.bulk_decorator,
+                self.slice_info,
+            )
         else:
             return self.decorate_single(value)
 
@@ -239,8 +259,13 @@ class DecoratedResultSet:
         """
         new_result_set = self.result_set.order_by(*args, **kwargs)
         return DecoratedResultSet(
-            new_result_set, self.result_decorator, self.pre_iter_hook,
-            self.bulk_decorator, self.slice_info, self.return_both)
+            new_result_set,
+            self.result_decorator,
+            self.pre_iter_hook,
+            self.bulk_decorator,
+            self.slice_info,
+            self.return_both,
+        )
 
     def get_plain_result_set(self):
         """Return the plain Storm result set."""
@@ -261,5 +286,10 @@ class DecoratedResultSet:
         else:
             new_result_set = self.result_set.find(*args, **kwargs)
         return DecoratedResultSet(
-            new_result_set, self.result_decorator, self.pre_iter_hook,
-            self.bulk_decorator, self.slice_info, self.return_both)
+            new_result_set,
+            self.result_decorator,
+            self.pre_iter_hook,
+            self.bulk_decorator,
+            self.slice_info,
+            self.return_both,
+        )
diff --git a/lib/lp/services/database/enumcol.py b/lib/lp/services/database/enumcol.py
index 3da2741..768a08a 100644
--- a/lib/lp/services/database/enumcol.py
+++ b/lib/lp/services/database/enumcol.py
@@ -1,24 +1,21 @@
 # Copyright 2009-2021 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from lazr.enum import (
-    DBEnumeratedType,
-    DBItem,
-    )
+from lazr.enum import DBEnumeratedType, DBItem
 from storm.properties import SimpleProperty
 from storm.variables import Variable
 from zope.security.proxy import isinstance as zope_isinstance
 
-
 __all__ = [
-    'DBEnum',
-    ]
+    "DBEnum",
+]
 
 
 def check_enum_type(enum):
     if not issubclass(enum, DBEnumeratedType):
         raise TypeError(
-            '%r must be a DBEnumeratedType: %r' % (enum, type(enum)))
+            "%r must be a DBEnumeratedType: %r" % (enum, type(enum))
+        )
 
 
 def check_type(enum):
@@ -31,6 +28,7 @@ def check_type(enum):
 
 class DBEnumVariable(Variable):
     """A Storm variable class representing a DBEnumeratedType."""
+
     __slots__ = ("_enum",)
 
     def __init__(self, *args, **kwargs):
@@ -44,14 +42,17 @@ class DBEnumVariable(Variable):
                     return enum.items[value]
                 except KeyError:
                     pass
-            raise KeyError('%r not in present in any of %r' % (
-                value, self._enum))
+            raise KeyError(
+                "%r not in present in any of %r" % (value, self._enum)
+            )
         else:
             if not zope_isinstance(value, DBItem):
                 raise TypeError("Not a DBItem: %r" % (value,))
             if value.enum not in self._enum:
-                raise TypeError("DBItem from unknown enum, %r not in %r" % (
-                        value.enum.name, self._enum))
+                raise TypeError(
+                    "DBItem from unknown enum, %r not in %r"
+                    % (value.enum.name, self._enum)
+                )
             return value
 
     def parse_get(self, value, to_db):
diff --git a/lib/lp/services/database/interfaces.py b/lib/lp/services/database/interfaces.py
index aae6350..3f9a9af 100644
--- a/lib/lp/services/database/interfaces.py
+++ b/lib/lp/services/database/interfaces.py
@@ -2,21 +2,21 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'DEFAULT_FLAVOR',
-    'DisallowedStore',
-    'IDatabasePolicy',
-    'IDBObject',
-    'IMasterObject',
-    'IMasterStore',
-    'IRequestExpired',
-    'ISQLBase',
-    'IStandbyStore',
-    'IStore',
-    'IStoreSelector',
-    'MAIN_STORE',
-    'PRIMARY_FLAVOR',
-    'STANDBY_FLAVOR',
-    ]
+    "DEFAULT_FLAVOR",
+    "DisallowedStore",
+    "IDatabasePolicy",
+    "IDBObject",
+    "IMasterObject",
+    "IMasterStore",
+    "IRequestExpired",
+    "ISQLBase",
+    "IStandbyStore",
+    "IStore",
+    "IStoreSelector",
+    "MAIN_STORE",
+    "PRIMARY_FLAVOR",
+    "STANDBY_FLAVOR",
+]
 
 
 from zope.interface import Interface
@@ -35,6 +35,7 @@ class IRequestExpired(IRuntimeError):
 # think it is ever used though ...
 class ISQLBase(Interface):
     """An extension of ISQLObject that provides an ID."""
+
     id = Int(title="The integer ID for the instance")
 
 
@@ -42,12 +43,12 @@ class ISQLBase(Interface):
 # Database policies
 #
 
-MAIN_STORE = 'main'  # The main database.
+MAIN_STORE = "main"  # The main database.
 ALL_STORES = frozenset([MAIN_STORE])
 
-DEFAULT_FLAVOR = 'default'  # Default flavor for current state.
-PRIMARY_FLAVOR = 'primary'  # The primary database.
-STANDBY_FLAVOR = 'standby'  # A standby database.
+DEFAULT_FLAVOR = "default"  # Default flavor for current state.
+PRIMARY_FLAVOR = "primary"  # The primary database.
+STANDBY_FLAVOR = "standby"  # A standby database.
 
 
 class IDatabasePolicy(Interface):
@@ -56,6 +57,7 @@ class IDatabasePolicy(Interface):
     The publisher adapts the request to `IDatabasePolicy` to
     instantiate the policy for the current request.
     """
+
     def __enter__():
         """Standard Python context manager interface.
 
@@ -106,6 +108,7 @@ class IStoreSelector(Interface):
     databases as we are prepared to pay for, so they will perform better
     because they are less loaded.
     """
+
     def push(dbpolicy):
         """Install an `IDatabasePolicy` as the default for this thread."""
 
@@ -145,6 +148,7 @@ class IStoreSelector(Interface):
 
 class IStore(Interface):
     """A storm.store.Store."""
+
     def get(cls, key):
         """See storm.store.Store."""
 
diff --git a/lib/lp/services/database/isolation.py b/lib/lp/services/database/isolation.py
index dfbde85..4ff9530 100644
--- a/lib/lp/services/database/isolation.py
+++ b/lib/lp/services/database/isolation.py
@@ -4,11 +4,11 @@
 """Ensure that some operations happen outside of transactions."""
 
 __all__ = [
-    'check_no_transaction',
-    'ensure_no_transaction',
-    'is_transaction_in_progress',
-    'TransactionInProgress',
-    ]
+    "check_no_transaction",
+    "ensure_no_transaction",
+    "is_transaction_in_progress",
+    "TransactionInProgress",
+]
 
 from functools import wraps
 
@@ -16,13 +16,12 @@ import psycopg2.extensions
 from storm.zope.interfaces import IZStorm
 from zope.component import getUtility
 
-
 TRANSACTION_IN_PROGRESS_STATUSES = {
-    psycopg2.extensions.TRANSACTION_STATUS_ACTIVE: 'is active',
-    psycopg2.extensions.TRANSACTION_STATUS_INTRANS: 'has started',
-    psycopg2.extensions.TRANSACTION_STATUS_INERROR: 'has errored',
-    psycopg2.extensions.TRANSACTION_STATUS_UNKNOWN: 'is in an unknown state',
-    }
+    psycopg2.extensions.TRANSACTION_STATUS_ACTIVE: "is active",
+    psycopg2.extensions.TRANSACTION_STATUS_INTRANS: "has started",
+    psycopg2.extensions.TRANSACTION_STATUS_INERROR: "has errored",
+    psycopg2.extensions.TRANSACTION_STATUS_UNKNOWN: "is in an unknown state",
+}
 
 
 class TransactionInProgress(Exception):
@@ -44,7 +43,8 @@ def is_transaction_in_progress():
     """Return True if a transaction is in progress for any store."""
     return any(
         status in TRANSACTION_IN_PROGRESS_STATUSES
-        for name, status in gen_store_statuses())
+        for name, status in gen_store_statuses()
+    )
 
 
 def check_no_transaction():
@@ -53,13 +53,16 @@ def check_no_transaction():
         if status in TRANSACTION_IN_PROGRESS_STATUSES:
             desc = TRANSACTION_IN_PROGRESS_STATUSES[status]
             raise TransactionInProgress(
-                "Transaction %s in %s store." % (desc, name))
+                "Transaction %s in %s store." % (desc, name)
+            )
 
 
 def ensure_no_transaction(func):
     """Decorator that calls check_no_transaction before function."""
+
     @wraps(func)
     def wrapper(*args, **kwargs):
         check_no_transaction()
         return func(*args, **kwargs)
+
     return wrapper
diff --git a/lib/lp/services/database/locking.py b/lib/lp/services/database/locking.py
index b1ed93d..8e313a5 100644
--- a/lib/lp/services/database/locking.py
+++ b/lib/lp/services/database/locking.py
@@ -2,23 +2,17 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'AdvisoryLockHeld',
-    'LockType',
-    'try_advisory_lock',
+    "AdvisoryLockHeld",
+    "LockType",
+    "try_advisory_lock",
 ]
 
 from contextlib import contextmanager
 
-from lazr.enum import (
-    DBEnumeratedType,
-    DBItem,
-    )
+from lazr.enum import DBEnumeratedType, DBItem
 from storm.locals import Select
 
-from lp.services.database.stormexpr import (
-    AdvisoryUnlock,
-    TryAdvisoryLock,
-    )
+from lp.services.database.stormexpr import AdvisoryUnlock, TryAdvisoryLock
 
 
 class AdvisoryLockHeld(Exception):
@@ -27,25 +21,36 @@ class AdvisoryLockHeld(Exception):
 
 class LockType(DBEnumeratedType):
 
-    BRANCH_SCAN = DBItem(0, """Branch scan.
+    BRANCH_SCAN = DBItem(
+        0,
+        """Branch scan.
 
         Branch scan.
-        """)
+        """,
+    )
 
-    GIT_REF_SCAN = DBItem(1, """Git repository reference scan.
+    GIT_REF_SCAN = DBItem(
+        1,
+        """Git repository reference scan.
 
         Git repository reference scan.
-        """)
+        """,
+    )
 
-    PACKAGE_COPY = DBItem(2, """Package copy.
+    PACKAGE_COPY = DBItem(
+        2,
+        """Package copy.
 
         Package copy.
-        """)
+        """,
+    )
 
-    REGISTRY_UPLOAD = DBItem(3, """OCI Registry upload.
+    REGISTRY_UPLOAD = DBItem(
+        3,
+        """OCI Registry upload.
 
         OCI Registry upload.
-        """
+        """,
     )
 
 
diff --git a/lib/lp/services/database/multitablecopy.py b/lib/lp/services/database/multitablecopy.py
index a79e8ca..8d512f6 100644
--- a/lib/lp/services/database/multitablecopy.py
+++ b/lib/lp/services/database/multitablecopy.py
@@ -1,7 +1,7 @@
 # Copyright 2009 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-__all__ = ['MultiTableCopy']
+__all__ = ["MultiTableCopy"]
 
 import logging
 import re
@@ -10,15 +10,8 @@ import time
 from zope.interface import implementer
 
 from lp.services.database import postgresql
-from lp.services.database.sqlbase import (
-    cursor,
-    quote,
-    quoteIdentifier,
-    )
-from lp.services.looptuner import (
-    DBLoopTuner,
-    ITunableLoop,
-    )
+from lp.services.database.sqlbase import cursor, quote, quoteIdentifier
+from lp.services.looptuner import DBLoopTuner, ITunableLoop
 
 
 @implementer(ITunableLoop)
@@ -28,8 +21,14 @@ class PouringLoop:
     Used by MultiTableCopy internally to tell DBLoopTuner what to do.
     """
 
-    def __init__(self, from_table, to_table, transaction_manager, logger,
-        batch_pouring_callback=None):
+    def __init__(
+        self,
+        from_table,
+        to_table,
+        transaction_manager,
+        logger,
+        batch_pouring_callback=None,
+    ):
 
         self.from_table = str(from_table)
         self.to_table = str(to_table)
@@ -46,8 +45,10 @@ class PouringLoop:
             self.lowest_id = 1
             self.highest_id = 0
 
-        self.logger.debug("Up to %d rows in holding table"
-                         % (self.highest_id + 1 - self.lowest_id))
+        self.logger.debug(
+            "Up to %d rows in holding table"
+            % (self.highest_id + 1 - self.lowest_id)
+        )
 
     def isDone(self):
         """See `ITunableLoop`."""
@@ -60,14 +61,17 @@ class PouringLoop:
         batch_size = int(batch_size)
 
         # Figure out what id lies exactly batch_size rows ahead.
-        self.cur.execute("""
+        self.cur.execute(
+            """
             SELECT id
             FROM %s
             WHERE id >= %s
             ORDER BY id
             OFFSET %s
             LIMIT 1
-            """ % (self.from_table, quote(self.lowest_id), quote(batch_size)))
+            """
+            % (self.from_table, quote(self.lowest_id), quote(batch_size))
+        )
         end_id = self.cur.fetchone()
 
         if end_id is not None:
@@ -78,16 +82,22 @@ class PouringLoop:
         next += 1
 
         self.prepareBatch(
-            self.from_table, self.to_table, batch_size, self.lowest_id, next)
+            self.from_table, self.to_table, batch_size, self.lowest_id, next
+        )
 
-        self.logger.debug("pouring %s: %d rows (%d-%d)" % (
-            self.from_table, batch_size, self.lowest_id, next))
+        self.logger.debug(
+            "pouring %s: %d rows (%d-%d)"
+            % (self.from_table, batch_size, self.lowest_id, next)
+        )
 
-        self.cur.execute("INSERT INTO %s (SELECT * FROM %s WHERE id < %d)"
-                         % (self.to_table, self.from_table, next))
+        self.cur.execute(
+            "INSERT INTO %s (SELECT * FROM %s WHERE id < %d)"
+            % (self.to_table, self.from_table, next)
+        )
 
-        self.cur.execute("DELETE FROM %s WHERE id < %d"
-                         % (self.from_table, next))
+        self.cur.execute(
+            "DELETE FROM %s WHERE id < %d" % (self.from_table, next)
+        )
 
         self.lowest_id = next
         self._commit()
@@ -108,12 +118,12 @@ class PouringLoop:
         # just renews our setting after the connection is reset.
         postgresql.allow_sequential_scans(self.cur, False)
 
-    def prepareBatch(
-        self, from_table, to_table, batch_size, begin_id, end_id):
+    def prepareBatch(self, from_table, to_table, batch_size, begin_id, end_id):
         """If batch_pouring_callback is defined, call it."""
         if self.batch_pouring_callback is not None:
             self.batch_pouring_callback(
-                from_table, to_table, batch_size, begin_id, end_id)
+                from_table, to_table, batch_size, begin_id, end_id
+            )
 
 
 class MultiTableCopy:
@@ -204,8 +214,15 @@ class MultiTableCopy:
     while the multi-table copy is running, for instance.
     """
 
-    def __init__(self, name, tables, seconds_per_batch=2.0,
-            minimum_batch_size=500, restartable=True, logger=None):
+    def __init__(
+        self,
+        name,
+        tables,
+        seconds_per_batch=2.0,
+        minimum_batch_size=500,
+        restartable=True,
+        logger=None,
+    ):
         """Define a MultiTableCopy, including an in-order list of tables.
 
         :param name: a unique identifier for this MultiTableCopy operation,
@@ -251,24 +268,29 @@ class MultiTableCopy:
 
     def dropHoldingTables(self):
         """Drop any holding tables that may exist for this MultiTableCopy."""
-        holding_tables = [self.getHoldingTableName(table)
-                          for table in self.tables]
+        holding_tables = [
+            self.getHoldingTableName(table) for table in self.tables
+        ]
         postgresql.drop_tables(cursor(), holding_tables)
 
-    def getRawHoldingTableName(self, tablename, suffix=''):
+    def getRawHoldingTableName(self, tablename, suffix=""):
         """Name for a holding table, but without quotes.  Use with care."""
         if suffix:
-            suffix = '_%s' % suffix
+            suffix = "_%s" % suffix
 
-        assert re.search(r'[^a-z_]', tablename + suffix) is None, (
-            'Unsupported characters in table name per Bug #179821')
+        assert (
+            re.search(r"[^a-z_]", tablename + suffix) is None
+        ), "Unsupported characters in table name per Bug #179821"
 
         raw_name = "temp_%s_holding_%s%s" % (
-            str(tablename), self.name, str(suffix))
+            str(tablename),
+            self.name,
+            str(suffix),
+        )
 
         return raw_name
 
-    def getHoldingTableName(self, tablename, suffix=''):
+    def getHoldingTableName(self, tablename, suffix=""):
         """Name for a holding table to hold data being copied in tablename.
 
         Return value is properly quoted for use as an SQL identifier.
@@ -287,9 +309,17 @@ class MultiTableCopy:
         """
         return foreign_key
 
-    def extract(self, source_table, joins=None, where_clause=None,
-        id_sequence=None, inert_where=None, pre_pouring_callback=None,
-        batch_pouring_callback=None, external_joins=None):
+    def extract(
+        self,
+        source_table,
+        joins=None,
+        where_clause=None,
+        id_sequence=None,
+        inert_where=None,
+        pre_pouring_callback=None,
+        batch_pouring_callback=None,
+        external_joins=None,
+    ):
         """Extract (selected) rows from source_table into a holding table.
 
         The holding table gets an additional new_id column with identifiers
@@ -382,13 +412,21 @@ class MultiTableCopy:
 
         holding_table = self.getHoldingTableName(source_table)
 
-        self.logger.info('Extracting from %s into %s...' % (
-            source_table, holding_table))
+        self.logger.info(
+            "Extracting from %s into %s..." % (source_table, holding_table)
+        )
 
         starttime = time.time()
 
-        cur = self._selectToHolding(source_table, joins, external_joins,
-            where_clause, holding_table, id_sequence, inert_where)
+        cur = self._selectToHolding(
+            source_table,
+            joins,
+            external_joins,
+            where_clause,
+            holding_table,
+            id_sequence,
+            inert_where,
+        )
 
         if len(joins) > 0:
             self._retargetForeignKeys(holding_table, joins, cur)
@@ -397,10 +435,19 @@ class MultiTableCopy:
         self._indexIdColumn(holding_table, source_table, cur)
 
         self.logger.debug(
-            '...Extracted in %.3f seconds' % (time.time() - starttime))
-
-    def _selectToHolding(self, source_table, joins, external_joins,
-            where_clause, holding_table, id_sequence, inert_where):
+            "...Extracted in %.3f seconds" % (time.time() - starttime)
+        )
+
+    def _selectToHolding(
+        self,
+        source_table,
+        joins,
+        external_joins,
+        where_clause,
+        holding_table,
+        id_sequence,
+        inert_where,
+    ):
         """Create holding table based on data from source table.
 
         We don't need to know what's in the source table exactly; we just
@@ -409,7 +456,7 @@ class MultiTableCopy:
         new_id column.
         """
         source_table = str(source_table)
-        select = ['source.*']
+        select = ["source.*"]
         from_list = ["%s source" % source_table]
         where = []
 
@@ -421,35 +468,34 @@ class MultiTableCopy:
 
             self._checkForeignKeyOrder(column, referenced_table)
 
-            select.append('%s.new_id AS new_%s' % (
-                referenced_holding, column))
+            select.append("%s.new_id AS new_%s" % (referenced_holding, column))
             from_list.append(referenced_holding)
-            where.append('%s = %s.id' % (column, referenced_holding))
+            where.append("%s = %s.id" % (column, referenced_holding))
 
         from_list.extend(external_joins)
 
         if where_clause is not None:
-            where.append('(%s)' % where_clause)
+            where.append("(%s)" % where_clause)
 
-        where_text = ''
+        where_text = ""
         if len(where) > 0:
-            where_text = 'WHERE %s' % ' AND '.join(where)
+            where_text = "WHERE %s" % " AND ".join(where)
 
         # For each row we append at the end any new foreign key values, and
         # finally a "new_id" holding its future id field.  This new_id value
         # is allocated from the original table's id sequence, so it will be
         # unique in the original table.
         table_creation_parameters = {
-            'columns': ','.join(select),
-            'holding_table': holding_table,
-            'id_sequence': "nextval('%s'::regclass)" % id_sequence,
-            'inert_where': inert_where,
-            'source_tables': ','.join(from_list),
-            'where': where_text,
-            'temp': '',
-            }
+            "columns": ",".join(select),
+            "holding_table": holding_table,
+            "id_sequence": "nextval('%s'::regclass)" % id_sequence,
+            "inert_where": inert_where,
+            "source_tables": ",".join(from_list),
+            "where": where_text,
+            "temp": "",
+        }
         if not self.restartable:
-            table_creation_parameters['temp'] = 'TEMP'
+            table_creation_parameters["temp"] = "TEMP"
 
         cur = cursor()
 
@@ -458,30 +504,39 @@ class MultiTableCopy:
             # We'll be pouring all rows from this table.  To avoid a costly
             # second write pass (which would rewrite all records in the
             # holding table), we assign new_ids right in the same query.
-            cur.execute('''
+            cur.execute(
+                """
                 CREATE %(temp)s TABLE %(holding_table)s AS
                 SELECT DISTINCT ON (source.id)
                     %(columns)s, %(id_sequence)s AS new_id
                 FROM %(source_tables)s
                 %(where)s
-                ORDER BY id''' % table_creation_parameters)
+                ORDER BY id"""
+                % table_creation_parameters
+            )
         else:
             # Some of the rows may have to have null new_ids.  To avoid
             # wasting "address space" on the sequence, we populate the entire
             # holding table with null new_ids, then fill in new_id only for
             # rows that do not match the "inert_where" condition.
-            cur.execute('''
+            cur.execute(
+                """
                 CREATE %(temp)s TABLE %(holding_table)s AS
                 SELECT DISTINCT ON (source.id)
                     %(columns)s, NULL::integer AS new_id
                 FROM %(source_tables)s
                 %(where)s
-                ORDER BY id''' % table_creation_parameters)
-            cur.execute('''
+                ORDER BY id"""
+                % table_creation_parameters
+            )
+            cur.execute(
+                """
                 UPDATE %(holding_table)s AS holding
                 SET new_id = %(id_sequence)s
                 WHERE NOT (%(inert_where)s)
-                ''' % table_creation_parameters)
+                """
+                % table_creation_parameters
+            )
 
         return cur
 
@@ -493,10 +548,13 @@ class MultiTableCopy:
         """
         source_table = str(source_table)
         self.logger.debug("Indexing %s" % holding_table)
-        cur.execute('''
+        cur.execute(
+            """
             CREATE UNIQUE INDEX %s
             ON %s (id)
-        ''' % (self.getHoldingTableName(source_table, 'id'), holding_table))
+        """
+            % (self.getHoldingTableName(source_table, "id"), holding_table)
+        )
 
     def _retargetForeignKeys(self, holding_table, joins, cur):
         """Replace foreign keys in new holding table.
@@ -506,14 +564,15 @@ class MultiTableCopy:
         added from the holding table.
         """
         columns = [join.lower() for join in joins]
-        fk_updates = ['%s = new_%s' % (column, column) for column in columns]
-        updates = ', '.join(fk_updates)
+        fk_updates = ["%s = new_%s" % (column, column) for column in columns]
+        updates = ", ".join(fk_updates)
         self.logger.debug("Redirecting foreign keys: %s" % updates)
         cur.execute("UPDATE %s SET %s" % (holding_table, updates))
         for column in columns:
             self.logger.debug("Dropping foreign-key column %s" % column)
-            cur.execute("ALTER TABLE %s DROP COLUMN new_%s"
-                        % (holding_table, column))
+            cur.execute(
+                "ALTER TABLE %s DROP COLUMN new_%s" % (holding_table, column)
+            )
 
     def needsRecovery(self):
         """Do we have holding tables with recoverable data from previous run?
@@ -534,9 +593,10 @@ class MultiTableCopy:
         # its new_id column, then the pouring process had not begun yet.
         # Assume the data was not ready for pouring.
         first_holding_table = self.getRawHoldingTableName(self.tables[0])
-        if postgresql.table_has_column(cur, first_holding_table, 'new_id'):
+        if postgresql.table_has_column(cur, first_holding_table, "new_id"):
             self.logger.info(
-                "Previous run aborted too early for recovery; redo all")
+                "Previous run aborted too early for recovery; redo all"
+            )
             return False
 
         self.logger.info("Recoverable data found")
@@ -559,7 +619,8 @@ class MultiTableCopy:
         elif self.last_extracted_table != len(self.tables) - 1:
             raise AssertionError(
                 "Not safe to pour: last table '%s' was not extracted"
-                % self.tables[-1])
+                % self.tables[-1]
+            )
 
         cur = self._commit(transaction_manager)
 
@@ -582,16 +643,19 @@ class MultiTableCopy:
                 continue
 
             holding_table = self.getHoldingTableName(table)
-            self.logger.info("Pouring %s back into %s..."
-                         % (holding_table, table))
+            self.logger.info(
+                "Pouring %s back into %s..." % (holding_table, table)
+            )
 
             tablestarttime = time.time()
 
             has_new_id = postgresql.table_has_column(
-                cur, holding_table_unquoted, 'new_id')
+                cur, holding_table_unquoted, "new_id"
+            )
 
             self._pourTable(
-                holding_table, table, has_new_id, transaction_manager)
+                holding_table, table, has_new_id, transaction_manager
+            )
 
             # Drop holding table.  It may still contain rows with id set to
             # null.  Those must not be poured.
@@ -599,7 +663,8 @@ class MultiTableCopy:
 
             self.logger.debug(
                 "Pouring %s took %.3f seconds."
-                % (holding_table, time.time() - tablestarttime))
+                % (holding_table, time.time() - tablestarttime)
+            )
 
             cur = self._commit(transaction_manager)
 
@@ -608,7 +673,8 @@ class MultiTableCopy:
         postgresql.allow_sequential_scans(cur, True)
 
     def _pourTable(
-        self, holding_table, table, has_new_id, transaction_manager):
+        self, holding_table, table, has_new_id, transaction_manager
+    ):
         """Pour contents of a holding table back into its source table.
 
         This will commit transaction_manager, typically multiple times.
@@ -636,10 +702,15 @@ class MultiTableCopy:
         # five seconds or so each; we aim for four just to be sure.
 
         pourer = PouringLoop(
-            holding_table, table, transaction_manager, self.logger,
-            self.batch_pouring_callbacks.get(table))
+            holding_table,
+            table,
+            transaction_manager,
+            self.logger,
+            self.batch_pouring_callbacks.get(table),
+        )
         DBLoopTuner(
-            pourer, self.seconds_per_batch, self.minimum_batch_size).run()
+            pourer, self.seconds_per_batch, self.minimum_batch_size
+        ).run()
 
     def _checkExtractionOrder(self, source_table):
         """Verify order in which tables are extracted against tables list.
@@ -651,24 +722,28 @@ class MultiTableCopy:
             table_number = self.tables.index(source_table)
         except ValueError:
             raise AssertionError(
-                "Can't extract '%s': not in list of tables" % source_table)
+                "Can't extract '%s': not in list of tables" % source_table
+            )
 
         if self.last_extracted_table is None:
             # Can't skip the first table!
             if table_number > 0:
                 raise AssertionError(
-                    "Can't extract: skipped first table '%s'"
-                    % self.tables[0])
+                    "Can't extract: skipped first table '%s'" % self.tables[0]
+                )
         else:
             if table_number < self.last_extracted_table:
                 raise AssertionError(
-                    "Table '%s' extracted after its turn" % source_table)
+                    "Table '%s' extracted after its turn" % source_table
+                )
             if table_number > self.last_extracted_table + 1:
                 raise AssertionError(
-                    "Table '%s' extracted before its turn" % source_table)
+                    "Table '%s' extracted before its turn" % source_table
+                )
             if table_number == self.last_extracted_table:
                 raise AssertionError(
-                    "Table '%s' extracted again" % source_table)
+                    "Table '%s' extracted again" % source_table
+                )
 
         self.last_extracted_table = table_number
 
@@ -685,16 +760,19 @@ class MultiTableCopy:
         except ValueError:
             raise AssertionError(
                 "Foreign key '%s' refers to table '%s' "
-                "which is not being copied" % (fk, referenced_table))
+                "which is not being copied" % (fk, referenced_table)
+            )
 
         if target_number > self.last_extracted_table:
             raise AssertionError(
                 "Foreign key '%s' refers to table '%s' "
-                "which is to be copied later" % (fk, referenced_table))
+                "which is to be copied later" % (fk, referenced_table)
+            )
         if target_number == self.last_extracted_table:
             raise AssertionError(
                 "Foreign key '%s' in table '%s' "
-                "is a self-reference" % (fk, referenced_table))
+                "is a self-reference" % (fk, referenced_table)
+            )
 
     def _commit(self, transaction_manager):
         """Commit our transaction and create replacement cursor.
diff --git a/lib/lp/services/database/namedrow.py b/lib/lp/services/database/namedrow.py
index 3f40183..9341845 100644
--- a/lib/lp/services/database/namedrow.py
+++ b/lib/lp/services/database/namedrow.py
@@ -10,7 +10,7 @@ from collections import namedtuple
 
 def named_fetchall(cur):
     row_type = namedtuple(
-        'DatabaseRow',
-        (description[0] for description in cur.description))
+        "DatabaseRow", (description[0] for description in cur.description)
+    )
     for row in cur.fetchall():
         yield row_type(*row)
diff --git a/lib/lp/services/database/nl_search.py b/lib/lp/services/database/nl_search.py
index a4fbdf0..2463b2d 100644
--- a/lib/lp/services/database/nl_search.py
+++ b/lib/lp/services/database/nl_search.py
@@ -5,28 +5,23 @@
 full text index.
 """
 
-__all__ = ['nl_phrase_search']
+__all__ = ["nl_phrase_search"]
 
 import re
 
 import six
 from storm.databases.postgres import Case
-from storm.locals import (
-    Count,
-    Select,
-    SQL,
-    )
+from storm.locals import SQL, Count, Select
 from zope.component import getUtility
 
 from lp.services.database.interfaces import (
     DEFAULT_FLAVOR,
+    MAIN_STORE,
     IStore,
     IStoreSelector,
-    MAIN_STORE,
-    )
+)
 from lp.services.database.stormexpr import fti_search
 
-
 # Regular expression to extract terms from the printout of a ts_query
 TS_QUERY_TERM_RE = re.compile(r"'([^']+)'")
 
@@ -49,8 +44,9 @@ def nl_term_candidates(phrase):
     return TS_QUERY_TERM_RE.findall(terms)
 
 
-def nl_phrase_search(phrase, table, constraint_clauses=None,
-                     fast_enabled=True):
+def nl_phrase_search(
+    phrase, table, constraint_clauses=None, fast_enabled=True
+):
     """Return the tsearch2 query that should be used to do a phrase search.
 
     The precise heuristics applied by this function will vary as we tune
@@ -72,7 +68,7 @@ def nl_phrase_search(phrase, table, constraint_clauses=None,
     """
     terms = nl_term_candidates(phrase)
     if len(terms) == 0:
-        return ''
+        return ""
     if fast_enabled:
         return _nl_phrase_search(terms, table, constraint_clauses)
     else:
@@ -92,7 +88,7 @@ def _nl_phrase_search(terms, table, constraint_clauses):
     # a more complex rank & search function.
     # sorted for doctesting convenience - should have no impact on tsearch2.
     if len(terms) < 3:
-        return '|'.join(sorted(terms))
+        return "|".join(sorted(terms))
     # Expand
     and_groups = [None] * (len(terms) + 1)
     for pos in range(len(terms) + 1):
@@ -101,9 +97,8 @@ def _nl_phrase_search(terms, table, constraint_clauses):
     for pos, term in enumerate(sorted(terms)):
         and_groups[pos + 1].discard(term)
     # sorted for doctesting convenience - should have no impact on tsearch2.
-    and_clauses = ['(' + '&'.join(sorted(group)) + ')'
-        for group in and_groups]
-    return '|'.join(and_clauses)
+    and_clauses = ["(" + "&".join(sorted(group)) + ")" for group in and_groups]
+    return "|".join(and_clauses)
 
 
 def _slow_nl_phrase_search(terms, table, constraint_clauses):
@@ -142,7 +137,7 @@ def _slow_nl_phrase_search(terms, table, constraint_clauses):
     total = store.find(table, *constraint_clauses).count()
     term_candidates = terms
     if total < 5:
-        return '|'.join(term_candidates)
+        return "|".join(term_candidates)
 
     # Build the query to get all the counts. We get all the counts in
     # one query, using COUNT(CASE ...), since issuing separate queries
@@ -150,12 +145,15 @@ def _slow_nl_phrase_search(terms, table, constraint_clauses):
     counts = store.find(
         tuple(
             Count(Case([(fti_search(table, term), True)], default=None))
-            for term in term_candidates),
-        *constraint_clauses).one()
+            for term in term_candidates
+        ),
+        *constraint_clauses,
+    ).one()
 
     # Remove words that are too common.
     terms = [
-        term for count, term in zip(counts, term_candidates)
+        term
+        for count, term in zip(counts, term_candidates)
         if float(count) / total < 0.5
-        ]
-    return '|'.join(terms)
+    ]
+    return "|".join(terms)
diff --git a/lib/lp/services/database/policy.py b/lib/lp/services/database/policy.py
index f1f1b41..855933e 100644
--- a/lib/lp/services/database/policy.py
+++ b/lib/lp/services/database/policy.py
@@ -4,53 +4,38 @@
 """Launchpad database policies."""
 
 __all__ = [
-    'BaseDatabasePolicy',
-    'DatabaseBlockedPolicy',
-    'LaunchpadDatabasePolicy',
-    'PrimaryDatabasePolicy',
-    'StandbyDatabasePolicy',
-    'StandbyOnlyDatabasePolicy',
-    ]
-
-from datetime import (
-    datetime,
-    timedelta,
-    )
+    "BaseDatabasePolicy",
+    "DatabaseBlockedPolicy",
+    "LaunchpadDatabasePolicy",
+    "PrimaryDatabasePolicy",
+    "StandbyDatabasePolicy",
+    "StandbyOnlyDatabasePolicy",
+]
+
+from datetime import datetime, timedelta
 
 import psycopg2
-from storm.cache import (
-    Cache,
-    GenerationalCache,
-    )
+from storm.cache import Cache, GenerationalCache
 from storm.exceptions import DisconnectionError
 from storm.zope.interfaces import IZStorm
 from zope.authentication.interfaces import IUnauthenticatedPrincipal
 from zope.component import getUtility
-from zope.interface import (
-    alsoProvides,
-    implementer,
-    )
-
-from lp.services.config import (
-    config,
-    dbconfig,
-    )
+from zope.interface import alsoProvides, implementer
+
+from lp.services.config import config, dbconfig
 from lp.services.database.interfaces import (
     DEFAULT_FLAVOR,
+    MAIN_STORE,
+    PRIMARY_FLAVOR,
+    STANDBY_FLAVOR,
     DisallowedStore,
     IDatabasePolicy,
     IMasterStore,
     IStandbyStore,
     IStoreSelector,
-    MAIN_STORE,
-    PRIMARY_FLAVOR,
-    STANDBY_FLAVOR,
-    )
+)
 from lp.services.database.sqlbase import StupidCache
-from lp.services.webapp.interfaces import (
-    IClientIdManager,
-    ISession,
-    )
+from lp.services.webapp.interfaces import IClientIdManager, ISession
 
 
 def _now():
@@ -67,11 +52,11 @@ _test_lag = None
 
 def storm_cache_factory():
     """Return a Storm Cache of the type and size specified in dbconfig."""
-    if dbconfig.storm_cache == 'generational':
+    if dbconfig.storm_cache == "generational":
         return GenerationalCache(int(dbconfig.storm_cache_size))
-    elif dbconfig.storm_cache == 'stupid':
+    elif dbconfig.storm_cache == "stupid":
         return StupidCache(int(dbconfig.storm_cache_size))
-    elif dbconfig.storm_cache == 'default':
+    elif dbconfig.storm_cache == "default":
         return Cache(int(dbconfig.storm_cache_size))
     else:
         assert False, "Unknown storm_cache %s." % dbconfig.storm_cache
@@ -82,10 +67,11 @@ def get_connected_store(name, flavor):
 
     :raises storm.exceptions.DisconnectionError: On failures.
     """
-    store_name = '%s-%s' % (name, flavor)
+    store_name = "%s-%s" % (name, flavor)
     try:
         store = getUtility(IZStorm).get(
-            store_name, 'launchpad:%s' % store_name)
+            store_name, "launchpad:%s" % store_name
+        )
         store._connection._ensure_connected()
         return store
     except DisconnectionError:
@@ -93,7 +79,7 @@ def get_connected_store(name, flavor):
         # registered with the transaction manager. Otherwise, if
         # _ensure_connected() caused the disconnected state it may not
         # be put into reconnect state at the end of the transaction.
-        store._connection._event.emit('register-transaction')
+        store._connection._event.emit("register-transaction")
         raise
     except psycopg2.OperationalError as exc:
         # Per Bug #1025264, Storm emits psycopg2 errors when we
@@ -145,7 +131,7 @@ class BaseDatabasePolicy:
             if store is None:
                 raise
 
-        if not getattr(store, '_lp_store_initialized', False):
+        if not getattr(store, "_lp_store_initialized", False):
             # No existing Store. Create a new one and tweak its defaults.
 
             # XXX stub 2009-06-25 bug=391996: The default Storm
@@ -179,9 +165,11 @@ class BaseDatabasePolicy:
     def __exit__(self, exc_type, exc_value, traceback):
         """See `IDatabasePolicy`."""
         policy = getUtility(IStoreSelector).pop()
-        assert policy is self, (
-            "Unexpected database policy %s returned by store selector"
-            % repr(policy))
+        assert (
+            policy is self
+        ), "Unexpected database policy %s returned by store selector" % repr(
+            policy
+        )
 
 
 class DatabaseBlockedPolicy(BaseDatabasePolicy):
@@ -201,6 +189,7 @@ class PrimaryDatabasePolicy(BaseDatabasePolicy):
     support session cookies. It is also used when no policy has been
     installed.
     """
+
     default_flavor = PRIMARY_FLAVOR
 
 
@@ -209,6 +198,7 @@ class StandbyDatabasePolicy(BaseDatabasePolicy):
 
     Access to the primary can still be made if requested explicitly.
     """
+
     default_flavor = STANDBY_FLAVOR
 
 
@@ -217,6 +207,7 @@ class StandbyOnlyDatabasePolicy(BaseDatabasePolicy):
 
     This policy is used for Feeds requests and other always-read only request.
     """
+
     default_flavor = STANDBY_FLAVOR
 
     def getStore(self, name, flavor):
@@ -227,15 +218,14 @@ class StandbyOnlyDatabasePolicy(BaseDatabasePolicy):
 
 
 def LaunchpadDatabasePolicyFactory(request):
-    """Return the Launchpad IDatabasePolicy for the current appserver state.
-    """
+    """Return the Launchpad IDatabasePolicy for the current appserver state."""
     # We need to select a non-load balancing DB policy for some status URLs so
     # it doesn't query the DB for lag information (this page should not
     # hit the database at all). We haven't traversed yet, so we have
     # to sniff the request this way.  Even though PATH_INFO is always
     # present in real requests, we need to tread carefully (``get``) because
     # of test requests in our automated tests.
-    if request.get('PATH_INFO') in ['/+opstats', '/+haproxy']:
+    if request.get("PATH_INFO") in ["/+opstats", "/+haproxy"]:
         return DatabaseBlockedPolicy(request)
     else:
         return LaunchpadDatabasePolicy(request)
@@ -250,21 +240,22 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy):
     def __init__(self, request):
         self.request = request
         # Detect if this is a read only request or not.
-        self.read_only = self.request.method in ['GET', 'HEAD']
+        self.read_only = self.request.method in ["GET", "HEAD"]
 
     def _hasSession(self):
         "Is there is already a session cookie hanging around?"
         cookie_name = getUtility(IClientIdManager).namespace
         return (
-            cookie_name in self.request.cookies or
-            self.request.response.getCookie(cookie_name) is not None)
+            cookie_name in self.request.cookies
+            or self.request.response.getCookie(cookie_name) is not None
+        )
 
     def install(self):
         """See `IDatabasePolicy`."""
         default_flavor = None
 
         # If this is a Retry attempt, force use of the primary database.
-        if getattr(self.request, '_retry_count', 0) > 0:
+        if getattr(self.request, "_retry_count", 0) > 0:
             default_flavor = PRIMARY_FLAVOR
 
         # Select if the DEFAULT_FLAVOR Store will be the primary or a
@@ -275,8 +266,9 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy):
         # those changes to propagate to the standby databases.
         elif self.read_only:
             lag = self.getReplicationLag()
-            if (lag is not None
-                and lag > timedelta(seconds=config.database.max_usable_lag)):
+            if lag is not None and lag > timedelta(
+                seconds=config.database.max_usable_lag
+            ):
                 # Don't use the standby at all if lag is greater than the
                 # configured threshold. This reduces replication oddities
                 # noticed by users, as well as reducing load on the
@@ -288,8 +280,8 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy):
                 # important for fast and reliable performance for pages like
                 # +opstats.
                 if self._hasSession():
-                    session_data = ISession(self.request)['lp.dbpolicy']
-                    last_write = session_data.get('last_write', None)
+                    session_data = ISession(self.request)["lp.dbpolicy"]
+                    last_write = session_data.get("last_write", None)
                 else:
                     last_write = None
                 now = _now()
@@ -306,7 +298,7 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy):
         else:
             default_flavor = PRIMARY_FLAVOR
 
-        assert default_flavor is not None, 'default_flavor not set!'
+        assert default_flavor is not None, "default_flavor not set!"
 
         self.default_flavor = default_flavor
 
@@ -323,8 +315,12 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy):
             # to the session. This will be true if the principal is
             # authenticated or if there is already a session cookie
             # hanging around.
-            if not IUnauthenticatedPrincipal.providedBy(
-                self.request.principal) or self._hasSession():
+            if (
+                not IUnauthenticatedPrincipal.providedBy(
+                    self.request.principal
+                )
+                or self._hasSession()
+            ):
                 # A non-readonly request has been made. Store this fact
                 # in the session. Precision is hard coded at 1 minute
                 # (so we don't update the timestamp if it is no more
@@ -334,13 +330,14 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy):
                 # send their session key that was set over https, so we
                 # don't want to access the session which will overwrite
                 # the cookie and log the user out.
-                session_data = ISession(self.request)['lp.dbpolicy']
-                last_write = session_data.get('last_write', None)
+                session_data = ISession(self.request)["lp.dbpolicy"]
+                last_write = session_data.get("last_write", None)
                 now = _now()
-                if (last_write is None or
-                    last_write < now - timedelta(minutes=1)):
+                if last_write is None or last_write < now - timedelta(
+                    minutes=1
+                ):
                     # set value
-                    session_data['last_write'] = now
+                    session_data["last_write"] = now
 
     def getReplicationLag(self):
         """Return the replication lag between the primary and our hot standby.
@@ -354,11 +351,13 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy):
         # Attempt to retrieve PostgreSQL streaming replication lag
         # from the standby.
         standby_store = self.getStore(MAIN_STORE, STANDBY_FLAVOR)
-        hot_standby, streaming_lag = standby_store.execute("""
+        hot_standby, streaming_lag = standby_store.execute(
+            """
             SELECT
                 pg_is_in_recovery(),
                 now() - pg_last_xact_replay_timestamp()
-            """).get_one()
+            """
+        ).get_one()
         if hot_standby and streaming_lag is not None:
             # standby is a PG 9.1 streaming replication hot standby.
             # Return the lag.
@@ -370,8 +369,7 @@ class LaunchpadDatabasePolicy(BaseDatabasePolicy):
 
 
 def WebServiceDatabasePolicyFactory(request):
-    """Return the Launchpad IDatabasePolicy for the current appserver state.
-    """
+    """Return the Launchpad IDatabasePolicy for the current appserver state."""
     # If a session cookie was sent with the request, use the
     # standard Launchpad database policy for load balancing to
     # the standby databases. The javascript web service libraries
diff --git a/lib/lp/services/database/postgresql.py b/lib/lp/services/database/postgresql.py
index cec01cf..ec32201 100644
--- a/lib/lp/services/database/postgresql.py
+++ b/lib/lp/services/database/postgresql.py
@@ -8,11 +8,7 @@ and table manipulation
 
 import re
 
-from lp.services.database.sqlbase import (
-    quote,
-    quoteIdentifier,
-    sqlvalues,
-    )
+from lp.services.database.sqlbase import quote, quoteIdentifier, sqlvalues
 
 
 def listReferences(cur, table, column, indirect=True, _state=None):
@@ -100,7 +96,7 @@ def listReferences(cur, table, column, indirect=True, _state=None):
 
 
 def listIndexes(cur, table, column, only_unique=False):
-    '''Return a list of indexes on `table` that include the `column`
+    """Return a list of indexes on `table` that include the `column`
 
     `cur` must be an open DB-API cursor.
 
@@ -142,11 +138,11 @@ def listIndexes(cur, table, column, only_unique=False):
     >>> listIndexes(cur, 'a', 'selfref')
     []
 
-    '''
+    """
 
     # Retrieve the attributes for the table
     attributes = {}
-    sql = '''
+    sql = """
         SELECT
             a.attnum,
             a.attname
@@ -155,7 +151,7 @@ def listIndexes(cur, table, column, only_unique=False):
         WHERE
             t.relname = %(table)s
             AND a.attnum > 0
-        '''
+        """
     cur.execute(sql, dict(table=table))
     for num, name in cur.fetchall():
         attributes[int(num)] = name
@@ -164,33 +160,29 @@ def listIndexes(cur, table, column, only_unique=False):
     rv = []
 
     # Retrieve the indexes.
-    sql = '''
+    sql = """
         SELECT
             i.indkey
         FROM
             pg_class AS t JOIN pg_index AS i ON i.indrelid = t.oid
         WHERE
             t.relname = %(table)s
-        '''
+        """
     if only_unique:
-        sql += ' AND i.indisunique = true'
+        sql += " AND i.indisunique = true"
     cur.execute(sql, dict(table=table))
-    for indkey, in cur.fetchall():
+    for (indkey,) in cur.fetchall():
         # We have a space separated list of integer keys into the attribute
         # mapping. Ignore the 0's, as they indicate a function and we don't
         # handle them.
-        keys = [
-            attributes[int(key)]
-                for key in indkey.split()
-                    if int(key) > 0
-            ]
+        keys = [attributes[int(key)] for key in indkey.split() if int(key) > 0]
         if column in keys:
             rv.append(tuple(keys))
     return rv
 
 
 def listUniques(cur, table, column):
-    '''Return a list of unique indexes on `table` that include the `column`
+    """Return a list of unique indexes on `table` that include the `column`
 
     `cur` must be an open DB-API cursor.
 
@@ -232,7 +224,7 @@ def listUniques(cur, table, column):
     >>> listUniques(cur, 'c', 'description')
     []
 
-    '''
+    """
     return listIndexes(cur, table, column, only_unique=True)
 
 
@@ -265,7 +257,7 @@ def listSequences(cur):
     rv = []
     cur.execute(sql)
     for schema, sequence in list(cur.fetchall()):
-        match = re.search(r'^(\w+)_(\w+)_seq$', sequence)
+        match = re.search(r"^(\w+)_(\w+)_seq$", sequence)
         if match is None:
             rv.append((schema, sequence, None, None))
         else:
@@ -299,39 +291,42 @@ def check_indirect_references(references):
     for src_tab, src_col, ref_tab, ref_col, updact, delact in references:
         # If the ref_tab and ref_col is not Person.id, then we have
         # an indirect reference. Ensure the update action is 'CASCADE'
-        if ref_tab != 'person' and ref_col != 'id':
-            if updact != 'c':
+        if ref_tab != "person" and ref_col != "id":
+            if updact != "c":
                 raise RuntimeError(
-                    '%s.%s reference to %s.%s must be ON UPDATE CASCADE'
-                    % (src_tab, src_col, ref_tab, ref_col))
+                    "%s.%s reference to %s.%s must be ON UPDATE CASCADE"
+                    % (src_tab, src_col, ref_tab, ref_col)
+                )
 
 
 def generateResetSequencesSQL(cur):
-    """Return SQL that will reset table sequences to match the data in them.
-    """
+    """Return SQL that will reset table sequences to match the data in them."""
     stmt = []
     for schema, sequence, table, column in listSequences(cur):
         if table is None or column is None:
             continue
         sql = "SELECT max(%s) FROM %s" % (
-                quoteIdentifier(column), quoteIdentifier(table)
-                )
+            quoteIdentifier(column),
+            quoteIdentifier(table),
+        )
         cur.execute(sql)
         last_value = cur.fetchone()[0]
         if last_value is None:
             last_value = 1
-            flag = 'false'
+            flag = "false"
         else:
-            flag = 'true'
+            flag = "true"
         sql = "setval(%s, %d, %s)" % (
-                quote('%s.%s' % (schema, sequence)), int(last_value), flag
-                )
+            quote("%s.%s" % (schema, sequence)),
+            int(last_value),
+            flag,
+        )
         stmt.append(sql)
     if stmt:
-        stmt = 'SELECT ' + ', '.join(stmt)
+        stmt = "SELECT " + ", ".join(stmt)
         return stmt
     else:
-        return ''
+        return ""
 
 
 def resetSequences(cur):
@@ -355,6 +350,7 @@ def resetSequences(cur):
     if sql:
         cur.execute(sql)
 
+
 # Regular expression used to parse row count estimate from EXPLAIN output
 _rows_re = re.compile(r"rows=(\d+)\swidth=")
 
@@ -399,12 +395,15 @@ def have_table(cur, table):
     >>> have_table(cur, 'atesttable')
     False
     """
-    cur.execute('''
+    cur.execute(
+        """
         SELECT count(*) > 0
         FROM pg_tables
         WHERE tablename=%s
-    ''' % str(quote(table)))
-    return (cur.fetchall()[0][0] != 0)
+    """
+        % str(quote(table))
+    )
+    return cur.fetchall()[0][0] != 0
 
 
 def table_has_column(cur, table, column):
@@ -423,14 +422,17 @@ def table_has_column(cur, table, column):
     >>> table_has_column(cur, 'atesttable', 'x')
     False
     """
-    cur.execute('''
+    cur.execute(
+        """
         SELECT count(*) > 0
         FROM pg_attribute
         JOIN pg_class ON pg_class.oid = attrelid
         WHERE relname=%s
             AND attname=%s
-    ''' % sqlvalues(table, column))
-    return (cur.fetchall()[0][0] != 0)
+    """
+        % sqlvalues(table, column)
+    )
+    return cur.fetchall()[0][0] != 0
 
 
 def drop_tables(cur, tables):
@@ -460,7 +462,7 @@ def drop_tables(cur, tables):
         tables = [tables]
 
     # This syntax requires postgres 8.2 or better
-    cur.execute("DROP TABLE IF EXISTS %s" % ','.join(tables))
+    cur.execute("DROP TABLE IF EXISTS %s" % ",".join(tables))
 
 
 def allow_sequential_scans(cur, permission):
@@ -494,9 +496,9 @@ def allow_sequential_scans(cur, permission):
     >>> print(cur.fetchall()[0][0])
     off
     """
-    permission_value = 'false'
+    permission_value = "false"
     if permission:
-        permission_value = 'true'
+        permission_value = "true"
 
     cur.execute("SET enable_seqscan=%s" % permission_value)
 
@@ -506,17 +508,20 @@ def all_tables_in_schema(cur, schema):
 
     :returns: A set of quoted, fully qualified table names.
     """
-    cur.execute("""
+    cur.execute(
+        """
         SELECT nspname, relname
         FROM pg_class, pg_namespace
         WHERE
             pg_class.relnamespace = pg_namespace.oid
             AND pg_namespace.nspname = %s
             AND pg_class.relkind = 'r'
-        """ % sqlvalues(schema))
+        """
+        % sqlvalues(schema)
+    )
     return {
-            fqn(namespace, tablename)
-            for namespace, tablename in cur.fetchall()}
+        fqn(namespace, tablename) for namespace, tablename in cur.fetchall()
+    }
 
 
 def all_sequences_in_schema(cur, schema):
@@ -524,17 +529,18 @@ def all_sequences_in_schema(cur, schema):
 
     :returns: A set of quoted, fully qualified table names.
     """
-    cur.execute("""
+    cur.execute(
+        """
         SELECT nspname, relname
         FROM pg_class, pg_namespace
         WHERE
             pg_class.relnamespace = pg_namespace.oid
             AND pg_namespace.nspname = %s
             AND pg_class.relkind = 'S'
-        """ % sqlvalues(schema))
-    return {
-            fqn(namespace, sequence)
-            for namespace, sequence in cur.fetchall()}
+        """
+        % sqlvalues(schema)
+    )
+    return {fqn(namespace, sequence) for namespace, sequence in cur.fetchall()}
 
 
 def fqn(namespace, name):
@@ -573,14 +579,21 @@ class ConnectionString:
     >>> repr(cs)
     'dbname=launchpad_dev user=foo'
     """
+
     CONNECTION_KEYS = [
-        'dbname', 'user', 'host', 'port', 'connect_timeout', 'sslmode']
+        "dbname",
+        "user",
+        "host",
+        "port",
+        "connect_timeout",
+        "sslmode",
+    ]
 
     def __init__(self, conn_str):
         if "'" in conn_str or "\\" in conn_str:
             raise AssertionError("quoted or escaped values are not supported")
 
-        if '=' not in conn_str:
+        if "=" not in conn_str:
             # Just a dbname
             for key in self.CONNECTION_KEYS:
                 setattr(self, key, None)
@@ -591,7 +604,7 @@ class ConnectionString:
             # be added after construction or not actually required
             # at all in some instances.
             for key in self.CONNECTION_KEYS:
-                match = re.search(r'%s=([^ ]+)' % key, conn_str)
+                match = re.search(r"%s=([^ ]+)" % key, conn_str)
                 if match is None:
                     setattr(self, key, None)
                 else:
@@ -602,20 +615,22 @@ class ConnectionString:
         for key in self.CONNECTION_KEYS:
             val = getattr(self, key, None)
             if val is not None:
-                params.append('%s=%s' % (key, val))
-        return ' '.join(params)
+                params.append("%s=%s" % (key, val))
+        return " ".join(params)
 
     def __eq__(self, other):
         return isinstance(other, ConnectionString) and all(
             getattr(self, key, None) == getattr(other, key, None)
-            for key in self.CONNECTION_KEYS)
+            for key in self.CONNECTION_KEYS
+        )
 
     def __ne__(self, other):
         return not self == other
 
     def __hash__(self):
         return hash(
-            tuple(getattr(self, key, None) for key in self.CONNECTION_KEYS))
+            tuple(getattr(self, key, None) for key in self.CONNECTION_KEYS)
+        )
 
     def asPGCommandLineArgs(self):
         """Return a string suitable for the PostgreSQL standard tools
@@ -638,7 +653,7 @@ class ConnectionString:
             params.append("--username=%s" % self.user)
         if self.dbname is not None:
             params.append(self.dbname)
-        return ' '.join(params)
+        return " ".join(params)
 
     def asLPCommandLineArgs(self):
         """Return a string suitable for use by the LP tools using
@@ -655,13 +670,14 @@ class ConnectionString:
             params.append("--user=%s" % self.user)
         if self.dbname is not None:
             params.append("--dbname=%s" % self.dbname)
-        return ' '.join(params)
+        return " ".join(params)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     import psycopg
-    con = psycopg.connect('dbname=launchpad_dev user=launchpad')
+
+    con = psycopg.connect("dbname=launchpad_dev user=launchpad")
     cur = con.cursor()
 
-    for table, column in listReferences(cur, 'person', 'id'):
-        print('%32s %32s' % (table, column))
+    for table, column in listReferences(cur, "person", "id"):
+        print("%32s %32s" % (table, column))
diff --git a/lib/lp/services/database/sort_sql.py b/lib/lp/services/database/sort_sql.py
index 30148d5..ddabd86 100644
--- a/lib/lp/services/database/sort_sql.py
+++ b/lib/lp/services/database/sort_sql.py
@@ -28,8 +28,8 @@ class Parser:
 
     def __init__(self):
         self.lines = []
-        self.buffer = ''
-        self.line = ''
+        self.buffer = ""
+        self.line = ""
 
     def parse_quoted_string(self, string):
         """Parse strings enclosed in single quote marks.
@@ -55,9 +55,12 @@ class Parser:
         ValueError: Couldn't parse quoted string
         """
 
-        quoted_pattern = re.compile('''
+        quoted_pattern = re.compile(
+            """
             ' (?: [^'] | '' )* '
-            ''', re.X | re.S)
+            """,
+            re.X | re.S,
+        )
 
         match = quoted_pattern.match(string)
 
@@ -72,7 +75,7 @@ class Parser:
         statement."""
 
         while statement:
-            if statement == ');\n':
+            if statement == ");\n":
                 return True
             elif statement[0] == "'":
                 string, statement = self.parse_quoted_string(statement)
@@ -114,15 +117,18 @@ class Parser:
         "INSERT INTO foo (name)\nVALUES ('Foo');\n")
         """
 
-        if not line.startswith('INSERT '):
+        if not line.startswith("INSERT "):
             return (0, None), line
 
         if not self.is_complete_insert_statement(line):
             raise ValueError("Incomplete line")
 
-        insert_pattern = re.compile(r'''
+        insert_pattern = re.compile(
+            r"""
             ^INSERT \s+ INTO \s+ \S+ \s+ \([^)]+\) \s+ VALUES \s+ \((\d+)
-            ''', re.X)
+            """,
+            re.X,
+        )
         match = insert_pattern.match(line)
 
         if match:
@@ -135,9 +141,9 @@ class Parser:
 
         self.buffer += s
 
-        while '\n' in self.buffer:
-            line, self.buffer = self.buffer.split('\n', 1)
-            self.line += line + '\n'
+        while "\n" in self.buffer:
+            line, self.buffer = self.buffer.split("\n", 1)
+            self.line += line + "\n"
 
             try:
                 value, line = self.parse_line(self.line)
@@ -145,7 +151,7 @@ class Parser:
                 pass
             else:
                 self.lines.append((value, self.line[:-1]))
-                self.line = ''
+                self.line = ""
 
 
 def print_lines_sorted(file, lines):
@@ -192,12 +198,12 @@ def print_lines_sorted(file, lines):
     for line in lines:
         sort_value, string = line
 
-        if string == '':
+        if string == "":
             if block:
                 print_block(block)
                 block = []
 
-            file.write('\n')
+            file.write("\n")
         else:
             block.append(line)
 
diff --git a/lib/lp/services/database/sqlbase.py b/lib/lp/services/database/sqlbase.py
index ba90a83..085dc29 100644
--- a/lib/lp/services/database/sqlbase.py
+++ b/lib/lp/services/database/sqlbase.py
@@ -2,53 +2,48 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'block_implicit_flushes',
-    'clear_current_connection_cache',
-    'connect',
-    'convert_storm_clause_to_string',
-    'cursor',
-    'disconnect_stores',
-    'flush_database_caches',
-    'flush_database_updates',
-    'get_transaction_timestamp',
-    'ISOLATION_LEVEL_AUTOCOMMIT',
-    'ISOLATION_LEVEL_DEFAULT',
-    'ISOLATION_LEVEL_READ_COMMITTED',
-    'ISOLATION_LEVEL_REPEATABLE_READ',
-    'ISOLATION_LEVEL_SERIALIZABLE',
-    'quote',
-    'quoteIdentifier',
-    'quote_identifier',
-    'reset_store',
-    'session_store',
-    'SQLBase',
-    'sqlvalues',
-    'StupidCache',
-    ]
+    "block_implicit_flushes",
+    "clear_current_connection_cache",
+    "connect",
+    "convert_storm_clause_to_string",
+    "cursor",
+    "disconnect_stores",
+    "flush_database_caches",
+    "flush_database_updates",
+    "get_transaction_timestamp",
+    "ISOLATION_LEVEL_AUTOCOMMIT",
+    "ISOLATION_LEVEL_DEFAULT",
+    "ISOLATION_LEVEL_READ_COMMITTED",
+    "ISOLATION_LEVEL_REPEATABLE_READ",
+    "ISOLATION_LEVEL_SERIALIZABLE",
+    "quote",
+    "quoteIdentifier",
+    "quote_identifier",
+    "reset_store",
+    "session_store",
+    "SQLBase",
+    "sqlvalues",
+    "StupidCache",
+]
 
 
 from datetime import datetime
 
 import psycopg2
+import pytz
+import storm
+import transaction
 from psycopg2.extensions import (
     ISOLATION_LEVEL_AUTOCOMMIT,
     ISOLATION_LEVEL_READ_COMMITTED,
     ISOLATION_LEVEL_REPEATABLE_READ,
     ISOLATION_LEVEL_SERIALIZABLE,
-    )
-import pytz
-import storm
+)
 from storm.databases.postgres import compile as postgres_compile
-from storm.expr import (
-    compile as storm_compile,
-    State,
-    )
-from storm.locals import (
-    Store,
-    Storm,
-    )
+from storm.expr import State
+from storm.expr import compile as storm_compile
+from storm.locals import Store, Storm
 from storm.zope.interfaces import IZStorm
-import transaction
 from twisted.python.util import mergeFunctionMetadata
 from zope.component import getUtility
 from zope.interface import implementer
@@ -57,18 +52,17 @@ from zope.security.proxy import removeSecurityProxy
 from lp.services.config import dbconfig
 from lp.services.database.interfaces import (
     DEFAULT_FLAVOR,
+    MAIN_STORE,
     DisallowedStore,
     IMasterObject,
     IMasterStore,
     ISQLBase,
     IStore,
     IStoreSelector,
-    MAIN_STORE,
-    )
+)
 from lp.services.database.sqlobject import sqlrepr
 from lp.services.propertycache import clear_property_cache
 
-
 # Default we want for scripts, and the PostgreSQL default. Note psycopg1 will
 # use SERIALIZABLE unless we override, but psycopg2 will not.
 ISOLATION_LEVEL_DEFAULT = ISOLATION_LEVEL_READ_COMMITTED
@@ -80,7 +74,7 @@ ISOLATION_LEVEL_DEFAULT = ISOLATION_LEVEL_READ_COMMITTED
 # automatically quotes, which includes a few of our table names.  We
 # remove them here due to case mismatches between the DB and Launchpad
 # code.
-postgres_compile.remove_reserved_words(['language', 'section'])
+postgres_compile.remove_reserved_words(["language", "section"])
 
 
 class StupidCache:
@@ -143,7 +137,7 @@ class LaunchpadStyle(storm.sqlobject.SQLObjectStyle):
         return table
 
     def idForTable(self, table):
-        return 'id'
+        return "id"
 
     def pythonClassToAttr(self, className):
         return className.lower()
@@ -159,8 +153,8 @@ class LaunchpadStyle(storm.sqlobject.SQLObjectStyle):
 
 @implementer(ISQLBase)
 class SQLBase(storm.sqlobject.SQLObjectBase):
-    """Base class emulating SQLObject for legacy database classes.
-    """
+    """Base class emulating SQLObject for legacy database classes."""
+
     _style = LaunchpadStyle()
 
     # Silence warnings in linter script, which complains about all
@@ -197,9 +191,11 @@ class SQLBase(storm.sqlobject.SQLObjectBase):
             argument_store = Store.of(argument)
             if argument_store is not store:
                 new_argument = store.find(
-                    argument.__class__, id=argument.id).one()
-                assert new_argument is not None, (
-                    '%s not yet synced to this store' % repr(argument))
+                    argument.__class__, id=argument.id
+                ).one()
+                assert (
+                    new_argument is not None
+                ), "%s not yet synced to this store" % repr(argument)
                 kwargs[key] = new_argument
 
         store.add(self)
@@ -217,7 +213,7 @@ class SQLBase(storm.sqlobject.SQLObjectBase):
         # XXX jamesh 2008-05-09:
         # This matches the repr() output for the sqlos.SQLOS class.
         # A number of the doctests rely on this formatting.
-        return '<%s at 0x%x>' % (self.__class__.__name__, id(self))
+        return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
 
     def destroySelf(self):
         my_master = IMasterObject(self)
@@ -283,16 +279,16 @@ class SQLBase(storm.sqlobject.SQLObjectBase):
 
 
 def clear_current_connection_cache():
-    """Clear SQLObject's object cache. SQLObject compatibility - DEPRECATED.
-    """
+    """Clear SQLObject's object cache. SQLObject compatibility - DEPRECATED."""
     _get_sqlobject_store().invalidate()
 
 
 def get_transaction_timestamp(store):
     """Get the timestamp for the current transaction on `store`."""
     timestamp = store.execute(
-        "SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'").get_one()[0]
-    return timestamp.replace(tzinfo=pytz.timezone('UTC'))
+        "SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
+    ).get_one()[0]
+    return timestamp.replace(tzinfo=pytz.timezone("UTC"))
 
 
 def quote(x):
@@ -360,7 +356,7 @@ def quote(x):
         # SQLObject can't cope with sets, so convert to a list, which it
         # /does/ know how to handle.
         x = list(x)
-    return sqlrepr(x, 'postgres')
+    return sqlrepr(x, "postgres")
 
 
 def sqlvalues(*values, **kwvalues):
@@ -404,7 +400,8 @@ def sqlvalues(*values, **kwvalues):
     """
     if (values and kwvalues) or (not values and not kwvalues):
         raise TypeError(
-            "Use either positional or keyword values with sqlvalue.")
+            "Use either positional or keyword values with sqlvalue."
+        )
     if values:
         return tuple(quote(item) for item in values)
     elif kwvalues:
@@ -476,7 +473,7 @@ def convert_storm_clause_to_string(storm_clause):
     clause = storm_compile(storm_clause, state)
     if len(state.parameters):
         parameters = [param.get(to_db=True) for param in state.parameters]
-        clause = clause.replace('?', '%s') % sqlvalues(*parameters)
+        clause = clause.replace("?", "%s") % sqlvalues(*parameters)
     return clause
 
 
@@ -539,6 +536,7 @@ def block_implicit_flushes(func):
             return func(*args, **kwargs)
         finally:
             store.unblock_implicit_flushes()
+
     return mergeFunctionMetadata(func, block_implicit_flushes_decorator)
 
 
@@ -550,6 +548,7 @@ def reset_store(func):
             return func(*args, **kwargs)
         finally:
             _get_sqlobject_store().reset()
+
     return mergeFunctionMetadata(func, reset_store_decorator)
 
 
@@ -575,6 +574,7 @@ def connect_string(user=None, dbname=None):
     # We must connect to the read-write DB here, so we use rw_main_primary
     # directly.
     from lp.services.database.postgresql import ConnectionString
+
     con_str = ConnectionString(dbconfig.rw_main_primary)
     if user is not None:
         con_str.user = user
@@ -626,7 +626,7 @@ class cursor:
 
 def session_store():
     """Return a store connected to the session DB."""
-    return getUtility(IZStorm).get('session', 'launchpad-session:')
+    return getUtility(IZStorm).get("session", "launchpad-session:")
 
 
 def disconnect_stores():
@@ -639,7 +639,8 @@ def disconnect_stores():
     """
     zstorm = getUtility(IZStorm)
     stores = [
-        store for name, store in zstorm.iterstores() if name != 'session']
+        store for name, store in zstorm.iterstores() if name != "session"
+    ]
 
     # If we have any stores, abort the transaction and close them.
     if stores:
diff --git a/lib/lp/services/database/sqlobject/__init__.py b/lib/lp/services/database/sqlobject/__init__.py
index 381c7d2..940ce94 100644
--- a/lib/lp/services/database/sqlobject/__init__.py
+++ b/lib/lp/services/database/sqlobject/__init__.py
@@ -9,25 +9,24 @@ import datetime
 from storm.expr import SQL
 from storm.sqlobject import *  # noqa: F401,F403
 
-
 _sqlStringReplace = [
-    ('\\', '\\\\'),
+    ("\\", "\\\\"),
     ("'", "''"),
-    ('\000', '\\0'),
-    ('\b', '\\b'),
-    ('\n', '\\n'),
-    ('\r', '\\r'),
-    ('\t', '\\t'),
-    ]
+    ("\000", "\\0"),
+    ("\b", "\\b"),
+    ("\n", "\\n"),
+    ("\r", "\\r"),
+    ("\t", "\\t"),
+]
 
 # XXX 2007-03-07 jamesh:
 # This is a cut down version of sqlobject's sqlrepr() method.  Ideally
 # we can get rid of this as code is converted to use store.execute().
 def sqlrepr(value, dbname=None):
-    assert dbname in [None, 'postgres']
-    if hasattr(value, '__sqlrepr__'):
+    assert dbname in [None, "postgres"]
+    if hasattr(value, "__sqlrepr__"):
         return value.__sqlrepr__(dbname)
-    elif hasattr(value, 'getquoted'):
+    elif hasattr(value, "getquoted"):
         return value.getquoted()
     elif isinstance(value, SQL):
         return value.expr
@@ -56,6 +55,9 @@ def sqlrepr(value, dbname=None):
         return value.strftime("'%Y-%m-%d'")
     elif isinstance(value, datetime.timedelta):
         return "INTERVAL '%d DAYS %d SECONDS %d MICROSECONDS'" % (
-            value.days, value.seconds, value.microseconds)
+            value.days,
+            value.seconds,
+            value.microseconds,
+        )
     else:
         raise AssertionError("Unhandled type: %r" % type(value))
diff --git a/lib/lp/services/database/stormbase.py b/lib/lp/services/database/stormbase.py
index 20ed118..2bd27a3 100644
--- a/lib/lp/services/database/stormbase.py
+++ b/lib/lp/services/database/stormbase.py
@@ -2,8 +2,8 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'StormBase',
-    ]
+    "StormBase",
+]
 
 from storm.info import get_obj_info
 from storm.locals import Storm
diff --git a/lib/lp/services/database/stormexpr.py b/lib/lp/services/database/stormexpr.py
index c40171d..b8c9163 100644
--- a/lib/lp/services/database/stormexpr.py
+++ b/lib/lp/services/database/stormexpr.py
@@ -2,54 +2,51 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'AdvisoryUnlock',
-    'Array',
-    'ArrayAgg',
-    'ArrayContains',
-    'ArrayIntersects',
-    'BulkUpdate',
-    'ColumnSelect',
-    'Concatenate',
-    'CountDistinct',
-    'fti_search',
-    'Greatest',
-    'get_where_for_reference',
-    'IsDistinctFrom',
-    'IsFalse',
-    'IsTrue',
-    'JSONExtract',
-    'NullCount',
-    'NullsFirst',
-    'NullsLast',
-    'RegexpMatch',
-    'rank_by_fti',
-    'TryAdvisoryLock',
-    'Unnest',
-    'Values',
-    ]
+    "AdvisoryUnlock",
+    "Array",
+    "ArrayAgg",
+    "ArrayContains",
+    "ArrayIntersects",
+    "BulkUpdate",
+    "ColumnSelect",
+    "Concatenate",
+    "CountDistinct",
+    "fti_search",
+    "Greatest",
+    "get_where_for_reference",
+    "IsDistinctFrom",
+    "IsFalse",
+    "IsTrue",
+    "JSONExtract",
+    "NullCount",
+    "NullsFirst",
+    "NullsLast",
+    "RegexpMatch",
+    "rank_by_fti",
+    "TryAdvisoryLock",
+    "Unnest",
+    "Values",
+]
 
 from storm import Undef
 from storm.exceptions import ClassInfoError
 from storm.expr import (
-    BinaryOper,
     COLUMN_NAME,
+    EXPR,
+    SQL,
+    TABLE,
+    BinaryOper,
     ComparableExpr,
-    compile,
     CompoundOper,
-    EXPR,
     Expr,
     In,
     Like,
     NamedFunc,
     Or,
-    SQL,
     SuffixExpr,
-    TABLE,
-    )
-from storm.info import (
-    get_cls_info,
-    get_obj_info,
-    )
+    compile,
+)
+from storm.info import get_cls_info, get_obj_info
 
 
 class BulkUpdate(Expr):
@@ -73,8 +70,13 @@ def compile_bulkupdate(compile, update, state):
     col_values = [compile(val, state) for col, val in pairs]
     sets = ["%s=%s" % (col, val) for col, val in zip(col_names, col_values)]
     state.context = TABLE
-    tokens = ["UPDATE ", compile(update.table, state, token=True), " SET ",
-              ", ".join(sets), " FROM "]
+    tokens = [
+        "UPDATE ",
+        compile(update.table, state, token=True),
+        " SET ",
+        ", ".join(sets),
+        " FROM ",
+    ]
     state.context = EXPR
     # We don't want the values expression wrapped in parenthesis.
     state.precedence = 0
@@ -100,16 +102,20 @@ def compile_values(compile, expr, state):
     col_names, col_types = zip(*expr.cols)
     first_row = ", ".join(
         "%s::%s" % (compile(value, state), type)
-        for value, type in zip(expr.values[0], col_types))
+        for value, type in zip(expr.values[0], col_types)
+    )
     rows = [first_row] + [compile(value, state) for value in expr.values[1:]]
     return "(VALUES (%s)) AS %s(%s)" % (
-        "), (".join(rows), expr.name, ', '.join(col_names))
+        "), (".join(rows),
+        expr.name,
+        ", ".join(col_names),
+    )
 
 
 class ColumnSelect(Expr):
     # Wrap a select statement in braces so that it can be used as a column
     # expression in another query.
-    __slots__ = ("select")
+    __slots__ = "select"
 
     def __init__(self, select):
         self.select = select
@@ -136,7 +142,7 @@ class CountDistinct(Expr):
     # storm's Count() implementation is broken for distinct with > 1
     # column.
 
-    __slots__ = ("columns")
+    __slots__ = "columns"
 
     def __init__(self, columns):
         self.columns = columns
@@ -152,6 +158,7 @@ def compile_countdistinct(compile, countselect, state):
 
 class Concatenate(BinaryOper):
     """Storm operator for string concatenation."""
+
     __slots__ = ()
     oper = " || "
 
@@ -172,14 +179,14 @@ class TryAdvisoryLock(NamedFunc):
 
     __slots__ = ()
 
-    name = 'PG_TRY_ADVISORY_LOCK'
+    name = "PG_TRY_ADVISORY_LOCK"
 
 
 class AdvisoryUnlock(NamedFunc):
 
     __slots__ = ()
 
-    name = 'PG_ADVISORY_UNLOCK'
+    name = "PG_ADVISORY_UNLOCK"
 
 
 @compile.when(Array)
@@ -192,24 +199,28 @@ def compile_array(compile, array, state):
 
 class ArrayAgg(NamedFunc):
     """Aggregate values (within a GROUP BY) into an array."""
+
     __slots__ = ()
     name = "ARRAY_AGG"
 
 
 class Unnest(NamedFunc):
     """Expand an array to a set of rows."""
+
     __slots__ = ()
     name = "unnest"
 
 
 class ArrayContains(CompoundOper):
     """True iff the left side is a superset of the right side."""
+
     __slots__ = ()
     oper = "@>"
 
 
 class ArrayIntersects(CompoundOper):
     """True iff the arrays have at least one element in common."""
+
     __slots__ = ()
     oper = "&&"
 
@@ -220,6 +231,7 @@ class IsTrue(SuffixExpr):
     Unlike `expr` or `expr == True`, this returns `FALSE` when
     `expr IS NULL`.
     """
+
     __slots__ = ()
     suffix = "IS TRUE"
 
@@ -230,24 +242,28 @@ class IsFalse(SuffixExpr):
     Unlike `Not(expr)` or `expr == False`, this returns `FALSE` when
     `expr IS NULL`.
     """
+
     __slots__ = ()
     suffix = "IS FALSE"
 
 
 class IsDistinctFrom(CompoundOper):
     """True iff the left side is distinct from the right side."""
+
     __slots__ = ()
     oper = " IS DISTINCT FROM "
 
 
 class NullsFirst(SuffixExpr):
     """Order null values before non-null values."""
+
     __slots__ = ()
     suffix = "NULLS FIRST"
 
 
 class NullsLast(SuffixExpr):
     """Order null values after non-null values."""
+
     __slots__ = ()
     suffix = "NULLS LAST"
 
@@ -277,15 +293,21 @@ def get_where_for_reference(reference, other):
     more efficient for large collections of values.
     """
     relation = reference._relation
-    if isinstance(other, (list, set, tuple,)):
+    if isinstance(
+        other,
+        (
+            list,
+            set,
+            tuple,
+        ),
+    ):
         return _get_where_for_local_many(relation, other)
     else:
         return relation.get_where_for_local(other)
 
 
 def _remote_variables(relation, obj):
-    """A helper function to extract the foreign key values of an object.
-    """
+    """A helper function to extract the foreign key values of an object."""
     try:
         get_obj_info(obj)
     except ClassInfoError:
@@ -312,7 +334,8 @@ def _get_where_for_local_many(relation, others):
     if len(relation.local_key) == 1:
         return In(
             relation.local_key[0],
-            [_remote_variables(relation, value) for value in others])
+            [_remote_variables(relation, value) for value in others],
+        )
     else:
         return Or(*[relation.get_where_for_local(value) for value in others])
 
@@ -330,13 +353,17 @@ def fti_search(table, text, ftq=True):
     """An expression ensuring that table rows match the specified text."""
     table, query_fragment = determine_table_and_fragment(table, ftq)
     return SQL(
-        '%s.fti @@ %s' % (table.name, query_fragment), params=(text,),
-        tables=(table,))
+        "%s.fti @@ %s" % (table.name, query_fragment),
+        params=(text,),
+        tables=(table,),
+    )
 
 
 def rank_by_fti(table, text, ftq=True, desc=True):
     table, query_fragment = determine_table_and_fragment(table, ftq)
     return SQL(
-        '%sts_rank(%s.fti, %s)' % (
-            '-' if desc else '', table.name, query_fragment),
-        params=(text,), tables=(table,))
+        "%sts_rank(%s.fti, %s)"
+        % ("-" if desc else "", table.name, query_fragment),
+        params=(text,),
+        tables=(table,),
+    )
diff --git a/lib/lp/services/database/tests/enumeration.py b/lib/lp/services/database/tests/enumeration.py
index 8761b50..099ff80 100644
--- a/lib/lp/services/database/tests/enumeration.py
+++ b/lib/lp/services/database/tests/enumeration.py
@@ -4,34 +4,30 @@
 """Test enumerations used for enumcol doctests."""
 
 __all__ = [
-    'DBTestEnumeration',
-    'InheritedTestEnumeration',
-    'ExtendedTestEnumeration',
-    'UnrelatedTestEnumeration',
-    ]
+    "DBTestEnumeration",
+    "InheritedTestEnumeration",
+    "ExtendedTestEnumeration",
+    "UnrelatedTestEnumeration",
+]
 
 
-from lazr.enum import (
-    DBEnumeratedType,
-    DBItem,
-    use_template,
-    )
+from lazr.enum import DBEnumeratedType, DBItem, use_template
 
 
 class DBTestEnumeration(DBEnumeratedType):
-    VALUE1 = DBItem(1, 'Some value')
-    VALUE2 = DBItem(2, 'Some other value')
+    VALUE1 = DBItem(1, "Some value")
+    VALUE2 = DBItem(2, "Some other value")
 
 
 class InheritedTestEnumeration(DBTestEnumeration):
-    VALUE3 = DBItem(3, 'Yet another item')
+    VALUE3 = DBItem(3, "Yet another item")
 
 
 class ExtendedTestEnumeration(DBEnumeratedType):
     use_template(DBTestEnumeration)
-    VALUE3 = DBItem(3, 'Yet another item')
+    VALUE3 = DBItem(3, "Yet another item")
 
 
 class UnrelatedTestEnumeration(DBEnumeratedType):
-    VALUE1 = DBItem(1, 'Some value')
-    VALUE2 = DBItem(2, 'Some other value')
+    VALUE1 = DBItem(1, "Some value")
+    VALUE2 = DBItem(2, "Some other value")
diff --git a/lib/lp/services/database/tests/script_isolation.py b/lib/lp/services/database/tests/script_isolation.py
index fa46ffb..1440ce2 100644
--- a/lib/lp/services/database/tests/script_isolation.py
+++ b/lib/lp/services/database/tests/script_isolation.py
@@ -10,13 +10,9 @@ __all__ = []
 import transaction
 
 from lp.services.config import dbconfig
-from lp.services.database.sqlbase import (
-    cursor,
-    disconnect_stores,
-    )
+from lp.services.database.sqlbase import cursor, disconnect_stores
 from lp.services.scripts import execute_zcml_for_scripts
 
-
 execute_zcml_for_scripts()
 
 
@@ -34,10 +30,11 @@ def check():
     cur.execute("SHOW transaction_isolation")
     print(cur.fetchone()[0])
 
-dbconfig.override(dbuser='launchpad_main', isolation_level='read_committed')
+
+dbconfig.override(dbuser="launchpad_main", isolation_level="read_committed")
 disconnect_stores()
 check()
 
-dbconfig.override(isolation_level='repeatable_read')
+dbconfig.override(isolation_level="repeatable_read")
 disconnect_stores()
 check()
diff --git a/lib/lp/services/database/tests/test_bulk.py b/lib/lp/services/database/tests/test_bulk.py
index 78981ae..975250a 100644
--- a/lib/lp/services/database/tests/test_bulk.py
+++ b/lib/lp/services/database/tests/test_bulk.py
@@ -5,17 +5,14 @@
 
 import datetime
 
+import transaction
 from pytz import UTC
 from storm.exceptions import ClassInfoError
 from storm.expr import SQL
 from storm.info import get_obj_info
 from storm.store import Store
 from testtools.matchers import Equals
-import transaction
-from zope.security import (
-    checker,
-    proxy,
-    )
+from zope.security import checker, proxy
 
 from lp.bugs.enums import BugNotificationLevel
 from lp.bugs.model.bug import BugAffectsPerson
@@ -24,60 +21,47 @@ from lp.code.model.branchjob import (
     BranchJob,
     BranchJobType,
     ReclaimBranchSpaceJob,
-    )
+)
 from lp.code.model.branchsubscription import BranchSubscription
 from lp.registry.model.person import Person
 from lp.services.database import bulk
-from lp.services.database.interfaces import (
-    IMasterStore,
-    IStandbyStore,
-    IStore,
-    )
+from lp.services.database.interfaces import IMasterStore, IStandbyStore, IStore
 from lp.services.database.sqlbase import (
     convert_storm_clause_to_string,
     get_transaction_timestamp,
-    )
-from lp.services.features.model import (
-    FeatureFlag,
-    getFeatureStore,
-    )
+)
+from lp.services.features.model import FeatureFlag, getFeatureStore
 from lp.services.job.model.job import Job
 from lp.soyuz.model.component import Component
-from lp.testing import (
-    StormStatementRecorder,
-    TestCase,
-    TestCaseWithFactory,
-    )
+from lp.testing import StormStatementRecorder, TestCase, TestCaseWithFactory
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.matchers import HasQueryCount
 
-
 object_is_key = lambda thing: thing
 
 
 class TestBasicFunctions(TestCase):
-
     def test_collate_empty_list(self):
         self.assertEqual([], list(bulk.collate([], object_is_key)))
 
     def test_collate_when_object_is_key(self):
-        self.assertEqual(
-            [(1, [1])],
-            list(bulk.collate([1], object_is_key)))
+        self.assertEqual([(1, [1])], list(bulk.collate([1], object_is_key)))
         self.assertEqual(
             [(1, [1]), (2, [2, 2])],
-            sorted(bulk.collate([1, 2, 2], object_is_key)))
+            sorted(bulk.collate([1, 2, 2], object_is_key)),
+        )
 
     def test_collate_with_key_function(self):
         self.assertEqual(
-            [(4, ['fred', 'joss']), (6, ['barney'])],
-            sorted(bulk.collate(['fred', 'barney', 'joss'], len)))
+            [(4, ["fred", "joss"]), (6, ["barney"])],
+            sorted(bulk.collate(["fred", "barney", "joss"], len)),
+        )
 
     def test_get_type(self):
         self.assertEqual(object, bulk.get_type(object()))
 
     def test_get_type_with_proxied_object(self):
-        proxied_object = proxy.Proxy('fred', checker.Checker({}))
+        proxied_object = proxy.Proxy("fred", checker.Checker({}))
         self.assertEqual(str, bulk.get_type(proxied_object))
 
 
@@ -100,8 +84,7 @@ class TestLoaders(TestCaseWithFactory):
     def test_gen_reload_queries_with_multiple_similar_objects(self):
         # gen_reload_queries() should generate a single query to load
         # multiple objects of the same type.
-        db_objects = {
-            self.factory.makeSourcePackageName() for i in range(5)}
+        db_objects = {self.factory.makeSourcePackageName() for i in range(5)}
         db_queries = list(bulk.gen_reload_queries(db_objects))
         self.assertEqual(1, len(db_queries))
         db_query = db_queries[0]
@@ -110,18 +93,15 @@ class TestLoaders(TestCaseWithFactory):
     def test_gen_reload_queries_with_mixed_objects(self):
         # gen_reload_queries() should return one query for each
         # distinct object type in the given objects.
-        db_objects = {
-            self.factory.makeSourcePackageName() for i in range(5)}
-        db_objects.update(
-            self.factory.makeComponent() for i in range(5))
+        db_objects = {self.factory.makeSourcePackageName() for i in range(5)}
+        db_objects.update(self.factory.makeComponent() for i in range(5))
         db_queries = list(bulk.gen_reload_queries(db_objects))
         self.assertEqual(2, len(db_queries))
         db_objects_loaded = set()
         for db_query in db_queries:
             objects = set(db_query)
             # None of these objects should have been loaded before.
-            self.assertEqual(
-                set(), objects.intersection(db_objects_loaded))
+            self.assertEqual(set(), objects.intersection(db_objects_loaded))
             db_objects_loaded.update(objects)
         self.assertEqual(db_objects, db_objects_loaded)
 
@@ -138,7 +118,7 @@ class TestLoaders(TestCaseWithFactory):
         db_objects = [
             IMasterStore(db_object).get(db_object_type, db_object.id),
             IStandbyStore(db_object).get(db_object_type, db_object.id),
-            ]
+        ]
         db_object_ids = {id(obj) for obj in db_objects}
         db_queries = list(bulk.gen_reload_queries(db_objects))
         self.assertEqual(2, len(db_queries))
@@ -147,24 +127,29 @@ class TestLoaders(TestCaseWithFactory):
             object_ids = {id(obj) for obj in db_query}
             # None of these objects should have been loaded before.
             self.assertEqual(
-                set(), object_ids.intersection(db_object_ids_loaded))
+                set(), object_ids.intersection(db_object_ids_loaded)
+            )
             db_object_ids_loaded.update(object_ids)
         self.assertEqual(db_object_ids, db_object_ids_loaded)
 
     def test_gen_reload_queries_with_non_Storm_objects(self):
         # gen_reload_queries() does not like non-Storm objects.
         self.assertRaises(
-            ClassInfoError, list, bulk.gen_reload_queries(['bogus']))
+            ClassInfoError, list, bulk.gen_reload_queries(["bogus"])
+        )
 
     def test_gen_reload_queries_with_compound_primary_keys(self):
         # gen_reload_queries() does not like compound primary keys.
         bap = BugAffectsPerson(
-            bug=self.factory.makeBug(), person=self.factory.makePerson())
+            bug=self.factory.makeBug(), person=self.factory.makePerson()
+        )
         db_queries = bulk.gen_reload_queries([bap])
         self.assertRaisesWithContent(
             AssertionError,
-            'Compound primary keys are not supported: BugAffectsPerson.',
-            list, db_queries)
+            "Compound primary keys are not supported: BugAffectsPerson.",
+            list,
+            db_queries,
+        )
 
     def test_reload(self):
         # reload() loads the given objects using queries generated by
@@ -173,11 +158,11 @@ class TestLoaders(TestCaseWithFactory):
         db_object_naked = proxy.removeSecurityProxy(db_object)
         db_object_info = get_obj_info(db_object_naked)
         IStore(db_object).flush()
-        self.assertIsNone(db_object_info.get('invalidated'))
+        self.assertIsNone(db_object_info.get("invalidated"))
         IStore(db_object).invalidate(db_object)
-        self.assertEqual(True, db_object_info.get('invalidated'))
+        self.assertEqual(True, db_object_info.get("invalidated"))
         bulk.reload([db_object])
-        self.assertIsNone(db_object_info.get('invalidated'))
+        self.assertIsNone(db_object_info.get("invalidated"))
 
     def test__make_compound_load_clause(self):
         # The query constructed by _make_compound_load_clause has the
@@ -187,9 +172,15 @@ class TestLoaders(TestCaseWithFactory):
             # test.
             (FeatureFlag.scope, FeatureFlag.priority, FeatureFlag.flag),
             sorted(
-                [('foo', 0, 'bar'), ('foo', 0, 'baz'),
-                 ('foo', 1, 'bar'), ('foo', 1, 'quux'),
-                 ('bar', 0, 'foo')]))
+                [
+                    ("foo", 0, "bar"),
+                    ("foo", 0, "baz"),
+                    ("foo", 1, "bar"),
+                    ("foo", 1, "quux"),
+                    ("bar", 0, "foo"),
+                ]
+            ),
+        )
         self.assertEqual(
             "FeatureFlag.scope = E'bar' AND ("
             "FeatureFlag.priority = 0 AND FeatureFlag.flag IN (E'foo')) OR "
@@ -198,37 +189,38 @@ class TestLoaders(TestCaseWithFactory):
             "FeatureFlag.flag IN (E'bar', E'baz') OR "
             "FeatureFlag.priority = 1 AND "
             "FeatureFlag.flag IN (E'bar', E'quux'))",
-            convert_storm_clause_to_string(clause))
+            convert_storm_clause_to_string(clause),
+        )
 
     def test_load(self):
         # load() loads objects of the given type by their primary keys.
         db_objects = [
             self.factory.makeComponent(),
             self.factory.makeComponent(),
-            ]
+        ]
         db_object_ids = [db_object.id for db_object in db_objects]
         self.assertEqual(
-            set(bulk.load(Component, db_object_ids)),
-            set(db_objects))
+            set(bulk.load(Component, db_object_ids)), set(db_objects)
+        )
 
     def test_load_with_non_Storm_objects(self):
         # load() does not like non-Storm objects.
-        self.assertRaises(
-            ClassInfoError, bulk.load, str, [])
+        self.assertRaises(ClassInfoError, bulk.load, str, [])
 
     def test_load_with_compound_primary_keys(self):
         # load() can load objects with compound primary keys.
         flags = [
-            FeatureFlag('foo', 0, 'bar', 'true'),
-            FeatureFlag('foo', 0, 'baz', 'false'),
-            ]
-        other_flag = FeatureFlag('notfoo', 0, 'notbar', 'true')
+            FeatureFlag("foo", 0, "bar", "true"),
+            FeatureFlag("foo", 0, "baz", "false"),
+        ]
+        other_flag = FeatureFlag("notfoo", 0, "notbar", "true")
         for flag in flags + [other_flag]:
             getFeatureStore().add(flag)
 
         self.assertContentEqual(
             flags,
-            bulk.load(FeatureFlag, [(ff.scope, ff.flag) for ff in flags]))
+            bulk.load(FeatureFlag, [(ff.scope, ff.flag) for ff in flags]),
+        )
 
     def test_load_with_store(self):
         # load() can use an alternative store.
@@ -239,36 +231,45 @@ class TestLoaders(TestCaseWithFactory):
         # Master store.
         master_store = IMasterStore(db_object)
         [db_object_from_master] = bulk.load(
-            Component, [db_object.id], store=master_store)
-        self.assertEqual(
-            Store.of(db_object_from_master), master_store)
+            Component, [db_object.id], store=master_store
+        )
+        self.assertEqual(Store.of(db_object_from_master), master_store)
         # Standby store.
         standby_store = IStandbyStore(db_object)
         [db_object_from_standby] = bulk.load(
-            Component, [db_object.id], store=standby_store)
-        self.assertEqual(
-            Store.of(db_object_from_standby), standby_store)
+            Component, [db_object.id], store=standby_store
+        )
+        self.assertEqual(Store.of(db_object_from_standby), standby_store)
 
     def test_load_related(self):
         owning_objects = [
             self.factory.makeBug(),
             self.factory.makeBug(),
-            ]
+        ]
         expected = {bug.owner for bug in owning_objects}
-        self.assertEqual(expected,
-            set(bulk.load_related(Person, owning_objects, ['ownerID'])))
+        self.assertEqual(
+            expected,
+            set(bulk.load_related(Person, owning_objects, ["ownerID"])),
+        )
 
     def test_load_referencing(self):
         owned_objects = [
             self.factory.makeBranch(),
             self.factory.makeBranch(),
-            ]
-        expected = set(list(owned_objects[0].subscriptions) +
-            list(owned_objects[1].subscriptions))
+        ]
+        expected = set(
+            list(owned_objects[0].subscriptions)
+            + list(owned_objects[1].subscriptions)
+        )
         self.assertNotEqual(0, len(expected))
-        self.assertEqual(expected,
-            set(bulk.load_referencing(BranchSubscription, owned_objects,
-                ['branch_id'])))
+        self.assertEqual(
+            expected,
+            set(
+                bulk.load_referencing(
+                    BranchSubscription, owned_objects, ["branch_id"]
+                )
+            ),
+        )
 
 
 class TestCreate(TestCaseWithFactory):
@@ -281,22 +282,43 @@ class TestCreate(TestCaseWithFactory):
         people = [self.factory.makePerson() for i in range(5)]
 
         wanted = [
-            (bug, person, person, datetime.datetime.now(UTC),
-             BugNotificationLevel.LIFECYCLE)
-            for person in people]
+            (
+                bug,
+                person,
+                person,
+                datetime.datetime.now(UTC),
+                BugNotificationLevel.LIFECYCLE,
+            )
+            for person in people
+        ]
 
         with StormStatementRecorder() as recorder:
             subs = bulk.create(
-                (BugSubscription.bug, BugSubscription.person,
-                 BugSubscription.subscribed_by, BugSubscription.date_created,
-                 BugSubscription.bug_notification_level),
-                wanted, get_objects=True)
+                (
+                    BugSubscription.bug,
+                    BugSubscription.person,
+                    BugSubscription.subscribed_by,
+                    BugSubscription.date_created,
+                    BugSubscription.bug_notification_level,
+                ),
+                wanted,
+                get_objects=True,
+            )
 
         self.assertThat(recorder, HasQueryCount(Equals(2)))
         self.assertContentEqual(
             wanted,
-            ((sub.bug, sub.person, sub.subscribed_by, sub.date_created,
-              sub.bug_notification_level) for sub in subs))
+            (
+                (
+                    sub.bug,
+                    sub.person,
+                    sub.subscribed_by,
+                    sub.date_created,
+                    sub.bug_notification_level,
+                )
+                for sub in subs
+            ),
+        )
 
     def test_null_reference(self):
         # create() handles None as a Reference value.
@@ -304,29 +326,38 @@ class TestCreate(TestCaseWithFactory):
         wanted = [(None, job, BranchJobType.RECLAIM_BRANCH_SPACE)]
         [branchjob] = bulk.create(
             (BranchJob.branch, BranchJob.job, BranchJob.job_type),
-            wanted, get_objects=True)
+            wanted,
+            get_objects=True,
+        )
         self.assertEqual(
-            wanted, [(branchjob.branch, branchjob.job, branchjob.job_type)])
+            wanted, [(branchjob.branch, branchjob.job, branchjob.job_type)]
+        )
 
     def test_fails_on_multiple_classes(self):
         # create() only inserts into columns on a single class.
         self.assertRaises(
             ValueError,
-            bulk.create, (BugSubscription.bug, BranchSubscription.branch), [])
+            bulk.create,
+            (BugSubscription.bug, BranchSubscription.branch),
+            [],
+        )
 
     def test_fails_on_reference_mismatch(self):
         # create() handles Reference columns in a typesafe manner.
         self.assertRaisesWithContent(
-            RuntimeError, "Property used in an unknown class",
-            bulk.create, (BugSubscription.bug,),
-            [[self.factory.makeBranch()]])
+            RuntimeError,
+            "Property used in an unknown class",
+            bulk.create,
+            (BugSubscription.bug,),
+            [[self.factory.makeBranch()]],
+        )
 
     def test_zero_values_is_noop(self):
         # create()ing 0 rows is a no-op.
         with StormStatementRecorder() as recorder:
             self.assertEqual(
-                [],
-                bulk.create((BugSubscription.bug,), [], get_objects=True))
+                [], bulk.create((BugSubscription.bug,), [], get_objects=True)
+            )
         self.assertThat(recorder, HasQueryCount(Equals(0)))
 
     def test_can_return_ids(self):
@@ -337,7 +368,9 @@ class TestCreate(TestCaseWithFactory):
         with StormStatementRecorder() as recorder:
             [created_id] = bulk.create(
                 (BranchJob.branch, BranchJob.job, BranchJob.job_type),
-                wanted, get_primary_keys=True)
+                wanted,
+                get_primary_keys=True,
+            )
         self.assertThat(recorder, HasQueryCount(Equals(1)))
         [reclaimjob] = ReclaimBranchSpaceJob.iterReady()
         self.assertEqual(created_id, reclaimjob.context.id)
@@ -352,12 +385,16 @@ class TestCreate(TestCaseWithFactory):
                 None,
                 bulk.create(
                     (BranchJob.branch, BranchJob.job, BranchJob.job_type),
-                    wanted, get_objects=False))
+                    wanted,
+                    get_objects=False,
+                ),
+            )
         self.assertThat(recorder, HasQueryCount(Equals(1)))
         [reclaimjob] = ReclaimBranchSpaceJob.iterReady()
         branchjob = reclaimjob.context
         self.assertEqual(
-            wanted, [(branchjob.branch, branchjob.job, branchjob.job_type)])
+            wanted, [(branchjob.branch, branchjob.job, branchjob.job_type)]
+        )
 
     def test_sql_passed_through(self):
         # create() passes SQL() expressions through untouched.
@@ -365,12 +402,25 @@ class TestCreate(TestCaseWithFactory):
         person = self.factory.makePerson()
 
         [sub] = bulk.create(
-            (BugSubscription.bug, BugSubscription.person,
-             BugSubscription.subscribed_by, BugSubscription.date_created,
-             BugSubscription.bug_notification_level),
-            [(bug, person, person,
-              SQL("CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"),
-              BugNotificationLevel.LIFECYCLE)], get_objects=True)
+            (
+                BugSubscription.bug,
+                BugSubscription.person,
+                BugSubscription.subscribed_by,
+                BugSubscription.date_created,
+                BugSubscription.bug_notification_level,
+            ),
+            [
+                (
+                    bug,
+                    person,
+                    person,
+                    SQL("CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"),
+                    BugNotificationLevel.LIFECYCLE,
+                )
+            ],
+            get_objects=True,
+        )
         self.assertEqual(
             get_transaction_timestamp(IStore(BugSubscription)),
-            sub.date_created)
+            sub.date_created,
+        )
diff --git a/lib/lp/services/database/tests/test_collection.py b/lib/lp/services/database/tests/test_collection.py
index 5be65aa..35f967b 100644
--- a/lib/lp/services/database/tests/test_collection.py
+++ b/lib/lp/services/database/tests/test_collection.py
@@ -26,14 +26,18 @@ def make_table(range_start, range_end, table_name=None):
     assert range_start < range_end, "Invalid range."
     if table_name is None:
         table_name = "TestTable"
-    IStore(Person).execute("""
+    IStore(Person).execute(
+        """
        CREATE TEMP TABLE %s AS
        SELECT generate_series AS id
        FROM generate_series(%d, %d)
-       """ % (table_name, range_start, range_end - 1))
+       """
+        % (table_name, range_start, range_end - 1)
+    )
 
     class TestTable(StormBase):
         """A test class/table generated on the fly for testing purposes."""
+
         __storm_table__ = table_name
         id = Int(primary=True)
 
@@ -79,7 +83,8 @@ class CollectionTest(TestCaseWithFactory):
     def test_select_conditions(self):
         TestTable = make_table(1, 5)
         collection = Collection(
-            TestTable.id > 2, TestTable.id < 4, tables=TestTable)
+            TestTable.id > 2, TestTable.id < 4, tables=TestTable
+        )
         result = collection.select(TestTable)
         self.assertContentEqual([3], get_ids(result))
 
@@ -105,63 +110,65 @@ class CollectionTest(TestCaseWithFactory):
 
     def test_add_tables(self):
         # The list of tables to select from carries across copies.
-        TestTable1 = make_table(1, 2, 'TestTable1')
-        TestTable2 = make_table(2, 3, 'TestTable2')
+        TestTable1 = make_table(1, 2, "TestTable1")
+        TestTable2 = make_table(2, 3, "TestTable2")
         collection = Collection(tables=TestTable1)
         collection = Collection(collection, tables=TestTable2)
         result = collection.select(TestTable1.id, TestTable2.id)
         self.assertEqual([(1, 2)], list(result))
 
     def test_add_tables_and_conditions(self):
-        TestTable1 = make_table(1, 2, 'TestTable1')
-        TestTable2 = make_table(2, 3, 'TestTable2')
+        TestTable1 = make_table(1, 2, "TestTable1")
+        TestTable2 = make_table(2, 3, "TestTable2")
         collection = Collection(TestTable1.id == 1, tables=TestTable1)
         collection = Collection(
-            collection, TestTable2.id == 2, tables=TestTable2)
+            collection, TestTable2.id == 2, tables=TestTable2
+        )
         result = collection.select(TestTable1.id, TestTable2.id)
         self.assertEqual([(1, 2)], list(result))
 
     def test_select_join(self):
-        TestTable1 = make_table(1, 2, 'TestTable1')
-        TestTable2 = make_table(2, 3, 'TestTable2')
+        TestTable1 = make_table(1, 2, "TestTable1")
+        TestTable2 = make_table(2, 3, "TestTable2")
         collection = Collection(tables=(TestTable1, TestTable2))
         result = collection.select(TestTable1, TestTable2)
-        self.assertEqual(
-            [(TestTable1(id=1), TestTable2(id=2))], list(result))
+        self.assertEqual([(TestTable1(id=1), TestTable2(id=2))], list(result))
 
     def test_select_join_column(self):
-        TestTable1 = make_table(1, 2, 'TestTable1')
-        TestTable2 = make_table(2, 3, 'TestTable2')
+        TestTable1 = make_table(1, 2, "TestTable1")
+        TestTable2 = make_table(2, 3, "TestTable2")
         collection = Collection(tables=(TestTable1, TestTable2))
         result = collection.select(TestTable1.id, TestTable2.id)
         self.assertEqual([(1, 2)], list(result))
 
     def test_select_partial_join(self):
-        TestTable1 = make_table(1, 2, 'TestTable1')
-        TestTable2 = make_table(2, 3, 'TestTable2')
+        TestTable1 = make_table(1, 2, "TestTable1")
+        TestTable2 = make_table(2, 3, "TestTable2")
         collection = Collection(
-            TestTable2.id == TestTable1.id + 1,
-            tables=(TestTable1, TestTable2))
+            TestTable2.id == TestTable1.id + 1, tables=(TestTable1, TestTable2)
+        )
         result = collection.select(TestTable1.id)
         self.assertEqual([1], list(result))
 
     def test_joinInner(self):
-        TestTable1 = make_table(1, 3, 'TestTable1')
-        TestTable2 = make_table(2, 4, 'TestTable2')
+        TestTable1 = make_table(1, 3, "TestTable1")
+        TestTable2 = make_table(2, 4, "TestTable2")
 
         # Add a join table to the collection.
         collection = Collection(tables=TestTable1).joinInner(
-            TestTable2, TestTable2.id == TestTable1.id)
+            TestTable2, TestTable2.id == TestTable1.id
+        )
         result = collection.select(TestTable1.id, TestTable2.id)
         self.assertContentEqual([(2, 2)], list(result))
 
     def test_joinOuter(self):
-        TestTable1 = make_table(1, 3, 'TestTable1')
-        TestTable2 = make_table(2, 4, 'TestTable2')
+        TestTable1 = make_table(1, 3, "TestTable1")
+        TestTable2 = make_table(2, 4, "TestTable2")
 
         # Add an outer-join table to the collection.
         collection = Collection(tables=TestTable1).joinOuter(
-            TestTable2, TestTable2.id == TestTable1.id)
+            TestTable2, TestTable2.id == TestTable1.id
+        )
         result = collection.select(TestTable1.id, TestTable2.id)
         self.assertContentEqual([(1, None), (2, 2)], list(result))
 
diff --git a/lib/lp/services/database/tests/test_connectionstring.py b/lib/lp/services/database/tests/test_connectionstring.py
index 1af7205..0d4735e 100644
--- a/lib/lp/services/database/tests/test_connectionstring.py
+++ b/lib/lp/services/database/tests/test_connectionstring.py
@@ -6,43 +6,45 @@ from lp.testing import TestCase
 
 
 class TestConnectionString(TestCase):
-
     def test_relevant_fields_parsed(self):
-        s = ('dbname=dbname user=user host=host port=port '
-             'connect_timeout=timeout sslmode=mode')
+        s = (
+            "dbname=dbname user=user host=host port=port "
+            "connect_timeout=timeout sslmode=mode"
+        )
         cs = ConnectionString(s)
-        self.assertEqual('dbname', cs.dbname)
-        self.assertEqual('user', cs.user)
-        self.assertEqual('host', cs.host)
-        self.assertEqual('port', cs.port)
-        self.assertEqual('timeout', cs.connect_timeout)
-        self.assertEqual('mode', cs.sslmode)
+        self.assertEqual("dbname", cs.dbname)
+        self.assertEqual("user", cs.user)
+        self.assertEqual("host", cs.host)
+        self.assertEqual("port", cs.port)
+        self.assertEqual("timeout", cs.connect_timeout)
+        self.assertEqual("mode", cs.sslmode)
 
         # and check that str/repr have the same keys and values.
         self.assertContentEqual(s.split(), str(cs).split())
         self.assertContentEqual(s.split(), repr(cs).split())
 
     def test_hyphens_in_values(self):
-        cs = ConnectionString('user=foo-bar host=foo.bar-baz.quux')
-        self.assertEqual('foo-bar', cs.user)
-        self.assertEqual('foo.bar-baz.quux', cs.host)
+        cs = ConnectionString("user=foo-bar host=foo.bar-baz.quux")
+        self.assertEqual("foo-bar", cs.user)
+        self.assertEqual("foo.bar-baz.quux", cs.host)
 
     def test_str_with_changes(self):
-        initial = 'dbname=foo host=bar'
-        expected = 'dbname=foo user=baz host=blah'
+        initial = "dbname=foo host=bar"
+        expected = "dbname=foo user=baz host=blah"
         cs = ConnectionString(initial)
-        cs.host = 'blah'
-        cs.user = 'baz'
+        cs.host = "blah"
+        cs.user = "baz"
         self.assertEqual(expected, str(cs))
 
     def test_rejects_quoted_strings(self):
         self.assertRaises(
-            AssertionError, ConnectionString, "dbname='quoted string'")
+            AssertionError, ConnectionString, "dbname='quoted string'"
+        )
 
     def test_equality(self):
-        cs1 = ConnectionString('dbname=foo host=bar')
-        cs2 = ConnectionString('dbname=foo host=bar')
-        cs3 = ConnectionString('dbname=foo host=baz')
+        cs1 = ConnectionString("dbname=foo host=bar")
+        cs2 = ConnectionString("dbname=foo host=bar")
+        cs3 = ConnectionString("dbname=foo host=baz")
         self.assertEqual(cs1, cs2)
         self.assertNotEqual(cs1, cs3)
         self.assertNotEqual(cs2, cs3)
diff --git a/lib/lp/services/database/tests/test_decoratedresultset.py b/lib/lp/services/database/tests/test_decoratedresultset.py
index 0b26ba7..a2494cc 100644
--- a/lib/lp/services/database/tests/test_decoratedresultset.py
+++ b/lib/lp/services/database/tests/test_decoratedresultset.py
@@ -8,23 +8,21 @@ __all__ = []
 import unittest
 
 from lp.testing.layers import DatabaseFunctionalLayer
-from lp.testing.systemdocs import (
-    LayeredDocFileSuite,
-    setUp,
-    tearDown,
-    )
+from lp.testing.systemdocs import LayeredDocFileSuite, setUp, tearDown
 
 
 def test_suite():
     suite = unittest.TestSuite()
 
     test = LayeredDocFileSuite(
-        'decoratedresultset.rst',
-        setUp=setUp, tearDown=tearDown,
-        layer=DatabaseFunctionalLayer)
+        "decoratedresultset.rst",
+        setUp=setUp,
+        tearDown=tearDown,
+        layer=DatabaseFunctionalLayer,
+    )
     suite.addTest(test)
     return suite
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/lib/lp/services/database/tests/test_doc.py b/lib/lp/services/database/tests/test_doc.py
index bb85450..0bee367 100644
--- a/lib/lp/services/database/tests/test_doc.py
+++ b/lib/lp/services/database/tests/test_doc.py
@@ -10,7 +10,6 @@ import os
 from lp.services.testing import build_test_suite
 from lp.testing.layers import DatabaseFunctionalLayer
 
-
 here = os.path.dirname(os.path.realpath(__file__))
 
 
diff --git a/lib/lp/services/database/tests/test_indexes.py b/lib/lp/services/database/tests/test_indexes.py
index 086bb8e..015b27d 100644
--- a/lib/lp/services/database/tests/test_indexes.py
+++ b/lib/lp/services/database/tests/test_indexes.py
@@ -3,15 +3,9 @@
 
 """Test database index correctness."""
 
-from testscenarios import (
-    load_tests_apply_scenarios,
-    WithScenarios,
-    )
-
-from lp.services.database.postgresql import (
-    listIndexes,
-    listReferences,
-    )
+from testscenarios import WithScenarios, load_tests_apply_scenarios
+
+from lp.services.database.postgresql import listIndexes, listReferences
 from lp.services.database.sqlbase import cursor
 from lp.testing import TestCase
 from lp.testing.layers import ZopelessDatabaseLayer
@@ -28,13 +22,14 @@ class TestIndexedReferences(WithScenarios, TestCase):
     scenarios = [
         ("Archive", {"table": "archive", "column": "id"}),
         ("Job", {"table": "job", "column": "id"}),
-        ]
+    ]
 
     def test_references_are_indexed(self):
         cur = cursor()
         self.addCleanup(cur.close)
         references = list(
-            listReferences(cur, self.table, self.column, indirect=False))
+            listReferences(cur, self.table, self.column, indirect=False)
+        )
         missing = []
         for src_tab, src_col, _, _, _, _ in references:
             for index in listIndexes(cur, src_tab, src_col):
diff --git a/lib/lp/services/database/tests/test_isolation.py b/lib/lp/services/database/tests/test_isolation.py
index add0786..629f450 100644
--- a/lib/lp/services/database/tests/test_isolation.py
+++ b/lib/lp/services/database/tests/test_isolation.py
@@ -3,9 +3,9 @@
 
 """Tests of the isolation module."""
 
+import transaction
 from psycopg2.extensions import TRANSACTION_STATUS_IDLE
 from storm.zope.interfaces import IZStorm
-import transaction
 from zope.component import getUtility
 
 from lp.services.database import isolation
@@ -25,7 +25,7 @@ class TestIsolation(TestCase):
         # for these tests, so execute a query in every store; one of them will
         # have a transactional state.
         for store in stores:
-            store.execute('SELECT 1')
+            store.execute("SELECT 1")
 
     def test_gen_store_statuses(self):
         # All stores are either disconnected or idle when all
@@ -38,8 +38,11 @@ class TestIsolation(TestCase):
         # begun.
         self.createTransaction()
         self.assertTrue(
-            any(status not in (None, TRANSACTION_STATUS_IDLE)
-                for _, status in isolation.gen_store_statuses()))
+            any(
+                status not in (None, TRANSACTION_STATUS_IDLE)
+                for _, status in isolation.gen_store_statuses()
+            )
+        )
 
     def test_is_transaction_in_progress(self):
         # is_transaction_in_progress() returns False when all
@@ -60,8 +63,8 @@ class TestIsolation(TestCase):
         # transaction has begun.
         self.createTransaction()
         self.assertRaises(
-            isolation.TransactionInProgress,
-            isolation.check_no_transaction)
+            isolation.TransactionInProgress, isolation.check_no_transaction
+        )
 
     def test_ensure_no_transaction(self):
         # ensure_no_transaction() is a decorator that raises
@@ -70,12 +73,13 @@ class TestIsolation(TestCase):
         @isolation.ensure_no_transaction
         def echo(*args, **kwargs):
             return args, kwargs
+
         # echo() will just return the given args no transaction is in
         # progress.
         transaction.abort()
         self.assertEqual(
-            ((1, 2, 3), {'a': 4, 'b': 5, 'c': 6}),
-            echo(1, 2, 3, a=4, b=5, c=6))
+            ((1, 2, 3), {"a": 4, "b": 5, "c": 6}), echo(1, 2, 3, a=4, b=5, c=6)
+        )
         # echo() will break with TransactionInProgress when a
         # transaction has begun.
         self.createTransaction()
diff --git a/lib/lp/services/database/tests/test_isolation_changes.py b/lib/lp/services/database/tests/test_isolation_changes.py
index 6cfefdb..cddd36c 100644
--- a/lib/lp/services/database/tests/test_isolation_changes.py
+++ b/lib/lp/services/database/tests/test_isolation_changes.py
@@ -6,24 +6,20 @@
 __all__ = []
 
 import os.path
-from subprocess import (
-    PIPE,
-    Popen,
-    STDOUT,
-    )
 import sys
-from textwrap import dedent
 import unittest
+from subprocess import PIPE, STDOUT, Popen
+from textwrap import dedent
 
 import transaction
 
 from lp.services.config import dbconfig
 from lp.services.database.sqlbase import (
+    ISOLATION_LEVEL_SERIALIZABLE,
     connect,
     cursor,
     disconnect_stores,
-    ISOLATION_LEVEL_SERIALIZABLE,
-    )
+)
 from lp.testing.layers import LaunchpadZopelessLayer
 
 
@@ -45,98 +41,108 @@ class TestIsolation(unittest.TestCase):
         return cur.fetchone()[0]
 
     def test_default(self):
-        self.assertEqual(self.getCurrentIsolation(), 'read committed')
+        self.assertEqual(self.getCurrentIsolation(), "read committed")
 
     def test_autocommit(self):
-        set_isolation_level('autocommit')
+        set_isolation_level("autocommit")
         # There is no actual 'autocommit' mode in PostgreSQL. psycopg
         # implements this feature by using read committed isolation and
         # issuing commit() statements after every query.
-        self.assertEqual(self.getCurrentIsolation(), 'read committed')
+        self.assertEqual(self.getCurrentIsolation(), "read committed")
 
         # So we need to confirm we are actually in autocommit mode
         # by seeing if we an roll back
         cur = cursor()
         cur.execute(
-            "SELECT COUNT(*) FROM Person WHERE homepage_content IS NULL")
+            "SELECT COUNT(*) FROM Person WHERE homepage_content IS NULL"
+        )
         self.assertNotEqual(cur.fetchone()[0], 0)
         cur.execute("UPDATE Person SET homepage_content=NULL")
         transaction.abort()
         cur = cursor()
         cur.execute(
-            "SELECT COUNT(*) FROM Person WHERE homepage_content IS NOT NULL")
+            "SELECT COUNT(*) FROM Person WHERE homepage_content IS NOT NULL"
+        )
         self.assertEqual(cur.fetchone()[0], 0)
 
     def test_readCommitted(self):
-        set_isolation_level('read_committed')
-        self.assertEqual(self.getCurrentIsolation(), 'read committed')
+        set_isolation_level("read_committed")
+        self.assertEqual(self.getCurrentIsolation(), "read committed")
 
     def test_repeatableRead(self):
-        set_isolation_level('repeatable_read')
-        self.assertEqual(self.getCurrentIsolation(), 'repeatable read')
+        set_isolation_level("repeatable_read")
+        self.assertEqual(self.getCurrentIsolation(), "repeatable read")
 
     def test_serializable(self):
-        set_isolation_level('serializable')
-        self.assertEqual(self.getCurrentIsolation(), 'serializable')
+        set_isolation_level("serializable")
+        self.assertEqual(self.getCurrentIsolation(), "serializable")
 
     def test_commit(self):
         # Change the isolation level
-        self.assertEqual(self.getCurrentIsolation(), 'read committed')
-        set_isolation_level('serializable')
-        self.assertEqual(self.getCurrentIsolation(), 'serializable')
+        self.assertEqual(self.getCurrentIsolation(), "read committed")
+        set_isolation_level("serializable")
+        self.assertEqual(self.getCurrentIsolation(), "serializable")
 
         cur = cursor()
         cur.execute("UPDATE Person SET homepage_content=NULL")
         transaction.commit()
         cur.execute("UPDATE Person SET homepage_content='foo'")
-        self.assertEqual(self.getCurrentIsolation(), 'serializable')
+        self.assertEqual(self.getCurrentIsolation(), "serializable")
 
     def test_rollback(self):
         # Change the isolation level
-        self.assertEqual(self.getCurrentIsolation(), 'read committed')
-        set_isolation_level('serializable')
-        self.assertEqual(self.getCurrentIsolation(), 'serializable')
+        self.assertEqual(self.getCurrentIsolation(), "read committed")
+        set_isolation_level("serializable")
+        self.assertEqual(self.getCurrentIsolation(), "serializable")
 
         cur = cursor()
         cur.execute("UPDATE Person SET homepage_content=NULL")
         transaction.abort()
-        self.assertEqual(self.getCurrentIsolation(), 'serializable')
+        self.assertEqual(self.getCurrentIsolation(), "serializable")
 
     def test_script(self):
         # Ensure that things work in stand alone scripts too, in case out
         # test infrustructure is faking something.
-        script = os.path.join(
-                os.path.dirname(__file__), 'script_isolation.py')
+        script = os.path.join(os.path.dirname(__file__), "script_isolation.py")
         cmd = [sys.executable, script]
         process = Popen(
-            cmd, stdout=PIPE, stderr=STDOUT, stdin=PIPE,
-            universal_newlines=True)
+            cmd,
+            stdout=PIPE,
+            stderr=STDOUT,
+            stdin=PIPE,
+            universal_newlines=True,
+        )
         (script_output, _empty) = process.communicate()
-        self.assertEqual(process.returncode, 0, 'Error: ' + script_output)
-        self.assertEqual(script_output, dedent("""\
+        self.assertEqual(process.returncode, 0, "Error: " + script_output)
+        self.assertEqual(
+            script_output,
+            dedent(
+                """\
                 read committed
                 read committed
                 repeatable read
                 repeatable read
-                """))
+                """
+            ),
+        )
 
     def test_connect(self):
         # Ensure connect() method returns a connection with the correct
         # default isolation
         con = connect()
-        self.assertEqual(self.getCurrentIsolation(con), 'read committed')
+        self.assertEqual(self.getCurrentIsolation(con), "read committed")
         con.rollback()
-        self.assertEqual(self.getCurrentIsolation(con), 'read committed')
+        self.assertEqual(self.getCurrentIsolation(con), "read committed")
 
         # Ensure that changing the isolation sticks.
         con = connect(isolation=ISOLATION_LEVEL_SERIALIZABLE)
-        self.assertEqual(self.getCurrentIsolation(con), 'serializable')
+        self.assertEqual(self.getCurrentIsolation(con), "serializable")
         con.rollback()
-        self.assertEqual(self.getCurrentIsolation(con), 'serializable')
+        self.assertEqual(self.getCurrentIsolation(con), "serializable")
 
         # But on a fresh connection, it works just fine.
         con = connect()
         con.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
-        self.assertEqual(self.getCurrentIsolation(con), 'serializable')
+        self.assertEqual(self.getCurrentIsolation(con), "serializable")
         con.rollback()
-        self.assertEqual(self.getCurrentIsolation(con), 'serializable')
+        self.assertEqual(self.getCurrentIsolation(con), "serializable")
diff --git a/lib/lp/services/database/tests/test_postgresql.py b/lib/lp/services/database/tests/test_postgresql.py
index 7e4c5c7..e280065 100644
--- a/lib/lp/services/database/tests/test_postgresql.py
+++ b/lib/lp/services/database/tests/test_postgresql.py
@@ -15,24 +15,31 @@ def setUp(test):
 
     # Create a test schema demonstrating the edge cases
     cur = con.cursor()
-    cur.execute("""
+    cur.execute(
+        """
         CREATE TABLE A (
             aid     serial PRIMARY KEY,
             selfref integer CONSTRAINT a_selfref_fk REFERENCES A(aid),
             name    text
             )
-        """)
-    cur.execute("""
+        """
+    )
+    cur.execute(
+        """
         CREATE INDEX a__name__idx ON A(name)
-        """)
-    cur.execute("""
+        """
+    )
+    cur.execute(
+        """
         CREATE TABLE B (
             bid integer PRIMARY KEY,
             aid integer UNIQUE CONSTRAINT b_aid_fk REFERENCES A(aid)
                 ON DELETE CASCADE ON UPDATE CASCADE
             )
-        """)
-    cur.execute("""
+        """
+    )
+    cur.execute(
+        """
         CREATE TABLE C (
             cid integer PRIMARY KEY,
             aid integer CONSTRAINT c_aid_fk REFERENCES B(aid),
@@ -41,37 +48,41 @@ def setUp(test):
             name text,
             description text
             )
-        """)
-    cur.execute("""
+        """
+    )
+    cur.execute(
+        """
         CREATE INDEX c__name__description__idx ON C(name, description)
-        """)
-    cur.execute("""
+        """
+    )
+    cur.execute(
+        """
         CREATE TABLE D (
             did integer PRIMARY KEY,
             aid integer UNIQUE CONSTRAINT d_aid_fk REFERENCES B(aid),
             bid integer CONSTRAINT d_bid_fk REFERENCES B(bid),
             CONSTRAINT d_aid_bid_key UNIQUE (aid, bid)
             )
-        """)
+        """
+    )
     cur.execute("CREATE SEQUENCE standalone")
     con.commit()
 
     # Store the connection and a cursor for the tests to use
     cur = con.cursor()
-    test.globs['con'] = con
-    test.globs['cur'] = cur
+    test.globs["con"] = con
+    test.globs["cur"] = cur
 
 
 def tearDown(test):
-    test.globs['con'].close()
+    test.globs["con"].close()
     test._db_fixture.tearDown()
     del test._db_fixture
 
 
 def test_suite():
     suite = DocTestSuite(
-            "lp.services.database.postgresql",
-            setUp=setUp, tearDown=tearDown
-            )
+        "lp.services.database.postgresql", setUp=setUp, tearDown=tearDown
+    )
     suite.layer = BaseLayer
     return suite
diff --git a/lib/lp/services/database/tests/test_rundoctests.py b/lib/lp/services/database/tests/test_rundoctests.py
index f46a5a6..902f80a 100644
--- a/lib/lp/services/database/tests/test_rundoctests.py
+++ b/lib/lp/services/database/tests/test_rundoctests.py
@@ -1,11 +1,11 @@
 # Copyright 2011 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-from doctest import DocTestSuite
 import unittest
+from doctest import DocTestSuite
 
 
 def test_suite():
     suite = unittest.TestSuite()
-    suite.addTest(DocTestSuite('lp.services.database.sort_sql'))
+    suite.addTest(DocTestSuite("lp.services.database.sort_sql"))
     return suite
diff --git a/lib/lp/services/database/tests/test_sqlbase.py b/lib/lp/services/database/tests/test_sqlbase.py
index 9048602..35f12a7 100644
--- a/lib/lp/services/database/tests/test_sqlbase.py
+++ b/lib/lp/services/database/tests/test_sqlbase.py
@@ -2,12 +2,8 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 import doctest
-from doctest import (
-    ELLIPSIS,
-    NORMALIZE_WHITESPACE,
-    REPORT_NDIFF,
-    )
 import unittest
+from doctest import ELLIPSIS, NORMALIZE_WHITESPACE, REPORT_NDIFF
 
 from lp.services.database import sqlbase
 
@@ -17,6 +13,7 @@ def test_suite():
     dt_suite = doctest.DocTestSuite(sqlbase, optionflags=optionflags)
     return unittest.TestSuite((dt_suite,))
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     runner = unittest.TextTestRunner()
     runner.run(test_suite())
diff --git a/lib/lp/services/database/tests/test_storm.py b/lib/lp/services/database/tests/test_storm.py
index 4cee575..9ef704b 100644
--- a/lib/lp/services/database/tests/test_storm.py
+++ b/lib/lp/services/database/tests/test_storm.py
@@ -12,5 +12,5 @@ class TestStorm(TestCase):
     def test_has_cextensions(self):
         """Ensure Storm C extensions are being used."""
         self.assertTrue(
-            storm.has_cextensions,
-            'Storm not running with C extensions')
+            storm.has_cextensions, "Storm not running with C extensions"
+        )
diff --git a/lib/lp/services/database/tests/test_stormbase.py b/lib/lp/services/database/tests/test_stormbase.py
index 86b106d..a6d369f 100644
--- a/lib/lp/services/database/tests/test_stormbase.py
+++ b/lib/lp/services/database/tests/test_stormbase.py
@@ -3,16 +3,16 @@
 
 """StormBase tests."""
 
-from storm.locals import Int
 import transaction
+from storm.locals import Int
 from zope.component import getUtility
 
 from lp.services.database.interfaces import (
     DEFAULT_FLAVOR,
+    MAIN_STORE,
     IStore,
     IStoreSelector,
-    MAIN_STORE,
-    )
+)
 from lp.services.database.stormbase import StormBase
 from lp.testing import TestCase
 from lp.testing.layers import ZopelessDatabaseLayer
diff --git a/lib/lp/services/database/tests/test_transaction_decorators.py b/lib/lp/services/database/tests/test_transaction_decorators.py
index 1370457..feef7c4 100644
--- a/lib/lp/services/database/tests/test_transaction_decorators.py
+++ b/lib/lp/services/database/tests/test_transaction_decorators.py
@@ -5,10 +5,7 @@ import unittest
 
 import transaction
 
-from lp.services.database import (
-    read_transaction,
-    write_transaction,
-    )
+from lp.services.database import read_transaction, write_transaction
 from lp.services.database.interfaces import IStore
 from lp.services.librarian.model import LibraryFileContent
 from lp.services.librarianserver import db
@@ -22,9 +19,9 @@ class TestTransactionDecorators(unittest.TestCase):
     layer = LaunchpadZopelessLayer
 
     def setUp(self):
-        switch_dbuser('librarian')
+        switch_dbuser("librarian")
         self.store = IStore(LibraryFileContent)
-        self.content_id = db.Library().add('deadbeef', 1234, 'abababab', 'ba')
+        self.content_id = db.Library().add("deadbeef", 1234, "abababab", "ba")
         self.file_content = self._getTestFileContent()
         transaction.commit()
 
@@ -34,53 +31,71 @@ class TestTransactionDecorators(unittest.TestCase):
 
     def test_read_transaction_reset_store(self):
         """Make sure that the store is reset after the transaction."""
+
         @read_transaction
         def no_op():
             pass
+
         no_op()
         self.assertIsNot(
-            self.file_content, self._getTestFileContent(),
-            "Store wasn't reset properly.")
+            self.file_content,
+            self._getTestFileContent(),
+            "Store wasn't reset properly.",
+        )
 
     def test_write_transaction_reset_store(self):
         """Make sure that the store is reset after the transaction."""
+
         @write_transaction
         def no_op():
             pass
+
         no_op()
         self.assertIsNot(
-            self.file_content, self._getTestFileContent(),
-            "Store wasn't reset properly.")
+            self.file_content,
+            self._getTestFileContent(),
+            "Store wasn't reset properly.",
+        )
 
     def test_write_transaction_reset_store_with_raise(self):
         """Make sure that the store is reset after the transaction."""
+
         @write_transaction
         def no_op():
-            raise RuntimeError('an error occured')
+            raise RuntimeError("an error occured")
+
         self.assertRaises(RuntimeError, no_op)
         self.assertIsNot(
-            self.file_content, self._getTestFileContent(),
-            "Store wasn't reset properly.")
+            self.file_content,
+            self._getTestFileContent(),
+            "Store wasn't reset properly.",
+        )
 
     def test_writing_transaction_reset_store_on_commit_failure(self):
-        """The store should be reset even if committing the transaction fails.
-        """
+        """The store is reset even if committing the transaction fails."""
+
         class TransactionAborter:
             """Make the next commit() fails."""
+
             def newTransaction(self, txn):
                 pass
 
             def beforeCompletion(self, txn):
-                raise RuntimeError('the commit will fail')
+                raise RuntimeError("the commit will fail")
+
         aborter = TransactionAborter()
         transaction.manager.registerSynch(aborter)
         try:
+
             @write_transaction
             def no_op():
                 pass
+
             self.assertRaises(RuntimeError, no_op)
             self.assertIsNot(
-                self.file_content, self._getTestFileContent(),
-                "Store wasn't reset properly.")
+                self.file_content,
+                self._getTestFileContent(),
+                "Store wasn't reset properly.",
+            )
         finally:
             transaction.manager.unregisterSynch(aborter)
diff --git a/lib/lp/services/database/tests/test_transaction_policy.py b/lib/lp/services/database/tests/test_transaction_policy.py
index db19a4f..d947398 100644
--- a/lib/lp/services/database/tests/test_transaction_policy.py
+++ b/lib/lp/services/database/tests/test_transaction_policy.py
@@ -3,15 +3,15 @@
 
 """Test `TransactionPolicy`."""
 
-from psycopg2 import InternalError
 import transaction
+from psycopg2 import InternalError
 
 from lp.registry.model.person import Person
 from lp.services.database.interfaces import IStore
 from lp.services.database.isolation import (
-    check_no_transaction,
     TransactionInProgress,
-    )
+    check_no_transaction,
+)
 from lp.services.database.transaction_policy import DatabaseTransactionPolicy
 from lp.testing import TestCaseWithFactory
 from lp.testing.layers import ZopelessDatabaseLayer
@@ -154,13 +154,15 @@ class TestTransactionPolicy(TestCaseWithFactory):
         # Only the nested policy (the second element of the key tuple)
         # determines whether writes are allowed (the value associated
         # with the key).
-        self.assertEqual({
-            (False, False): False,
-            (False, True): True,
-            (True, False): False,
-            (True, True): True,
+        self.assertEqual(
+            {
+                (False, False): False,
+                (False, True): True,
+                (True, False): False,
+                (True, True): True,
             },
-            effects)
+            effects,
+        )
 
     def test_policy_restores_previous_policy_on_success(self):
         # A transaction policy, once exited, restores the previously
@@ -169,10 +171,10 @@ class TestTransactionPolicy(TestCaseWithFactory):
             with DatabaseTransactionPolicy(read_only=True):
                 self.readFromDatabase()
             self.assertTrue(
-                self.hasDatabaseBeenWrittenTo(self.writeToDatabase()))
+                self.hasDatabaseBeenWrittenTo(self.writeToDatabase())
+            )
             transaction.commit()
-        self.assertTrue(
-            self.hasDatabaseBeenWrittenTo(self.writeToDatabase()))
+        self.assertTrue(self.hasDatabaseBeenWrittenTo(self.writeToDatabase()))
 
     def test_propagates_failure(self):
         # Exceptions raised inside a transaction policy are not
@@ -198,8 +200,7 @@ class TestTransactionPolicy(TestCaseWithFactory):
         except HorribleFailure:
             pass
 
-        self.assertTrue(
-            self.hasDatabaseBeenWrittenTo(self.writeToDatabase()))
+        self.assertTrue(self.hasDatabaseBeenWrittenTo(self.writeToDatabase()))
 
     def test_policy_can_span_transactions(self):
         # It's okay to commit within a policy; the policy will still
diff --git a/lib/lp/services/database/transaction_policy.py b/lib/lp/services/database/transaction_policy.py
index 8766894..14bd41d 100644
--- a/lib/lp/services/database/transaction_policy.py
+++ b/lib/lp/services/database/transaction_policy.py
@@ -4,11 +4,11 @@
 """Policy for database transactions."""
 
 __all__ = [
-    'DatabaseTransactionPolicy',
-    ]
+    "DatabaseTransactionPolicy",
+]
 
-from psycopg2.extensions import TRANSACTION_STATUS_IDLE
 import transaction
+from psycopg2.extensions import TRANSACTION_STATUS_IDLE
 
 from lp.registry.model.person import Person
 from lp.services.database.interfaces import IMasterStore
@@ -87,7 +87,8 @@ class DatabaseTransactionPolicy:
         :raise TransactionInProgress: if a transaction was already ongoing.
         """
         self._checkNoTransaction(
-            "Entered DatabaseTransactionPolicy while in a transaction.")
+            "Entered DatabaseTransactionPolicy while in a transaction."
+        )
         self.previous_policy = self._getCurrentPolicy()
         self._setPolicy(self.read_only)
         # Commit should include the policy itself.  If this breaks
@@ -105,7 +106,7 @@ class DatabaseTransactionPolicy:
         :raise TransactionInProgress: if trying to exit normally from a
             read-write policy without closing its transaction first.
         """
-        successful_exit = (exc_type is None)
+        successful_exit = exc_type is None
         if successful_exit:
             # We're going to abort any ongoing transactions, but flush
             # first to catch out any writes that we might still be
@@ -117,7 +118,8 @@ class DatabaseTransactionPolicy:
             if not self.read_only:
                 self._checkNoTransaction(
                     "Failed to close transaction before leaving read-write "
-                    "DatabaseTransactionPolicy.")
+                    "DatabaseTransactionPolicy."
+                )
 
         transaction.abort()
         self._setPolicy(self.previous_policy)
@@ -162,11 +164,11 @@ class DatabaseTransactionPolicy:
         :return: True for read-only policy, False for read-write policy.
         """
         db_switch_value_to_policy = {
-            'on': True,
-            'off': False,
+            "on": True,
+            "off": False,
         }
         show_command = "SHOW %s" % self.db_switch
-        db_switch_value, = self.store.execute(show_command).get_one()
+        (db_switch_value,) = self.store.execute(show_command).get_one()
         return db_switch_value_to_policy[db_switch_value]
 
     def _setPolicy(self, read_only=True):
@@ -175,5 +177,4 @@ class DatabaseTransactionPolicy:
         :param read_only: True for read-only policy, False for read-write
             policy.
         """
-        self.store.execute(
-            "SET %s TO %s" % (self.db_switch, quote(read_only)))
+        self.store.execute("SET %s TO %s" % (self.db_switch, quote(read_only)))
diff --git a/lib/lp/services/encoding.py b/lib/lp/services/encoding.py
index c8e8f15..070bb10 100644
--- a/lib/lp/services/encoding.py
+++ b/lib/lp/services/encoding.py
@@ -4,28 +4,27 @@
 """Character encoding utilities"""
 
 __all__ = [
-    'escape_nonascii_uniquely',
-    'guess',
-    'is_ascii_only',
-    'wsgi_native_string',
-    ]
+    "escape_nonascii_uniquely",
+    "guess",
+    "is_ascii_only",
+    "wsgi_native_string",
+]
 
 import codecs
 import re
 
 import six
 
-
 _boms = [
-    (codecs.BOM_UTF16_BE, 'utf_16_be'),
-    (codecs.BOM_UTF16_LE, 'utf_16_le'),
-    (codecs.BOM_UTF32_BE, 'utf_32_be'),
-    (codecs.BOM_UTF32_LE, 'utf_32_le'),
-    ]
+    (codecs.BOM_UTF16_BE, "utf_16_be"),
+    (codecs.BOM_UTF16_LE, "utf_16_le"),
+    (codecs.BOM_UTF32_BE, "utf_32_be"),
+    (codecs.BOM_UTF32_LE, "utf_32_le"),
+]
 
 
 def guess(s):
-    r'''
+    r"""
     Attempts to heuristically guess a strings encoding, returning
     a Unicode string.
 
@@ -107,14 +106,12 @@ def guess(s):
     >>> guess(u'hello'.encode('UTF-16be')) == u'\x00h\x00e\x00l\x00l\x00o'
     True
 
-    '''
+    """
 
     # Calling this method with a Unicode argument indicates a hidden bug
     # that will bite you eventually -- StuartBishop 20050709
     if isinstance(s, str):
-        raise TypeError(
-                'encoding.guess called with Unicode string %r' % (s,)
-                )
+        raise TypeError("encoding.guess called with Unicode string %r" % (s,))
 
     # Attempt to use an objects default Unicode conversion, for objects
     # that can encode themselves as ASCII.
@@ -128,32 +125,32 @@ def guess(s):
     try:
         for bom, encoding in _boms:
             if s.startswith(bom):
-                return str(s[len(bom):], encoding)
+                return str(s[len(bom) :], encoding)
     except UnicodeDecodeError:
         pass
 
     # Try preferred encoding
     try:
-        return str(s, 'UTF-8')
+        return str(s, "UTF-8")
     except UnicodeDecodeError:
         pass
 
     # If we have characters in this range, it is probably CP1252
-    if re.search(br"[\x80-\x9f]", s) is not None:
+    if re.search(rb"[\x80-\x9f]", s) is not None:
         try:
-            return str(s, 'CP1252')
+            return str(s, "CP1252")
         except UnicodeDecodeError:
             pass
 
     # If we have characters in this range, it is probably ISO-8859-15
-    if re.search(br"[\xa4\xa6\xa8\xb4\xb8\xbc-\xbe]", s) is not None:
+    if re.search(rb"[\xa4\xa6\xa8\xb4\xb8\xbc-\xbe]", s) is not None:
         try:
-            return str(s, 'ISO-8859-15')
+            return str(s, "ISO-8859-15")
         except UnicodeDecodeError:
             pass
 
     # Otherwise we default to ISO-8859-1
-    return str(s, 'ISO-8859-1', 'replace')
+    return str(s, "ISO-8859-1", "replace")
 
 
 def escape_nonascii_uniquely(bogus_string):
@@ -179,14 +176,14 @@ def escape_nonascii_uniquely(bogus_string):
 
     :type bogus_string: bytes
     """
-    nonascii_regex = re.compile(br'[\200-\377]')
+    nonascii_regex = re.compile(rb"[\200-\377]")
 
     # By encoding the invalid ascii with a backslash, x, and then the
     # hex value, it makes it easy to decode it by pasting into a python
     # interpreter. quopri() is not used, since that could caused the
     # decoding of an email to fail.
     def quote(match):
-        return b'\\x%x' % ord(match.group(0))
+        return b"\\x%x" % ord(match.group(0))
 
     return nonascii_regex.sub(quote, bogus_string)
 
@@ -194,20 +191,20 @@ def escape_nonascii_uniquely(bogus_string):
 def is_ascii_only(string):
     r"""Ensure that the string contains only ASCII characters.
 
-        >>> is_ascii_only(u'ascii only')
-        True
-        >>> is_ascii_only(b'ascii only')
-        True
-        >>> is_ascii_only(b'\xf4')
-        False
-        >>> is_ascii_only(u'\xf4')
-        False
+    >>> is_ascii_only(u'ascii only')
+    True
+    >>> is_ascii_only(b'ascii only')
+    True
+    >>> is_ascii_only(b'\xf4')
+    False
+    >>> is_ascii_only(u'\xf4')
+    False
     """
     try:
         if isinstance(string, bytes):
-            string.decode('ascii')
+            string.decode("ascii")
         else:
-            string.encode('ascii')
+            string.encode("ascii")
     except UnicodeError:
         return False
     else:
@@ -222,8 +219,8 @@ def wsgi_native_string(s):
     porting to Python 3 via an intermediate stage of Unicode literals in
     Python 2, we enforce this here.
     """
-    result = six.ensure_str(s, encoding='ISO-8859-1')
+    result = six.ensure_str(s, encoding="ISO-8859-1")
     if isinstance(s, str):
         # Ensure we're limited to ISO-8859-1.
-        result.encode('ISO-8859-1')
+        result.encode("ISO-8859-1")
     return result
diff --git a/lib/lp/services/features/__init__.py b/lib/lp/services/features/__init__.py
index 85d9949..151e87e 100644
--- a/lib/lp/services/features/__init__.py
+++ b/lib/lp/services/features/__init__.py
@@ -181,15 +181,14 @@ other environments that have no explicit setup and teardown::
 
 import threading
 
-
 __all__ = [
-    'currentScope',
-    'defaultFlagValue',
-    'get_relevant_feature_controller',
-    'getFeatureFlag',
-    'install_feature_controller',
-    'make_script_feature_controller',
-    ]
+    "currentScope",
+    "defaultFlagValue",
+    "get_relevant_feature_controller",
+    "getFeatureFlag",
+    "install_feature_controller",
+    "make_script_feature_controller",
+]
 
 
 per_thread = threading.local()
@@ -210,7 +209,7 @@ def uninstall_feature_controller():
 
     This function is used to create a pristine environment in tests.
     """
-    if hasattr(per_thread, 'features'):
+    if hasattr(per_thread, "features"):
         del per_thread.features
 
 
@@ -219,7 +218,7 @@ def get_relevant_feature_controller():
     # The noncommittal name "relevant" is because this function may change to
     # look things up from the current request or some other mechanism in
     # future.
-    return getattr(per_thread, 'features', None)
+    return getattr(per_thread, "features", None)
 
 
 def getFeatureFlag(flag):
@@ -261,4 +260,5 @@ def make_script_feature_controller(script_name):
     from lp.services.features.scopes import ScopesForScript
 
     return FeatureController(
-        ScopesForScript(script_name).lookup, StormFeatureRuleSource())
+        ScopesForScript(script_name).lookup, StormFeatureRuleSource()
+    )
diff --git a/lib/lp/services/features/browser/changelog.py b/lib/lp/services/features/browser/changelog.py
index ddeb90b..aef753e 100644
--- a/lib/lp/services/features/browser/changelog.py
+++ b/lib/lp/services/features/browser/changelog.py
@@ -4,8 +4,8 @@
 """Classes to view FeatureFlagChange."""
 
 __all__ = [
-    'ChangeLog',
-    ]
+    "ChangeLog",
+]
 
 from lp.services.features.changelog import ChangeLog
 from lp.services.webapp.batching import BatchNavigator
@@ -14,10 +14,10 @@ from lp.services.webapp.publisher import LaunchpadView
 
 class FeatureChangeLogView(LaunchpadView):
 
-    page_title = label = 'Feature flag changelog'
+    page_title = label = "Feature flag changelog"
 
     @property
     def changes(self):
         navigator = BatchNavigator(ChangeLog.get(), self.request, size=10)
-        navigator.setHeadings('change', 'changes')
+        navigator.setHeadings("change", "changes")
         return navigator
diff --git a/lib/lp/services/features/browser/edit.py b/lib/lp/services/features/browser/edit.py
index 25ebdf4..3acc671 100644
--- a/lib/lp/services/features/browser/edit.py
+++ b/lib/lp/services/features/browser/edit.py
@@ -4,23 +4,20 @@
 """View and edit feature rules."""
 
 __all__ = [
-    'FeatureControlView',
-    'IFeatureControlForm',
-    ]
+    "FeatureControlView",
+    "IFeatureControlForm",
+]
 
 
-from difflib import unified_diff
 import logging
+from difflib import unified_diff
 
 from zope.formlib.widget import CustomWidgetFactory
 from zope.formlib.widgets import TextAreaWidget
 from zope.interface import Interface
 from zope.schema import Text
 
-from lp.app.browser.launchpadform import (
-    action,
-    LaunchpadFormView,
-    )
+from lp.app.browser.launchpadform import LaunchpadFormView, action
 from lp.app.browser.stringformatter import FormattersAPI
 from lp.services.features.changelog import ChangeLog
 from lp.services.features.rulesource import DuplicatePriorityError
@@ -39,12 +36,15 @@ class IFeatureControlForm(Interface):
             "Rules to control feature flags on Launchpad.  "
             "On each line: (flag, scope, priority, value), "
             "whitespace-separated.  Numerically higher "
-            "priorities match first."),
-        required=False)
+            "priorities match first."
+        ),
+        required=False,
+    )
     comment = Text(
         title="Comment",
         description=("Who requested this change and why."),
-        required=True)
+        required=True,
+    )
 
 
 class FeatureControlView(LaunchpadFormView):
@@ -55,26 +55,26 @@ class FeatureControlView(LaunchpadFormView):
     """
 
     schema = IFeatureControlForm
-    page_title = label = 'Feature control'
+    page_title = label = "Feature control"
     diff = None
-    logger_name = 'lp.services.features'
+    logger_name = "lp.services.features"
     custom_widget_comment = CustomWidgetFactory(TextAreaWidget, height=2)
 
     @property
     def field_names(self):
         if self.canSubmit(None):
-            return ['feature_rules', 'comment']
+            return ["feature_rules", "comment"]
         else:
             return []
 
     def canSubmit(self, action):
         """Is the user authorized to change the rules?"""
-        return check_permission('launchpad.Admin', self.context)
+        return check_permission("launchpad.Admin", self.context)
 
     @action("Change", name="change", condition=canSubmit)
     def change_action(self, action, data):
         original_rules = self.request.features.rule_source.getAllRulesAsText()
-        rules_text = data.get('feature_rules') or ''
+        rules_text = data.get("feature_rules") or ""
         logger = logging.getLogger(self.logger_name)
         logger.warning("Change feature rules to: %s" % (rules_text,))
         logger.warning("Previous feature rules were: %s" % (original_rules,))
@@ -83,8 +83,8 @@ class FeatureControlView(LaunchpadFormView):
         # (whitespace normalized) and ordered consistently so the diff is
         # minimal.
         new_rules = self.request.features.rule_source.getAllRulesAsText()
-        diff = '\n'.join(self.diff_rules(original_rules, new_rules))
-        comment = data['comment']
+        diff = "\n".join(self.diff_rules(original_rules, new_rules))
+        comment = data["comment"]
         ChangeLog.append(diff, comment, self.user)
         self.diff = FormattersAPI(diff).format_diff()
 
@@ -93,17 +93,17 @@ class FeatureControlView(LaunchpadFormView):
         # Just generate a one-block diff.
         lines_of_context = 999999
         diff = unified_diff(
-            rules1.splitlines(),
-            rules2.splitlines(),
-            n=lines_of_context)
+            rules1.splitlines(), rules2.splitlines(), n=lines_of_context
+        )
         # The three line header is meaningless here.
         return list(diff)[3:]
 
     @property
     def initial_values(self):
         return {
-            'feature_rules':
-                self.request.features.rule_source.getAllRulesAsText(),
+            "feature_rules": (
+                self.request.features.rule_source.getAllRulesAsText()
+            ),
         }
 
     def validate(self, data):
@@ -112,7 +112,12 @@ class FeatureControlView(LaunchpadFormView):
         try:
             # Unfortunately if the field is '', zope leaves it out of data.
             self.request.features.rule_source.parseRules(
-                data.get('feature_rules') or '')
-        except (IndexError, TypeError, ValueError,
-                DuplicatePriorityError) as e:
-            self.setFieldError('feature_rules', 'Invalid rule syntax: %s' % e)
+                data.get("feature_rules") or ""
+            )
+        except (
+            IndexError,
+            TypeError,
+            ValueError,
+            DuplicatePriorityError,
+        ) as e:
+            self.setFieldError("feature_rules", "Invalid rule syntax: %s" % e)
diff --git a/lib/lp/services/features/browser/info.py b/lib/lp/services/features/browser/info.py
index b5f6ab3..1e8a70e 100644
--- a/lib/lp/services/features/browser/info.py
+++ b/lib/lp/services/features/browser/info.py
@@ -4,8 +4,8 @@
 """View and edit feature rules."""
 
 __all__ = [
-    'FeatureInfoView',
-    ]
+    "FeatureInfoView",
+]
 
 
 from collections import namedtuple
@@ -14,26 +14,23 @@ from lp.services.features.flags import (
     flag_info,
     undocumented_flags,
     value_domain_info,
-    )
-from lp.services.features.scopes import (
-    HANDLERS,
-    undocumented_scopes,
-    )
+)
+from lp.services.features.scopes import HANDLERS, undocumented_scopes
 from lp.services.utils import docstring_dedent
 from lp.services.webapp.publisher import LaunchpadView
 
-
 # Named tuples to use when passing flag and scope data to the template.
 Flag = namedtuple(
-    'Flag', ('name', 'domain', 'description', 'default', 'title', 'link'))
-ValueDomain = namedtuple('ValueDomain', ('name', 'description'))
-Scope = namedtuple('Scope', ('regex', 'description'))
+    "Flag", ("name", "domain", "description", "default", "title", "link")
+)
+ValueDomain = namedtuple("ValueDomain", ("name", "description"))
+Scope = namedtuple("Scope", ("regex", "description"))
 
 
 class FeatureInfoView(LaunchpadView):
     """Display feature flag documentation and other info."""
 
-    page_title = label = 'Feature flag info'
+    page_title = label = "Feature flag info"
 
     @property
     def flag_info(self):
@@ -42,9 +39,8 @@ class FeatureInfoView(LaunchpadView):
 
     @property
     def undocumented_flags(self):
-        """Flag names referenced during process lifetime but not documented.
-        """
-        return ', '.join(undocumented_flags)
+        """Flag names referenced during process lifetime but undocumented."""
+        return ", ".join(undocumented_flags)
 
     @property
     def value_domain_info(self):
@@ -53,13 +49,13 @@ class FeatureInfoView(LaunchpadView):
 
     @property
     def undocumented_scopes(self):
-        """Scope names referenced during process lifetime but not documented.
-        """
-        return ', '.join(undocumented_scopes)
+        """Scope names referenced during process lifetime but undocumented."""
+        return ", ".join(undocumented_scopes)
 
     @property
     def scope_info(self):
         """A list of scopes as named tuples, ready to be rendered."""
         return [
             Scope._make((handler.pattern, docstring_dedent(handler.__doc__)))
-            for handler in HANDLERS]
+            for handler in HANDLERS
+        ]
diff --git a/lib/lp/services/features/browser/tests/test_changelog.py b/lib/lp/services/features/browser/tests/test_changelog.py
index 0070832..8b65a81 100644
--- a/lib/lp/services/features/browser/tests/test_changelog.py
+++ b/lib/lp/services/features/browser/tests/test_changelog.py
@@ -8,19 +8,15 @@ from zope.component import getUtility
 from lp.services.features.changelog import ChangeLog
 from lp.services.webapp.authorization import check_permission
 from lp.services.webapp.interfaces import ILaunchpadRoot
-from lp.testing import (
-    login_celebrity,
-    login_person,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCaseWithFactory, login_celebrity, login_person
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.pages import find_tag_by_id
 from lp.testing.views import create_view
 
-
 diff = (
     "-bugs.feature_%(idx)s team:testers 10 on\n"
-    "+bugs.feature_%(idx)s team:testers 10 off")
+    "+bugs.feature_%(idx)s team:testers 10 off"
+)
 
 
 class TestChangeLogView(TestCaseWithFactory):
@@ -35,54 +31,52 @@ class TestChangeLogView(TestCaseWithFactory):
 
     def makeFeatureFlagChanges(self):
         for i in range(0, 11):
-            ChangeLog.append(
-                diff % dict(idx=i), 'comment %s' % i, self.person)
+            ChangeLog.append(diff % dict(idx=i), "comment %s" % i, self.person)
 
     def test_anonymous_no_access(self):
         # Anonymous users cannot access the view.
-        view = create_view(self.root, name='+feature-changelog')
-        self.assertFalse(check_permission('launchpad.Edit', view))
+        view = create_view(self.root, name="+feature-changelog")
+        self.assertFalse(check_permission("launchpad.Edit", view))
 
     def test_logged_on_user_no_access(self):
         # Login users cannot access the view.
         login_person(self.factory.makePerson())
-        view = create_view(self.root, name='+feature-changelog')
-        self.assertFalse(check_permission('launchpad.Edit', view))
+        view = create_view(self.root, name="+feature-changelog")
+        self.assertFalse(check_permission("launchpad.Edit", view))
 
     def test_registry_experts_access(self):
         # Registry expert members can access the view.
-        login_celebrity('registry_experts')
-        view = create_view(self.root, name='+feature-changelog')
-        self.assertTrue(check_permission('launchpad.Edit', view))
+        login_celebrity("registry_experts")
+        view = create_view(self.root, name="+feature-changelog")
+        self.assertTrue(check_permission("launchpad.Edit", view))
 
     def test_admin_access(self):
         # Admin members can access the view.
-        login_celebrity('admin')
-        view = create_view(self.root, name='+feature-changelog')
-        self.assertTrue(check_permission('launchpad.Edit', view))
+        login_celebrity("admin")
+        view = create_view(self.root, name="+feature-changelog")
+        self.assertTrue(check_permission("launchpad.Edit", view))
 
     def test_batched_page_title(self):
         # The view provides a page_title and label.
-        view = create_view(self.root, name='+feature-changelog')
-        self.assertEqual(
-            view.label, view.page_title)
-        self.assertEqual(
-            'Feature flag changelog', view.page_title)
+        view = create_view(self.root, name="+feature-changelog")
+        self.assertEqual(view.label, view.page_title)
+        self.assertEqual("Feature flag changelog", view.page_title)
 
     def test_batched_changes(self):
         # The view provides a batched iterator of changes.
         self.makeFeatureFlagChanges()
-        view = create_view(self.root, name='+feature-changelog')
+        view = create_view(self.root, name="+feature-changelog")
         batch = view.changes
-        self.assertEqual('change', batch._singular_heading)
-        self.assertEqual('changes', batch._plural_heading)
+        self.assertEqual("change", batch._singular_heading)
+        self.assertEqual("changes", batch._plural_heading)
         self.assertEqual(10, batch.default_size)
         self.assertEqual(None, batch.currentBatch().nextBatch().nextBatch())
 
     def test_page_batched_changes(self):
         self.makeFeatureFlagChanges()
-        member = login_celebrity('admin')
+        member = login_celebrity("admin")
         view = create_view(
-            self.root, name='+feature-changelog', principal=member)
-        tag = find_tag_by_id(view.render(), 'changes')
-        self.assertTrue('table', tag.name)
+            self.root, name="+feature-changelog", principal=member
+        )
+        tag = find_tag_by_id(view.render(), "changes")
+        self.assertTrue("table", tag.name)
diff --git a/lib/lp/services/features/browser/tests/test_feature_editor.py b/lib/lp/services/features/browser/tests/test_feature_editor.py
index 80172fe..a6fb812 100644
--- a/lib/lp/services/features/browser/tests/test_feature_editor.py
+++ b/lib/lp/services/features/browser/tests/test_feature_editor.py
@@ -17,16 +17,10 @@ from lp.services.features.rulesource import StormFeatureRuleSource
 from lp.services.webapp import canonical_url
 from lp.services.webapp.escaping import html_escape
 from lp.services.webapp.interfaces import ILaunchpadRoot
-from lp.testing import (
-    BrowserTestCase,
-    person_logged_in,
-    )
+from lp.testing import BrowserTestCase, person_logged_in
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.matchers import Contains
-from lp.testing.pages import (
-    find_main_content,
-    find_tag_by_id,
-    )
+from lp.testing.pages import find_main_content, find_tag_by_id
 
 
 class TestFeatureControlPage(BrowserTestCase):
@@ -55,11 +49,11 @@ class TestFeatureControlPage(BrowserTestCase):
 
     def getFeatureRulesViewURL(self):
         root = getUtility(ILaunchpadRoot)
-        return canonical_url(root, view_name='+feature-rules')
+        return canonical_url(root, view_name="+feature-rules")
 
     def getFeatureRulesEditURL(self):
         root = getUtility(ILaunchpadRoot)
-        return canonical_url(root, view_name='+feature-rules')
+        return canonical_url(root, view_name="+feature-rules")
 
     def test_feature_page_default_value(self):
         """No rules in the sampledata gives no content in the page"""
@@ -68,68 +62,72 @@ class TestFeatureControlPage(BrowserTestCase):
         textarea = browser.getControl(name="field.feature_rules")
         # and by default, since there are no rules in the sample data, it's
         # empty
-        self.assertThat(textarea.value, Equals(''))
+        self.assertThat(textarea.value, Equals(""))
 
     def test_feature_page_from_database(self):
-        StormFeatureRuleSource().setAllRules([
-            ('ui.icing', 'default', 100, '3.0'),
-            ('ui.icing', 'beta_user', 300, '4.0'),
-            ])
+        StormFeatureRuleSource().setAllRules(
+            [
+                ("ui.icing", "default", 100, "3.0"),
+                ("ui.icing", "beta_user", 300, "4.0"),
+            ]
+        )
         browser = self.getUserBrowserAsAdmin()
         browser.open(self.getFeatureRulesViewURL())
         textarea = browser.getControl(name="field.feature_rules")
         self.assertThat(
-            textarea.value.replace('\r', '').strip(),
+            textarea.value.replace("\r", "").strip(),
             Equals(
-                "ui.icing\tbeta_user\t300\t4.0\n"
-                "ui.icing\tdefault\t100\t3.0"))
+                "ui.icing\tbeta_user\t300\t4.0\n" "ui.icing\tdefault\t100\t3.0"
+            ),
+        )
 
     def test_feature_rules_anonymous_unauthorized(self):
         browser = self.getUserBrowser()
-        self.assertRaises(Unauthorized,
-            browser.open,
-            self.getFeatureRulesViewURL())
+        self.assertRaises(
+            Unauthorized, browser.open, self.getFeatureRulesViewURL()
+        )
 
     def test_feature_rules_plebian_unauthorized(self):
         """Logged in, but not a member of any interesting teams."""
         browser = self.getUserBrowserAsTeamMember([])
-        self.assertRaises(Unauthorized,
-            browser.open,
-            self.getFeatureRulesViewURL())
+        self.assertRaises(
+            Unauthorized, browser.open, self.getFeatureRulesViewURL()
+        )
 
     def test_feature_page_can_view(self):
         """User that can only view the rules do not see the form."""
         browser = self.getUserBrowserAsTeamMember(
-            [getUtility(ILaunchpadCelebrities).registry_experts])
+            [getUtility(ILaunchpadCelebrities).registry_experts]
+        )
         browser.open(self.getFeatureRulesViewURL())
         content = find_main_content(browser.contents)
-        self.assertEqual(
-            None, find_tag_by_id(content, 'field.feature_rules'))
-        self.assertEqual(
-            None, find_tag_by_id(content, 'field.actions.change'))
-        self.assertTrue(
-            find_tag_by_id(content, 'feature-rules'))
+        self.assertEqual(None, find_tag_by_id(content, "field.feature_rules"))
+        self.assertEqual(None, find_tag_by_id(content, "field.actions.change"))
+        self.assertTrue(find_tag_by_id(content, "feature-rules"))
 
     def test_feature_page_submit_changes(self):
         """Submitted changes show up in the db."""
         browser = self.getUserBrowserAsAdmin()
         browser.open(self.getFeatureRulesEditURL())
-        new_value = 'beta_user some_key 10 some value with spaces'
+        new_value = "beta_user some_key 10 some value with spaces"
         textarea = browser.getControl(name="field.feature_rules")
         textarea.value = new_value
-        browser.getControl(name="field.comment").value = 'Bob is testing.'
+        browser.getControl(name="field.comment").value = "Bob is testing."
         browser.getControl(name="field.actions.change").click()
         self.assertThat(
             list(StormFeatureRuleSource().getAllRulesAsTuples()),
-            Equals([
-                ('beta_user', 'some_key', 10, 'some value with spaces'),
-                ]))
+            Equals(
+                [
+                    ("beta_user", "some_key", 10, "some value with spaces"),
+                ]
+            ),
+        )
         changes = list(ChangeLog.get())
         self.assertEqual(1, len(changes))
         self.assertEqual(
-            '+beta_user\tsome_key\t10\tsome value with spaces',
-            changes[0].diff)
-        self.assertEqual('Bob is testing.', changes[0].comment)
+            "+beta_user\tsome_key\t10\tsome value with spaces", changes[0].diff
+        )
+        self.assertEqual("Bob is testing.", changes[0].comment)
         self.assertEqual(self.user, changes[0].person)
 
     def test_change_message(self):
@@ -137,49 +135,55 @@ class TestFeatureControlPage(BrowserTestCase):
         browser = self.getUserBrowserAsAdmin()
         browser.open(self.getFeatureRulesEditURL())
         textarea = browser.getControl(name="field.feature_rules")
-        textarea.value = 'beta_user some_key 10 some value with spaces'
-        browser.getControl(name="field.comment").value = 'comment'
+        textarea.value = "beta_user some_key 10 some value with spaces"
+        browser.getControl(name="field.comment").value = "comment"
         browser.getControl(name="field.actions.change").click()
         self.assertThat(
-            browser.contents,
-            Contains('Your changes have been applied'))
+            browser.contents, Contains("Your changes have been applied")
+        )
 
     def test_change_diff(self):
         """Submitting shows a diff of the changes."""
         browser = self.getUserBrowserAsAdmin()
         browser.open(self.getFeatureRulesEditURL())
-        browser.getControl(name="field.feature_rules").value = (
-            'beta_user some_key 10 some value with spaces')
-        browser.getControl(name="field.comment").value = 'comment'
+        browser.getControl(
+            name="field.feature_rules"
+        ).value = "beta_user some_key 10 some value with spaces"
+        browser.getControl(name="field.comment").value = "comment"
         browser.getControl(name="field.actions.change").click()
-        browser.getControl(name="field.comment").value = 'comment'
-        browser.getControl(name="field.feature_rules").value = (
-            'beta_user some_key 10 another value with spaces')
+        browser.getControl(name="field.comment").value = "comment"
+        browser.getControl(
+            name="field.feature_rules"
+        ).value = "beta_user some_key 10 another value with spaces"
         browser.getControl(name="field.actions.change").click()
         # The diff is formatted nicely using CSS.
         self.assertThat(
-            browser.contents,
-            Contains('<td class="diff-added text">'))
+            browser.contents, Contains('<td class="diff-added text">')
+        )
         # Removed rules are displayed as being removed.
         self.assertThat(
-            browser.contents.replace('\t', ' '),
-            Contains('-beta_user some_key 10 some value with spaces'))
+            browser.contents.replace("\t", " "),
+            Contains("-beta_user some_key 10 some value with spaces"),
+        )
         # Added rules are displayed as being added.
         self.assertThat(
-            browser.contents.replace('\t', ' '),
-            Contains('+beta_user some_key 10 another value with spaces'))
+            browser.contents.replace("\t", " "),
+            Contains("+beta_user some_key 10 another value with spaces"),
+        )
 
     def test_change_logging_note(self):
         """When submitting changes the name of the logger is shown."""
         browser = self.getUserBrowserAsAdmin()
         browser.open(self.getFeatureRulesEditURL())
-        browser.getControl(name="field.feature_rules").value = (
-            'beta_user some_key 10 some value with spaces')
-        browser.getControl(name="field.comment").value = 'comment'
+        browser.getControl(
+            name="field.feature_rules"
+        ).value = "beta_user some_key 10 some value with spaces"
+        browser.getControl(name="field.comment").value = "comment"
         browser.getControl(name="field.actions.change").click()
         self.assertThat(
             browser.contents,
-            Contains('logged by the lp.services.features logger'))
+            Contains("logged by the lp.services.features logger"),
+        )
 
     def test_feature_page_submit_change_to_empty(self):
         """Correctly handle submitting an empty value."""
@@ -187,14 +191,14 @@ class TestFeatureControlPage(BrowserTestCase):
         # handle it properly.
         browser = self.getUserBrowserAsAdmin()
         browser.open(self.getFeatureRulesEditURL())
-        new_value = ''
+        new_value = ""
         textarea = browser.getControl(name="field.feature_rules")
         textarea.value = new_value
-        browser.getControl(name="field.comment").value = 'comment'
+        browser.getControl(name="field.comment").value = "comment"
         browser.getControl(name="field.actions.change").click()
         self.assertThat(
-            list(StormFeatureRuleSource().getAllRulesAsTuples()),
-            Equals([]))
+            list(StormFeatureRuleSource().getAllRulesAsTuples()), Equals([])
+        )
 
     def test_feature_page_submit_change_when_unauthorized(self):
         """Correctly handling attempted value changes when not authorized."""
@@ -207,15 +211,20 @@ class TestFeatureControlPage(BrowserTestCase):
         browser = self.getUserBrowserAsAdmin()
         browser.open(self.getFeatureRulesEditURL())
         textarea = browser.getControl(name="field.feature_rules")
-        textarea.value = dedent("""\
+        textarea.value = dedent(
+            """\
             key foo 10 foo
             key bar 10 bar
-            """)
-        browser.getControl(name="field.comment").value = 'comment'
+            """
+        )
+        browser.getControl(name="field.comment").value = "comment"
         browser.getControl(name="field.actions.change").click()
         self.assertThat(
             browser.contents,
             Contains(
                 html_escape(
                     'Invalid rule syntax: duplicate priority for flag "key": '
-                    '10')))
+                    "10"
+                )
+            ),
+        )
diff --git a/lib/lp/services/features/browser/tests/test_feature_info.py b/lib/lp/services/features/browser/tests/test_feature_info.py
index 3a3cf2c..7e713c4 100644
--- a/lib/lp/services/features/browser/tests/test_feature_info.py
+++ b/lib/lp/services/features/browser/tests/test_feature_info.py
@@ -9,23 +9,16 @@ from zope.security.interfaces import Unauthorized
 
 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
 from lp.services.features.flags import (
+    NullFeatureController,
     documented_flags,
     flag_info,
-    NullFeatureController,
     undocumented_flags,
     value_domain_info,
-    )
-from lp.services.features.scopes import (
-    HANDLERS,
-    undocumented_scopes,
-    )
+)
+from lp.services.features.scopes import HANDLERS, undocumented_scopes
 from lp.services.webapp import canonical_url
 from lp.services.webapp.interfaces import ILaunchpadRoot
-from lp.testing import (
-    BrowserTestCase,
-    person_logged_in,
-    TestCase,
-    )
+from lp.testing import BrowserTestCase, TestCase, person_logged_in
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.matchers import Contains
 
@@ -37,7 +30,7 @@ class TestFeatureControlPage(BrowserTestCase):
     def getFeatureInfoUrl(self):
         """Find the URL to the feature info page."""
         root = getUtility(ILaunchpadRoot)
-        return canonical_url(root, view_name='+feature-info')
+        return canonical_url(root, view_name="+feature-info")
 
     def getUserBrowserAsAdmin(self):
         """Make a new TestBrowser logged in as an admin user."""
@@ -81,14 +74,14 @@ class TestFeatureControlPage(BrowserTestCase):
         # Stash away any already encountered undocumented flags.
         saved_undocumented = undocumented_flags.copy()
         undocumented_flags.clear()
-        undocumented_flags.update(['first', 'second'])
+        undocumented_flags.update(["first", "second"])
         browser.open(self.getFeatureInfoUrl())
         # Put the saved undocumented flags back.
         undocumented_flags.clear()
         undocumented_flags.update(saved_undocumented)
         # Are the (injected) undocumented flags shown in the page?
-        self.assertThat(browser.contents, Contains('first'))
-        self.assertThat(browser.contents, Contains('second'))
+        self.assertThat(browser.contents, Contains("first"))
+        self.assertThat(browser.contents, Contains("second"))
 
     def test_undocumented_scope_displayed(self):
         """The undocumented scope names are displayed on the page."""
@@ -96,28 +89,24 @@ class TestFeatureControlPage(BrowserTestCase):
         # Stash away any already encountered undocumented scopes.
         saved_undocumented = undocumented_scopes.copy()
         undocumented_scopes.clear()
-        undocumented_scopes.update(['first', 'second'])
+        undocumented_scopes.update(["first", "second"])
         browser.open(self.getFeatureInfoUrl())
         # Put the saved undocumented scopes back.
         undocumented_scopes.clear()
         undocumented_scopes.update(saved_undocumented)
         # Are the (injected) undocumented scopes shown in the page?
-        self.assertThat(browser.contents, Contains('first'))
-        self.assertThat(browser.contents, Contains('second'))
+        self.assertThat(browser.contents, Contains("first"))
+        self.assertThat(browser.contents, Contains("second"))
 
     def test_feature_info_anonymous_unauthorized(self):
         """Anonymous users can not view the feature flag info page."""
         browser = self.getUserBrowser()
-        self.assertRaises(Unauthorized,
-            browser.open,
-            self.getFeatureInfoUrl())
+        self.assertRaises(Unauthorized, browser.open, self.getFeatureInfoUrl())
 
     def test_feature_rules_plebian_unauthorized(self):
         """Unauthorized logged-in users can't view the info page."""
         browser = self.getUserBrowserAsTeamMember([])
-        self.assertRaises(Unauthorized,
-            browser.open,
-            self.getFeatureInfoUrl())
+        self.assertRaises(Unauthorized, browser.open, self.getFeatureInfoUrl())
 
 
 class TestUndocumentedFeatureFlags(TestCase):
@@ -144,18 +133,18 @@ class TestUndocumentedFeatureFlags(TestCase):
         """Reading undocumented feature flags records them as undocumented."""
         controller = NullFeatureController()
         # This test assumes there is no flag named "does-not-exist".
-        assert 'does-not-exist' not in documented_flags
-        controller.getFlag('does-not-exist')
-        self.assertThat(undocumented_flags, Contains('does-not-exist'))
+        assert "does-not-exist" not in documented_flags
+        controller.getFlag("does-not-exist")
+        self.assertThat(undocumented_flags, Contains("does-not-exist"))
 
     def test_reading_documented_feature_flags(self):
         """Reading documented flags does not record them as undocumented."""
         controller = NullFeatureController()
         # Make sure there is no flag named "documented-flag-name" before we
         # start testing.
-        assert 'documented-flag-name' not in documented_flags
-        documented_flags.update(['documented-flag-name'])
-        controller.getFlag('documented-flag-name')
+        assert "documented-flag-name" not in documented_flags
+        documented_flags.update(["documented-flag-name"])
+        controller.getFlag("documented-flag-name")
         self.assertThat(
-            undocumented_flags,
-            Not(Contains('documented-flag-name')))
+            undocumented_flags, Not(Contains("documented-flag-name"))
+        )
diff --git a/lib/lp/services/features/changelog.py b/lib/lp/services/features/changelog.py
index 3d4e42c..441167f 100644
--- a/lib/lp/services/features/changelog.py
+++ b/lib/lp/services/features/changelog.py
@@ -4,15 +4,15 @@
 """Classes that manage FeatureFlagChangelogEntry items."""
 
 __all__ = [
-    'ChangeLog',
-    ]
+    "ChangeLog",
+]
 
 from storm.locals import Desc
 
 from lp.services.features.model import (
     FeatureFlagChangelogEntry,
     getFeatureStore,
-    )
+)
 
 
 class ChangeLog:
@@ -30,7 +30,6 @@ class ChangeLog:
     def append(diff, comment, person):
         """Append a FeatureFlagChangelogEntry to the ChangeLog."""
         store = getFeatureStore()
-        feature_flag_change = FeatureFlagChangelogEntry(
-            diff, comment, person)
+        feature_flag_change = FeatureFlagChangelogEntry(diff, comment, person)
         store.add(feature_flag_change)
         return feature_flag_change
diff --git a/lib/lp/services/features/flags.py b/lib/lp/services/features/flags.py
index 87f0f7d..27d660c 100644
--- a/lib/lp/services/features/flags.py
+++ b/lib/lp/services/features/flags.py
@@ -2,12 +2,12 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'FeatureController',
-    'flag_info',
-    'NullFeatureController',
-    'undocumented_flags',
-    'value_domain_info',
-    ]
+    "FeatureController",
+    "flag_info",
+    "NullFeatureController",
+    "undocumented_flags",
+    "value_domain_info",
+]
 
 
 import logging
@@ -15,23 +15,19 @@ import logging
 from lp.services.features.rulesource import (
     NullFeatureRuleSource,
     StormFeatureRuleSource,
-    )
-
+)
 
-logger = logging.getLogger('lp.services.features')
+logger = logging.getLogger("lp.services.features")
 
-value_domain_info = sorted([
-    ('boolean',
-     'Any non-empty value is true; an empty value is false.'),
-    ('float',
-     'The flag value is set to the given floating point number.'),
-    ('int',
-     "An integer."),
-    ('space delimited',
-     'Space-delimited strings.'),
-    ('datetime',
-     'ISO 8601 datetime'),
-    ])
+value_domain_info = sorted(
+    [
+        ("boolean", "Any non-empty value is true; an empty value is false."),
+        ("float", "The flag value is set to the given floating point number."),
+        ("int", "An integer."),
+        ("space delimited", "Space-delimited strings."),
+        ("datetime", "ISO 8601 datetime"),
+    ]
+)
 
 # Data for generating web-visible feature flag documentation.
 #
@@ -47,190 +43,268 @@ value_domain_info = sorted([
 #
 # NOTE: "default behaviour" does not specify a default value.  It
 # merely documents the code's behaviour if no value is specified.
-flag_info = sorted([
-    ('baselayout.careers_link.disabled',
-     'boolean',
-     'Hide the link to the Canonical Careers site.',
-     '',
-     '',
-     ''),
-    ('bugs.affected_count_includes_dupes.disabled',
-     'boolean',
-     ("Disable adding up affected users across all duplicate bugs."),
-     '',
-     '',
-     'https://bugs.launchpad.net/launchpad/+bug/678090'),
-    ('bugs.bugtracker_components.enabled',
-     'boolean',
-     ('Enables the display of bugtracker components.'),
-     '',
-     '',
-     ''),
-    ('bugs.dynamic_bug_listings.pre_fetch',
-     'boolean',
-     ('Enables pre-fetching bug listing results.'),
-     '',
-     'Listing pre-fetching',
-     'https://bugs.launchpad.net/launchpad/+bug/888756'),
-    ('bugs.heat_updates.cutoff',
-     'timestamp',
-     ('Set the oldest that a bug\'s heat can be before it is '
-      'considered outdated.'),
-     '',
-     '',
-     ''),
-    ('code.ajax_revision_diffs.enabled',
-     'boolean',
-     ("Offer expandable inline diffs for branch revisions."),
-     '',
-     '',
-     ''),
-    ('code.incremental_diffs.enabled',
-     'boolean',
-     'Shows incremental diffs on merge proposals.',
-     '',
-     '',
-     ''),
-    ('hard_timeout',
-     'float',
-     'Sets the hard request timeout in milliseconds.',
-     '',
-     '',
-     ''),
-    ('jobs.celery.enabled_classes',
-     'space delimited',
-     'Names of Job classes that should be run via celery',
-     'No jobs run via celery',
-     'Celery-enabled job classes',
-     'https://dev.launchpad.net/CeleryJobRunner'),
-    ('js.yui_version',
-     'space delimited',
-     'Allows us to change the YUI version we run against, e.g. yui-3.4.',
-     'As speficied in versions.cfg',
-     '',
-     ''),
-    ('mail.dkim_authentication.disabled',
-     'boolean',
-     'Disable DKIM authentication checks on incoming mail.',
-     '',
-     '',
-     ''),
-    ('markdown.enabled',
-     'boolean',
-     'Interpret selected user content as Markdown.',
-     'disabled',
-     'Markdown',
-     'https://launchpad.net/bugs/391780'),
-    ('memcache',
-     'boolean',
-     'Enables use of memcached where it is supported.',
-     'enabled',
-     '',
-     ''),
-    ('profiling.enabled',
-     'boolean',
-     'Overrides config.profiling.profiling_allowed to permit profiling.',
-     '',
-     '',
-     ''),
-    ('soyuz.derived_series_upgrade.enabled',
-     'boolean',
-     'Enables mass-upgrade of packages on derivative distributions pages.',
-     '',
-     '',
-     ''),
-    ('visible_render_time',
-     'boolean',
-     'Shows the server-side page render time in the login widget.',
-     '',
-     '',
-     ''),
-    ('disclosure.dsp_picker.enabled',
-     'boolean',
-     'Enables the use of the new DistributionSourcePackage vocabulary for '
-     'the source and binary package name pickers.',
-     '',
-     '',
-     ''),
-    ('bugs.autoconfirm.enabled_distribution_names',
-     'space delimited',
-     ('Enables auto-confirming bugtasks for distributions (and their '
-      'series and packages).  Use the default domain.  Specify a single '
-      'asterisk ("*") to enable for all distributions.'),
-     'None are enabled',
-     '',
-     ''),
-    ('bugs.autoconfirm.enabled_product_names',
-     'space delimited',
-     ('Enables auto-confirming bugtasks for products (and their '
-      'series).  Use the default domain.  Specify a single '
-      'asterisk ("*") to enable for all products.'),
-     'None are enabled',
-     '',
-     ''),
-    ('ajax.batch_navigator.enabled',
-     'boolean',
-     ('If true, batch navigators which have been wired to do so use ajax '
-     'calls to load the next batch of data.'),
-     '',
-     '',
-     ''),
-    ('registry.upcoming_work_view.enabled',
-     'boolean',
-     ('If true, the new upcoming work view of teams is available.'),
-     '',
-     '',
-     ''),
-    ('soyuz.gina.skip_source_versions',
-     'space delimited',
-     ('List of source versions for gina to skip when importing into a '
-      'distribution, formatted as distro/package/version.'),
-     '',
-     '',
-     ''),
-    ('app.root_blog.enabled',
-     'boolean',
-     'If true, load posts from the Launchpad blog to show on the root page.',
-     '',
-     '',
-     ''),
-    ('twisted.flags.refresh',
-     'float',
-     'Number of seconds between feature flag refreshes.',
-     '30',
-     '',
-     ''),
-    ('librarian.swift.enabled',
-     'boolean',
-     'If true, attempt to serve files from Swift.',
-     'disabled',
-     '',
-     ''),
-    ('soyuz.ppa.separate_long_descriptions',
-     'boolean',
-     'If true, PPAs will create an i18n/Translations-en file',
-     'disabled',
-     'PPA Separate Long Descriptions',
-     ''),
-    ('soyuz.named_auth_token.allow_new',
-     'boolean',
-     'If true, allow creation of named authorization tokens for archives.',
-     'disabled',
-     'Named authorization tokens for archives',
-     ''),
-    ('sitesearch.engine.name',
-     'space delimited',
-     'Name of the site search engine backend (only "bing" is available).',
-     'bing',
-     'Site search engine',
-     ''),
-    ('archivepublisher.signing_service.enabled',
-     'boolean',
-     'If true, sign packages using signing service instead of local files.',
-     '',
-     '',
-     ''),
-    ])
+flag_info = sorted(
+    [
+        (
+            "baselayout.careers_link.disabled",
+            "boolean",
+            "Hide the link to the Canonical Careers site.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "bugs.affected_count_includes_dupes.disabled",
+            "boolean",
+            ("Disable adding up affected users across all duplicate bugs."),
+            "",
+            "",
+            "https://bugs.launchpad.net/launchpad/+bug/678090";,
+        ),
+        (
+            "bugs.bugtracker_components.enabled",
+            "boolean",
+            ("Enables the display of bugtracker components."),
+            "",
+            "",
+            "",
+        ),
+        (
+            "bugs.dynamic_bug_listings.pre_fetch",
+            "boolean",
+            ("Enables pre-fetching bug listing results."),
+            "",
+            "Listing pre-fetching",
+            "https://bugs.launchpad.net/launchpad/+bug/888756";,
+        ),
+        (
+            "bugs.heat_updates.cutoff",
+            "timestamp",
+            (
+                "Set the oldest that a bug's heat can be before it is "
+                "considered outdated."
+            ),
+            "",
+            "",
+            "",
+        ),
+        (
+            "code.ajax_revision_diffs.enabled",
+            "boolean",
+            ("Offer expandable inline diffs for branch revisions."),
+            "",
+            "",
+            "",
+        ),
+        (
+            "code.incremental_diffs.enabled",
+            "boolean",
+            "Shows incremental diffs on merge proposals.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "hard_timeout",
+            "float",
+            "Sets the hard request timeout in milliseconds.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "jobs.celery.enabled_classes",
+            "space delimited",
+            "Names of Job classes that should be run via celery",
+            "No jobs run via celery",
+            "Celery-enabled job classes",
+            "https://dev.launchpad.net/CeleryJobRunner";,
+        ),
+        (
+            "js.yui_version",
+            "space delimited",
+            "Allows us to change the YUI version we run against, e.g. "
+            "yui-3.4.",
+            "As speficied in versions.cfg",
+            "",
+            "",
+        ),
+        (
+            "mail.dkim_authentication.disabled",
+            "boolean",
+            "Disable DKIM authentication checks on incoming mail.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "markdown.enabled",
+            "boolean",
+            "Interpret selected user content as Markdown.",
+            "disabled",
+            "Markdown",
+            "https://launchpad.net/bugs/391780";,
+        ),
+        (
+            "memcache",
+            "boolean",
+            "Enables use of memcached where it is supported.",
+            "enabled",
+            "",
+            "",
+        ),
+        (
+            "profiling.enabled",
+            "boolean",
+            "Overrides config.profiling.profiling_allowed to permit "
+            "profiling.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "soyuz.derived_series_upgrade.enabled",
+            "boolean",
+            "Enables mass-upgrade of packages on derivative distributions "
+            "pages.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "visible_render_time",
+            "boolean",
+            "Shows the server-side page render time in the login widget.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "disclosure.dsp_picker.enabled",
+            "boolean",
+            "Enables the use of the new DistributionSourcePackage vocabulary "
+            "for the source and binary package name pickers.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "bugs.autoconfirm.enabled_distribution_names",
+            "space delimited",
+            (
+                "Enables auto-confirming bugtasks for distributions (and "
+                "their series and packages).  Use the default domain.  "
+                'Specify a single asterisk ("*") to enable for all '
+                "distributions."
+            ),
+            "None are enabled",
+            "",
+            "",
+        ),
+        (
+            "bugs.autoconfirm.enabled_product_names",
+            "space delimited",
+            (
+                "Enables auto-confirming bugtasks for products (and their "
+                "series).  Use the default domain.  Specify a single "
+                'asterisk ("*") to enable for all products.'
+            ),
+            "None are enabled",
+            "",
+            "",
+        ),
+        (
+            "ajax.batch_navigator.enabled",
+            "boolean",
+            (
+                "If true, batch navigators which have been wired to do so use "
+                "ajax calls to load the next batch of data."
+            ),
+            "",
+            "",
+            "",
+        ),
+        (
+            "registry.upcoming_work_view.enabled",
+            "boolean",
+            ("If true, the new upcoming work view of teams is available."),
+            "",
+            "",
+            "",
+        ),
+        (
+            "soyuz.gina.skip_source_versions",
+            "space delimited",
+            (
+                "List of source versions for gina to skip when importing into "
+                "a distribution, formatted as distro/package/version."
+            ),
+            "",
+            "",
+            "",
+        ),
+        (
+            "app.root_blog.enabled",
+            "boolean",
+            "If true, load posts from the Launchpad blog to show on the root "
+            "page.",
+            "",
+            "",
+            "",
+        ),
+        (
+            "twisted.flags.refresh",
+            "float",
+            "Number of seconds between feature flag refreshes.",
+            "30",
+            "",
+            "",
+        ),
+        (
+            "librarian.swift.enabled",
+            "boolean",
+            "If true, attempt to serve files from Swift.",
+            "disabled",
+            "",
+            "",
+        ),
+        (
+            "soyuz.ppa.separate_long_descriptions",
+            "boolean",
+            "If true, PPAs will create an i18n/Translations-en file",
+            "disabled",
+            "PPA Separate Long Descriptions",
+            "",
+        ),
+        (
+            "soyuz.named_auth_token.allow_new",
+            "boolean",
+            "If true, allow creation of named authorization tokens for "
+            "archives.",
+            "disabled",
+            "Named authorization tokens for archives",
+            "",
+        ),
+        (
+            "sitesearch.engine.name",
+            "space delimited",
+            'Name of the site search engine backend (only "bing" is '
+            "available).",
+            "bing",
+            "Site search engine",
+            "",
+        ),
+        (
+            "archivepublisher.signing_service.enabled",
+            "boolean",
+            "If true, sign packages using signing service instead of local "
+            "files.",
+            "",
+            "",
+            "",
+        ),
+    ]
+)
 
 # The set of all flag names that are documented.
 documented_flags = {info[0] for info in flag_info}
@@ -239,8 +313,7 @@ documented_flags = {info[0] for info in flag_info}
 undocumented_flags = set()
 
 
-class Memoize():
-
+class Memoize:
     def __init__(self, calc):
         self._known = {}
         self._calc = calc
@@ -253,7 +326,7 @@ class Memoize():
         return v
 
 
-class ScopeDict():
+class ScopeDict:
     """Allow scopes to be looked up by getitem"""
 
     def __init__(self, features):
@@ -263,7 +336,7 @@ class ScopeDict():
         return self.features.isInScope(scope_name)
 
 
-class FeatureController():
+class FeatureController:
     """A FeatureController tells application code what features are active.
 
     It does this by meshing together two sources of data:
@@ -338,13 +411,14 @@ class FeatureController():
             for scope, priority, value in self._rules[flag]:
                 if self._known_scopes.lookup(scope):
                     self._debugMessage(
-                        'feature match flag=%r value=%r scope=%r' %
-                        (flag, value, scope))
+                        "feature match flag=%r value=%r scope=%r"
+                        % (flag, value, scope)
+                    )
                     return (value, scope)
             else:
-                self._debugMessage('no rules matched for %r' % flag)
+                self._debugMessage("no rules matched for %r" % flag)
         else:
-            self._debugMessage('no rules relevant to %r' % flag)
+            self._debugMessage("no rules relevant to %r" % flag)
         return (None, None)
 
     def _debugMessage(self, message):
@@ -358,7 +432,9 @@ class FeatureController():
         return self._current_scopes.lookup(flag)
 
     def _findCurrentScope(self, flag):
-        """Lookup method for self._current_scopes. See also `currentScope()`.
+        """Lookup method for self._current_scopes.
+
+        See also `currentScope()`.
         """
         return self._currentValueAndScope(flag)[1]
 
@@ -405,7 +481,7 @@ class FeatureController():
         self._needRules()
         if flag in self._rules:
             for scope, priority, value in self._rules[flag]:
-                if scope == 'default':
+                if scope == "default":
                     return value
         return None
 
@@ -414,5 +490,6 @@ class NullFeatureController(FeatureController):
     """For use in testing: everything is turned off"""
 
     def __init__(self):
-        FeatureController.__init__(self, lambda scope: None,
-            NullFeatureRuleSource())
+        FeatureController.__init__(
+            self, lambda scope: None, NullFeatureRuleSource()
+        )
diff --git a/lib/lp/services/features/model.py b/lib/lp/services/features/model.py
index 1dd3875..fea3505 100644
--- a/lib/lp/services/features/model.py
+++ b/lib/lp/services/features/model.py
@@ -2,22 +2,16 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'FeatureFlag',
-    'FeatureFlagChangelogEntry',
-    'getFeatureStore',
-    ]
+    "FeatureFlag",
+    "FeatureFlagChangelogEntry",
+    "getFeatureStore",
+]
 
 from datetime import datetime
 
 import pytz
 import six
-from storm.locals import (
-    DateTime,
-    Int,
-    Reference,
-    Storm,
-    Unicode,
-    )
+from storm.locals import DateTime, Int, Reference, Storm, Unicode
 
 from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.interfaces import IStore
@@ -26,7 +20,7 @@ from lp.services.database.interfaces import IStore
 class FeatureFlag(Storm):
     """Database setting of a particular flag in a scope"""
 
-    __storm_table__ = 'FeatureFlag'
+    __storm_table__ = "FeatureFlag"
     __storm_primary__ = "scope", "flag"
 
     scope = Unicode(allow_none=False)
@@ -46,19 +40,19 @@ class FeatureFlag(Storm):
 class FeatureFlagChangelogEntry(Storm):
     """A record of a change to the whole set of feature flags."""
 
-    __storm_table__ = 'FeatureFlagChangelogEntry'
+    __storm_table__ = "FeatureFlagChangelogEntry"
 
     id = Int(primary=True)
     date_changed = UtcDateTimeCol(notNull=True)
     diff = Unicode(allow_none=False)
     comment = Unicode(allow_none=False)
-    person_id = Int(name='person', allow_none=False)
-    person = Reference(person_id, 'Person.id')
+    person_id = Int(name="person", allow_none=False)
+    person = Reference(person_id, "Person.id")
 
     def __init__(self, diff, comment, person):
         super().__init__()
         self.diff = six.ensure_text(diff)
-        self.date_changed = datetime.now(pytz.timezone('UTC'))
+        self.date_changed = datetime.now(pytz.timezone("UTC"))
         self.comment = six.ensure_text(comment)
         self.person = person
 
diff --git a/lib/lp/services/features/rulesource.py b/lib/lp/services/features/rulesource.py
index 44db1c1..df1b804 100644
--- a/lib/lp/services/features/rulesource.py
+++ b/lib/lp/services/features/rulesource.py
@@ -4,42 +4,36 @@
 """Returns rules defining which features are active"""
 
 __all__ = [
-    'DuplicatePriorityError',
-    'FeatureRuleSource',
-    'MemoryFeatureRuleSource',
-    'NullFeatureRuleSource',
-    'StormFeatureRuleSource',
-    ]
-
-from collections import (
-    defaultdict,
-    namedtuple,
-    )
+    "DuplicatePriorityError",
+    "FeatureRuleSource",
+    "MemoryFeatureRuleSource",
+    "NullFeatureRuleSource",
+    "StormFeatureRuleSource",
+]
+
 import re
+from collections import defaultdict, namedtuple
 
 import six
 from storm.locals import Desc
 
-from lp.services.features.model import (
-    FeatureFlag,
-    getFeatureStore,
-    )
+from lp.services.features.model import FeatureFlag, getFeatureStore
 from lp.services.webapp import adapter
 
-
 # A convenient mapping for a feature flag rule in the database.
 Rule = namedtuple("Rule", "flag scope priority value")
 
 
 class DuplicatePriorityError(Exception):
-
     def __init__(self, flag, priority):
         self.flag = flag
         self.priority = priority
 
     def __str__(self):
         return 'duplicate priority for flag "%s": %d' % (
-            self.flag, self.priority)
+            self.flag,
+            self.priority,
+        )
 
 
 class FeatureRuleSource:
@@ -70,9 +64,9 @@ class FeatureRuleSource:
         """
         tr = []
         for (flag, scope, priority, value) in self.getAllRulesAsTuples():
-            tr.append('\t'.join((flag, scope, str(priority), value)))
-        tr.append('')
-        return '\n'.join(tr)
+            tr.append("\t".join((flag, scope, str(priority), value)))
+        tr.append("")
+        return "\n".join(tr)
 
     def setAllRulesFromText(self, text_form):
         """Update all rules from text input.
@@ -96,9 +90,9 @@ class FeatureRuleSource:
         r = []
         seen_priorities = defaultdict(set)
         for line in text_form.splitlines():
-            if line.strip() == '':
+            if line.strip() == "":
                 continue
-            flag, scope, priority_str, value = re.split('[ \t]+', line, 3)
+            flag, scope, priority_str, value = re.split("[ \t]+", line, 3)
             priority = int(priority_str)
             r.append((flag, scope, priority, six.ensure_text(value)))
             if priority in seen_priorities[flag]:
@@ -109,8 +103,7 @@ class FeatureRuleSource:
 
 
 class StormFeatureRuleSource(FeatureRuleSource):
-    """Access feature rules stored in the database via Storm.
-    """
+    """Access feature rules stored in the database via Storm."""
 
     def getAllRulesAsTuples(self):
         try:
@@ -127,11 +120,9 @@ class StormFeatureRuleSource(FeatureRuleSource):
         except adapter.RequestExpired:
             return
         store = getFeatureStore()
-        rs = (store
-                .find(FeatureFlag)
-                .order_by(
-                    FeatureFlag.flag,
-                    Desc(FeatureFlag.priority)))
+        rs = store.find(FeatureFlag).order_by(
+            FeatureFlag.flag, Desc(FeatureFlag.priority)
+        )
         for r in rs:
             yield Rule(str(r.flag), str(r.scope), r.priority, r.value)
 
@@ -143,19 +134,21 @@ class StormFeatureRuleSource(FeatureRuleSource):
         # XXX: would be slightly better to only update rules as necessary so
         # we keep timestamps, and to avoid the direct sql etc -- mbp 20100924
         store = getFeatureStore()
-        store.execute('DELETE FROM FeatureFlag')
+        store.execute("DELETE FROM FeatureFlag")
         for (flag, scope, priority, value) in new_rules:
-            store.add(FeatureFlag(
-                scope=six.ensure_text(scope),
-                flag=six.ensure_text(flag),
-                value=value,
-                priority=priority))
+            store.add(
+                FeatureFlag(
+                    scope=six.ensure_text(scope),
+                    flag=six.ensure_text(flag),
+                    value=value,
+                    priority=priority,
+                )
+            )
         store.flush()
 
 
 class MemoryFeatureRuleSource(FeatureRuleSource):
-    """Access feature rules stored in non-persistent memory.
-    """
+    """Access feature rules stored in non-persistent memory."""
 
     def __init__(self):
         self.rules = []
diff --git a/lib/lp/services/features/scopes.py b/lib/lp/services/features/scopes.py
index 688284c..4e114f5 100644
--- a/lib/lp/services/features/scopes.py
+++ b/lib/lp/services/features/scopes.py
@@ -9,30 +9,29 @@ run from cron scripts and potentially also other places.
 """
 
 __all__ = [
-    'DefaultScope',
-    'default_scopes',
-    'FixedScope',
-    'HANDLERS',
-    'MultiScopeHandler',
-    'ScopesForScript',
-    'ScopesFromRequest',
-    'TeamScope',
-    'UserSliceScope',
-    'undocumented_scopes',
-    ]
+    "DefaultScope",
+    "default_scopes",
+    "FixedScope",
+    "HANDLERS",
+    "MultiScopeHandler",
+    "ScopesForScript",
+    "ScopesFromRequest",
+    "TeamScope",
+    "UserSliceScope",
+    "undocumented_scopes",
+]
 
-from itertools import zip_longest
 import re
+from itertools import zip_longest
 
-from lp.registry.interfaces.person import IPerson
 import lp.services.config
+from lp.registry.interfaces.person import IPerson
 from lp.services.propertycache import cachedproperty
 
-
 undocumented_scopes = set()
 
 
-class BaseScope():
+class BaseScope:
     """A base class for scope handlers.
 
     The docstring of subclasses is used on the +feature-info page as
@@ -50,14 +49,15 @@ class BaseScope():
 
     def lookup(self, scope_name):
         """Returns true if the given scope name is "active"."""
-        raise NotImplementedError('Subclasses of BaseScope must implement '
-            'lookup.')
+        raise NotImplementedError(
+            "Subclasses of BaseScope must implement " "lookup."
+        )
 
 
 class DefaultScope(BaseScope):
     """The default scope.  Always active."""
 
-    pattern = r'default$'
+    pattern = r"default$"
 
     def lookup(self, scope_name):
         return True
@@ -75,18 +75,19 @@ class PageScope(BaseScope):
         Foo#quux
     """
 
-    pattern = r'pageid:'
+    pattern = r"pageid:"
 
     def __init__(self, request):
         self._request = request
 
     def lookup(self, scope_name):
         """Is the given scope match the current pageid?"""
-        pageid_scope = scope_name[len('pageid:'):]
+        pageid_scope = scope_name[len("pageid:") :]
         scope_segments = self._pageid_to_namespace(pageid_scope)
         request_segments = self._request_pageid_namespace
         for scope_segment, request_segment in zip_longest(
-                scope_segments, request_segments):
+            scope_segments, request_segments
+        ):
             if scope_segment is None:
                 break
             if scope_segment != request_segment:
@@ -97,14 +98,17 @@ class PageScope(BaseScope):
     def _pageid_to_namespace(pageid):
         """Return a list of namespace elements for pageid."""
         # Normalise delimiters.
-        pageid = pageid.replace('#', ':')
+        pageid = pageid.replace("#", ":")
         # Create a list to walk, empty namespaces are elided.
-        return [name for name in pageid.split(':') if name]
+        return [name for name in pageid.split(":") if name]
 
     @cachedproperty
     def _request_pageid_namespace(self):
-        return tuple(self._pageid_to_namespace(
-            self._request._orig_env.get('launchpad.pageid', '')))
+        return tuple(
+            self._pageid_to_namespace(
+                self._request._orig_env.get("launchpad.pageid", "")
+            )
+        )
 
 
 class ScopeWithPerson(BaseScope):
@@ -134,7 +138,7 @@ class TeamScope(ScopeWithPerson):
     process -- in particular, before authentication has happened.
     """
 
-    pattern = r'team:'
+    pattern = r"team:"
 
     def lookup(self, scope_name):
         """Is the given scope a team membership?
@@ -144,7 +148,7 @@ class TeamScope(ScopeWithPerson):
         fixed to reduce this to one query).
         """
         if self.person is not None:
-            team_name = scope_name[len('team:'):]
+            team_name = scope_name[len("team:") :]
             return self.person.inTeam(team_name)
 
 
@@ -163,7 +167,7 @@ class UserSliceScope(ScopeWithPerson):
     some users don't have all the fun by being in eg 0,100.
     """
 
-    pattern = r'userslice:(\d+),(\d+)'
+    pattern = r"userslice:(\d+),(\d+)"
 
     def lookup(self, scope_name):
         match = self.compiled_pattern.match(scope_name)
@@ -185,13 +189,13 @@ class ServerScope(BaseScope):
     in the Launchpad configuration.
     """
 
-    pattern = r'server\.'
+    pattern = r"server\."
 
     def lookup(self, scope_name):
         """Match the current server as a scope."""
-        server_name = scope_name.split('.', 1)[1]
+        server_name = scope_name.split(".", 1)[1]
         try:
-            return lp.services.config.config['launchpad']['is_' + server_name]
+            return lp.services.config.config["launchpad"]["is_" + server_name]
         except KeyError:
             pass
         return False
@@ -204,7 +208,7 @@ class ScriptScope(BaseScope):
     "embroider."
     """
 
-    pattern = r'script:'
+    pattern = r"script:"
 
     def __init__(self, script_name):
         self.script_scope = self.pattern + script_name
@@ -222,7 +226,7 @@ class FixedScope(BaseScope):
     """
 
     def __init__(self, scope):
-        self.pattern = re.escape(scope) + '$'
+        self.pattern = re.escape(scope) + "$"
 
     def lookup(self, scope_name):
         return True
@@ -235,7 +239,7 @@ class FixedScope(BaseScope):
 HANDLERS = {DefaultScope, PageScope, TeamScope, ServerScope, ScriptScope}
 
 
-class MultiScopeHandler():
+class MultiScopeHandler:
     """A scope handler that combines multiple `BaseScope`s.
 
     The ordering in which they're added is arbitrary, because precedence is
@@ -250,7 +254,8 @@ class MultiScopeHandler():
         return [
             handler
             for handler in self.handlers
-                if handler.compiled_pattern.match(scope_name)]
+            if handler.compiled_pattern.match(scope_name)
+        ]
 
     def lookup(self, scope_name):
         """Determine if scope_name applies.
@@ -286,13 +291,16 @@ class ScopesFromRequest(MultiScopeHandler):
     def __init__(self, request):
         def person_from_request():
             return IPerson(request.principal, None)
+
         scopes = list(default_scopes)
-        scopes.extend([
-            PageScope(request),
-            ServerScope(),
-            TeamScope(person_from_request),
-            UserSliceScope(person_from_request),
-            ])
+        scopes.extend(
+            [
+                PageScope(request),
+                ServerScope(),
+                TeamScope(person_from_request),
+                UserSliceScope(person_from_request),
+            ]
+        )
         super().__init__(scopes)
 
 
diff --git a/lib/lp/services/features/testing.py b/lib/lp/services/features/testing.py
index f715cc9..f081323 100644
--- a/lib/lp/services/features/testing.py
+++ b/lib/lp/services/features/testing.py
@@ -4,25 +4,25 @@
 """Helpers for writing tests that use feature flags."""
 
 __all__ = [
-    'FeatureFixture',
-    'MemoryFeatureFixture',
-    ]
+    "FeatureFixture",
+    "MemoryFeatureFixture",
+]
 
 
+import psycopg2
 from fixtures import Fixture
 from lazr.restful.utils import get_current_browser_request
-import psycopg2
 
 from lp.services.features import (
     get_relevant_feature_controller,
     install_feature_controller,
-    )
+)
 from lp.services.features.flags import FeatureController
 from lp.services.features.rulesource import (
     MemoryFeatureRuleSource,
     Rule,
     StormFeatureRuleSource,
-    )
+)
 from lp.services.features.scopes import ScopesFromRequest
 from lp.testing.dbuser import dbuser
 
@@ -34,19 +34,24 @@ def dbadmin(func):
     implicitly commits the transaction, and we want to avoid unnecessary
     commits to avoid breaking database setup optimizations.
     """
+
     def dbadmin_retry(*args, **kw):
         try:
             return func(*args, **kw)
         except psycopg2.ProgrammingError:
-            with dbuser('testadmin'):
+            with dbuser("testadmin"):
                 return func(*args, **kw)
+
     return dbadmin_retry
 
 
 class FeatureFixtureMixin:
-
-    def __init__(self, features_dict, full_feature_rules=None,
-            override_scope_lookup=None):
+    def __init__(
+        self,
+        features_dict,
+        full_feature_rules=None,
+        override_scope_lookup=None,
+    ):
         """Constructor.
 
         :param features_dict: A dictionary-like object with keys and values
@@ -68,12 +73,14 @@ class FeatureFixtureMixin:
         if self.override_scope_lookup:
             scope_lookup = self.override_scope_lookup
         else:
+
             def scope_lookup(scope_name):
                 request = get_current_browser_request()
                 return ScopesFromRequest(request).lookup(scope_name)
 
         install_feature_controller(
-            FeatureController(scope_lookup, rule_source))
+            FeatureController(scope_lookup, rule_source)
+        )
         self.addCleanup(install_feature_controller, original_controller)
 
     def makeNewRules(self):
@@ -87,17 +94,16 @@ class FeatureFixtureMixin:
         # by setAllRules().
         new_rules = [
             Rule(
-                flag=flag_name,
-                scope='default',
-                priority=999,
-                value=str(value))
+                flag=flag_name, scope="default", priority=999, value=str(value)
+            )
             for flag_name, value in self.desired_features.items()
-                if value is not None]
+            if value is not None
+        ]
 
         if self.full_feature_rules is not None:
             new_rules.extend(
-                Rule(**rule_spec)
-                for rule_spec in self.full_feature_rules)
+                Rule(**rule_spec) for rule_spec in self.full_feature_rules
+            )
 
         return new_rules
 
@@ -123,7 +129,8 @@ class FeatureFixture(FeatureFixtureMixin, Fixture):
         rule_source = StormFeatureRuleSource()
         self.addCleanup(
             dbadmin(rule_source.setAllRules),
-            dbadmin(rule_source.getAllRulesAsTuples)())
+            dbadmin(rule_source.getAllRulesAsTuples)(),
+        )
         dbadmin(rule_source.setAllRules)(rules)
         return rule_source
 
diff --git a/lib/lp/services/features/tests/test_changelog.py b/lib/lp/services/features/tests/test_changelog.py
index 7a4f348..ef57e50 100644
--- a/lib/lp/services/features/tests/test_changelog.py
+++ b/lib/lp/services/features/tests/test_changelog.py
@@ -12,10 +12,10 @@ from lp.services.features.model import FeatureFlagChangelogEntry
 from lp.testing import TestCaseWithFactory
 from lp.testing.layers import DatabaseFunctionalLayer
 
-
 diff = (
     "-bugs.new_feature team:testers 10 on\n"
-    "+bugs.new_feature team:testers 10 off")
+    "+bugs.new_feature team:testers 10 off"
+)
 
 
 class TestFeatureFlagChangelogEntry(TestCaseWithFactory):
@@ -26,18 +26,15 @@ class TestFeatureFlagChangelogEntry(TestCaseWithFactory):
     def test_FeatureFlagChangelogEntry_creation(self):
         # A FeatureFlagChangelogEntry has a diff and a date of change.
         person = self.factory.makePerson()
-        before = datetime.now(pytz.timezone('UTC'))
+        before = datetime.now(pytz.timezone("UTC"))
         feature_flag_change = FeatureFlagChangelogEntry(
-            diff, 'comment', person)
-        after = datetime.now(pytz.timezone('UTC'))
-        self.assertEqual(
-            diff, feature_flag_change.diff)
-        self.assertEqual(
-            'comment', feature_flag_change.comment)
-        self.assertEqual(
-            person, feature_flag_change.person)
-        self.assertBetween(
-            before, feature_flag_change.date_changed, after)
+            diff, "comment", person
+        )
+        after = datetime.now(pytz.timezone("UTC"))
+        self.assertEqual(diff, feature_flag_change.diff)
+        self.assertEqual("comment", feature_flag_change.comment)
+        self.assertEqual(person, feature_flag_change.person)
+        self.assertBetween(before, feature_flag_change.date_changed, after)
 
 
 class TestChangeLog(TestCaseWithFactory):
@@ -51,19 +48,17 @@ class TestChangeLog(TestCaseWithFactory):
 
     def test_ChangeLog_append(self):
         # The append() method creates a FeatureFlagChangelogEntry.
-        feature_flag_change = ChangeLog.append(diff, 'comment', self.person)
-        self.assertEqual(
-            diff, feature_flag_change.diff)
-        self.assertEqual(
-            'comment', feature_flag_change.comment)
-        self.assertEqual(
-            self.person, feature_flag_change.person)
+        feature_flag_change = ChangeLog.append(diff, "comment", self.person)
+        self.assertEqual(diff, feature_flag_change.diff)
+        self.assertEqual("comment", feature_flag_change.comment)
+        self.assertEqual(self.person, feature_flag_change.person)
 
     def test_ChangeLog_get(self):
         # The get() method returns an iterator of FeatureFlagChanges from
         # newest to oldest.
-        feature_flag_change_1 = ChangeLog.append(diff, 'comment', self.person)
-        feature_flag_change_2 = ChangeLog.append(diff, 'comment', self.person)
+        feature_flag_change_1 = ChangeLog.append(diff, "comment", self.person)
+        feature_flag_change_2 = ChangeLog.append(diff, "comment", self.person)
         results = ChangeLog.get()
         self.assertEqual(
-            [feature_flag_change_2, feature_flag_change_1], list(results))
+            [feature_flag_change_2, feature_flag_change_1], list(results)
+        )
diff --git a/lib/lp/services/features/tests/test_db_settings.py b/lib/lp/services/features/tests/test_db_settings.py
index bec0701..859cd8a 100644
--- a/lib/lp/services/features/tests/test_db_settings.py
+++ b/lib/lp/services/features/tests/test_db_settings.py
@@ -3,14 +3,8 @@
 
 """Tests for feature settings coming from the database"""
 
-from lp.services.features.model import (
-    FeatureFlag,
-    getFeatureStore,
-    )
-from lp.testing import (
-    layers,
-    TestCase,
-    )
+from lp.services.features.model import FeatureFlag, getFeatureStore
+from lp.testing import TestCase, layers
 
 
 class TestFeatureModel(TestCase):
diff --git a/lib/lp/services/features/tests/test_flags.py b/lib/lp/services/features/tests/test_flags.py
index d22790d..548c0d7 100644
--- a/lib/lp/services/features/tests/test_flags.py
+++ b/lib/lp/services/features/tests/test_flags.py
@@ -5,30 +5,23 @@
 
 import os
 
-from lp.services.features import (
-    getFeatureFlag,
-    install_feature_controller,
-    )
+from lp.services.features import getFeatureFlag, install_feature_controller
 from lp.services.features.flags import FeatureController
 from lp.services.features.rulesource import (
     MemoryFeatureRuleSource,
     StormFeatureRuleSource,
-    )
-from lp.testing import (
-    layers,
-    TestCase,
-    )
+)
+from lp.testing import TestCase, layers
 
-
-notification_name = 'notification.global.text'
-notification_value = '\N{SNOWMAN} stormy Launchpad weather ahead'
+notification_name = "notification.global.text"
+notification_value = "\N{SNOWMAN} stormy Launchpad weather ahead"
 
 
 testdata = [
-    (notification_name, 'beta_user', 100, notification_value),
-    ('ui.icing', 'default', 100, '3.0'),
-    ('ui.icing', 'beta_user', 300, '4.0'),
-    ]
+    (notification_name, "beta_user", 100, notification_value),
+    ("ui.icing", "default", 100, "3.0"),
+    ("ui.icing", "beta_user", 300, "4.0"),
+]
 
 
 class TestFeatureFlags(TestCase):
@@ -39,6 +32,7 @@ class TestFeatureFlags(TestCase):
         super().setUp()
         if os.environ.get("STORM_TRACE", None):
             from storm.tracer import debug
+
             debug(True)
 
     def makeControllerInScopes(self, scopes):
@@ -48,6 +42,7 @@ class TestFeatureFlags(TestCase):
         def scope_cb(scope):
             call_log.append(scope)
             return scope in scopes
+
         controller = FeatureController(scope_cb, StormFeatureRuleSource())
         return controller, call_log
 
@@ -56,57 +51,59 @@ class TestFeatureFlags(TestCase):
 
     def test_getFlag(self):
         self.populateStore()
-        control, call_log = self.makeControllerInScopes(['default'])
-        self.assertEqual('3.0',
-            control.getFlag('ui.icing'))
-        self.assertEqual(['beta_user', 'default'], call_log)
+        control, call_log = self.makeControllerInScopes(["default"])
+        self.assertEqual("3.0", control.getFlag("ui.icing"))
+        self.assertEqual(["beta_user", "default"], call_log)
 
     def test_getItem(self):
         # for use in page templates, the flags can be treated as a dict
         self.populateStore()
-        control, call_log = self.makeControllerInScopes(['default'])
-        self.assertEqual('3.0',
-            control['ui.icing'])
-        self.assertEqual(['beta_user', 'default'], call_log)
+        control, call_log = self.makeControllerInScopes(["default"])
+        self.assertEqual("3.0", control["ui.icing"])
+        self.assertEqual(["beta_user", "default"], call_log)
         # after looking this up the value is known and the scopes are
         # positively and negatively cached
-        self.assertEqual({'ui.icing': '3.0'}, control.usedFlags())
-        self.assertEqual(dict(beta_user=False, default=True),
-            control.usedScopes())
+        self.assertEqual({"ui.icing": "3.0"}, control.usedFlags())
+        self.assertEqual(
+            dict(beta_user=False, default=True), control.usedScopes()
+        )
 
     def test_currentScope(self):
         # currentScope() returns the scope of the matching rule with
         # the highest priority rule
         self.populateStore()
         # If only one scope matches, its name is returned.
-        control, call_log = self.makeControllerInScopes(['default'])
-        self.assertEqual('default', control.currentScope('ui.icing'))
+        control, call_log = self.makeControllerInScopes(["default"])
+        self.assertEqual("default", control.currentScope("ui.icing"))
         # If two scopes match, the one with the higer priority is returned.
         control, call_log = self.makeControllerInScopes(
-            ['default', 'beta_user'])
-        self.assertEqual('beta_user', control.currentScope('ui.icing'))
+            ["default", "beta_user"]
+        )
+        self.assertEqual("beta_user", control.currentScope("ui.icing"))
 
     def test_currentScope__undefined_feature(self):
         # currentScope() returns None for a non-existent flaeture flag.
         self.populateStore()
-        control, call_log = self.makeControllerInScopes(['default'])
-        self.assertIs(None, control.currentScope('undefined_feature'))
+        control, call_log = self.makeControllerInScopes(["default"])
+        self.assertIs(None, control.currentScope("undefined_feature"))
 
     def test_defaultFlagValue(self):
         # defaultFlagValue() returns the default value of a flag even if
         # another scopewith a higher priority matches.
         self.populateStore()
         control, call_log = self.makeControllerInScopes(
-            ['default', 'beta_user'])
-        self.assertEqual('3.0', control.defaultFlagValue('ui.icing'))
+            ["default", "beta_user"]
+        )
+        self.assertEqual("3.0", control.defaultFlagValue("ui.icing"))
 
     def test_defaultFlagValue__undefined_feature(self):
         # defaultFlagValue() returns None if no default scope is defined
         # for a feature.
         self.populateStore()
         control, call_log = self.makeControllerInScopes(
-            ['default', 'beta_user'])
-        self.assertIs(None, control.defaultFlagValue('undefined_feature'))
+            ["default", "beta_user"]
+        )
+        self.assertIs(None, control.defaultFlagValue("undefined_feature"))
 
     def test_getAllFlags(self):
         # can fetch all the active flags, and it gives back only the
@@ -114,50 +111,48 @@ class TestFeatureFlags(TestCase):
         # normally be used.
         self.populateStore()
         control, call_log = self.makeControllerInScopes(
-            ['beta_user', 'default'])
+            ["beta_user", "default"]
+        )
         self.assertEqual(
-            {'ui.icing': '4.0',
-             notification_name: notification_value},
-            control.getAllFlags())
+            {"ui.icing": "4.0", notification_name: notification_value},
+            control.getAllFlags(),
+        )
         # evaluates all necessary flags; in this test data beta_user shadows
         # default settings
-        self.assertEqual(['beta_user'], call_log)
+        self.assertEqual(["beta_user"], call_log)
 
     def test_overrideFlag(self):
         # if there are multiple settings for a flag, and they match multiple
         # scopes, the priorities determine which is matched
         self.populateStore()
-        default_control, call_log = self.makeControllerInScopes(['default'])
-        self.assertEqual(
-            '3.0',
-            default_control.getFlag('ui.icing'))
+        default_control, call_log = self.makeControllerInScopes(["default"])
+        self.assertEqual("3.0", default_control.getFlag("ui.icing"))
         beta_control, call_log = self.makeControllerInScopes(
-            ['beta_user', 'default'])
-        self.assertEqual(
-            '4.0',
-            beta_control.getFlag('ui.icing'))
+            ["beta_user", "default"]
+        )
+        self.assertEqual("4.0", beta_control.getFlag("ui.icing"))
 
     def test_undefinedFlag(self):
         # if the flag is not defined, we get None
         self.populateStore()
         control, call_log = self.makeControllerInScopes(
-            ['beta_user', 'default'])
-        self.assertIs(None,
-            control.getFlag('unknown_flag'))
+            ["beta_user", "default"]
+        )
+        self.assertIs(None, control.getFlag("unknown_flag"))
         no_scope_flags, call_log = self.makeControllerInScopes([])
-        self.assertIs(None,
-            no_scope_flags.getFlag('ui.icing'))
+        self.assertIs(None, no_scope_flags.getFlag("ui.icing"))
 
     def test_threadGetFlag(self):
         self.populateStore()
         # the start-of-request handler will do something like this:
         controller, call_log = self.makeControllerInScopes(
-            ['default', 'beta_user'])
+            ["default", "beta_user"]
+        )
         install_feature_controller(controller)
         try:
             # then application code can simply ask without needing a context
             # object
-            self.assertEqual('4.0', getFeatureFlag('ui.icing'))
+            self.assertEqual("4.0", getFeatureFlag("ui.icing"))
         finally:
             install_feature_controller(None)
 
@@ -165,14 +160,14 @@ class TestFeatureFlags(TestCase):
         # If there is no context, please don't crash. workaround for the root
         # cause in bug 631884.
         install_feature_controller(None)
-        self.assertEqual(None, getFeatureFlag('ui.icing'))
+        self.assertEqual(None, getFeatureFlag("ui.icing"))
 
     def testLazyScopeLookup(self):
         # feature scopes may be a bit expensive to look up, so we do it only
         # when it will make a difference to the result.
         self.populateStore()
-        f, call_log = self.makeControllerInScopes(['beta_user'])
-        self.assertEqual('4.0', f.getFlag('ui.icing'))
+        f, call_log = self.makeControllerInScopes(["beta_user"])
+        self.assertEqual("4.0", f.getFlag("ui.icing"))
         # to calculate this it should only have had to check we're in the
         # beta_users scope; nothing else makes a difference
         self.assertEqual(dict(beta_user=True), f._known_scopes._known)
@@ -181,7 +176,7 @@ class TestFeatureFlags(TestCase):
         # looking up an unknown feature gives you None
         self.populateStore()
         f, call_log = self.makeControllerInScopes([])
-        self.assertEqual(None, f.getFlag('unknown'))
+        self.assertEqual(None, f.getFlag("unknown"))
         # no scopes need to be checked because it's just not in the database
         # and there's no point checking
         self.assertEqual({}, f._known_scopes._known)
@@ -192,29 +187,26 @@ class TestFeatureFlags(TestCase):
 
     def testScopeDict(self):
         # can get scopes as a dict, for use by "feature_scopes/server.demo"
-        f, call_log = self.makeControllerInScopes(['beta_user'])
-        self.assertEqual(True, f.scopes['beta_user'])
-        self.assertEqual(False, f.scopes['alpha_user'])
-        self.assertEqual(True, f.scopes['beta_user'])
-        self.assertEqual(['beta_user', 'alpha_user'], call_log)
+        f, call_log = self.makeControllerInScopes(["beta_user"])
+        self.assertEqual(True, f.scopes["beta_user"])
+        self.assertEqual(False, f.scopes["alpha_user"])
+        self.assertEqual(True, f.scopes["beta_user"])
+        self.assertEqual(["beta_user", "alpha_user"], call_log)
 
 
 test_rules_list = [
-    (notification_name, 'beta_user', 100, notification_value),
-    ('ui.icing', 'normal_user', 500, '5.0'),
-    ('ui.icing', 'beta_user', 300, '4.0'),
-    ('ui.icing', 'default', 100, '3.0'),
-    ]
+    (notification_name, "beta_user", 100, notification_value),
+    ("ui.icing", "normal_user", 500, "5.0"),
+    ("ui.icing", "beta_user", 300, "4.0"),
+    ("ui.icing", "default", 100, "3.0"),
+]
 
 
 class FeatureRuleSourceTestsMixin:
-
     def test_getAllRulesAsTuples(self):
         source = self.makeSource()
         source.setAllRules(test_rules_list)
-        self.assertEqual(
-            test_rules_list,
-            list(source.getAllRulesAsTuples()))
+        self.assertEqual(test_rules_list, list(source.getAllRulesAsTuples()))
 
     def test_getAllRulesAsText(self):
         source = self.makeSource()
@@ -225,29 +217,35 @@ class FeatureRuleSourceTestsMixin:
 ui.icing\tnormal_user\t500\t5.0
 ui.icing\tbeta_user\t300\t4.0
 ui.icing\tdefault\t100\t3.0
-""" % (notification_name, notification_value),
-            source.getAllRulesAsText())
+"""
+            % (notification_name, notification_value),
+            source.getAllRulesAsText(),
+        )
 
     def test_setAllRulesFromText(self):
         # We will overwrite existing data.
         source = self.makeSource()
         source.setAllRules(test_rules_list)
-        source.setAllRulesFromText("""
+        source.setAllRulesFromText(
+            """
 
 flag1   beta_user   200   alpha
 flag1   default     100   gamma with spaces
 flag2   default     0\ton
-""")
-        self.assertEqual({
-            'flag1': [
-                ('beta_user', 200, 'alpha'),
-                ('default', 100, 'gamma with spaces'),
+"""
+        )
+        self.assertEqual(
+            {
+                "flag1": [
+                    ("beta_user", 200, "alpha"),
+                    ("default", 100, "gamma with spaces"),
                 ],
-            'flag2': [
-                ('default', 0, 'on'),
+                "flag2": [
+                    ("default", 0, "on"),
                 ],
             },
-            source.getAllRulesAsDict())
+            source.getAllRulesAsDict(),
+        )
 
 
 class TestStormFeatureRuleSource(FeatureRuleSourceTestsMixin, TestCase):
diff --git a/lib/lp/services/features/tests/test_helpers.py b/lib/lp/services/features/tests/test_helpers.py
index 89b0f20..6fda923 100644
--- a/lib/lp/services/features/tests/test_helpers.py
+++ b/lib/lp/services/features/tests/test_helpers.py
@@ -8,64 +8,58 @@ __all__ = []
 from lp.services.features import (
     get_relevant_feature_controller,
     getFeatureFlag,
-    )
+)
 from lp.services.features.rulesource import (
     MemoryFeatureRuleSource,
     StormFeatureRuleSource,
-    )
-from lp.services.features.testing import (
-    FeatureFixture,
-    MemoryFeatureFixture,
-    )
-from lp.testing import (
-    layers,
-    TestCase,
-    )
+)
+from lp.services.features.testing import FeatureFixture, MemoryFeatureFixture
+from lp.testing import TestCase, layers
 
 
 class FeatureFixturesTestsMixin:
-
     def test_fixture_sets_one_flag_and_cleans_up_again(self):
         flag = self.getUniqueString()
         value_before_fixture_setup = getFeatureFlag(flag)
         value_after_fixture_setup = None
 
-        fixture = self.fixture_cls({flag: 'on'})
+        fixture = self.fixture_cls({flag: "on"})
         fixture.setUp()
         value_after_fixture_setup = getFeatureFlag(flag)
         fixture.cleanUp()
 
-        self.assertEqual(value_after_fixture_setup, 'on')
+        self.assertEqual(value_after_fixture_setup, "on")
         self.assertEqual(value_before_fixture_setup, getFeatureFlag(flag))
         self.assertNotEqual(
-            value_before_fixture_setup, value_after_fixture_setup)
+            value_before_fixture_setup, value_after_fixture_setup
+        )
 
     def test_fixture_deletes_existing_values(self):
-        self.useFixture(self.fixture_cls({'one': '1'}))
-        self.useFixture(self.fixture_cls({'two': '2'}))
+        self.useFixture(self.fixture_cls({"one": "1"}))
+        self.useFixture(self.fixture_cls({"two": "2"}))
 
-        self.assertEqual(getFeatureFlag('one'), None)
-        self.assertEqual(getFeatureFlag('two'), '2')
+        self.assertEqual(getFeatureFlag("one"), None)
+        self.assertEqual(getFeatureFlag("two"), "2")
 
     def test_fixture_overrides_previously_set_flags(self):
-        self.useFixture(self.fixture_cls({'one': '1'}))
-        self.useFixture(self.fixture_cls({'one': '5'}))
+        self.useFixture(self.fixture_cls({"one": "1"}))
+        self.useFixture(self.fixture_cls({"one": "5"}))
 
-        self.assertEqual(getFeatureFlag('one'), '5')
+        self.assertEqual(getFeatureFlag("one"), "5")
 
     def test_fixture_does_not_set_value_for_flags_that_are_None(self):
-        self.useFixture(self.fixture_cls({'nothing': None}))
-        self.assertEqual(getFeatureFlag('nothing'), None)
+        self.useFixture(self.fixture_cls({"nothing": None}))
+        self.assertEqual(getFeatureFlag("nothing"), None)
 
     def test_setting_one_flag_with_context_manager(self):
         flag = self.getUniqueString()
         value_outside_manager = getFeatureFlag(flag)
         value_in_manager = None
 
-        with self.fixture_cls({flag: 'on'}):
+        with self.fixture_cls({flag: "on"}):
             value_in_manager = getFeatureFlag(flag)
 
-        self.assertEqual(value_in_manager, 'on')
+        self.assertEqual(value_in_manager, "on")
         self.assertEqual(value_outside_manager, getFeatureFlag(flag))
         self.assertNotEqual(value_outside_manager, value_in_manager)
 
@@ -78,10 +72,11 @@ class TestFeatureFixture(FeatureFixturesTestsMixin, TestCase):
     fixture_cls = FeatureFixture
 
     def test_fixture_uses_storm(self):
-        self.useFixture(self.fixture_cls({'one': '1'}))
+        self.useFixture(self.fixture_cls({"one": "1"}))
         self.assertIsInstance(
             get_relevant_feature_controller().rule_source,
-            StormFeatureRuleSource)
+            StormFeatureRuleSource,
+        )
 
 
 class TestMemoryFeatureFixture(FeatureFixturesTestsMixin, TestCase):
@@ -92,7 +87,8 @@ class TestMemoryFeatureFixture(FeatureFixturesTestsMixin, TestCase):
     fixture_cls = MemoryFeatureFixture
 
     def test_fixture_uses_memory(self):
-        self.useFixture(self.fixture_cls({'one': '1'}))
+        self.useFixture(self.fixture_cls({"one": "1"}))
         self.assertIsInstance(
             get_relevant_feature_controller().rule_source,
-            MemoryFeatureRuleSource)
+            MemoryFeatureRuleSource,
+        )
diff --git a/lib/lp/services/features/tests/test_scopes.py b/lib/lp/services/features/tests/test_scopes.py
index 843f177..824fbd4 100644
--- a/lib/lp/services/features/tests/test_scopes.py
+++ b/lib/lp/services/features/tests/test_scopes.py
@@ -10,19 +10,15 @@ from lp.services.features.scopes import (
     ScopesForScript,
     ScriptScope,
     UserSliceScope,
-    )
+)
 from lp.services.features.testing import FeatureFixture
-from lp.testing import (
-    person_logged_in,
-    TestCase,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCase, TestCaseWithFactory, person_logged_in
 from lp.testing.layers import DatabaseFunctionalLayer
 
 
 class FakeScope(BaseScope):
 
-    pattern = r'fake:'
+    pattern = r"fake:"
 
     def __init__(self, name):
         self.name = name
@@ -83,19 +79,18 @@ class FakePerson:
 
 
 class TestUserSliceScope(TestCase):
-
     def test_user_slice(self):
         person = FakePerson()
         # NB: scopes take a callable that returns the person, that in
         # production comes from the request.
         scope = UserSliceScope(lambda: person)
         # Effectively selects everyone; should always be true.
-        self.assertTrue(scope.lookup('userslice:0,1'))
+        self.assertTrue(scope.lookup("userslice:0,1"))
         # Exactly one of these should be true.
         checks = 7
         matches = []
         for i in range(checks):
-            name = 'userslice:%d,%d' % (i, checks)
+            name = "userslice:%d,%d" % (i, checks)
             if scope.lookup(name):
                 matches.append(name)
         self.assertEqual(len(matches), 1, matches)
@@ -108,18 +103,23 @@ class TestUserSliceScopeIntegration(TestCaseWithFactory):
     def test_user_slice_from_rules(self):
         """Userslice matches against the real request user"""
         person = self.factory.makePerson()
-        with FeatureFixture({}, full_feature_rules=[
-            dict(
-                flag='test_feature',
-                scope='userslice:0,1',
-                priority=999,
-                value='on'),
-            dict(
-                flag='test_not',
-                scope='userslice:1,1',
-                priority=999,
-                value='not_value'),
-            ]):
+        with FeatureFixture(
+            {},
+            full_feature_rules=[
+                dict(
+                    flag="test_feature",
+                    scope="userslice:0,1",
+                    priority=999,
+                    value="on",
+                ),
+                dict(
+                    flag="test_not",
+                    scope="userslice:1,1",
+                    priority=999,
+                    value="not_value",
+                ),
+            ],
+        ):
             with person_logged_in(person):
-                self.assertEqual(getFeatureFlag('test_feature'), 'on')
-                self.assertEqual(getFeatureFlag('test_not'), None)
+                self.assertEqual(getFeatureFlag("test_feature"), "on")
+                self.assertEqual(getFeatureFlag("test_not"), None)
diff --git a/lib/lp/services/features/tests/test_webapp.py b/lib/lp/services/features/tests/test_webapp.py
index 13b6e27..c61504d 100644
--- a/lib/lp/services/features/tests/test_webapp.py
+++ b/lib/lp/services/features/tests/test_webapp.py
@@ -6,20 +6,17 @@
 from textwrap import dedent
 
 from lp.services.config import config
-from lp.services.features import (
-    getFeatureFlag,
-    webapp,
-    )
+from lp.services.features import getFeatureFlag, webapp
 from lp.services.features.testing import FeatureFixture
 from lp.services.webapp.errorlog import globalErrorUtility
 from lp.services.webapp.servers import LaunchpadTestRequest
 from lp.testing import (
     CaptureOops,
-    layers,
-    login_as,
     TestCase,
     TestCaseWithFactory,
-    )
+    layers,
+    login_as,
+)
 
 
 class TestScopesFromRequest(TestCase):
@@ -29,59 +26,64 @@ class TestScopesFromRequest(TestCase):
     def test_pageid_scope_normal(self):
         request = LaunchpadTestRequest()
         scopes = webapp.ScopesFromRequest(request)
-        request.setInWSGIEnvironment('launchpad.pageid', 'foo:bar')
-        self.assertTrue(scopes.lookup('pageid:'))
-        self.assertTrue(scopes.lookup('pageid:foo'))
-        self.assertTrue(scopes.lookup('pageid:foo:bar'))
-        self.assertFalse(scopes.lookup('pageid:foo:bar#quux'))
+        request.setInWSGIEnvironment("launchpad.pageid", "foo:bar")
+        self.assertTrue(scopes.lookup("pageid:"))
+        self.assertTrue(scopes.lookup("pageid:foo"))
+        self.assertTrue(scopes.lookup("pageid:foo:bar"))
+        self.assertFalse(scopes.lookup("pageid:foo:bar#quux"))
 
     def test_pageid_scope_collection(self):
         request = LaunchpadTestRequest()
         scopes = webapp.ScopesFromRequest(request)
-        request.setInWSGIEnvironment('launchpad.pageid', 'scoped:thing:#type')
-        self.assertTrue(scopes.lookup('pageid:'))
-        self.assertTrue(scopes.lookup('pageid:scoped'))
-        self.assertTrue(scopes.lookup('pageid:scoped:thing'))
-        self.assertTrue(scopes.lookup('pageid:scoped:thing:#type'))
-        self.assertFalse(scopes.lookup('pageid:scoped:thing:#type:other'))
+        request.setInWSGIEnvironment("launchpad.pageid", "scoped:thing:#type")
+        self.assertTrue(scopes.lookup("pageid:"))
+        self.assertTrue(scopes.lookup("pageid:scoped"))
+        self.assertTrue(scopes.lookup("pageid:scoped:thing"))
+        self.assertTrue(scopes.lookup("pageid:scoped:thing:#type"))
+        self.assertFalse(scopes.lookup("pageid:scoped:thing:#type:other"))
 
     def test_pageid_scope_empty(self):
         request = LaunchpadTestRequest()
         scopes = webapp.ScopesFromRequest(request)
-        request.setInWSGIEnvironment('launchpad.pageid', '')
-        self.assertTrue(scopes.lookup('pageid:'))
-        self.assertFalse(scopes.lookup('pageid:foo'))
-        self.assertFalse(scopes.lookup('pageid:foo:bar'))
+        request.setInWSGIEnvironment("launchpad.pageid", "")
+        self.assertTrue(scopes.lookup("pageid:"))
+        self.assertFalse(scopes.lookup("pageid:foo"))
+        self.assertFalse(scopes.lookup("pageid:foo:bar"))
 
     def test_default(self):
         request = LaunchpadTestRequest()
         scopes = webapp.ScopesFromRequest(request)
-        self.assertTrue(scopes.lookup('default'))
+        self.assertTrue(scopes.lookup("default"))
 
     def test_server(self):
         request = LaunchpadTestRequest()
         scopes = webapp.ScopesFromRequest(request)
-        self.assertFalse(scopes.lookup('server.lpnet'))
-        config.push('ensure_lpnet', dedent("""\
+        self.assertFalse(scopes.lookup("server.lpnet"))
+        config.push(
+            "ensure_lpnet",
+            dedent(
+                """\
             [launchpad]
             is_lpnet: True
-            """))
+            """
+            ),
+        )
         try:
-            self.assertTrue(scopes.lookup('server.lpnet'))
+            self.assertTrue(scopes.lookup("server.lpnet"))
         finally:
-            config.pop('ensure_lpnet')
+            config.pop("ensure_lpnet")
 
     def test_server_missing_key(self):
         request = LaunchpadTestRequest()
         scopes = webapp.ScopesFromRequest(request)
         # There is no such key in the config, so this returns False.
-        self.assertFalse(scopes.lookup('server.pink'))
+        self.assertFalse(scopes.lookup("server.pink"))
 
     def test_unknown_scope(self):
         # Asking about an unknown scope is not an error.
         request = LaunchpadTestRequest()
         scopes = webapp.ScopesFromRequest(request)
-        scopes.lookup('not-a-real-scope')
+        scopes.lookup("not-a-real-scope")
 
 
 class TestDBScopes(TestCaseWithFactory):
@@ -92,7 +94,7 @@ class TestDBScopes(TestCaseWithFactory):
         request = LaunchpadTestRequest()
         scopes = webapp.ScopesFromRequest(request)
         self.factory.loginAsAnyone(request)
-        self.assertFalse(scopes.lookup('team:nonexistent'))
+        self.assertFalse(scopes.lookup("team:nonexistent"))
 
     def test_team_scope_in_team(self):
         request = LaunchpadTestRequest()
@@ -100,7 +102,7 @@ class TestDBScopes(TestCaseWithFactory):
         member = self.factory.makePerson()
         team = self.factory.makeTeam(members=[member])
         login_as(member, request)
-        self.assertTrue(scopes.lookup('team:%s' % team.name))
+        self.assertTrue(scopes.lookup("team:%s" % team.name))
 
 
 class TestFeaturesIntoOops(TestCaseWithFactory):
@@ -108,18 +110,19 @@ class TestFeaturesIntoOops(TestCaseWithFactory):
     layer = layers.DatabaseFunctionalLayer
 
     def test_get_features_into_oops(self):
-        with FeatureFixture({'feature_name': 'value'}):
+        with FeatureFixture({"feature_name": "value"}):
             with CaptureOops() as capture:
                 request = LaunchpadTestRequest()
 
-                self.assertEqual(getFeatureFlag('feature_name'), 'value')
+                self.assertEqual(getFeatureFlag("feature_name"), "value")
 
                 # Simulate an oops here.
                 globalErrorUtility.raising(None, request=request)
 
                 oops = capture.oopses[0]
-                self.assertTrue('features.usedScopes' in oops)
-                self.assertTrue('features.usedFlags' in oops)
+                self.assertTrue("features.usedScopes" in oops)
+                self.assertTrue("features.usedFlags" in oops)
                 self.assertEqual(
-                    oops['features.usedFlags'],
-                    '%r' % {'feature_name': 'value'})
+                    oops["features.usedFlags"],
+                    "%r" % {"feature_name": "value"},
+                )
diff --git a/lib/lp/services/features/tests/test_xmlrpc.py b/lib/lp/services/features/tests/test_xmlrpc.py
index ba1960c..3835e57 100644
--- a/lib/lp/services/features/tests/test_xmlrpc.py
+++ b/lib/lp/services/features/tests/test_xmlrpc.py
@@ -13,13 +13,9 @@ from lp.services.features.scopes import (
     DefaultScope,
     FixedScope,
     MultiScopeHandler,
-    )
+)
 from lp.services.features.xmlrpc import FeatureFlagApplication
-from lp.testing import (
-    feature_flags,
-    set_feature_flag,
-    TestCaseWithFactory,
-    )
+from lp.testing import TestCaseWithFactory, feature_flags, set_feature_flag
 from lp.testing.layers import DatabaseFunctionalLayer
 from lp.testing.xmlrpc import XMLRPCTestTransport
 
@@ -35,63 +31,71 @@ class TestGetFeatureFlag(TestCaseWithFactory):
     def installFeatureController(self, feature_controller):
         old_features = features.get_relevant_feature_controller()
         features.install_feature_controller(feature_controller)
-        self.addCleanup(
-            features.install_feature_controller, old_features)
+        self.addCleanup(features.install_feature_controller, old_features)
 
     def test_getFeatureFlag_returns_None_by_default(self):
-        self.assertIs(None, self.endpoint.getFeatureFlag('unknown'))
+        self.assertIs(None, self.endpoint.getFeatureFlag("unknown"))
 
     def test_getFeatureFlag_returns_true_for_set_flag(self):
-        flag_name = 'flag'
+        flag_name = "flag"
         with feature_flags():
-            set_feature_flag(flag_name, '1')
-            self.assertEqual('1', self.endpoint.getFeatureFlag(flag_name))
+            set_feature_flag(flag_name, "1")
+            self.assertEqual("1", self.endpoint.getFeatureFlag(flag_name))
 
     def test_getFeatureFlag_ignores_relevant_feature_controller(self):
         # getFeatureFlag should only consider the scopes it is asked to
         # consider, not any that happen to be active due to the XML-RPC
         # request itself.
-        flag_name = 'flag'
-        scope_name = 'scope'
+        flag_name = "flag"
+        scope_name = "scope"
         self.installFeatureController(
             FeatureController(
                 MultiScopeHandler(
-                    [DefaultScope(), FixedScope(scope_name)]).lookup,
-                StormFeatureRuleSource()))
-        set_feature_flag(flag_name, '1', scope_name)
+                    [DefaultScope(), FixedScope(scope_name)]
+                ).lookup,
+                StormFeatureRuleSource(),
+            )
+        )
+        set_feature_flag(flag_name, "1", scope_name)
         self.assertEqual(None, self.endpoint.getFeatureFlag(flag_name))
 
     def test_getFeatureFlag_considers_supplied_scope(self):
-        flag_name = 'flag'
-        scope_name = 'scope'
+        flag_name = "flag"
+        scope_name = "scope"
         with feature_flags():
-            set_feature_flag(flag_name, 'value', scope_name)
+            set_feature_flag(flag_name, "value", scope_name)
             self.assertEqual(
-                'value',
-                self.endpoint.getFeatureFlag(flag_name, [scope_name]))
+                "value", self.endpoint.getFeatureFlag(flag_name, [scope_name])
+            )
 
     def test_getFeatureFlag_turns_user_into_team_scope(self):
-        flag_name = 'flag'
+        flag_name = "flag"
         person = self.factory.makePerson()
         team = self.factory.makeTeam(members=[person])
         with feature_flags():
-            set_feature_flag(flag_name, 'value', 'team:' + team.name)
+            set_feature_flag(flag_name, "value", "team:" + team.name)
             self.assertEqual(
-                'value',
+                "value",
                 self.endpoint.getFeatureFlag(
-                    flag_name, ['user:' + person.name]))
+                    flag_name, ["user:" + person.name]
+                ),
+            )
 
     def test_xmlrpc_interface_unset(self):
         sp = xmlrpc.client.ServerProxy(
             config.launchpad.feature_flags_endpoint,
-            transport=XMLRPCTestTransport(), allow_none=True)
-        self.assertEqual(None, sp.getFeatureFlag('flag'))
+            transport=XMLRPCTestTransport(),
+            allow_none=True,
+        )
+        self.assertEqual(None, sp.getFeatureFlag("flag"))
 
     def test_xmlrpc_interface_set(self):
         sp = xmlrpc.client.ServerProxy(
             config.launchpad.feature_flags_endpoint,
-            transport=XMLRPCTestTransport(), allow_none=True)
-        flag_name = 'flag'
+            transport=XMLRPCTestTransport(),
+            allow_none=True,
+        )
+        flag_name = "flag"
         with feature_flags():
-            set_feature_flag(flag_name, '1')
-            self.assertEqual('1', sp.getFeatureFlag(flag_name))
+            set_feature_flag(flag_name, "1")
+            self.assertEqual("1", sp.getFeatureFlag(flag_name))
diff --git a/lib/lp/services/features/webapp.py b/lib/lp/services/features/webapp.py
index 7ffd2da..f4b36c5 100644
--- a/lib/lp/services/features/webapp.py
+++ b/lib/lp/services/features/webapp.py
@@ -14,8 +14,8 @@ from lp.services.features.scopes import ScopesFromRequest
 def start_request(event):
     """Register FeatureController."""
     event.request.features = FeatureController(
-        ScopesFromRequest(event.request).lookup,
-        StormFeatureRuleSource())
+        ScopesFromRequest(event.request).lookup, StormFeatureRuleSource()
+    )
     install_feature_controller(event.request.features)
 
 
diff --git a/lib/lp/services/features/xmlrpc.py b/lib/lp/services/features/xmlrpc.py
index 8e4835a..0446802 100644
--- a/lib/lp/services/features/xmlrpc.py
+++ b/lib/lp/services/features/xmlrpc.py
@@ -4,9 +4,9 @@
 """FeatureFlagApplication allows access to information about feature flags."""
 
 __all__ = [
-    'IFeatureFlagApplication',
-    'FeatureFlagApplication',
-    ]
+    "IFeatureFlagApplication",
+    "FeatureFlagApplication",
+]
 
 import six
 from zope.component import getUtility
@@ -16,11 +16,11 @@ from lp.registry.interfaces.person import IPersonSet
 from lp.services.features.flags import FeatureController
 from lp.services.features.rulesource import StormFeatureRuleSource
 from lp.services.features.scopes import (
-    default_scopes,
     FixedScope,
     MultiScopeHandler,
     TeamScope,
-    )
+    default_scopes,
+)
 from lp.services.webapp.interfaces import ILaunchpadApplication
 
 
@@ -41,18 +41,19 @@ class IFeatureFlagApplication(ILaunchpadApplication):
 
 @implementer(IFeatureFlagApplication)
 class FeatureFlagApplication:
-
     def getFeatureFlag(self, flag_name, active_scopes=()):
         scopes = list(default_scopes)
         for scope_name in active_scopes:
-            if scope_name.startswith('user:'):
+            if scope_name.startswith("user:"):
                 person = getUtility(IPersonSet).getByName(
-                    scope_name[len('user:'):])
+                    scope_name[len("user:") :]
+                )
                 if person is not None:
                     scopes.append(TeamScope(lambda: person))
             else:
                 scopes.append(FixedScope(scope_name))
         flag_name = six.ensure_text(flag_name)
         controller = FeatureController(
-            MultiScopeHandler(scopes).lookup, StormFeatureRuleSource())
+            MultiScopeHandler(scopes).lookup, StormFeatureRuleSource()
+        )
         return controller.getFlag(flag_name)
diff --git a/lib/lp/services/feeds/browser.py b/lib/lp/services/feeds/browser.py
index 19e76fe..0be5396 100644
--- a/lib/lp/services/feeds/browser.py
+++ b/lib/lp/services/feeds/browser.py
@@ -4,22 +4,22 @@
 """View support classes for feeds."""
 
 __all__ = [
-    'AnnouncementsFeedLink',
-    'BranchFeedLink',
-    'BugFeedLink',
-    'BugTargetLatestBugsFeedLink',
-    'FeedLinkBase',
-    'FeedsMixin',
-    'FeedsNavigation',
-    'FeedsRootUrlData',
-    'PersonBranchesFeedLink',
-    'PersonRevisionsFeedLink',
-    'ProductBranchesFeedLink',
-    'ProductRevisionsFeedLink',
-    'ProjectBranchesFeedLink',
-    'ProjectRevisionsFeedLink',
-    'RootAnnouncementsFeedLink',
-    ]
+    "AnnouncementsFeedLink",
+    "BranchFeedLink",
+    "BugFeedLink",
+    "BugTargetLatestBugsFeedLink",
+    "FeedLinkBase",
+    "FeedsMixin",
+    "FeedsNavigation",
+    "FeedsRootUrlData",
+    "PersonBranchesFeedLink",
+    "PersonRevisionsFeedLink",
+    "ProductBranchesFeedLink",
+    "ProductRevisionsFeedLink",
+    "ProjectBranchesFeedLink",
+    "ProjectRevisionsFeedLink",
+    "RootAnnouncementsFeedLink",
+]
 
 from zope.component import getUtility
 from zope.interface import implementer
@@ -29,36 +29,27 @@ from zope.security.interfaces import Unauthorized
 from lp.app.errors import NotFoundError
 from lp.bugs.interfaces.bug import IBugSet
 from lp.bugs.interfaces.bugtarget import IHasBugs
-from lp.bugs.interfaces.bugtask import (
-    IBugTask,
-    IBugTaskSet,
-    )
+from lp.bugs.interfaces.bugtask import IBugTask, IBugTaskSet
 from lp.bugs.interfaces.malone import IMaloneApplication
 from lp.code.interfaces.branch import IBranch
 from lp.layers import FeedsLayer
 from lp.registry.interfaces.announcement import (
     IAnnouncementSet,
     IHasAnnouncements,
-    )
-from lp.registry.interfaces.person import (
-    IPerson,
-    IPersonSet,
-    )
+)
+from lp.registry.interfaces.person import IPerson, IPersonSet
 from lp.registry.interfaces.pillar import IPillarNameSet
 from lp.registry.interfaces.product import IProduct
 from lp.registry.interfaces.projectgroup import IProjectGroup
 from lp.services.config import config
 from lp.services.feeds.interfaces.application import IFeedsApplication
 from lp.services.webapp import (
+    Navigation,
     canonical_name,
     canonical_url,
-    Navigation,
     stepto,
-    )
-from lp.services.webapp.interfaces import (
-    ICanonicalUrlData,
-    ILaunchpadRoot,
-    )
+)
+from lp.services.webapp.interfaces import ICanonicalUrlData, ILaunchpadRoot
 from lp.services.webapp.publisher import RedirectionView
 from lp.services.webapp.url import urlappend
 from lp.services.webapp.vhosts import allvhosts
@@ -68,9 +59,9 @@ from lp.services.webapp.vhosts import allvhosts
 class FeedsRootUrlData:
     """`ICanonicalUrlData` for Feeds."""
 
-    path = ''
+    path = ""
     inside = None
-    rootsite = 'feeds'
+    rootsite = "feeds"
 
     def __init__(self, context):
         self.context = context
@@ -83,7 +74,7 @@ class FeedsNavigation(Navigation):
 
     newlayer = FeedsLayer
 
-    @stepto('+index')
+    @stepto("+index")
     def redirect_index(self):
         """Redirect /+index to help.launchpad.net/Feeds site.
 
@@ -93,7 +84,8 @@ class FeedsNavigation(Navigation):
         to the default site.
         """
         return self.redirectSubTree(
-            'https://help.launchpad.net/Feeds', status=301)
+            "https://help.launchpad.net/Feeds";, status=301
+        )
 
     def traverse(self, name):
         """Traverse the paths of a feed.
@@ -107,16 +99,18 @@ class FeedsNavigation(Navigation):
         # XXX bac 20071019, we would like to normalize with respect to case
         # too but cannot due to a problem with the bug search requiring status
         # values to be of a particular case.  See bug 154562.
-        query_string = self.request.get('QUERY_STRING', '')
-        fields = sorted(query_string.split('&'))
-        normalized_query_string = '&'.join(fields)
+        query_string = self.request.get("QUERY_STRING", "")
+        fields = sorted(query_string.split("&"))
+        normalized_query_string = "&".join(fields)
         if query_string != normalized_query_string:
             # We must empty the traversal stack to prevent an error
             # when calling RedirectionView.publishTraverse().
             self.request.setTraversalStack([])
-            target = "%s%s?%s" % (self.request.getApplicationURL(),
-                                  self.request['PATH_INFO'],
-                                  normalized_query_string)
+            target = "%s%s?%s" % (
+                self.request.getApplicationURL(),
+                self.request["PATH_INFO"],
+                normalized_query_string,
+            )
             redirect = RedirectionView(target, self.request, 301)
             return redirect
 
@@ -124,17 +118,17 @@ class FeedsNavigation(Navigation):
         # http://feeds.launchpad.net/bugs/latest-bugs.atom
         # http://feeds.launchpad.net/bugs/+bugs.atom?...
         # http://feeds.launchpad.net/bugs/1/bug.atom
-        if name == 'bugs':
+        if name == "bugs":
             stack = self.request.getTraversalStack()
             if len(stack) == 0:
-                raise NotFound(self, '', self.request)
+                raise NotFound(self, "", self.request)
             bug_id = stack.pop()
-            if bug_id.startswith('+'):
+            if bug_id.startswith("+"):
                 if config.launchpad.is_bug_search_feed_active:
                     return getUtility(IBugTaskSet)
                 else:
                     raise Unauthorized("Bug search feed deactivated")
-            elif bug_id.startswith('latest-bugs.'):
+            elif bug_id.startswith("latest-bugs."):
                 return getUtility(IMaloneApplication)
             else:
                 self.request.stepstogo.consume()
@@ -143,11 +137,11 @@ class FeedsNavigation(Navigation):
         # Redirect to the canonical name before doing the lookup.
         if canonical_name(name) != name:
             return self.redirectSubTree(
-                canonical_url(self.context) + canonical_name(name),
-                status=301)
+                canonical_url(self.context) + canonical_name(name), status=301
+            )
 
         try:
-            if name.startswith('~'):
+            if name.startswith("~"):
                 # Handle persons and teams.
                 # http://feeds.launchpad.net/~salgado/latest-bugs.html
                 person = getUtility(IPersonSet).getByName(name[1:])
@@ -168,15 +162,16 @@ class FeedLinkBase:
     Subclasses can override:
         title: The name of the feed as it appears in a browser.
     """
-    title = 'Atom Feed'
+
+    title = "Atom Feed"
     href = None
-    rooturl = allvhosts.configs['feeds'].rooturl
+    rooturl = allvhosts.configs["feeds"].rooturl
 
     def __init__(self, context):
         self.context = context
-        assert self.usedfor.providedBy(context), (
-            "Context %r does not provide interface %r"
-            % (context, self.usedfor))
+        assert self.usedfor.providedBy(
+            context
+        ), "Context %r does not provide interface %r" % (context, self.usedfor)
 
     @classmethod
     def allowFeed(cls, context):
@@ -192,12 +187,13 @@ class BugFeedLink(FeedLinkBase):
 
     @property
     def title(self):
-        return 'Bug %s Feed' % self.context.bug.id
+        return "Bug %s Feed" % self.context.bug.id
 
     @property
     def href(self):
-        return urlappend(self.rooturl,
-                         'bugs/' + str(self.context.bug.id) + '/bug.atom')
+        return urlappend(
+            self.rooturl, "bugs/" + str(self.context.bug.id) + "/bug.atom"
+        )
 
     @classmethod
     def allowFeed(cls, context):
@@ -212,14 +208,15 @@ class BugTargetLatestBugsFeedLink(FeedLinkBase):
     @property
     def title(self):
         if IMaloneApplication.providedBy(self.context):
-            return 'Latest Bugs'
+            return "Latest Bugs"
         else:
-            return 'Latest Bugs for %s' % self.context.displayname
+            return "Latest Bugs for %s" % self.context.displayname
 
     @property
     def href(self):
-        return urlappend(canonical_url(self.context, rootsite='feeds'),
-                         'latest-bugs.atom')
+        return urlappend(
+            canonical_url(self.context, rootsite="feeds"), "latest-bugs.atom"
+        )
 
 
 class AnnouncementsFeedLink(FeedLinkBase):
@@ -228,17 +225,19 @@ class AnnouncementsFeedLink(FeedLinkBase):
     @property
     def title(self):
         if IAnnouncementSet.providedBy(self.context):
-            return 'All Announcements'
+            return "All Announcements"
         else:
-            return 'Announcements for %s' % self.context.displayname
+            return "Announcements for %s" % self.context.displayname
 
     @property
     def href(self):
         if IAnnouncementSet.providedBy(self.context):
-            return urlappend(self.rooturl, 'announcements.atom')
+            return urlappend(self.rooturl, "announcements.atom")
         else:
-            return urlappend(canonical_url(self.context, rootsite='feeds'),
-                             'announcements.atom')
+            return urlappend(
+                canonical_url(self.context, rootsite="feeds"),
+                "announcements.atom",
+            )
 
 
 class RootAnnouncementsFeedLink(AnnouncementsFeedLink):
@@ -246,11 +245,11 @@ class RootAnnouncementsFeedLink(AnnouncementsFeedLink):
 
     @property
     def title(self):
-        return 'All Announcements'
+        return "All Announcements"
 
     @property
     def href(self):
-        return urlappend(self.rooturl, 'announcements.atom')
+        return urlappend(self.rooturl, "announcements.atom")
 
 
 class BranchesFeedLinkBase(FeedLinkBase):
@@ -258,26 +257,30 @@ class BranchesFeedLinkBase(FeedLinkBase):
 
     @property
     def title(self):
-        return 'Latest Branches for %s' % self.context.displayname
+        return "Latest Branches for %s" % self.context.displayname
 
     @property
     def href(self):
-        return urlappend(canonical_url(self.context, rootsite='feeds'),
-                         'branches.atom')
+        return urlappend(
+            canonical_url(self.context, rootsite="feeds"), "branches.atom"
+        )
 
 
 class ProjectBranchesFeedLink(BranchesFeedLinkBase):
     """Feed links for branches on a project."""
+
     usedfor = IProjectGroup
 
 
 class ProductBranchesFeedLink(BranchesFeedLinkBase):
     """Feed links for branches on a product."""
+
     usedfor = IProduct
 
 
 class PersonBranchesFeedLink(BranchesFeedLinkBase):
     """Feed links for branches on a person."""
+
     usedfor = IPerson
 
 
@@ -286,7 +289,7 @@ class RevisionsFeedLinkBase(FeedLinkBase):
 
     @property
     def title(self):
-        return 'Latest Revisions for %s' % self.context.displayname
+        return "Latest Revisions for %s" % self.context.displayname
 
     @property
     def href(self):
@@ -294,32 +297,37 @@ class RevisionsFeedLinkBase(FeedLinkBase):
 
         E.g.  http://feeds.launchpad.net/firefox/revisions.atom
         """
-        return urlappend(canonical_url(self.context, rootsite='feeds'),
-                         'revisions.atom')
+        return urlappend(
+            canonical_url(self.context, rootsite="feeds"), "revisions.atom"
+        )
 
 
 class ProjectRevisionsFeedLink(RevisionsFeedLinkBase):
     """Feed links for revisions on a project."""
+
     usedfor = IProjectGroup
 
 
 class ProductRevisionsFeedLink(RevisionsFeedLinkBase):
     """Feed links for revisions on a product."""
+
     usedfor = IProduct
 
 
 class BranchFeedLink(FeedLinkBase):
     """Feed links for revisions on a branch."""
+
     usedfor = IBranch
 
     @property
     def title(self):
-        return 'Latest Revisions for Branch %s' % self.context.displayname
+        return "Latest Revisions for Branch %s" % self.context.displayname
 
     @property
     def href(self):
-        return urlappend(canonical_url(self.context, rootsite="feeds"),
-                         'branch.atom')
+        return urlappend(
+            canonical_url(self.context, rootsite="feeds"), "branch.atom"
+        )
 
     @classmethod
     def allowFeed(cls, context):
@@ -330,20 +338,23 @@ class BranchFeedLink(FeedLinkBase):
 
 class PersonRevisionsFeedLink(FeedLinkBase):
     """Feed links for revisions created by a person."""
+
     usedfor = IPerson
 
     @property
     def title(self):
         if self.context.is_team:
-            return 'Latest Revisions by members of %s' % (
-                self.context.displayname)
+            return "Latest Revisions by members of %s" % (
+                self.context.displayname
+            )
         else:
-            return 'Latest Revisions by %s' % self.context.displayname
+            return "Latest Revisions by %s" % self.context.displayname
 
     @property
     def href(self):
-        return urlappend(canonical_url(self.context, rootsite="feeds"),
-                         'revisions.atom')
+        return urlappend(
+            canonical_url(self.context, rootsite="feeds"), "revisions.atom"
+        )
 
 
 class FeedsMixin:
@@ -354,6 +365,7 @@ class FeedsMixin:
 
     feed_links: Returns a list of objects subclassed from FeedLinkBase.
     """
+
     feed_types = (
         AnnouncementsFeedLink,
         BranchFeedLink,
@@ -366,15 +378,17 @@ class FeedsMixin:
         ProjectBranchesFeedLink,
         ProjectRevisionsFeedLink,
         RootAnnouncementsFeedLink,
-        )
+    )
 
     @property
     def feed_links(self):
-
         def allowFeed(feed_type, context):
-            return (feed_type.usedfor.providedBy(context) and
-                feed_type.allowFeed(context))
+            return feed_type.usedfor.providedBy(
+                context
+            ) and feed_type.allowFeed(context)
 
-        return [feed_type(self.context)
+        return [
+            feed_type(self.context)
             for feed_type in self.feed_types
-            if allowFeed(feed_type, self.context)]
+            if allowFeed(feed_type, self.context)
+        ]
diff --git a/lib/lp/services/feeds/feed.py b/lib/lp/services/feeds/feed.py
index b7e1467..a15cecc 100644
--- a/lib/lp/services/feeds/feed.py
+++ b/lib/lp/services/feeds/feed.py
@@ -8,12 +8,12 @@ Future support may include feeds such as sparklines.
 """
 
 __all__ = [
-    'FeedBase',
-    'FeedEntry',
-    'FeedPerson',
-    'FeedTypedData',
-    'MINUTES',
-    ]
+    "FeedBase",
+    "FeedEntry",
+    "FeedPerson",
+    "FeedTypedData",
+    "MINUTES",
+]
 
 import operator
 import os
@@ -33,21 +33,20 @@ from lp.services.feeds.interfaces.feed import (
     IFeedPerson,
     IFeedTypedData,
     UnsupportedFeedFormat,
-    )
+)
 from lp.services.propertycache import cachedproperty
 from lp.services.utils import utc_now
 from lp.services.webapp import (
-    canonical_url,
     LaunchpadView,
+    canonical_url,
     urlappend,
     urlparse,
-    )
+)
 from lp.services.webapp.escaping import html_escape
 from lp.services.webapp.interfaces import ILaunchpadRoot
 from lp.services.webapp.vhosts import allvhosts
 
-
-SUPPORTED_FEEDS = ('.atom', '.html')
+SUPPORTED_FEEDS = (".atom", ".html")
 MINUTES = 60  # Seconds in a minute.
 
 
@@ -62,15 +61,18 @@ class FeedBase(LaunchpadView):
     max_age = config.launchpad.max_feed_cache_minutes * MINUTES
     quantity = 25
     items = None
-    rootsite = 'mainsite'
-    template_files = {'atom': 'templates/feed-atom.pt',
-                      'html': 'templates/feed-html.pt'}
+    rootsite = "mainsite"
+    template_files = {
+        "atom": "templates/feed-atom.pt",
+        "html": "templates/feed-html.pt",
+    }
 
     def __init__(self, context, request):
         super().__init__(context, request)
         self.format = self.feed_format
-        self.root_url = canonical_url(getUtility(ILaunchpadRoot),
-                                      rootsite=self.rootsite)
+        self.root_url = canonical_url(
+            getUtility(ILaunchpadRoot), rootsite=self.rootsite
+        )
 
     @property
     def title(self):
@@ -84,13 +86,12 @@ class FeedBase(LaunchpadView):
         # The self link is the URL for this particular feed.  For example:
         # http://feeds.launchpad.net/ubuntu/announcments.atom
         path = "%s.%s" % (self.feedname, self.format)
-        return urlappend(canonical_url(self.context, rootsite="feeds"),
-                         path)
+        return urlappend(canonical_url(self.context, rootsite="feeds"), path)
 
     @property
     def site_url(self):
         """See `IFeed`."""
-        return allvhosts.configs['mainsite'].rooturl[:-1]
+        return allvhosts.configs["mainsite"].rooturl[:-1]
 
     @property
     def link_alternate(self):
@@ -106,22 +107,21 @@ class FeedBase(LaunchpadView):
         """
         # Get the creation date, if available.  Otherwise use a fixed date, as
         # allowed by the RFC.
-        if getattr(self.context, 'datecreated', None) is not None:
+        if getattr(self.context, "datecreated", None) is not None:
             datecreated = self.context.datecreated.date().isoformat()
-        elif getattr(self.context, 'date_created', None) is not None:
+        elif getattr(self.context, "date_created", None) is not None:
             datecreated = self.context.date_created.date().isoformat()
         else:
             datecreated = "2008"
         url_path = urlparse(self.link_alternate)[2]
-        if self.rootsite != 'mainsite':
-            id_ = 'tag:launchpad.net,%s:/%s%s' % (
+        if self.rootsite != "mainsite":
+            id_ = "tag:launchpad.net,%s:/%s%s" % (
                 datecreated,
                 self.rootsite,
-                url_path)
+                url_path,
+            )
         else:
-            id_ = 'tag:launchpad.net,%s:%s' % (
-                datecreated,
-                url_path)
+            id_ = "tag:launchpad.net,%s:%s" % (datecreated, url_path)
         return id_
 
     def getItems(self):
@@ -152,7 +152,7 @@ class FeedBase(LaunchpadView):
         if extension in SUPPORTED_FEEDS:
             return extension[1:]
         else:
-            raise UnsupportedFeedFormat('%s is not supported' % path)
+            raise UnsupportedFeedFormat("%s is not supported" % path)
 
     @property
     def logo(self):
@@ -167,9 +167,11 @@ class FeedBase(LaunchpadView):
     @cachedproperty
     def date_updated(self):
         """See `IFeed`."""
-        sorted_items = sorted(self.getItems(),
-                              key=operator.attrgetter('last_modified'),
-                              reverse=True)
+        sorted_items = sorted(
+            self.getItems(),
+            key=operator.attrgetter("last_modified"),
+            reverse=True,
+        )
         if len(sorted_items) == 0:
             # datetime.isoformat() doesn't place the necessary "+00:00"
             # for the feedvalidator's check of the iso8601 date format
@@ -177,7 +179,7 @@ class FeedBase(LaunchpadView):
             return utc_now()
         last_modified = sorted_items[0].last_modified
         if last_modified is None:
-            raise AssertionError('All feed entries require a date updated.')
+            raise AssertionError("All feed entries require a date updated.")
         return last_modified
 
     def render(self):
@@ -185,37 +187,40 @@ class FeedBase(LaunchpadView):
         expires = rfc1123_date(time.time() + self.max_age)
         if self.date_updated is not None:
             last_modified = rfc1123_date(
-                time.mktime(self.date_updated.timetuple()))
+                time.mktime(self.date_updated.timetuple())
+            )
         else:
             last_modified = rfc1123_date(time.time())
         response = self.request.response
-        response.setHeader('Expires', expires)
-        response.setHeader('Cache-Control', 'max-age=%d' % self.max_age)
-        response.setHeader('X-Cache-Control', 'max-age=%d' % self.max_age)
-        response.setHeader('Last-Modified', last_modified)
+        response.setHeader("Expires", expires)
+        response.setHeader("Cache-Control", "max-age=%d" % self.max_age)
+        response.setHeader("X-Cache-Control", "max-age=%d" % self.max_age)
+        response.setHeader("Last-Modified", last_modified)
 
-        if self.format == 'atom':
+        if self.format == "atom":
             return self.renderAtom()
-        elif self.format == 'html':
+        elif self.format == "html":
             return self.renderHTML()
         else:
-            raise UnsupportedFeedFormat("Format %s is not supported" %
-                                        self.format)
+            raise UnsupportedFeedFormat(
+                "Format %s is not supported" % self.format
+            )
 
     def renderAtom(self):
         """See `IFeed`."""
-        self.request.response.setHeader('content-type',
-                                        'application/atom+xml;charset=utf-8')
-        template_file = ViewPageTemplateFile(self.template_files['atom'])
+        self.request.response.setHeader(
+            "content-type", "application/atom+xml;charset=utf-8"
+        )
+        template_file = ViewPageTemplateFile(self.template_files["atom"])
         result = template_file(self)
         # XXX EdwinGrubbs 2008-01-10 bug=181903
         # Zope3 requires the content-type to start with "text/" if
         # the result is a unicode object.
-        return result.encode('utf-8')
+        return result.encode("utf-8")
 
     def renderHTML(self):
         """See `IFeed`."""
-        return ViewPageTemplateFile(self.template_files['html'])(self)
+        return ViewPageTemplateFile(self.template_files["html"])(self)
 
 
 @implementer(IFeedEntry)
@@ -225,19 +230,21 @@ class FeedEntry:
     An individual entry for a feed.
     """
 
-    def __init__(self,
-                 title,
-                 link_alternate,
-                 date_created,
-                 date_updated,
-                 date_published=None,
-                 authors=None,
-                 contributors=None,
-                 content=None,
-                 id_=None,
-                 generator=None,
-                 logo=None,
-                 icon=None):
+    def __init__(
+        self,
+        title,
+        link_alternate,
+        date_created,
+        date_updated,
+        date_published=None,
+        authors=None,
+        contributors=None,
+        content=None,
+        id_=None,
+        generator=None,
+        logo=None,
+        icon=None,
+    ):
         self.title = title
         self.link_alternate = link_alternate
         self.content = content
@@ -245,7 +252,7 @@ class FeedEntry:
         self.date_updated = date_updated
         self.date_published = date_published
         if date_updated is None:
-            raise AssertionError('date_updated is required by RFC 4287')
+            raise AssertionError("date_updated is required by RFC 4287")
         if authors is None:
             authors = []
         self.authors = authors
@@ -263,18 +270,19 @@ class FeedEntry:
 
     def construct_id(self):
         url_path = urlparse(self.link_alternate)[2]
-        return 'tag:launchpad.net,%s:%s' % (
+        return "tag:launchpad.net,%s:%s" % (
             self.date_created.date().isoformat(),
-            url_path)
+            url_path,
+        )
 
 
 @implementer(IFeedTypedData)
 class FeedTypedData:
     """Data for a feed that includes its type."""
 
-    content_types = ['text', 'html', 'xhtml']
+    content_types = ["text", "html", "xhtml"]
 
-    def __init__(self, content, content_type='text', root_url=None):
+    def __init__(self, content, content_type="text", root_url=None):
         self._content = content
         if content_type not in self.content_types:
             raise UnsupportedFeedFormat("%s: is not valid" % content_type)
@@ -283,23 +291,25 @@ class FeedTypedData:
 
     @property
     def content(self):
-        if (self.content_type in ('html', 'xhtml') and
-            self.root_url is not None):
+        if (
+            self.content_type in ("html", "xhtml")
+            and self.root_url is not None
+        ):
             # Unqualified hrefs must be qualified using the original subdomain
             # or they will try be served from http://feeds.launchpad.net,
             # which will not work.
             soup = BeautifulSoup(self._content)
-            a_tags = soup.find_all('a')
+            a_tags = soup.find_all("a")
             for a_tag in a_tags:
-                if a_tag['href'].startswith('/'):
-                    a_tag['href'] = urljoin(self.root_url, a_tag['href'])
+                if a_tag["href"].startswith("/"):
+                    a_tag["href"] = urljoin(self.root_url, a_tag["href"])
             altered_content = str(soup)
         else:
             altered_content = self._content
 
-        if self.content_type in ('text', 'html'):
+        if self.content_type in ("text", "html"):
             altered_content = html_escape(altered_content)
-        elif self.content_type == 'xhtml':
+        elif self.content_type == "xhtml":
             soup = BeautifulSoup(altered_content)
             altered_content = str(soup)
         return altered_content
diff --git a/lib/lp/services/feeds/interfaces/application.py b/lib/lp/services/feeds/interfaces/application.py
index c37cbd4..934b85a 100644
--- a/lib/lp/services/feeds/interfaces/application.py
+++ b/lib/lp/services/feeds/interfaces/application.py
@@ -2,8 +2,8 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'IFeedsApplication',
-    ]
+    "IFeedsApplication",
+]
 
 
 from lp.services.webapp.interfaces import ILaunchpadApplication
diff --git a/lib/lp/services/feeds/interfaces/feed.py b/lib/lp/services/feeds/interfaces/feed.py
index 7a92ff8..b22ad74 100644
--- a/lib/lp/services/feeds/interfaces/feed.py
+++ b/lib/lp/services/feeds/interfaces/feed.py
@@ -4,25 +4,15 @@
 """Interfaces for feeds generation."""
 
 __all__ = [
-    'IFeed',
-    'IFeedEntry',
-    'IFeedPerson',
-    'IFeedTypedData',
-    'UnsupportedFeedFormat',
-    ]
-
-from zope.interface import (
-    Attribute,
-    Interface,
-    )
-from zope.schema import (
-    Datetime,
-    Int,
-    List,
-    Text,
-    TextLine,
-    URI,
-    )
+    "IFeed",
+    "IFeedEntry",
+    "IFeedPerson",
+    "IFeedTypedData",
+    "UnsupportedFeedFormat",
+]
+
+from zope.interface import Attribute, Interface
+from zope.schema import URI, Datetime, Int, List, Text, TextLine
 
 
 class UnsupportedFeedFormat(Exception):
@@ -55,18 +45,19 @@ class IFeed(Interface):
     # stale.
     max_age = Int(
         title="Maximum age",
-        description="Maximum age in seconds for a feed to be cached.")
+        description="Maximum age in seconds for a feed to be cached.",
+    )
 
     # A feed could contain an arbitrary large number of entries, so a quantity
     # may be specified to limit the number of entries returned.
     quantity = Int(
         title="Quantity",
-        description="Number of items to be returned in a feed.")
+        description="Number of items to be returned in a feed.",
+    )
 
     # The title of the feed is prominently displayed in readers and should
     # succinctly identify the feed, e.g. "Latest bugs in Kubuntu".
-    title = TextLine(
-        title="Title of the feed.")
+    title = TextLine(title="Title of the feed.")
 
     # The URL for a feed identifies it uniquely and it should never change.
     # The latest bugs in Kubuntu is:
@@ -74,14 +65,15 @@ class IFeed(Interface):
     link_self = TextLine(
         title="URL for the feed.",
         description="The link_self URL for the feed should be "
-                    "unique and permanent.")
+        "unique and permanent.",
+    )
 
     # The site URL refers to the top-level page for the site serving the
     # feed.  For Launchpad the site_url should be the mainsite URL,
     # i.e. http://launchpad.net.
     site_url = TextLine(
-        title="Site URL",
-        description="The URL for the main site of Launchpad.")
+        title="Site URL", description="The URL for the main site of Launchpad."
+    )
 
     # Feeds are intended to be machine-readable -- XML to be processed by a
     # feed reader and then, possibly, displayed.  The alternate URL is the
@@ -91,10 +83,11 @@ class IFeed(Interface):
     link_alternate = TextLine(
         title="Alternate URL for the feed.",
         description="The URL to a resource that is the human-readable "
-                    "equivalent of the feed.  So for: "
-                    "http://feeds.launchpad.net/ubuntu/announcements.atom "
-                    "the link_alternate would be: "
-                    "http://launchpad.net/ubuntu/+announcements";)
+        "equivalent of the feed.  So for: "
+        "http://feeds.launchpad.net/ubuntu/announcements.atom "
+        "the link_alternate would be: "
+        "http://launchpad.net/ubuntu/+announcements";,
+    )
 
     # The feed ID is a permanent ID for the feed and it must be unique across
     # all time and domains.  That sounds harder than it really is.  To make
@@ -105,27 +98,25 @@ class IFeed(Interface):
     feed_id = TextLine(
         title="ID for the feed.",
         description="The <id> for a feed is permanent and globally unique. "
-                    "It is constructed following RFC 4151.")
+        "It is constructed following RFC 4151.",
+    )
 
     # The feed format is either 'atom' or 'html'.
     feed_format = TextLine(
         title="Feed format",
         description="Requested feed format.  "
-                    "Raises UnsupportedFeed if not supported.")
+        "Raises UnsupportedFeed if not supported.",
+    )
 
     # The logo URL points to an image identifying the feed and will likely
     # vary from one Launchpad application to another.  For example the logo
     # for bugs is:
     # http://launchpad.net/@@/bug.
-    logo = TextLine(
-        title="Logo URL",
-        description="The URL for the feed logo.")
+    logo = TextLine(title="Logo URL", description="The URL for the feed logo.")
 
     # The icon URL points to an image identifying the feed.  For Launchpad
     # feeds the icon is http://launchpad.net/@@/launchpad.
-    icon = TextLine(
-        title="Icon URL",
-        description="The URL for the feed icon.")
+    icon = TextLine(title="Icon URL", description="The URL for the feed icon.")
 
     # The date updated represents the last date any information in the feed
     # changed.  For instance for feed for Launchpad announcements the date
@@ -133,8 +124,8 @@ class IFeed(Interface):
     # the feed changed.  Feed readers use the date updated one criteria as to
     # whether to fetch the feed information anew.
     date_updated = Datetime(
-        title="Date update",
-        description="Date of last update for the feed.")
+        title="Date update", description="Date of last update for the feed."
+    )
 
     def getItems():
         """Get the individual items for the feed.
@@ -156,16 +147,12 @@ class IFeed(Interface):
 
 
 class IFeedEntry(Interface):
-    """Interface for an entry in a feed.
-
-    """
+    """Interface for an entry in a feed."""
 
     # The title of the entry is prominently displayed in readers and should
     # succinctly identify the entry, e.g. "Microsoft has a majority market
     # share."
-    title = TextLine(
-        title="Title",
-        description="The title of the entry")
+    title = TextLine(title="Title", description="The title of the entry")
 
     # The link alternate is an URL specifying the location of the
     # human-readable equivalent for the entry.  For a Ubuntu announcements, an
@@ -174,8 +161,9 @@ class IFeedEntry(Interface):
     link_alternate = TextLine(
         title="Alternate URL for the entry.",
         description="The URL to a resource that is the human-readable "
-                    "equivalent of the entry, e.g. "
-                    "http://launchpad.net/ubuntu/+announcement/1";)
+        "equivalent of the entry, e.g. "
+        "http://launchpad.net/ubuntu/+announcement/1";,
+    )
 
     # The actual content for the entry that is to be displayed in the feed
     # reader.  It may be text or marked up HTML.  It should be an
@@ -185,30 +173,34 @@ class IFeedEntry(Interface):
         "For an announcement, for example, the content "
         "is the text of the announcement.  It may be "
         "plain text or formatted html, as is done for "
-        "bugs.")
+        "bugs."
+    )
 
     # Date the entry was created in the system, without respect to the feed.
     date_created = Datetime(
         title="Date Created",
-        description="Date the entry was originally created in Launchpad.")
+        description="Date the entry was originally created in Launchpad.",
+    )
 
     # Date any aspect of the entry was changed.
     date_updated = Datetime(
-        title="Date Updated",
-        description="Date the entry was last updated.")
+        title="Date Updated", description="Date the entry was last updated."
+    )
 
     # Date the entry became published.
     date_published = Datetime(
         title="Date Published",
         description="Date the entry was published.  "
-                    "For some content this date will be the same "
-                    "as the creation date.  For others, like an "
-                    "announcement, it will be the date the announcement "
-                    "became public.")
+        "For some content this date will be the same "
+        "as the creation date.  For others, like an "
+        "announcement, it will be the date the announcement "
+        "became public.",
+    )
 
     # The primary authors for the entry.
     authors = Attribute(
-        "A list of IFeedPerson representing the authors for the entry.")
+        "A list of IFeedPerson representing the authors for the entry."
+    )
 
     # People who contributed to the entry.  The line between authors and
     # contributors is fuzzy.  For a bug, all comment writers could be
@@ -216,21 +208,22 @@ class IFeedEntry(Interface):
     # filer as the author and all commenters as contributors.  Pick an
     # approach and be consistent.
     contributors = Attribute(
-        "A list of IFeedPerson representing the contributors for the entry.")
+        "A list of IFeedPerson representing the contributors for the entry."
+    )
 
     # The logo representing the entry.
     # Not used and ignored.
     logo = TextLine(
         title="Logo URL",
-        description="The URL for the entry logo."
-                    "Currently not used.")
+        description="The URL for the entry logo." "Currently not used.",
+    )
 
     # The icon representing the entry.
     # Not used and ignored.
     icon = TextLine(
         title="Icon URL",
-        description="The URL for the entry icon."
-                    "Currently not used.")
+        description="The URL for the entry icon." "Currently not used.",
+    )
 
     # The description of the program that generated the feed.  May include
     # versioning information.  Useful for debugging purposes only.
@@ -238,8 +231,9 @@ class IFeedEntry(Interface):
     generator = TextLine(
         title="The generator of the feed.",
         description="A description of the program generating the feed.  "
-                    "Analogous to a browser USER-AGENT string.  "
-                    "Currently not used.")
+        "Analogous to a browser USER-AGENT string.  "
+        "Currently not used.",
+    )
 
 
 class IFeedTypedData(Interface):
@@ -248,40 +242,39 @@ class IFeedTypedData(Interface):
     content_types = List(
         title="Content types",
         description="List of supported content types",
-        required=True)
+        required=True,
+    )
 
-    content = Text(
-        title="Content",
-        description="Data contents",
-        required=True)
+    content = Text(title="Content", description="Data contents", required=True)
 
     content_type = Text(
         title="Content type",
         description="The actual content type for this object.  Must be"
-                    "one of those listed in content_types.",
-        required=False)
+        "one of those listed in content_types.",
+        required=False,
+    )
 
     root_url = Text(
         title="Root URL",
         description="URL for the root of the site that produced the content, "
-                    "i.e. 'http://code.launchpad.net'",
-        required=False)
+        "i.e. 'http://code.launchpad.net'",
+        required=False,
+    )
 
 
 class IFeedPerson(Interface):
     """Interface for a person in a feed."""
 
     name = TextLine(
-        title="Name",
-        description="The person's name.",
-        required=True)
+        title="Name", description="The person's name.", required=True
+    )
 
     email = TextLine(
         title="Email",
         description="The person's email address.",
-        required=False)
+        required=False,
+    )
 
     uri = URI(
-        title="URI",
-        description="The URI for the person.",
-        required=True)
+        title="URI", description="The URI for the person.", required=True
+    )
diff --git a/lib/lp/services/feeds/tests/helper.py b/lib/lp/services/feeds/tests/helper.py
index 7f7f8e8..f2235e4 100644
--- a/lib/lp/services/feeds/tests/helper.py
+++ b/lib/lp/services/feeds/tests/helper.py
@@ -4,34 +4,26 @@
 """Helper functions for testing feeds."""
 
 __all__ = [
-    'IThing',
-    'parse_entries',
-    'parse_ids',
-    'parse_links',
-    'Thing',
-    'ThingFeedView',
-    ]
+    "IThing",
+    "parse_entries",
+    "parse_ids",
+    "parse_links",
+    "Thing",
+    "ThingFeedView",
+]
 
-from zope.interface import (
-    Attribute,
-    implementer,
-    Interface,
-    )
+from zope.interface import Attribute, Interface, implementer
 
-from lp.services.beautifulsoup import (
-    BeautifulSoup,
-    SoupStrainer,
-    )
+from lp.services.beautifulsoup import BeautifulSoup, SoupStrainer
 from lp.services.webapp.publisher import LaunchpadView
 
 
 class IThing(Interface):
-    value = Attribute('the value of the thing')
+    value = Attribute("the value of the thing")
 
 
 @implementer(IThing)
 class Thing:
-
     def __init__(self, value):
         self.value = value
 
@@ -49,22 +41,24 @@ class ThingFeedView(LaunchpadView):
 
 def parse_entries(contents):
     """Define a helper function for parsing feed entries."""
-    strainer = SoupStrainer('entry')
+    strainer = SoupStrainer("entry")
     entries = [
-        tag for tag in BeautifulSoup(contents, 'xml', parse_only=strainer)]
+        tag for tag in BeautifulSoup(contents, "xml", parse_only=strainer)
+    ]
     return entries
 
 
 def parse_links(contents, rel):
     """Define a helper function for parsing feed links."""
-    strainer = SoupStrainer('link', rel=rel)
+    strainer = SoupStrainer("link", rel=rel)
     entries = [
-        tag for tag in BeautifulSoup(contents, 'xml', parse_only=strainer)]
+        tag for tag in BeautifulSoup(contents, "xml", parse_only=strainer)
+    ]
     return entries
 
 
 def parse_ids(contents):
     """Define a helper function for parsing ids."""
-    strainer = SoupStrainer('id')
-    ids = [tag for tag in BeautifulSoup(contents, 'xml', parse_only=strainer)]
+    strainer = SoupStrainer("id")
+    ids = [tag for tag in BeautifulSoup(contents, "xml", parse_only=strainer)]
     return ids
diff --git a/lib/lp/services/feeds/tests/test_doc.py b/lib/lp/services/feeds/tests/test_doc.py
index b0414e4..036b9eb 100644
--- a/lib/lp/services/feeds/tests/test_doc.py
+++ b/lib/lp/services/feeds/tests/test_doc.py
@@ -9,7 +9,6 @@ import os
 
 from lp.services.testing import build_test_suite
 
-
 here = os.path.dirname(os.path.realpath(__file__))
 
 special = {}
diff --git a/lib/lp/services/fields/__init__.py b/lib/lp/services/fields/__init__.py
index 684ec36..1887615 100644
--- a/lib/lp/services/fields/__init__.py
+++ b/lib/lp/services/fields/__init__.py
@@ -2,55 +2,55 @@
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 __all__ = [
-    'AnnouncementDate',
-    'BaseImageUpload',
-    'BlacklistableContentNameField',
-    'BugField',
-    'ContentNameField',
-    'Description',
-    'Datetime',
-    'DuplicateBug',
-    'FieldNotBoundError',
-    'FormattableDate',
-    'IAnnouncementDate',
-    'IBaseImageUpload',
-    'IBugField',
-    'IDescription',
-    'IInlineObject',
-    'INoneableTextLine',
-    'IPersonChoice',
-    'IStrippedTextLine',
-    'ISummary',
-    'ITag',
-    'ITitle',
-    'IURIField',
-    'IWhiteboard',
-    'IconImageUpload',
-    'InlineObject',
-    'KEEP_SAME_IMAGE',
-    'LogoImageUpload',
-    'MugshotImageUpload',
-    'NoneableDescription',
-    'NoneableTextLine',
-    'PersonChoice',
-    'PillarAliases',
-    'PillarNameField',
-    'PrivateTeamNotAllowed',
-    'ProductBugTracker',
-    'ProductNameField',
-    'PublicPersonChoice',
-    'SearchTag',
-    'StrippedTextLine',
-    'Summary',
-    'Tag',
-    'Title',
-    'URIField',
-    'UniqueField',
-    'Whiteboard',
-    'WorkItemsText',
-    'is_public_person_or_closed_team',
-    'is_public_person',
-    ]
+    "AnnouncementDate",
+    "BaseImageUpload",
+    "BlacklistableContentNameField",
+    "BugField",
+    "ContentNameField",
+    "Description",
+    "Datetime",
+    "DuplicateBug",
+    "FieldNotBoundError",
+    "FormattableDate",
+    "IAnnouncementDate",
+    "IBaseImageUpload",
+    "IBugField",
+    "IDescription",
+    "IInlineObject",
+    "INoneableTextLine",
+    "IPersonChoice",
+    "IStrippedTextLine",
+    "ISummary",
+    "ITag",
+    "ITitle",
+    "IURIField",
+    "IWhiteboard",
+    "IconImageUpload",
+    "InlineObject",
+    "KEEP_SAME_IMAGE",
+    "LogoImageUpload",
+    "MugshotImageUpload",
+    "NoneableDescription",
+    "NoneableTextLine",
+    "PersonChoice",
+    "PillarAliases",
+    "PillarNameField",
+    "PrivateTeamNotAllowed",
+    "ProductBugTracker",
+    "ProductNameField",
+    "PublicPersonChoice",
+    "SearchTag",
+    "StrippedTextLine",
+    "Summary",
+    "Tag",
+    "Title",
+    "URIField",
+    "UniqueField",
+    "Whiteboard",
+    "WorkItemsText",
+    "is_public_person_or_closed_team",
+    "is_public_person",
+]
 
 
 import io
@@ -59,15 +59,9 @@ from textwrap import dedent
 
 from lazr.restful.fields import Reference
 from lazr.restful.interfaces import IReferenceChoice
-from lazr.uri import (
-    InvalidURIError,
-    URI,
-    )
+from lazr.uri import URI, InvalidURIError
 from zope.component import getUtility
-from zope.interface import (
-    implementer,
-    Interface,
-    )
+from zope.interface import Interface, implementer
 from zope.schema import (
     Bool,
     Bytes,
@@ -79,7 +73,7 @@ from zope.schema import (
     Text,
     TextLine,
     Tuple,
-    )
+)
 from zope.schema.interfaces import (
     ConstraintNotSatisfied,
     IBytes,
@@ -88,37 +82,31 @@ from zope.schema.interfaces import (
     IObject,
     IText,
     ITextLine,
-    )
+)
 from zope.security.interfaces import ForbiddenAttribute
 
 from lp import _
 from lp.app.validators import LaunchpadValidationError
-from lp.app.validators.name import (
-    name_validator,
-    valid_name,
-    )
+from lp.app.validators.name import name_validator, valid_name
 from lp.blueprints.enums import SpecificationWorkItemStatus
 from lp.bugs.errors import InvalidDuplicateValue
-from lp.registry.enums import (
-    EXCLUSIVE_TEAM_POLICY,
-    PersonVisibility,
-    )
+from lp.registry.enums import EXCLUSIVE_TEAM_POLICY, PersonVisibility
 from lp.registry.interfaces.pillar import IPillarNameSet
 from lp.services.webapp.interfaces import ILaunchBag
 
-
 # Marker object to tell BaseImageUpload to keep the existing image.
 KEEP_SAME_IMAGE = object()
 # Regexp for detecting milestone headers in work items text.
-MILESTONE_RE = re.compile(r'^work items(.*)\s*:\s*$', re.I)
+MILESTONE_RE = re.compile(r"^work items(.*)\s*:\s*$", re.I)
 # Regexp for work items.
 WORKITEM_RE = re.compile(
-    r'^(\[(?P<assignee>.*?)\])?\s*(?P<title>.*)\s*:\s*(?P<status>.*)\s*$',
-    re.I)
+    r"^(\[(?P<assignee>.*?)\])?\s*(?P<title>.*)\s*:\s*(?P<status>.*)\s*$", re.I
+)
 
 
 # Field Interfaces
 
+
 class IStrippedTextLine(ITextLine):
     """A field with leading and trailing whitespaces stripped."""
 
@@ -173,42 +161,52 @@ class IURIField(ITextLine):
 
     A text line that holds a URI.
     """
+
     trailing_slash = Bool(
-        title=_('Whether a trailing slash is required for this field'),
+        title=_("Whether a trailing slash is required for this field"),
         required=False,
-        description=_('If set to True, then the path component of the URI '
-                      'will be automatically normalized to end in a slash. '
-                      'If set to False, any trailing slash will be '
-                      'automatically removed. If set to None, URIs will '
-                      'not be normalized.'))
+        description=_(
+            "If set to True, then the path component of the URI "
+            "will be automatically normalized to end in a slash. "
+            "If set to False, any trailing slash will be "
+            "automatically removed. If set to None, URIs will "
+            "not be normalized."
+        ),
+    )
 
     def normalize(input):
         """Normalize a URI.
 
-         * whitespace is stripped from the input value
-         * if the field requires (or forbids) a trailing slash on the URI,
-           ensures that the widget ends in a slash (or doesn't end in a
-           slash).
-         * the URI is canonicalized.
-         """
+        * whitespace is stripped from the input value
+        * if the field requires (or forbids) a trailing slash on the URI,
+          ensures that the widget ends in a slash (or doesn't end in a
+          slash).
+        * the URI is canonicalized.
+        """
 
 
 class IBaseImageUpload(IBytes):
     """Marker interface for ImageUpload fields."""
 
     dimensions = Tuple(
-        title=_('Maximum dimensions'),
-        description=_('A two-tuple with the maximum width and height (in '
-                      'pixels) of this image.'))
+        title=_("Maximum dimensions"),
+        description=_(
+            "A two-tuple with the maximum width and height (in "
+            "pixels) of this image."
+        ),
+    )
     max_size = Int(
-        title=_('Maximum size'),
-        description=_('The maximum size (in bytes) of this image.'))
+        title=_("Maximum size"),
+        description=_("The maximum size (in bytes) of this image."),
+    )
 
     default_image_resource = TextLine(
-        title=_('The default image'),
+        title=_("The default image"),
         description=_(
-            'The URL of the zope3 resource of the default image that should '
-            'be used. Something of the form /@@/team-mugshot'))
+            "The URL of the zope3 resource of the default image that should "
+            "be used. Something of the form /@@/team-mugshot"
+        ),
+    )
 
     def getCurrentImage():
         """Return the value of the field for the object bound to it.
@@ -219,7 +217,6 @@ class IBaseImageUpload(IBytes):
 
 @implementer(IStrippedTextLine)
 class StrippedTextLine(TextLine):
-
     def set(self, object, value):
         """Strip the value and pass up."""
         if value is not None:
@@ -235,6 +232,7 @@ class NoneableTextLine(StrippedTextLine):
 # Title
 # A field to capture a launchpad object title
 
+
 @implementer(ITitle)
 class Title(StrippedTextLine):
     pass
@@ -271,6 +269,7 @@ class StrippableText(Text):
 # Summary
 # A field capture a Launchpad object summary
 
+
 @implementer(ISummary)
 class Summary(StrippableText):
     pass
@@ -279,6 +278,7 @@ class Summary(StrippableText):
 # Description
 # A field capture a Launchpad object description
 
+
 @implementer(IDescription)
 class Description(StrippableText):
     pass
@@ -292,6 +292,7 @@ class NoneableDescription(Description):
 # Whiteboard
 # A field capture a Launchpad object whiteboard
 
+
 @implementer(IWhiteboard)
 class Whiteboard(StrippableText):
     pass
@@ -306,15 +307,17 @@ class FormattableDate(Date):
     """
 
     def _validate(self, value):
-        error_msg = ("Date could not be formatted. Provide a date formatted "
-            "like YYYY-MM-DD format. The year must be after 1900.")
+        error_msg = (
+            "Date could not be formatted. Provide a date formatted "
+            "like YYYY-MM-DD format. The year must be after 1900."
+        )
 
         super()._validate(value)
         # The only thing of interest here is whether or the input can be
         # formatted properly, not whether it makes sense otherwise.
         # As a minimal sanity check, just raise an error if it fails.
         try:
-            value.strftime('%Y')
+            value.strftime("%Y")
             if value.year < 1900:
                 # Unlike Python 2, Python 3's `date.strftime` works fine on
                 # years earlier than 1900.  However, we carry on refusing it
@@ -322,7 +325,7 @@ class FormattableDate(Date):
                 # at the time of writing this is only used for the targeted
                 # date of milestones and so dates before 1900 aren't
                 # interesting anyway.
-                raise ValueError('year=%d is before 1900' % value.year)
+                raise ValueError("year=%d is before 1900" % value.year)
         except ValueError:
             raise LaunchpadValidationError(error_msg)
 
@@ -334,7 +337,6 @@ class AnnouncementDate(Datetime):
 
 @implementer(IBugField)
 class BugField(Reference):
-
     def __init__(self, *args, **kwargs):
         """The schema will always be `IBug`."""
         super().__init__(Interface, *args, **kwargs)
@@ -342,6 +344,7 @@ class BugField(Reference):
     def _get_schema(self):
         """Get the schema here to avoid circular imports."""
         from lp.bugs.interfaces.bug import IBug
+
         return IBug
 
     def _set_schema(self, schema):
@@ -366,22 +369,36 @@ class DuplicateBug(BugField):
         current_bug = self.context
         dup_target = value
         if current_bug == dup_target:
-            raise InvalidDuplicateValue(_(dedent("""
-                You can't mark a bug as a duplicate of itself.""")))
+            raise InvalidDuplicateValue(
+                _(
+                    dedent(
+                        """
+                You can't mark a bug as a duplicate of itself."""
+                    )
+                )
+            )
         elif dup_target.duplicateof is not None:
-            raise InvalidDuplicateValue(_(dedent("""
+            raise InvalidDuplicateValue(
+                _(
+                    dedent(
+                        """
                 Bug ${dup} is already a duplicate of bug ${orig}. You
                 can only mark a bug report as duplicate of one that
                 isn't a duplicate itself.
-                """), mapping={'dup': dup_target.id,
-                               'orig': dup_target.duplicateof.id}))
+                """
+                    ),
+                    mapping={
+                        "dup": dup_target.id,
+                        "orig": dup_target.duplicateof.id,
+                    },
+                )
+            )
         else:
             return True
 
 
 @implementer(ITag)
 class Tag(TextLine):
-
     def constraint(self, value):
         """Make sure that the value is a valid name."""
         super_constraint = TextLine.constraint(self, value)
@@ -389,7 +406,6 @@ class Tag(TextLine):
 
 
 class SearchTag(Tag):
-
     def constraint(self, value):
         """Make sure the value is a valid search tag.
 
@@ -397,9 +413,9 @@ class SearchTag(Tag):
         with a minus, denoting "not this tag". A simple wildcard - an
         asterisk - is also valid, with or without a leading minus.
         """
-        if value in ('*', '-*'):
+        if value in ("*", "-*"):
             return True
-        elif value.startswith('-'):
+        elif value.startswith("-"):
             return super().constraint(value[1:])
         else:
             return super().constraint(value)
@@ -427,15 +443,15 @@ class UniqueField(TextLine):
         raise NotImplementedError
 
     def _isValueTaken(self, value):
-        """Returns true if and only if the specified value is already taken.
-        """
+        """Returns true if and only if the specified value is already taken."""
         return self._getByAttribute(value) is not None
 
     def unchanged(self, input):
         """Return True if the attribute on the object is unchanged."""
         _marker = object()
-        if (self._content_iface.providedBy(self.context) and
-            input == getattr(self.context, self.attribute, _marker)):
+        if self._content_iface.providedBy(self.context) and input == getattr(
+            self.context, self.attribute, _marker
+        ):
             return True
         return False
 
@@ -464,7 +480,7 @@ class UniqueField(TextLine):
 class ContentNameField(UniqueField):
     """Base class for fields that are used by unique 'name' attributes."""
 
-    attribute = 'name'
+    attribute = "name"
 
     def _getByAttribute(self, input):
         """Return the content object with the given attribute."""
@@ -486,9 +502,11 @@ class ContentNameField(UniqueField):
 class BlacklistableContentNameField(ContentNameField):
     """ContentNameField that also checks that a name is not blacklisted"""
 
-    blacklistmessage = _("The name '%s' has been blocked by the Launchpad "
-                         "administrators. Contact Launchpad Support if you "
-                         "want to use this name.")
+    blacklistmessage = _(
+        "The name '%s' has been blocked by the Launchpad "
+        "administrators. Contact Launchpad Support if you "
+        "want to use this name."
+    )
 
     def _validate(self, input):
         """Check that the given name is valid, unique and not blacklisted."""
@@ -503,6 +521,7 @@ class BlacklistableContentNameField(ContentNameField):
 
         # Need a local import because of circular dependencies.
         from lp.registry.interfaces.person import IPersonSet
+
         user = getUtility(ILaunchBag).user
         if getUtility(IPersonSet).isNameBlacklisted(input, user):
             raise LaunchpadValidationError(self.blacklistmessage % input)
@@ -514,7 +533,7 @@ class PillarAliases(TextLine):
     def _split_input(self, input):
         if input is None:
             return []
-        return re.sub(r'\s+', ' ', input).split()
+        return re.sub(r"\s+", " ", input).split()
 
     def _validate(self, input):
         """Make sure all the aliases are valid for the field's pillar.
@@ -526,19 +545,20 @@ class PillarAliases(TextLine):
         from lp.registry.interfaces.distribution import IDistribution
         from lp.registry.interfaces.product import IProduct
         from lp.registry.interfaces.projectgroup import IProjectGroup
+
         if IProduct.providedBy(context):
-            name_field = IProduct['name']
+            name_field = IProduct["name"]
         elif IProjectGroup.providedBy(context):
-            name_field = IProjectGroup['name']
+            name_field = IProjectGroup["name"]
         elif IDistribution.providedBy(context):
-            name_field = IDistribution['name']
+            name_field = IDistribution["name"]
         else:
             raise AssertionError("Unexpected context type.")
         name_field.bind(context)
         existing_aliases = context.aliases
         for name in self._split_input(input):
             if name == context.name:
-                raise LaunchpadValidationError('This is your name: %s' % name)
+                raise LaunchpadValidationError("This is your name: %s" % name)
             elif name in existing_aliases:
                 # This is already an alias to this pillar, so there's no need
                 # to validate it.
@@ -562,12 +582,14 @@ class ProductBugTracker(Choice):
     This field uses two attributes on the Product to model its state:
     'official_malone' and 'bugtracker'
     """
+
     malone_marker = object()
 
     @property
     def schema(self):
         # The IBugTracker needs to be imported here to avoid an import loop.
         from lp.bugs.interfaces.bugtracker import IBugTracker
+
         return IBugTracker
 
     def get(self, ob):
@@ -589,10 +611,16 @@ class ProductBugTracker(Choice):
 
 @implementer(IURIField)
 class URIField(TextLine):
-
-    def __init__(self, allowed_schemes=(), allow_userinfo=True,
-                 allow_port=True, allow_query=True, allow_fragment=True,
-                 trailing_slash=None, **kwargs):
+    def __init__(
+        self,
+        allowed_schemes=(),
+        allow_userinfo=True,
+        allow_port=True,
+        allow_query=True,
+        allow_fragment=True,
+        trailing_slash=None,
+        **kwargs
+    ):
         super().__init__(**kwargs)
         self.allowed_schemes = set(allowed_schemes)
         self.allow_userinfo = allow_userinfo
@@ -634,24 +662,29 @@ class URIField(TextLine):
         if self.allowed_schemes and uri.scheme not in self.allowed_schemes:
             raise LaunchpadValidationError(
                 'The URI scheme "%s" is not allowed.  Only URIs with '
-                'the following schemes may be used: %s'
-                % (uri.scheme, ', '.join(sorted(self.allowed_schemes))))
+                "the following schemes may be used: %s"
+                % (uri.scheme, ", ".join(sorted(self.allowed_schemes)))
+            )
 
         if not self.allow_userinfo and uri.userinfo is not None:
             raise LaunchpadValidationError(
-                'A username may not be specified in the URI.')
+                "A username may not be specified in the URI."
+            )
 
         if not self.allow_port and uri.port is not None:
             raise LaunchpadValidationError(
-                'Non-default ports are not allowed.')
+                "Non-default ports are not allowed."
+            )
 
         if not self.allow_query and uri.query is not None:
             raise LaunchpadValidationError(
-                'URIs with query strings are not allowed.')
+                "URIs with query strings are not allowed."
+            )
 
         if not self.allow_fragment and uri.fragment is not None:
             raise LaunchpadValidationError(
-                'URIs with fragment identifiers are not allowed.')
+                "URIs with fragment identifiers are not allowed."
+            )
 
         super()._validate(value)
 
@@ -680,8 +713,7 @@ class BaseImageUpload(Bytes):
         # class constructor can be used in the same way as other
         # Interface attribute specifiers.
         if default_image_resource is None:
-            raise AssertionError(
-                "You must specify a default image resource.")
+            raise AssertionError("You must specify a default image resource.")
 
         self.default_image_resource = default_image_resource
         Bytes.__init__(self, **kw)
@@ -703,35 +735,64 @@ class BaseImageUpload(Bytes):
         """Check that the given image is under the given constraints."""
         # No global import to avoid hard dependency on PIL being installed
         import PIL.Image
+
         if len(image) > self.max_size:
-            raise LaunchpadValidationError(_(dedent("""
-                This image exceeds the maximum allowed size in bytes.""")))
+            raise LaunchpadValidationError(
+                _(
+                    dedent(
+                        """
+                This image exceeds the maximum allowed size in bytes."""
+                    )
+                )
+            )
         try:
             pil_image = PIL.Image.open(io.BytesIO(image))
         except (OSError, ValueError):
-            raise LaunchpadValidationError(_(dedent("""
+            raise LaunchpadValidationError(
+                _(
+                    dedent(
+                        """
                 The file uploaded was not recognized as an image; please
-                check it and retry.""")))
+                check it and retry."""
+                    )
+                )
+            )
         width, height = pil_image.size
         required_width, required_height = self.dimensions
         if self.exact_dimensions:
             if width != required_width or height != required_height:
-                raise LaunchpadValidationError(_(dedent("""
+                raise LaunchpadValidationError(
+                    _(
+                        dedent(
+                            """
                     This image is not exactly ${width}x${height}
-                    pixels in size."""),
-                    mapping={'width': required_width,
-                             'height': required_height}))
+                    pixels in size."""
+                        ),
+                        mapping={
+                            "width": required_width,
+                            "height": required_height,
+                        },
+                    )
+                )
         else:
             if width > required_width or height > required_height:
-                raise LaunchpadValidationError(_(dedent("""
+                raise LaunchpadValidationError(
+                    _(
+                        dedent(
+                            """
                     This image is larger than ${width}x${height}
-                    pixels in size."""),
-                    mapping={'width': required_width,
-                             'height': required_height}))
+                    pixels in size."""
+                        ),
+                        mapping={
+                            "width": required_width,
+                            "height": required_height,
+                        },
+                    )
+                )
         return True
 
     def _validate(self, value):
-        if hasattr(value, 'seek'):
+        if hasattr(value, "seek"):
             value.seek(0)
             content = value.read()
         else:
@@ -778,12 +839,14 @@ class ProductNameField(PillarNameField):
     def _content_iface(self):
         # Local import to avoid circular dependencies.
         from lp.registry.interfaces.product import IProduct
+
         return IProduct
 
 
 def is_public_person(person):
     """Return True if the person is public."""
     from lp.registry.interfaces.person import IPerson
+
     if not IPerson.providedBy(person):
         return False
     return person.visibility == PersonVisibility.PUBLIC
@@ -792,6 +855,7 @@ def is_public_person(person):
 def is_public_person_or_closed_team(person):
     """Return True if person is a Person or not an open or delegated team."""
     from lp.registry.interfaces.person import IPerson
+
     if not IPerson.providedBy(person):
         return False
     if not person.is_team:
@@ -814,7 +878,8 @@ class PersonChoice(Choice):
     This is useful as a superclass and provides a clearer error message than
     "Constraint not satisfied".
     """
-    schema = IObject    # Will be set to IPerson once IPerson is defined.
+
+    schema = IObject  # Will be set to IPerson once IPerson is defined.
 
 
 class PublicPersonChoice(PersonChoice):
@@ -829,25 +894,30 @@ class PublicPersonChoice(PersonChoice):
 
 
 class WorkItemsText(Text):
-
     def parseLine(self, line):
         workitem_match = WORKITEM_RE.search(line)
         if workitem_match:
-            assignee = workitem_match.group('assignee')
-            title = workitem_match.group('title')
-            status = workitem_match.group('status')
+            assignee = workitem_match.group("assignee")
+            title = workitem_match.group("title")
+            status = workitem_match.group("status")
         else:
             raise LaunchpadValidationError(
-                'Invalid work item format: "%s"' % line)
-        if title == '':
+                'Invalid work item format: "%s"' % line
+            )
+        if title == "":
             raise LaunchpadValidationError(
-                'No work item title found on "%s"' % line)
-        if title.startswith('['):
+                'No work item title found on "%s"' % line
+            )
+        if title.startswith("["):
             raise LaunchpadValidationError(
-                'Missing closing "]" for assignee on "%s".' % line)
+                'Missing closing "]" for assignee on "%s".' % line
+            )
 
-        return {'title': title, 'status': status.strip().upper(),
-                'assignee': assignee}
+        return {
+            "title": title,
+            "status": status.strip().upper(),
+            "assignee": assignee,
+        }
 
     def parse(self, text):
         sequence = 0
@@ -855,19 +925,19 @@ class WorkItemsText(Text):
         work_items = []
         if text is not None:
             for line in text.splitlines():
-                if line.strip() == '':
+                if line.strip() == "":
                     continue
                 milestone_match = MILESTONE_RE.search(line)
                 if milestone_match:
                     milestone_part = milestone_match.group(1).strip()
-                    if milestone_part == '':
+                    if milestone_part == "":
                         milestone = None
                     else:
                         milestone = milestone_part.split()[-1]
                 else:
                     new_work_item = self.parseLine(line)
-                    new_work_item['milestone'] = milestone
-                    new_work_item['sequence'] = sequence
+                    new_work_item["milestone"] = milestone
+                    new_work_item["sequence"] = sequence
                     sequence += 1
                     work_items.append(new_work_item)
         return work_items
@@ -878,25 +948,27 @@ class WorkItemsText(Text):
     def parseAndValidate(self, text):
         work_items = self.parse(text)
         for work_item in work_items:
-            work_item['status'] = self.getStatus(work_item['status'])
-            work_item['assignee'] = self.getAssignee(work_item['assignee'])
-            work_item['milestone'] = self.getMilestone(work_item['milestone'])
+            work_item["status"] = self.getStatus(work_item["status"])
+            work_item["assignee"] = self.getAssignee(work_item["assignee"])
+            work_item["milestone"] = self.getMilestone(work_item["milestone"])
         return work_items
 
     def getStatus(self, text):
         valid_statuses = SpecificationWorkItemStatus.items
         if text.lower() not in [item.name.lower() for item in valid_statuses]:
-            raise LaunchpadValidationError('Unknown status: %s' % text)
+            raise LaunchpadValidationError("Unknown status: %s" % text)
         return valid_statuses[text.upper()]
 
     def getAssignee(self, assignee_name):
         if assignee_name is None:
             return None
         from lp.registry.interfaces.person import IPersonSet
+
         assignee = getUtility(IPersonSet).getByName(assignee_name)
         if assignee is None:
             raise LaunchpadValidationError(
-                "Unknown person name: %s" % assignee_name)
+                "Unknown person name: %s" % assignee_name
+            )
         return assignee
 
     def getMilestone(self, milestone_name):
@@ -909,19 +981,23 @@ class WorkItemsText(Text):
         from lp.registry.interfaces.distribution import IDistribution
         from lp.registry.interfaces.milestone import IMilestoneSet
         from lp.registry.interfaces.product import IProduct
+
         if IProduct.providedBy(target):
             milestone = getUtility(IMilestoneSet).getByNameAndProduct(
-                milestone_name, target)
+                milestone_name, target
+            )
         elif IDistribution.providedBy(target):
             milestone = getUtility(IMilestoneSet).getByNameAndDistribution(
-                milestone_name, target)
+                milestone_name, target
+            )
         else:
             raise AssertionError("Unexpected target type.")
 
         if milestone is None:
-            raise LaunchpadValidationError("The milestone '%s' is not valid "
-                                           "for the target '%s'." %
-                                           (milestone_name, target.name))
+            raise LaunchpadValidationError(
+                "The milestone '%s' is not valid "
+                "for the target '%s'." % (milestone_name, target.name)
+            )
         return milestone
 
 
diff --git a/lib/lp/services/fields/tests/test_doc.py b/lib/lp/services/fields/tests/test_doc.py
index bb85450..0bee367 100644
--- a/lib/lp/services/fields/tests/test_doc.py
+++ b/lib/lp/services/fields/tests/test_doc.py
@@ -10,7 +10,6 @@ import os
 from lp.services.testing import build_test_suite
 from lp.testing.layers import DatabaseFunctionalLayer
 
-
 here = os.path.dirname(os.path.realpath(__file__))
 
 
diff --git a/lib/lp/services/fields/tests/test_fields.py b/lib/lp/services/fields/tests/test_fields.py
index 6dd3aba..665ce95 100644
--- a/lib/lp/services/fields/tests/test_fields.py
+++ b/lib/lp/services/fields/tests/test_fields.py
@@ -13,25 +13,18 @@ from zope.schema.interfaces import TooShort
 
 from lp.app.validators import LaunchpadValidationError
 from lp.blueprints.enums import SpecificationWorkItemStatus
-from lp.registry.enums import (
-    EXCLUSIVE_TEAM_POLICY,
-    INCLUSIVE_TEAM_POLICY,
-    )
+from lp.registry.enums import EXCLUSIVE_TEAM_POLICY, INCLUSIVE_TEAM_POLICY
 from lp.registry.interfaces.nameblacklist import INameBlacklistSet
 from lp.services.database.interfaces import IStore
 from lp.services.fields import (
     BaseImageUpload,
     BlacklistableContentNameField,
     FormattableDate,
-    is_public_person_or_closed_team,
     StrippableText,
     WorkItemsText,
-    )
-from lp.testing import (
-    login_person,
-    TestCase,
-    TestCaseWithFactory,
-    )
+    is_public_person_or_closed_team,
+)
+from lp.testing import TestCase, TestCaseWithFactory, login_person
 from lp.testing.layers import DatabaseFunctionalLayer
 
 
@@ -45,67 +38,65 @@ def make_target():
 
 
 class TestFormattableDate(TestCase):
-
     def test_validation_fails_on_bad_data(self):
         field = FormattableDate()
         date_value = datetime.date(
-            *(time.strptime('1000-01-01', '%Y-%m-%d'))[:3])
-        self.assertRaises(
-            LaunchpadValidationError, field.validate, date_value)
+            *(time.strptime("1000-01-01", "%Y-%m-%d"))[:3]
+        )
+        self.assertRaises(LaunchpadValidationError, field.validate, date_value)
 
     def test_validation_passes_good_data(self):
         field = FormattableDate()
         date_value = datetime.date(
-            *(time.strptime('2010-01-01', '%Y-%m-%d'))[:3])
+            *(time.strptime("2010-01-01", "%Y-%m-%d"))[:3]
+        )
         self.assertIs(None, field.validate(date_value))
 
 
 class TestStrippableText(TestCase):
-
     def test_strips_text(self):
         # The set method should strip the string before setting the field.
         target = make_target()
-        field = StrippableText(__name__='test', strip_text=True)
+        field = StrippableText(__name__="test", strip_text=True)
         self.assertTrue(field.strip_text)
-        field.set(target, '  testing  ')
-        self.assertEqual('testing', target.test)
+        field.set(target, "  testing  ")
+        self.assertEqual("testing", target.test)
 
     def test_strips_text_trailing_only(self):
         # The set method strips the trailing whitespace.
         target = make_target()
         field = StrippableText(
-            __name__='test', strip_text=True, trailing_only=True)
+            __name__="test", strip_text=True, trailing_only=True
+        )
         self.assertTrue(field.trailing_only)
-        field.set(target, '  testing  ')
-        self.assertEqual('  testing', target.test)
+        field.set(target, "  testing  ")
+        self.assertEqual("  testing", target.test)
 
     def test_default_constructor(self):
         # If strip_text is not set, or set to false, then the text is not
         # stripped when set.
         target = make_target()
-        field = StrippableText(__name__='test')
+        field = StrippableText(__name__="test")
         self.assertFalse(field.strip_text)
-        field.set(target, '  testing  ')
-        self.assertEqual('  testing  ', target.test)
+        field.set(target, "  testing  ")
+        self.assertEqual("  testing  ", target.test)
 
     def test_setting_with_none(self):
         # The set method is given None, the attribute is set to None
         target = make_target()
-        field = StrippableText(__name__='test', strip_text=True)
+        field = StrippableText(__name__="test", strip_text=True)
         field.set(target, None)
         self.assertIs(None, target.test)
 
     def test_validate_min_contraints(self):
         # The minimum length constraint tests the stripped string.
-        field = StrippableText(
-            __name__='test', strip_text=True, min_length=1)
-        self.assertRaises(TooShort, field.validate, '  ')
+        field = StrippableText(__name__="test", strip_text=True, min_length=1)
+        self.assertRaises(TooShort, field.validate, "  ")
 
     def test_validate_max_contraints(self):
         # The minimum length constraint tests the stripped string.
-        field = StrippableText(
-            __name__='test', strip_text=True, max_length=2)
-        self.assertEqual(None, field.validate('  a  '))
+        field = StrippableText(__name__="test", strip_text=True, max_length=2)
+        self.assertEqual(None, field.validate("  a  "))
 
 
 class TestWorkItemsTextValidation(TestCaseWithFactory):
@@ -114,30 +105,40 @@ class TestWorkItemsTextValidation(TestCaseWithFactory):
 
     def setUp(self):
         super().setUp()
-        self.field = WorkItemsText(__name__='test')
+        self.field = WorkItemsText(__name__="test")
 
     def test_parseandvalidate(self):
         status = SpecificationWorkItemStatus.TODO
         assignee = self.factory.makePerson()
         milestone = self.factory.makeMilestone()
-        title = 'A work item'
+        title = "A work item"
         specification = self.factory.makeSpecification(
-            product=milestone.product)
+            product=milestone.product
+        )
         field = self.field.bind(specification)
-        work_items_text = (
-            "Work items for %s:\n"
-           "[%s]%s: %s" % (milestone.name, assignee.name, title, status.name))
+        work_items_text = "Work items for %s:\n" "[%s]%s: %s" % (
+            milestone.name,
+            assignee.name,
+            title,
+            status.name,
+        )
         work_item = field.parseAndValidate(work_items_text)[0]
-        self.assertEqual({'assignee': assignee,
-                          'milestone': milestone,
-                          'sequence': 0,
-                          'status': status,
-                          'title': title}, work_item)
+        self.assertEqual(
+            {
+                "assignee": assignee,
+                "milestone": milestone,
+                "sequence": 0,
+                "status": status,
+                "title": title,
+            },
+            work_item,
+        )
 
     def test_unknown_assignee_is_rejected(self):
-        person_name = 'test-person'
+        person_name = "test-person"
         self.assertRaises(
-            LaunchpadValidationError, self.field.getAssignee, person_name)
+            LaunchpadValidationError, self.field.getAssignee, person_name
+        )
 
     def test_validate_valid_assignee(self):
         assignee = self.factory.makePerson()
@@ -153,12 +154,14 @@ class TestWorkItemsTextValidation(TestCaseWithFactory):
         specification = self.factory.makeSpecification()
         field = self.field.bind(specification)
         self.assertRaises(
-            LaunchpadValidationError, field.getMilestone, 'does-not-exist')
+            LaunchpadValidationError, field.getMilestone, "does-not-exist"
+        )
 
     def test_validate_valid_product_milestone(self):
         milestone = self.factory.makeMilestone()
         specification = self.factory.makeSpecification(
-            product=milestone.product)
+            product=milestone.product
+        )
         field = self.field.bind(specification)
         self.assertEqual(milestone, field.getMilestone(milestone.name))
 
@@ -166,117 +169,141 @@ class TestWorkItemsTextValidation(TestCaseWithFactory):
         distro = self.factory.makeDistribution()
         milestone = self.factory.makeMilestone(distribution=distro)
         specification = self.factory.makeSpecification(
-            distribution=milestone.distribution)
+            distribution=milestone.distribution
+        )
         field = self.field.bind(specification)
         self.assertEqual(milestone, field.getMilestone(milestone.name))
 
     def test_validate_invalid_milestone(self):
-        milestone_name = 'test-milestone'
+        milestone_name = "test-milestone"
         self.factory.makeMilestone(name=milestone_name)
         # Milestone exists but is not a target for this spec.
         specification = self.factory.makeSpecification(product=None)
         field = self.field.bind(specification)
         self.assertRaises(
-            LaunchpadValidationError, field.getMilestone, milestone_name)
+            LaunchpadValidationError, field.getMilestone, milestone_name
+        )
 
     def test_validate_invalid_status(self):
         self.assertRaises(
-            LaunchpadValidationError, self.field.getStatus,
-            'Invalid status: FOO')
+            LaunchpadValidationError,
+            self.field.getStatus,
+            "Invalid status: FOO",
+        )
 
     def test_validate_valid_statuses(self):
-        statuses = [SpecificationWorkItemStatus.TODO,
-                    SpecificationWorkItemStatus.DONE,
-                    SpecificationWorkItemStatus.POSTPONED,
-                    SpecificationWorkItemStatus.INPROGRESS,
-                    SpecificationWorkItemStatus.BLOCKED]
+        statuses = [
+            SpecificationWorkItemStatus.TODO,
+            SpecificationWorkItemStatus.DONE,
+            SpecificationWorkItemStatus.POSTPONED,
+            SpecificationWorkItemStatus.INPROGRESS,
+            SpecificationWorkItemStatus.BLOCKED,
+        ]
         for status in statuses:
             validated_status = self.field.getStatus(status.name)
             self.assertEqual(validated_status, status)
 
 
 class TestWorkItemsText(TestCase):
-
     def setUp(self):
         super().setUp()
-        self.field = WorkItemsText(__name__='test')
+        self.field = WorkItemsText(__name__="test")
 
     def test_validate_raises_LaunchpadValidationError(self):
         self.assertRaises(
-            LaunchpadValidationError, self.field.validate,
-            'This is not a valid work item.')
+            LaunchpadValidationError,
+            self.field.validate,
+            "This is not a valid work item.",
+        )
 
     def test_single_line_parsing(self):
-        work_items_title = 'Test this work item'
-        parsed = self.field.parseLine('%s: TODO' % (work_items_title))
-        self.assertEqual(parsed['title'], work_items_title)
-        self.assertEqual(parsed['status'], 'TODO')
+        work_items_title = "Test this work ite