launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #29570
[Merge] ~cjwatson/launchpad:resource-warnings into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:resource-warnings into launchpad:master.
Commit message:
Fix various ResourceWarnings in tests
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/436127
I found this collection of miscellaneous `ResourceWarning` fixes when looking through my various local cleanup branches. I'm sure there's more of the same to be found elsewhere, but we might as well make the test suite a little quieter.
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:resource-warnings into launchpad:master.
diff --git a/lib/lp/bugs/doc/externalbugtracker-debbugs.rst b/lib/lp/bugs/doc/externalbugtracker-debbugs.rst
index 6fd64aa..b26f84f 100644
--- a/lib/lp/bugs/doc/externalbugtracker-debbugs.rst
+++ b/lib/lp/bugs/doc/externalbugtracker-debbugs.rst
@@ -175,11 +175,10 @@ cause importance to be set to medium, equivalent to the default normal
severity in debbugs.
>>> import email
- >>> summary = email.message_from_file(
- ... open(
- ... os.path.join(test_db_location, "db-h", "01", "237001.summary")
- ... )
- ... )
+ >>> with open(
+ ... os.path.join(test_db_location, "db-h", "01", "237001.summary")
+ ... ) as summary_file:
+ ... summary = email.message_from_file(summary_file)
>>> "Severity" not in summary
True
@@ -327,11 +326,10 @@ The Debbugs ExternalBugTracker can import a Debian bug into Launchpad.
The bug reporter gets taken from the From field in the debbugs bug
report.
- >>> report = email.message_from_file(
- ... open(
- ... os.path.join(test_db_location, "db-h", "35", "322535.report")
- ... )
- ... )
+ >>> with open(
+ ... os.path.join(test_db_location, "db-h", "35", "322535.report")
+ ... ) as report_file:
+ ... report = email.message_from_file(report_file)
>>> print(report["From"])
Moritz Muehlenhoff <jmm@xxxxxxxxxx>
diff --git a/lib/lp/bugs/scripts/debbugs.py b/lib/lp/bugs/scripts/debbugs.py
index a0307fc..4a908cc 100644
--- a/lib/lp/bugs/scripts/debbugs.py
+++ b/lib/lp/bugs/scripts/debbugs.py
@@ -123,6 +123,7 @@ class Database:
def __next__(self):
line = self.index.readline()
if not line:
+ self.index.close()
raise StopIteration
match = self.index_record.match(line)
@@ -184,6 +185,8 @@ class Database:
message = email.message_from_file(fd)
except Exception as e:
raise SummaryParseError("%s: %s" % (summary, str(e)))
+ finally:
+ fd.close()
version = message["format-version"]
if version is None:
@@ -223,8 +226,10 @@ class Database:
except FileNotFoundError:
raise ReportMissing(report)
- bug.report = fd.read()
- fd.close()
+ try:
+ bug.report = fd.read()
+ finally:
+ fd.close()
report_msg = email.message_from_bytes(bug.report)
charset = report_msg.get_content_charset("ascii")
diff --git a/lib/lp/codehosting/tests/test_rewrite.py b/lib/lp/codehosting/tests/test_rewrite.py
index 0424f32..9ba726d 100644
--- a/lib/lp/codehosting/tests/test_rewrite.py
+++ b/lib/lp/codehosting/tests/test_rewrite.py
@@ -350,7 +350,7 @@ class TestBranchRewriterScript(TestCaseWithFactory):
output_lines.append(nonblocking_readline(proc.stdout, 60).rstrip("\n"))
os.kill(proc.pid, signal.SIGINT)
- err = proc.stderr.read()
+ _, err = proc.communicate()
# The script produces logging output, but not to stderr.
self.assertEqual("", err)
self.assertEqual(expected_lines, output_lines)
@@ -373,6 +373,7 @@ class TestBranchRewriterScriptHandlesDisconnects(TestCase):
universal_newlines=True,
)
+ self.addCleanup(self.rewriter_proc.communicate)
self.addCleanup(self.rewriter_proc.terminate)
def request(self, query):
diff --git a/lib/lp/registry/doc/cache-country-mirrors.rst b/lib/lp/registry/doc/cache-country-mirrors.rst
index 7c9b82c..78dfe6c 100644
--- a/lib/lp/registry/doc/cache-country-mirrors.rst
+++ b/lib/lp/registry/doc/cache-country-mirrors.rst
@@ -48,8 +48,9 @@ look good.
>>> fr_txt_path = os.path.join(directory, "FR.txt")
>>> print("%o" % stat.S_IMODE(os.stat(fr_txt_path).st_mode))
644
- >>> for line in sorted(open(fr_txt_path).read().split("\n")):
- ... print(line)
+ >>> with open(fr_txt_path) as fr_txt:
+ ... for line in sorted(fr_txt.read().split("\n")):
+ ... print(line)
...
http://archive.ubuntu.com/ubuntu/
http://localhost:11375/archive-mirror/
diff --git a/lib/lp/services/apachelogparser/tests/test_apachelogparser.py b/lib/lp/services/apachelogparser/tests/test_apachelogparser.py
index b820eb1..dee5327 100644
--- a/lib/lp/services/apachelogparser/tests/test_apachelogparser.py
+++ b/lib/lp/services/apachelogparser/tests/test_apachelogparser.py
@@ -36,12 +36,12 @@ class TestLineParsing(TestCase):
"""Test parsing of lines of an apache log file."""
def test_return_value(self):
- fd = open(
+ with open(
os.path.join(here, "apache-log-files", "librarian-oneline.log")
- )
- host, date, status, request = get_host_date_status_and_request(
- fd.readline()
- )
+ ) as fd:
+ host, date, status, request = get_host_date_status_and_request(
+ fd.readline()
+ )
self.assertEqual(host, "201.158.154.121")
self.assertEqual(date, "[13/Jun/2008:18:38:57 +0100]")
self.assertEqual(status, "200")
@@ -158,8 +158,11 @@ class Test_get_fd_and_file_size(TestCase):
very beginning.
"""
fd, file_size = get_fd_and_file_size(file_path)
- self.assertEqual(fd.tell(), 0)
- self.assertEqual(len(fd.read()), file_size)
+ try:
+ self.assertEqual(fd.tell(), 0)
+ self.assertEqual(len(fd.read()), file_size)
+ finally:
+ fd.close()
def test_regular_file(self):
file_path = os.path.join(
@@ -212,58 +215,61 @@ class TestLogFileParsing(TestCase):
# also been downloaded once (last line of the sample log), but
# parse_file() always skips the last line as it may be truncated, so
# it doesn't show up in the dict returned.
- fd = open(
+ with open(
os.path.join(
here, "apache-log-files", "launchpadlibrarian.net.access-log"
),
"rb",
- )
- downloads, parsed_bytes, parsed_lines = parse_file(
- fd,
- start_position=0,
- logger=self.logger,
- get_download_key=get_path_download_key,
- )
- self.assertEqual(
- self.logger.getLogBuffer().strip(),
- "INFO Parsed 5 lines resulting in 3 download stats.",
- )
- date = datetime(2008, 6, 13)
- self.assertContentEqual(
- downloads.items(),
- [
- ("/12060796/me-tv-icon-64x64.png", {date: {"AU": 1}}),
- ("/8196569/mediumubuntulogo.png", {date: {"AR": 1, "JP": 1}}),
- ("/9096290/me-tv-icon-14x14.png", {date: {"AU": 1}}),
- ],
- )
+ ) as fd:
+ downloads, parsed_bytes, parsed_lines = parse_file(
+ fd,
+ start_position=0,
+ logger=self.logger,
+ get_download_key=get_path_download_key,
+ )
+ self.assertEqual(
+ self.logger.getLogBuffer().strip(),
+ "INFO Parsed 5 lines resulting in 3 download stats.",
+ )
+ date = datetime(2008, 6, 13)
+ self.assertContentEqual(
+ downloads.items(),
+ [
+ ("/12060796/me-tv-icon-64x64.png", {date: {"AU": 1}}),
+ (
+ "/8196569/mediumubuntulogo.png",
+ {date: {"AR": 1, "JP": 1}},
+ ),
+ ("/9096290/me-tv-icon-14x14.png", {date: {"AU": 1}}),
+ ],
+ )
- # The last line is skipped, so we'll record that the file has been
- # parsed until the beginning of the last line.
- self.assertNotEqual(parsed_bytes, fd.tell())
- self.assertEqual(parsed_bytes, self._getLastLineStart(fd))
+ # The last line is skipped, so we'll record that the file has been
+ # parsed until the beginning of the last line.
+ self.assertNotEqual(parsed_bytes, fd.tell())
+ self.assertEqual(parsed_bytes, self._getLastLineStart(fd))
def test_parsing_last_line(self):
# When there's only the last line of a given file for us to parse, we
# assume the file has been rotated and it's safe to parse its last
# line without worrying about whether or not it's been truncated.
- fd = open(
+ with open(
os.path.join(
here, "apache-log-files", "launchpadlibrarian.net.access-log"
),
"rb",
- )
- downloads, parsed_bytes, parsed_lines = parse_file(
- fd,
- start_position=self._getLastLineStart(fd),
- logger=self.logger,
- get_download_key=get_path_download_key,
- )
- self.assertEqual(
- self.logger.getLogBuffer().strip(),
- "INFO Parsed 1 lines resulting in 1 download stats.",
- )
- self.assertEqual(parsed_bytes, fd.tell())
+ ) as fd:
+ downloads, parsed_bytes, parsed_lines = parse_file(
+ fd,
+ start_position=self._getLastLineStart(fd),
+ logger=self.logger,
+ get_download_key=get_path_download_key,
+ )
+ self.assertEqual(
+ self.logger.getLogBuffer().strip(),
+ "INFO Parsed 1 lines resulting in 1 download stats.",
+ )
+ self.assertEqual(parsed_bytes, fd.tell())
self.assertContentEqual(
downloads.items(),
@@ -526,7 +532,8 @@ class TestParsedFilesDetection(TestCase):
# A file that has been parsed already but in which new content was
# added will be parsed again, starting from where parsing stopped last
# time.
- first_line = open(self.file_path).readline()
+ with open(self.file_path) as fd:
+ first_line = fd.readline()
ParsedApacheLog(first_line, len(first_line))
files_to_parse = list(get_files_to_parse([self.file_path]))
@@ -576,7 +583,8 @@ class TestParsedFilesDetection(TestCase):
# stopped last time. (Here we pretend we parsed only the first line)
gz_name = "launchpadlibrarian.net.access-log.1.gz"
gz_path = os.path.join(self.root, gz_name)
- first_line = gzip.open(gz_path).readline()
+ with gzip.open(gz_path) as gz:
+ first_line = gz.readline()
ParsedApacheLog(first_line, len(first_line))
files_to_parse = get_files_to_parse([gz_path])
positions = []
diff --git a/lib/lp/services/twistedsupport/loggingsupport.py b/lib/lp/services/twistedsupport/loggingsupport.py
index 2856992..6936fb6 100644
--- a/lib/lp/services/twistedsupport/loggingsupport.py
+++ b/lib/lp/services/twistedsupport/loggingsupport.py
@@ -80,12 +80,13 @@ class LaunchpadLogFile(DailyLogFile):
maxRotatedFiles=None,
compressLast=None,
):
- DailyLogFile.__init__(self, name, directory, defaultMode)
if maxRotatedFiles is not None:
self.maxRotatedFiles = int(maxRotatedFiles)
if compressLast is not None:
self.compressLast = int(compressLast)
+ # Check parameters before calling the superclass's __init__, since
+ # that opens a file that we'd otherwise need to close.
assert (
self.compressLast <= self.maxRotatedFiles
), "Only %d rotate files are kept, cannot compress %d" % (
@@ -93,6 +94,8 @@ class LaunchpadLogFile(DailyLogFile):
self.compressLast,
)
+ super().__init__(name, directory, defaultMode)
+
def _compressFile(self, path):
"""Compress the file in the given path using bzip2.
diff --git a/lib/lp/services/twistedsupport/tests/test_loggingsupport.py b/lib/lp/services/twistedsupport/tests/test_loggingsupport.py
index a929f49..a6ecac7 100644
--- a/lib/lp/services/twistedsupport/tests/test_loggingsupport.py
+++ b/lib/lp/services/twistedsupport/tests/test_loggingsupport.py
@@ -29,15 +29,21 @@ class TestLaunchpadLogFile(TestCase):
"""
# Default behaviour.
log_file = LaunchpadLogFile("test.log", self.temp_dir)
- self.assertEqual(5, log_file.maxRotatedFiles)
- self.assertEqual(3, log_file.compressLast)
+ try:
+ self.assertEqual(5, log_file.maxRotatedFiles)
+ self.assertEqual(3, log_file.compressLast)
+ finally:
+ log_file.close()
# Keeping only compressed rotated logs.
log_file = LaunchpadLogFile(
"test.log", self.temp_dir, maxRotatedFiles=1, compressLast=1
)
- self.assertEqual(1, log_file.maxRotatedFiles)
- self.assertEqual(1, log_file.compressLast)
+ try:
+ self.assertEqual(1, log_file.maxRotatedFiles)
+ self.assertEqual(1, log_file.compressLast)
+ finally:
+ log_file.close()
# Inconsistent parameters, compression more than kept rotated files.
self.assertRaises(
@@ -52,9 +58,8 @@ class TestLaunchpadLogFile(TestCase):
def createTestFile(self, name, content="nothing"):
"""Create a new file in the test directory."""
file_path = os.path.join(self.temp_dir, name)
- fd = open(file_path, "w")
- fd.write(content)
- fd.close()
+ with open(file_path, "w") as fd:
+ fd.write(content)
return file_path
def listTestFiles(self):
@@ -71,18 +76,24 @@ class TestLaunchpadLogFile(TestCase):
the newest first.
"""
log_file = LaunchpadLogFile("test.log", self.temp_dir)
- self.assertEqual(["test.log"], self.listTestFiles())
- self.assertEqual([], log_file.listLogs())
-
- self.createTestFile("boing")
- self.assertEqual([], log_file.listLogs())
-
- self.createTestFile("test.log.2000-12-31")
- self.createTestFile("test.log.2000-12-30.bz2")
- self.assertEqual(
- ["test.log.2000-12-31", "test.log.2000-12-30.bz2"],
- [os.path.basename(log_path) for log_path in log_file.listLogs()],
- )
+ try:
+ self.assertEqual(["test.log"], self.listTestFiles())
+ self.assertEqual([], log_file.listLogs())
+
+ self.createTestFile("boing")
+ self.assertEqual([], log_file.listLogs())
+
+ self.createTestFile("test.log.2000-12-31")
+ self.createTestFile("test.log.2000-12-30.bz2")
+ self.assertEqual(
+ ["test.log.2000-12-31", "test.log.2000-12-30.bz2"],
+ [
+ os.path.basename(log_path)
+ for log_path in log_file.listLogs()
+ ],
+ )
+ finally:
+ log_file.close()
def testRotate(self):
"""Check `LaunchpadLogFile.rotate`.
@@ -95,24 +106,29 @@ class TestLaunchpadLogFile(TestCase):
"test.log", self.temp_dir, maxRotatedFiles=2, compressLast=1
)
- # Monkey-patch DailyLogFile.suffix to be time independent.
- self.local_index = 0
+ try:
+ # Monkey-patch DailyLogFile.suffix to be time independent.
+ self.local_index = 0
- def testSuffix(tupledate):
- self.local_index += 1
- return str(self.local_index)
+ def testSuffix(tupledate):
+ self.local_index += 1
+ return str(self.local_index)
- log_file.suffix = testSuffix
+ log_file.suffix = testSuffix
- log_file.rotate()
- self.assertEqual(["test.log", "test.log.1"], self.listTestFiles())
+ log_file.rotate()
+ self.assertEqual(["test.log", "test.log.1"], self.listTestFiles())
- log_file.rotate()
- self.assertEqual(
- ["test.log", "test.log.1.bz2", "test.log.2"], self.listTestFiles()
- )
+ log_file.rotate()
+ self.assertEqual(
+ ["test.log", "test.log.1.bz2", "test.log.2"],
+ self.listTestFiles(),
+ )
- log_file.rotate()
- self.assertEqual(
- ["test.log", "test.log.2.bz2", "test.log.3"], self.listTestFiles()
- )
+ log_file.rotate()
+ self.assertEqual(
+ ["test.log", "test.log.2.bz2", "test.log.3"],
+ self.listTestFiles(),
+ )
+ finally:
+ log_file.close()
diff --git a/lib/lp/soyuz/doc/gina-multiple-arch.rst b/lib/lp/soyuz/doc/gina-multiple-arch.rst
index f9c5e1e..70c851a 100644
--- a/lib/lp/soyuz/doc/gina-multiple-arch.rst
+++ b/lib/lp/soyuz/doc/gina-multiple-arch.rst
@@ -98,17 +98,17 @@ Let's set up the filesystem:
>>> os.symlink(path, "/tmp/gina_test_archive")
>>> gina_proc = ["scripts/gina.py", "-q", "dapper", "dapper-updates"]
- >>> proc = subprocess.Popen(
+ >>> proc = subprocess.run(
... gina_proc, stderr=subprocess.PIPE, universal_newlines=True
... )
- >>> print(proc.stderr.read())
+ >>> print(proc.stderr)
WARNING ...
WARNING No source package bdftopcf (0.99.0-1) listed for bdftopcf
(0.99.0-1), scrubbing archive...
WARNING The archive for dapper-updates/universe doesn't contain a
directory for powerpc, skipping
<BLANKLINE>
- >>> proc.wait()
+ >>> proc.returncode
0
Make the changes visible elsewhere:
diff --git a/lib/lp/soyuz/doc/gina.rst b/lib/lp/soyuz/doc/gina.rst
index 5d5f0c9..97d53d5 100644
--- a/lib/lp/soyuz/doc/gina.rst
+++ b/lib/lp/soyuz/doc/gina.rst
@@ -136,13 +136,13 @@ Let's set up the filesystem:
And give it a spin:
>>> gina_proc = ["scripts/gina.py", "-q", "hoary", "breezy"]
- >>> proc = subprocess.Popen(
+ >>> proc = subprocess.run(
... gina_proc, stderr=subprocess.PIPE, universal_newlines=True
... )
Check STDERR for the errors we expected:
- >>> print(proc.stderr.read())
+ >>> print(proc.stderr)
ERROR Error processing package files for clearlooks
...
...ExecutionError: Error 2 unpacking source
@@ -200,7 +200,7 @@ Check STDERR for the errors we expected:
The exit status must be 0, for success:
- >>> proc.wait()
+ >>> proc.returncode
0
>>> transaction.commit()
@@ -567,10 +567,10 @@ been updated for packages in breezy which have changed since the last
run.
>>> gina_proc = ["scripts/gina.py", "-q", "hoary", "breezy"]
- >>> proc = subprocess.Popen(
+ >>> proc = subprocess.run(
... gina_proc, stderr=subprocess.PIPE, universal_newlines=True
... )
- >>> print(proc.stderr.read())
+ >>> print(proc.stderr)
ERROR Error processing package files for clearlooks
...
...ExecutionError: Error 2 unpacking source
@@ -615,7 +615,7 @@ run.
...
...PoolFileNotFound: .../python-sqlite_1.0.1-2ubuntu1_all.deb not found
<BLANKLINE>
- >>> proc.wait()
+ >>> proc.returncode
0
>>> transaction.commit()
@@ -709,10 +709,10 @@ First get a set of existing publishings for both source and binary:
Now run gina to import packages and convert them to partner:
>>> gina_proc = ["scripts/gina.py", "-q", "partner"]
- >>> proc = subprocess.Popen(
+ >>> proc = subprocess.run(
... gina_proc, stderr=subprocess.PIPE, universal_newlines=True
... )
- >>> proc.wait()
+ >>> proc.returncode
0
>>> transaction.commit()
@@ -826,10 +826,10 @@ Commit the changes and run the importer script.
>>> transaction.commit()
>>> gina_proc = ["scripts/gina.py", "-q", "lenny"]
- >>> proc = subprocess.Popen(
+ >>> proc = subprocess.run(
... gina_proc, stderr=subprocess.PIPE, universal_newlines=True
... )
- >>> proc.wait()
+ >>> proc.returncode
0
>>> transaction.commit()
@@ -866,11 +866,11 @@ Both, 'lenny' and 'hoary' (as partner) will be processed in the same
batch.
>>> gina_proc = ["scripts/gina.py", "lenny", "partner"]
- >>> proc = subprocess.Popen(
+ >>> proc = subprocess.run(
... gina_proc, stderr=subprocess.PIPE, universal_newlines=True
... )
- >>> print(proc.stderr.read())
+ >>> print(proc.stderr)
INFO Creating lockfile: /var/lock/launchpad-gina.lock
...
INFO === Processing debian/lenny/release ===
@@ -878,7 +878,7 @@ batch.
INFO === Processing ubuntu/hoary/release ===
...
- >>> proc.wait()
+ >>> proc.returncode
0
@@ -888,15 +888,15 @@ Other tests
For kicks, finally, run gina on a configured but incomplete archive:
>>> gina_proc = ["scripts/gina.py", "-q", "bogus"]
- >>> proc = subprocess.Popen(
+ >>> proc = subprocess.run(
... gina_proc, stderr=subprocess.PIPE, universal_newlines=True
... )
- >>> print(proc.stderr.read())
+ >>> print(proc.stderr)
ERROR Failed to analyze archive for bogoland
...
...MangledArchiveError: No archive directory for bogoland/main
<BLANKLINE>
- >>> proc.wait()
+ >>> proc.returncode
1
diff --git a/lib/lp/soyuz/scripts/gina/archive.py b/lib/lp/soyuz/scripts/gina/archive.py
index 19ab69d..f283190 100644
--- a/lib/lp/soyuz/scripts/gina/archive.py
+++ b/lib/lp/soyuz/scripts/gina/archive.py
@@ -125,10 +125,13 @@ class ArchiveFilesystemInfo:
def cleanup(self):
os.unlink(self.sources_tagfile)
+ self.srcfile.close()
if self.source_only:
return
os.unlink(self.binaries_tagfile)
+ self.binfile.close()
os.unlink(self.di_tagfile)
+ self.difile.close()
class ArchiveComponentItems:
diff --git a/lib/lp/soyuz/scripts/tests/test_gina.py b/lib/lp/soyuz/scripts/tests/test_gina.py
index ad9061d..3c37297 100644
--- a/lib/lp/soyuz/scripts/tests/test_gina.py
+++ b/lib/lp/soyuz/scripts/tests/test_gina.py
@@ -244,10 +244,13 @@ class TestArchiveFilesystemInfo(TestCase):
archive_info = ArchiveFilesystemInfo(
archive_root, "breezy", "main", "i386"
)
- sources = apt_pkg.TagFile(archive_info.srcfile)
- self.assertEqual("archive-copier", next(sources)["Package"])
- binaries = apt_pkg.TagFile(archive_info.binfile)
- self.assertEqual("python-pam", next(binaries)["Package"])
+ try:
+ with apt_pkg.TagFile(archive_info.srcfile) as sources:
+ self.assertEqual("archive-copier", next(sources)["Package"])
+ with apt_pkg.TagFile(archive_info.binfile) as binaries:
+ self.assertEqual("python-pam", next(binaries)["Package"])
+ finally:
+ archive_info.cleanup()
def test_uncompressed(self):
self.assertCompressionTypeWorks(lambda path: None)
diff --git a/lib/lp/testing/layers.py b/lib/lp/testing/layers.py
index eca5535..1e27824 100644
--- a/lib/lp/testing/layers.py
+++ b/lib/lp/testing/layers.py
@@ -614,7 +614,7 @@ class MemcachedLayer(BaseLayer):
except OSError:
pass
# Clean up the resulting zombie.
- MemcachedLayer._memcached_process.wait()
+ MemcachedLayer._memcached_process.communicate()
MemcachedLayer._memcached_process = None
@classmethod
@@ -1689,6 +1689,7 @@ class LayerProcessController:
except ProcessLookupError:
# The child process doesn't exist. Maybe it went away by the
# time we got here.
+ cls.appserver.communicate()
cls.appserver = None
return False
else:
@@ -1722,6 +1723,7 @@ class LayerProcessController:
# The process is already gone.
return
until = datetime.datetime.now() + WAIT_INTERVAL
+ cls.appserver.communicate()
cls.appserver = None
@classmethod
@@ -1810,6 +1812,7 @@ class LayerProcessController:
break
else:
os.kill(cls.appserver.pid, signal.SIGTERM)
+ cls.appserver.communicate()
cls.appserver = None
# Go no further.
raise AssertionError("App server startup timed out.")