← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~cjwatson/launchpad/loggerhead-gunicorn into lp:launchpad

 

Colin Watson has proposed merging lp:~cjwatson/launchpad/loggerhead-gunicorn into lp:launchpad.

Commit message:
Port the loggerhead integration to gunicorn.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/loggerhead-gunicorn/+merge/347387

Split out from https://code.launchpad.net/~cjwatson/launchpad/private-loggerhead/+merge/345680 without the private port stuff, as requested by William.  See that MP for rationale and comments.
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/loggerhead-gunicorn into lp:launchpad.
=== modified file 'Makefile'
--- Makefile	2018-05-21 20:30:16 +0000
+++ Makefile	2018-06-04 14:37:09 +0000
@@ -306,10 +306,10 @@
 	memcached,rabbitmq,txlongpoll -i $(LPCONFIG)
 
 run_codebrowse: compile
-	BZR_PLUGIN_PATH=bzrplugins $(PY) scripts/start-loggerhead.py -f
+	BZR_PLUGIN_PATH=bzrplugins $(PY) scripts/start-loggerhead.py
 
 start_codebrowse: compile
-	BZR_PLUGIN_PATH=$(shell pwd)/bzrplugins $(PY) scripts/start-loggerhead.py
+	BZR_PLUGIN_PATH=$(shell pwd)/bzrplugins $(PY) scripts/start-loggerhead.py --daemon
 
 stop_codebrowse:
 	$(PY) scripts/stop-loggerhead.py

=== modified file 'constraints.txt'
--- constraints.txt	2018-05-31 10:23:03 +0000
+++ constraints.txt	2018-06-04 14:37:09 +0000
@@ -257,7 +257,9 @@
 feedvalidator==0.0.0DEV-r1049
 fixtures==3.0.0
 FormEncode==1.2.4
+futures==3.2.0
 grokcore.component==1.6
+gunicorn==19.8.1
 html5browser==0.0.9
 httplib2==0.8
 hyperlink==18.0.0

=== removed file 'lib/launchpad_loggerhead/debug.py'
--- lib/launchpad_loggerhead/debug.py	2010-04-27 01:35:56 +0000
+++ lib/launchpad_loggerhead/debug.py	1970-01-01 00:00:00 +0000
@@ -1,120 +0,0 @@
-# Copyright 2009 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-import thread
-import time
-
-from paste.request import construct_url
-
-
-def tabulate(cells):
-    """Format a list of lists of strings in a table.
-
-    The 'cells' are centered.
-
-    >>> print ''.join(tabulate(
-    ...     [['title 1', 'title 2'],
-    ...      ['short', 'rather longer']]))
-     title 1     title 2
-      short   rather longer
-    """
-    widths = {}
-    for row in cells:
-        for col_index, cell in enumerate(row):
-            widths[col_index] = max(len(cell), widths.get(col_index, 0))
-    result = []
-    for row in cells:
-        result_row = ''
-        for col_index, cell in enumerate(row):
-            result_row += cell.center(widths[col_index] + 2)
-        result.append(result_row.rstrip() + '\n')
-    return result
-
-
-def threadpool_debug(app):
-    """Wrap `app` to provide debugging information about the threadpool state.
-
-    The returned application will serve debugging information about the state
-    of the threadpool at '/thread-debug' -- but only when accessed directly,
-    not when accessed through Apache.
-    """
-    def wrapped(environ, start_response):
-        if ('HTTP_X_FORWARDED_SERVER' in environ
-            or environ['PATH_INFO'] != '/thread-debug'):
-            environ['lp.timestarted'] = time.time()
-            return app(environ, start_response)
-        threadpool = environ['paste.httpserver.thread_pool']
-        start_response("200 Ok", [])
-        output = [("url", "time running", "time since last activity")]
-        now = time.time()
-        # Because we're accessing mutable structures without locks here,
-        # we're a bit cautious about things looking like we expect -- if a
-        # worker doesn't seem fully set up, we just ignore it.
-        for worker in threadpool.workers:
-            if not hasattr(worker, 'thread_id'):
-                continue
-            time_started, info = threadpool.worker_tracker.get(
-                worker.thread_id, (None, None))
-            if time_started is not None and info is not None:
-                real_time_started = info.get(
-                    'lp.timestarted', time_started)
-                output.append(
-                    map(str,
-                        (construct_url(info),
-                         now - real_time_started,
-                         now - time_started,)))
-        return tabulate(output)
-    return wrapped
-
-
-def change_kill_thread_criteria(application):
-    """Interfere with threadpool so that threads are killed for inactivity.
-
-    The usual rules with paste's threadpool is that a thread that takes longer
-    than 'hung_thread_limit' seconds to process a request is considered hung
-    and more than 'kill_thread_limit' seconds is killed.
-
-    Because loggerhead streams its output, how long the entire request takes
-    to process depends on things like how fast the users internet connection
-    is.  What we'd like to do is kill threads that don't _start_ to produce
-    output for 'kill_thread_limit' seconds.
-
-    What this class actually does is arrange things so that threads that
-    produce no output for 'kill_thread_limit' are killed, because that's the
-    rule Apache uses when interpreting ProxyTimeout.
-    """
-    def wrapped_application(environ, start_response):
-        threadpool = environ['paste.httpserver.thread_pool']
-        def reset_timer():
-            """Make this thread safe for another 'kill_thread_limit' seconds.
-
-            We do this by hacking the threadpool's record of when this thread
-            started to pretend that it started right now.  Hacky, but it's
-            enough to fool paste.httpserver.ThreadPool.kill_hung_threads and
-            that's what matters.
-            """
-            threadpool.worker_tracker[thread.get_ident()][0] = time.time()
-        def response_hook(status, response_headers, exc_info=None):
-            # We reset the timer when the HTTP headers are sent...
-            reset_timer()
-            writer = start_response(status, response_headers, exc_info)
-            def wrapped_writer(arg):
-                # ... and whenever more output has been generated.
-                reset_timer()
-                return writer(arg)
-            return wrapped_writer
-        result = application(environ, response_hook)
-        # WSGI allows the application to return an iterable, which could be a
-        # generator that does significant processing between successive items,
-        # so we should reset the timer between each item.
-        #
-        # This isn't really necessary as loggerhead doesn't return any
-        # non-trivial iterables to the WSGI server.  But it's probably better
-        # to cope with this case to avoid nasty suprises if loggerhead
-        # changes.
-        def reset_timer_between_items(iterable):
-            for item in iterable:
-                reset_timer()
-                yield item
-        return reset_timer_between_items(result)
-    return wrapped_application

=== added file 'lib/launchpad_loggerhead/testing.py'
--- lib/launchpad_loggerhead/testing.py	1970-01-01 00:00:00 +0000
+++ lib/launchpad_loggerhead/testing.py	2018-06-04 14:37:09 +0000
@@ -0,0 +1,72 @@
+# Copyright 2018 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+__metaclass__ = type
+__all__ = [
+    'LoggerheadFixture',
+    ]
+
+import os.path
+import time
+import warnings
+
+from fixtures import Fixture
+
+from lp.services.config import config
+from lp.services.osutils import (
+    get_pid_from_file,
+    kill_by_pidfile,
+    remove_if_exists,
+    )
+from lp.services.pidfile import pidfile_path
+from lp.services.scripts.tests import run_script
+from lp.testing.layers import (
+    BaseLayer,
+    LayerProcessController,
+    )
+
+
+class LoggerheadFixtureException(Exception):
+    pass
+
+
+class LoggerheadFixture(Fixture):
+    """Start loggerhead as a fixture."""
+
+    def _setUp(self):
+        pidfile = pidfile_path(
+            "codebrowse", use_config=LayerProcessController.appserver_config)
+        pid = get_pid_from_file(pidfile)
+        if pid is not None:
+            warnings.warn(
+                "Attempt to start LoggerheadFixture with an existing "
+                "instance (%d) running in %s." % (pid, pidfile))
+            kill_by_pidfile(pidfile)
+        self.logfile = os.path.join(config.codebrowse.log_folder, "debug.log")
+        remove_if_exists(self.logfile)
+        self.addCleanup(kill_by_pidfile, pidfile)
+        run_script(
+            os.path.join("scripts", "start-loggerhead.py"), ["--daemon"],
+            # The testrunner-appserver config provides the correct
+            # openid_provider_root URL.
+            extra_env={"LPCONFIG": BaseLayer.appserver_config_name})
+        self._waitForStartup()
+
+    def _hasStarted(self):
+        if os.path.exists(self.logfile):
+            with open(self.logfile) as logfile:
+                return "Listening at:" in logfile.read()
+        else:
+            return False
+
+    def _waitForStartup(self):
+        now = time.time()
+        deadline = now + 20
+        while now < deadline and not self._hasStarted():
+            time.sleep(0.1)
+            now = time.time()
+
+        if now >= deadline:
+            raise LoggerheadFixtureException("Unable to start loggerhead.")

=== modified file 'lib/launchpad_loggerhead/tests.py'
--- lib/launchpad_loggerhead/tests.py	2018-01-19 17:21:44 +0000
+++ lib/launchpad_loggerhead/tests.py	2018-06-04 14:37:09 +0000
@@ -1,29 +1,44 @@
 # Copyright 2010-2018 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-import urllib
-
 import lazr.uri
 from paste.httpexceptions import HTTPExceptionHandler
+import requests
+from six.moves.urllib_parse import (
+    urlencode,
+    urlsplit,
+    )
+import soupmatchers
+from testtools.content import Content
+from testtools.content_type import UTF8_TEXT
 import wsgi_intercept
 from wsgi_intercept.urllib2_intercept import (
     install_opener,
     uninstall_opener,
     )
 import wsgi_intercept.zope_testbrowser
+from zope.security.proxy import removeSecurityProxy
 
 from launchpad_loggerhead.app import RootApp
 from launchpad_loggerhead.session import SessionHandler
+from launchpad_loggerhead.testing import LoggerheadFixture
+from lp.app.enums import InformationType
 from lp.services.config import config
 from lp.services.webapp.vhosts import allvhosts
-from lp.testing import TestCase
-from lp.testing.layers import DatabaseFunctionalLayer
+from lp.testing import (
+    TestCase,
+    TestCaseWithFactory,
+    )
+from lp.testing.layers import (
+    AppServerLayer,
+    DatabaseFunctionalLayer,
+    )
 
 
 SESSION_VAR = 'lh.session'
 
-# See sourcecode/launchpad-loggerhead/start-loggerhead.py for the production
-# mechanism for getting the secret.
+# See lib/launchpad-loggerhead/wsgi.py for the production mechanism for
+# getting the secret.
 SECRET = 'secret'
 
 
@@ -132,8 +147,7 @@
         self.intercept(dummy_root, dummy_destination)
         self.browser.open(
             config.codehosting.secure_codebrowse_root +
-            '+logout?' +
-            urllib.urlencode(dict(next_to=dummy_root + '+logout')))
+            '+logout?' + urlencode(dict(next_to=dummy_root + '+logout')))
 
         # We are logged out, as before.
         self.assertEqual(self.session, {})
@@ -142,3 +156,51 @@
         self.assertEqual(self.browser.url, dummy_root + '+logout')
         self.assertEqual(self.browser.contents,
                          'This is a dummy destination.\n')
+
+
+class TestWSGI(TestCaseWithFactory):
+    """Smoke tests for Launchpad's loggerhead WSGI server."""
+
+    layer = AppServerLayer
+
+    def setUp(self):
+        super(TestWSGI, self).setUp()
+        self.useBzrBranches()
+        loggerhead_fixture = self.useFixture(LoggerheadFixture())
+
+        def get_debug_log_bytes():
+            try:
+                with open(loggerhead_fixture.logfile, "rb") as logfile:
+                    return [logfile.read()]
+            except IOError:
+                return [b""]
+
+        self.addDetail(
+            "loggerhead-debug", Content(UTF8_TEXT, get_debug_log_bytes))
+
+    def test_public_port_public_branch(self):
+        # Requests for public branches on the public port are allowed.
+        db_branch, _ = self.create_branch_and_tree()
+        branch_url = "http://127.0.0.1:%d/%s"; % (
+            config.codebrowse.port, db_branch.unique_name)
+        response = requests.get(branch_url)
+        self.assertEqual(200, response.status_code)
+        title_tag = soupmatchers.Tag(
+            "page title", "title", text="%s : changes" % db_branch.unique_name)
+        self.assertThat(response.text, soupmatchers.HTMLContains(title_tag))
+
+    def test_public_port_private_branch(self):
+        # Requests for private branches on the public port send the user
+        # through the login workflow.
+        db_branch, _ = self.create_branch_and_tree(
+            information_type=InformationType.USERDATA)
+        naked_branch = removeSecurityProxy(db_branch)
+        branch_url = "http://127.0.0.1:%d/%s"; % (
+            config.codebrowse.port, naked_branch.unique_name)
+        response = requests.get(
+            branch_url, headers={"X-Forwarded-Scheme": "https"},
+            allow_redirects=False)
+        self.assertEqual(301, response.status_code)
+        self.assertEqual(
+            "testopenid.dev:8085",
+            urlsplit(response.headers["Location"]).netloc)

=== added file 'lib/launchpad_loggerhead/wsgi.py'
--- lib/launchpad_loggerhead/wsgi.py	1970-01-01 00:00:00 +0000
+++ lib/launchpad_loggerhead/wsgi.py	2018-06-04 14:37:09 +0000
@@ -0,0 +1,159 @@
+# Copyright 2009-2018 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+__metaclass__ = type
+__all__ = [
+    'LoggerheadApplication',
+    ]
+
+import logging
+from optparse import OptionParser
+import os.path
+import time
+import traceback
+
+from gunicorn.app.base import Application
+from gunicorn.glogging import Logger
+from openid import oidutil
+from paste.deploy.config import PrefixMiddleware
+from paste.httpexceptions import HTTPExceptionHandler
+from paste.request import construct_url
+from paste.wsgilib import catch_errors
+
+from launchpad_loggerhead.app import (
+    oops_middleware,
+    RootApp,
+    )
+from launchpad_loggerhead.session import SessionHandler
+import lp.codehosting
+from lp.services.config import config
+from lp.services.pidfile import pidfile_path
+from lp.services.scripts import (
+    logger,
+    logger_options,
+    )
+from lp.services.scripts.logger import LaunchpadFormatter
+
+
+log = logging.getLogger("loggerhead")
+
+
+SESSION_VAR = "lh.session"
+
+
+def log_request_start_and_stop(app):
+    def wrapped(environ, start_response):
+        url = construct_url(environ)
+        log.info("Starting to process %s", url)
+        start_time = time.time()
+
+        def request_done_ok():
+            log.info(
+                "Processed ok %s [%0.3f seconds]",
+                url, time.time() - start_time)
+
+        def request_done_err(exc_info):
+            log.info(
+                "Processed err %s [%0.3f seconds]: %s",
+                url, time.time() - start_time,
+                traceback.format_exception_only(*exc_info[:2]))
+
+        return catch_errors(
+            app, environ, start_response, request_done_err, request_done_ok)
+
+    return wrapped
+
+
+class LoggerheadLogger(Logger):
+
+    def setup(self, cfg):
+        super(LoggerheadLogger, self).setup(cfg)
+        formatter = LaunchpadFormatter(datefmt=None)
+        for handler in self.error_log.handlers:
+            handler.setFormatter(formatter)
+
+        # Force Launchpad's logging machinery to set up the root logger the
+        # way we want it.
+        parser = OptionParser()
+        logger_options(parser)
+        log_options, _ = parser.parse_args(
+            ['-q', '--ms', '--log-file=DEBUG:%s' % cfg.errorlog])
+        logger(log_options)
+
+        # Make the OpenID library use proper logging rather than writing to
+        # stderr.
+        oidutil.log = lambda message, level=0: log.debug(message)
+
+
+class LoggerheadApplication(Application):
+
+    def __init__(self, **kwargs):
+        self.options = kwargs
+        super(LoggerheadApplication, self).__init__()
+
+    def init(self, parser, opts, args):
+        top = os.path.abspath(os.path.join(
+            os.path.dirname(__file__), os.pardir, os.pardir))
+        listen_host = config.codebrowse.listen_host
+        log_folder = config.codebrowse.log_folder or os.path.join(top, "logs")
+        if not os.path.exists(log_folder):
+            os.makedirs(log_folder)
+
+        cfg = {
+            "accesslog": os.path.join(log_folder, "access.log"),
+            "bind": [
+                "%s:%s" % (listen_host, config.codebrowse.port),
+                ],
+            "errorlog": os.path.join(log_folder, "debug.log"),
+            # Trust that firewalls only permit sending requests to
+            # loggerhead via a frontend.
+            "forwarded_allow_ips": "*",
+            "logger_class": "launchpad_loggerhead.wsgi.LoggerheadLogger",
+            "loglevel": "debug",
+            "pidfile": pidfile_path("codebrowse"),
+            "preload_app": True,
+            # XXX cjwatson 2018-05-15: These are gunicorn defaults plus
+            # X-Forwarded-Scheme: https, which we use in staging/production.
+            # We should switch the staging/production configuration to
+            # something that gunicorn understands natively and then drop
+            # this.
+            "secure_scheme_headers": {
+                "X-FORWARDED-PROTOCOL": "ssl",
+                "X-FORWARDED-PROTO": "https",
+                "X-FORWARDED-SCHEME": "https",
+                "X-FORWARDED-SSL": "on",
+                },
+            # Kill threads after 300 seconds of inactivity.  This is
+            # insanely high, but loggerhead is often pretty slow.
+            "timeout": 300,
+            "threads": 10,
+            "worker_class": "gthread",
+            }
+        cfg.update(self.options)
+        return cfg
+
+    def _load_bzr_plugins(self):
+        from bzrlib.plugin import load_plugins
+        load_plugins()
+
+        import bzrlib.plugins
+        if getattr(bzrlib.plugins, "loom", None) is None:
+            log.error("Loom plugin loading failed.")
+
+    def load(self):
+        self._load_bzr_plugins()
+
+        with open(os.path.join(
+                config.root, config.codebrowse.secret_path)) as secret_file:
+            secret = secret_file.read()
+
+        app = RootApp(SESSION_VAR)
+        app = HTTPExceptionHandler(app)
+        app = SessionHandler(app, SESSION_VAR, secret)
+        app = log_request_start_and_stop(app)
+        app = PrefixMiddleware(app)
+        app = oops_middleware(app)
+
+        return app

=== modified file 'lib/lp/services/osutils.py'
--- lib/lp/services/osutils.py	2018-03-27 17:43:27 +0000
+++ lib/lp/services/osutils.py	2018-06-04 14:37:09 +0000
@@ -9,7 +9,6 @@
     'find_on_path',
     'get_pid_from_file',
     'kill_by_pidfile',
-    'get_pid_from_file',
     'open_for_writing',
     'override_environ',
     'process_exists',

=== modified file 'lib/lp/services/scripts/tests/__init__.py'
--- lib/lp/services/scripts/tests/__init__.py	2018-05-06 08:52:34 +0000
+++ lib/lp/services/scripts/tests/__init__.py	2018-06-04 14:37:09 +0000
@@ -44,7 +44,7 @@
     return sorted(scripts)
 
 
-def run_script(script_relpath, args, expect_returncode=0):
+def run_script(script_relpath, args, expect_returncode=0, extra_env=None):
     """Run a script for testing purposes.
 
     :param script_relpath: The relative path to the script, from the tree
@@ -52,11 +52,16 @@
     :param args: Arguments to provide to the script.
     :param expect_returncode: The return code expected.  If a different value
         is returned, and exception will be raised.
+    :param extra_env: A dictionary of extra environment variables to provide
+        to the script, or None.
     """
     script = os.path.join(config.root, script_relpath)
     args = [script] + args
+    env = dict(os.environ)
+    if extra_env is not None:
+        env.update(extra_env)
     process = subprocess.Popen(
-        args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
     stdout, stderr = process.communicate()
     if process.returncode != expect_returncode:
         raise AssertionError('Failed:\n%s\n%s' % (stdout, stderr))

=== modified file 'lib/lp/testing/__init__.py'
--- lib/lp/testing/__init__.py	2018-05-09 16:55:39 +0000
+++ lib/lp/testing/__init__.py	2018-06-04 14:37:09 +0000
@@ -893,7 +893,8 @@
                 db_branch = self.factory.makeAnyBranch(**kwargs)
             else:
                 db_branch = self.factory.makeProductBranch(product, **kwargs)
-        branch_url = 'lp-internal:///' + db_branch.unique_name
+        branch_url = (
+            'lp-internal:///' + removeSecurityProxy(db_branch).unique_name)
         if not self.direct_database_server:
             transaction.commit()
         bzr_branch = self.createBranchAtURL(branch_url, format=format)

=== modified file 'scripts/start-loggerhead.py'
--- scripts/start-loggerhead.py	2012-06-27 13:57:04 +0000
+++ scripts/start-loggerhead.py	2018-06-04 14:37:09 +0000
@@ -1,192 +1,14 @@
 #!/usr/bin/python -S
 #
-# Copyright 2009-2012 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2018 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
+from __future__ import absolute_import, print_function, unicode_literals
+
 import _pythonpath
 
-import logging
-from optparse import OptionParser
-import os
-import sys
-import time
-import traceback
-
-from paste import httpserver
-from paste.deploy.config import PrefixMiddleware
-from paste.httpexceptions import HTTPExceptionHandler
-from paste.request import construct_url
-from paste.translogger import TransLogger
-from paste.wsgilib import catch_errors
-
-import lp.codehosting
-from lp.services.config import config
-
-
-LISTEN_HOST = config.codebrowse.listen_host
-LISTEN_PORT = config.codebrowse.port
-THREADPOOL_WORKERS = 10
-
-
-class NoLockingFileHandler(logging.FileHandler):
-    """A version of logging.FileHandler that doesn't do it's own locking.
-
-    We experienced occasional hangs in production where gdb-ery on the server
-    revealed that we sometimes end up with many threads blocking on the RLock
-    held by the logging file handler, and log reading finds that an exception
-    managed to kill a thread in an unsafe window for RLock's.
-
-    Luckily, there's no real reason for us to take a lock during logging as
-    each log message translates to one call to .write on a file object, which
-    translates to one fwrite call, and it seems that this does enough locking
-    itself for our purposes.
-
-    So this handler just doesn't lock in log message handling.
-    """
-
-    def acquire(self):
-        pass
-
-    def release(self):
-        pass
-
-
-def setup_logging(home, foreground):
-    # i hate that stupid logging config format, so just set up logging here.
-
-    log_folder = config.codebrowse.log_folder
-    if not log_folder:
-        log_folder = os.path.join(home, 'logs')
-    if not os.path.exists(log_folder):
-        os.makedirs(log_folder)
-
-    f = logging.Formatter(
-        '%(levelname)-.3s [%(asctime)s.%(msecs)03d] [%(thread)d] %(name)s: %(message)s',
-        '%Y%m%d-%H:%M:%S')
-    debug_log = NoLockingFileHandler(os.path.join(log_folder, 'debug.log'))
-    debug_log.setLevel(logging.DEBUG)
-    debug_log.setFormatter(f)
-    if foreground:
-        stdout_log = logging.StreamHandler(sys.stdout)
-        stdout_log.setLevel(logging.DEBUG)
-        stdout_log.setFormatter(f)
-    f = logging.Formatter('[%(asctime)s.%(msecs)03d] %(message)s',
-                          '%Y%m%d-%H:%M:%S')
-    access_log = NoLockingFileHandler(os.path.join(log_folder, 'access.log'))
-    access_log.setLevel(logging.INFO)
-    access_log.setFormatter(f)
-
-    logging.getLogger('').setLevel(logging.DEBUG)
-    logging.getLogger('').addHandler(debug_log)
-    logging.getLogger('wsgi').addHandler(access_log)
-
-    if foreground:
-        logging.getLogger('').addHandler(stdout_log)
-    else:
-        class S(object):
-            def write(self, str):
-                logging.getLogger().error(str.rstrip('\n'))
-            def flush(self):
-                pass
-        sys.stderr = S()
-
-
-parser = OptionParser(description="Start loggerhead.")
-parser.add_option(
-    "-f", "--foreground", default=False, action="store_true",
-    help="Run loggerhead in the foreground.")
-options, _ = parser.parse_args()
-
-home = os.path.realpath(os.path.dirname(__file__))
-pidfile = os.path.join(home, 'loggerhead.pid')
-
-if not options.foreground:
-    sys.stderr.write('\n')
-    sys.stderr.write('Launching loggerhead into the background.\n')
-    sys.stderr.write('PID file: %s\n' % (pidfile,))
-    sys.stderr.write('\n')
-
-    from loggerhead.daemon import daemonize
-    daemonize(pidfile, home)
-
-setup_logging(home, foreground=options.foreground)
-
-log = logging.getLogger('loggerhead')
-log.info('Starting up...')
-
-log.info('Loading the bzr plugins...')
-from bzrlib.plugin import load_plugins
-load_plugins()
-
-import bzrlib.plugins
-if getattr(bzrlib.plugins, 'loom', None) is None:
-    log.error('Loom plugin loading failed.')
-
-from launchpad_loggerhead.debug import (
-    change_kill_thread_criteria, threadpool_debug)
-from launchpad_loggerhead.app import RootApp, oops_middleware
-from launchpad_loggerhead.session import SessionHandler
-
-SESSION_VAR = 'lh.session'
-
-secret = open(os.path.join(config.root, config.codebrowse.secret_path)).read()
-
-app = RootApp(SESSION_VAR)
-app = HTTPExceptionHandler(app)
-app = SessionHandler(app, SESSION_VAR, secret)
-def log_request_start_and_stop(app):
-    def wrapped(environ, start_response):
-        log = logging.getLogger('loggerhead')
-        url = construct_url(environ)
-        log.info("Starting to process %s", url)
-        start_time = time.time()
-        def request_done_ok():
-            log.info("Processed ok %s [%0.3f seconds]", url, time.time() -
-                    start_time)
-        def request_done_err(exc_info):
-            log.info("Processed err %s [%0.3f seconds]: %s", url, time.time() -
-                    start_time, traceback.format_exception_only(*exc_info[:2]))
-        return catch_errors(app, environ, start_response, request_done_err,
-                request_done_ok)
-    return wrapped
-app = log_request_start_and_stop(app)
-app = PrefixMiddleware(app)
-app = TransLogger(app)
-app = threadpool_debug(app)
-
-def set_scheme(app):
-    """Set wsgi.url_scheme in the environment correctly.
-
-    We serve requests that originated from both http and https, and
-    distinguish between them by adding a header in the https Apache config.
-    """
-    def wrapped(environ, start_response):
-        environ['wsgi.url_scheme'] = environ.pop(
-            'HTTP_X_FORWARDED_SCHEME', 'http')
-        return app(environ, start_response)
-    return wrapped
-app = set_scheme(app)
-app = change_kill_thread_criteria(app)
-app = oops_middleware(app)
-
-try:
-    httpserver.serve(
-        app, host=LISTEN_HOST, port=LISTEN_PORT,
-        threadpool_workers=THREADPOOL_WORKERS,
-        threadpool_options={
-            # Kill threads after 300 seconds.  This is insanely high, but
-            # lower enough than the default (1800 seconds!) that evidence
-            # suggests it will be hit occasionally, and there's very little
-            # chance of it having negative consequences.
-            'kill_thread_limit': 300,
-            # Check for threads that should be killed every 10 requests.  The
-            # default is every 100, which is easily long enough for things to
-            # gum up completely in between checks.
-            'hung_check_period': 10,
-            })
-finally:
-    log.info('Shutdown.')
-    try:
-        os.remove(pidfile)
-    except OSError:
-        pass
+from launchpad_loggerhead.wsgi import LoggerheadApplication
+
+
+if __name__ == "__main__":
+    LoggerheadApplication().run()

=== modified file 'scripts/stop-loggerhead.py'
--- scripts/stop-loggerhead.py	2012-06-29 08:40:05 +0000
+++ scripts/stop-loggerhead.py	2018-06-04 14:37:09 +0000
@@ -1,38 +1,33 @@
 #!/usr/bin/python -S
 #
-# Copyright 2009-2012 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2018 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
+from __future__ import absolute_import, print_function, unicode_literals
+
 import _pythonpath
 
 from optparse import OptionParser
 import os
+import signal
 import sys
 
+from lp.services.pidfile import get_pid
+
 
 parser = OptionParser(description="Stop loggerhead.")
 parser.parse_args()
 
-home = os.path.realpath(os.path.dirname(__file__))
-pidfile = os.path.join(home, 'loggerhead.pid')
-
-try:
-    f = open(pidfile, 'r')
-except IOError as e:
-    print 'No pid file found.'
-    sys.exit(1)
-
-pid = int(f.readline())
+pid = get_pid("codebrowse")
 
 try:
     os.kill(pid, 0)
 except OSError as e:
-    print 'Stale pid file; server is not running.'
+    print('Stale pid file; server is not running.')
     sys.exit(1)
 
-print
-print 'Shutting down previous server @ pid %d.' % (pid,)
-print
+print()
+print('Shutting down previous server @ pid %d.' % (pid,))
+print()
 
-import signal
 os.kill(pid, signal.SIGTERM)

=== modified file 'setup.py'
--- setup.py	2018-05-31 10:23:03 +0000
+++ setup.py	2018-06-04 14:37:09 +0000
@@ -159,6 +159,7 @@
         'FeedParser',
         'feedvalidator',
         'fixtures',
+        'gunicorn[gthread]',
         'html5browser',
         'importlib-resources',
         'ipython',


Follow ups