← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~stub/launchpad/garbo-logging into lp:launchpad

 

Stuart Bishop has proposed merging lp:~stub/launchpad/garbo-logging into lp:launchpad with lp:~stub/launchpad/parallel-garbo as a prerequisite.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~stub/launchpad/garbo-logging/+merge/56948

Add prefixes to garbo text log output to make interleaved output more readable.
-- 
https://code.launchpad.net/~stub/launchpad/garbo-logging/+merge/56948
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~stub/launchpad/garbo-logging into lp:launchpad.
=== modified file 'lib/canonical/launchpad/utilities/looptuner.py'
--- lib/canonical/launchpad/utilities/looptuner.py	2011-03-30 16:32:24 +0000
+++ lib/canonical/launchpad/utilities/looptuner.py	2011-04-08 14:16:37 +0000
@@ -332,4 +332,5 @@
             minimum_chunk_size = self.minimum_chunk_size,
             maximum_chunk_size = self.maximum_chunk_size,
             cooldown_time = self.cooldown_time,
-            abort_time = self.abort_time).run()
+            abort_time = self.abort_time,
+            log = self.log).run()

=== modified file 'lib/lp/scripts/garbo.py'
--- lib/lp/scripts/garbo.py	2011-04-08 14:16:35 +0000
+++ lib/lp/scripts/garbo.py	2011-04-08 14:16:37 +0000
@@ -14,6 +14,7 @@
     timedelta,
     )
 from fixtures import TempDir
+import logging
 import multiprocessing
 import os
 import signal
@@ -80,6 +81,7 @@
 from lp.hardwaredb.model.hwdb import HWSubmission
 from lp.registry.model.person import Person
 from lp.services.job.model.job import Job
+from lp.services.log.logger import PrefixFilter
 from lp.services.memcache.interfaces import IMemcacheClient
 from lp.services.scripts.base import (
     LaunchpadCronScript,
@@ -1028,7 +1030,16 @@
                     break
                 tunable_loop_class = tunable_loops.pop(0)
 
-                self.logger.info("Running %s", tunable_loop_class.__name__)
+                loop_name = tunable_loop_class.__name__
+
+                # Configure logging for this loop to use a prefix. Log
+                # output from multiple threads will be interleaved, and
+                # this lets us tell log output from different tasks
+                # apart.
+                loop_logger = logging.getLogger('garbo.' + loop_name)
+                loop_logger.addFilter(PrefixFilter(loop_name))
+
+                loop_logger.info("Running %s", loop_name)
 
                 # How long until the script should abort.
                 remaining_script_time = (
@@ -1057,18 +1068,16 @@
                     "Task will be terminated in %0.3f seconds", abort_time)
 
                 tunable_loop = tunable_loop_class(
-                    abort_time=abort_time, log=self.logger)
+                    abort_time=abort_time, log=loop_logger)
 
                 if self._maximum_chunk_size is not None:
                     tunable_loop.maximum_chunk_size = self._maximum_chunk_size
 
                 try:
                     tunable_loop.run()
-                    self.logger.debug(
-                        "%s completed sucessfully.",
-                        tunable_loop_class.__name__)
+                    loop_logger.debug("%s completed sucessfully.", loop_name)
                 except Exception:
-                    self.logger.exception("Unhandled exception")
+                    loop_logger.exception("Unhandled exception")
                     self.failure_count += 1
                 finally:
                     transaction.abort()

=== modified file 'lib/lp/scripts/tests/test_garbo.py'
--- lib/lp/scripts/tests/test_garbo.py	2011-03-31 20:55:25 +0000
+++ lib/lp/scripts/tests/test_garbo.py	2011-04-08 14:16:37 +0000
@@ -11,7 +11,9 @@
     timedelta,
     )
 from fixtures import TempDir
+import logging
 import os
+from StringIO import StringIO
 import subprocess
 import time
 
@@ -85,8 +87,8 @@
     OpenIDConsumerAssociationPruner,
     UnusedSessionPruner,
     )
+from lp.services.log.logger import NullHandler
 from lp.services.job.model.job import Job
-from lp.services.log.logger import BufferLogger
 from lp.services.session.model import (
     SessionData,
     SessionPkgData,
@@ -148,9 +150,10 @@
         for i in range(10):
             self.store.add(BulkFoo())
 
+        self.log = logging.getLogger('garbo')
+
     def test_bulkpruner(self):
-        log = BufferLogger()
-        pruner = BulkFooPruner(log)
+        pruner = BulkFooPruner(self.log)
 
         # The loop thinks there is stuff to do. Confirm the initial
         # state is sane.
@@ -200,7 +203,7 @@
         pruner.cleanUp()
 
         # We can run it again - temporary objects cleaned up.
-        pruner = BulkFooPruner(log)
+        pruner = BulkFooPruner(self.log)
         while not pruner.isDone():
             pruner(chunk_size)
 
@@ -228,6 +231,8 @@
         self.make_session(u'ancient_auth', ancient, 'auth3')
         self.make_session(u'ancient_unauth', ancient, False)
 
+        self.log = logging.getLogger('garbo')
+
     def make_session(self, client_id, accessed, authenticated=None):
         session_data = SessionData()
         session_data.client_id = client_id
@@ -260,8 +265,7 @@
 
     def test_antique_session_pruner(self):
         chunk_size = 2
-        log = BufferLogger()
-        pruner = AntiqueSessionPruner(log)
+        pruner = AntiqueSessionPruner(self.log)
         try:
             while not pruner.isDone():
                 pruner(chunk_size)
@@ -284,8 +288,7 @@
 
     def test_unused_session_pruner(self):
         chunk_size = 2
-        log = BufferLogger()
-        pruner = UnusedSessionPruner(log)
+        pruner = UnusedSessionPruner(self.log)
         try:
             while not pruner.isDone():
                 pruner(chunk_size)
@@ -338,8 +341,7 @@
             expected_sessions.add(u'new dupe %d' % count)
 
         chunk_size = 2
-        log = BufferLogger()
-        pruner = DuplicateSessionPruner(log)
+        pruner = DuplicateSessionPruner(self.log)
         try:
             while not pruner.isDone():
                 pruner(chunk_size)
@@ -357,17 +359,29 @@
 
     def setUp(self):
         super(TestGarbo, self).setUp()
+
+        # Silence the root Logger by instructing the garbo logger to not
+        # propagate messages.
+        self.log = logging.getLogger('garbo')
+        self.log.addHandler(NullHandler())
+        self.log.propagate = 0
+
         # Run the garbage collectors to remove any existing garbage,
         # starting us in a known state.
         self.runDaily()
         self.runHourly()
 
+        # Capture garbo log output to tests can examine it.
+        self.log_buffer = StringIO()
+        handler = logging.StreamHandler(self.log_buffer)
+        self.log.addHandler(handler)
+
     def runDaily(self, maximum_chunk_size=2, test_args=()):
         transaction.commit()
         LaunchpadZopelessLayer.switchDbUser('garbo_daily')
         collector = DailyDatabaseGarbageCollector(test_args=list(test_args))
         collector._maximum_chunk_size = maximum_chunk_size
-        collector.logger = BufferLogger()
+        collector.logger = self.log
         collector.main()
         return collector
 
@@ -375,7 +389,7 @@
         LaunchpadZopelessLayer.switchDbUser('garbo_hourly')
         collector = HourlyDatabaseGarbageCollector(test_args=list(test_args))
         collector._maximum_chunk_size = maximum_chunk_size
-        collector.logger = BufferLogger()
+        collector.logger = self.log
         collector.main()
         return collector
 
@@ -915,7 +929,7 @@
         LaunchpadZopelessLayer.switchDbUser('testadmin')
         spr, changelog = self.upload_to_debian()
         collector = self.runHourly()
-        log = collector.logger.getLogBuffer()
+        log = self.log_buffer.getvalue()
         self.assertTrue(
             'SPR %d (9wm 1.2-7) changelog imported.' % spr.id in log)
         self.assertFalse(spr.changelog == None)
@@ -926,7 +940,7 @@
         LaunchpadZopelessLayer.switchDbUser('testadmin')
         spr, changelog = self.upload_to_debian(restricted=True)
         collector = self.runHourly()
-        log = collector.logger.getLogBuffer()
+        log = self.log_buffer.getvalue()
         self.assertTrue(
             'SPR %d (9wm 1.2-7) changelog imported.' % spr.id in log)
         self.assertFalse(spr.changelog == None)

=== modified file 'lib/lp/services/log/logger.py'
--- lib/lp/services/log/logger.py	2011-03-24 12:00:37 +0000
+++ lib/lp/services/log/logger.py	2011-04-08 14:16:37 +0000
@@ -1,15 +1,19 @@
 # Copyright 2010-2011 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
-"""Simple loggers."""
+"""Loggers."""
 
 __metaclass__ = type
 __all__ = [
     'BufferLogger',
     'DevNullLogger',
     'FakeLogger',
+    'LaunchpadLogger',
+    'Nullhandler'
+    'PrefixFilter',
     ]
 
+import logging
 from StringIO import StringIO
 import sys
 import traceback
@@ -30,6 +34,69 @@
 })
 
 
+class LaunchpadLogger(logging.Logger):
+    """Logger that support our custom levels."""
+
+    def debug1(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG1):
+            self._log(loglevels.DEBUG1, msg, args, **kwargs)
+
+    def debug2(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG2):
+            self._log(loglevels.DEBUG2, msg, args, **kwargs)
+
+    def debug3(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG3):
+            self._log(loglevels.DEBUG3, msg, args, **kwargs)
+
+    def debug4(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG4):
+            self._log(loglevels.DEBUG4, msg, args, **kwargs)
+
+    def debug5(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG5):
+            self._log(loglevels.DEBUG5, msg, args, **kwargs)
+
+    def debug6(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG6):
+            self._log(loglevels.DEBUG6, msg, args, **kwargs)
+
+    def debug7(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG7):
+            self._log(loglevels.DEBUG7, msg, args, **kwargs)
+
+    def debug8(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG8):
+            self._log(loglevels.DEBUG8, msg, args, **kwargs)
+
+    def debug9(self, msg, *args, **kwargs):
+        if self.isEnabledFor(loglevels.DEBUG9):
+            self._log(loglevels.DEBUG9, msg, args, **kwargs)
+
+
+class PrefixFilter:
+    """A logging Filter that inserts a prefix into messages.
+
+    If no static prefix is provided, the Logger's name is used.
+    """
+
+    def __init__(self, prefix=None):
+        self.prefix = prefix
+
+    def filter(self, record):
+        prefix = self.prefix or record.name
+        record.msg = '[%s] %s' % (prefix, record.msg)
+        return True
+
+
+class NullHandler(logging.Handler):
+    """A do-nothing Handler used to silence 'No handlers for logger' warnings.
+    """
+
+    def emit(self, record):
+        pass
+
+
 class FakeLogger:
     """Emulates a proper logger, just printing everything out the given file.
     """

=== modified file 'lib/lp/services/log/loglevels.py'
--- lib/lp/services/log/loglevels.py	2010-09-27 03:41:36 +0000
+++ lib/lp/services/log/loglevels.py	2011-04-08 14:16:37 +0000
@@ -22,7 +22,6 @@
     'ERROR',
     'FATAL',
     'INFO',
-    'LaunchpadLogger',
     'WARNING',
     ]
 
@@ -49,43 +48,3 @@
 DEBUG7 = DEBUG - 6
 DEBUG8 = DEBUG - 7
 DEBUG9 = DEBUG - 8
-
-
-class LaunchpadLogger(logging.Logger):
-    """Logger that support our custom levels."""
-
-    def debug1(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG1):
-            self._log(DEBUG1, msg, args, **kwargs)
-
-    def debug2(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG2):
-            self._log(DEBUG2, msg, args, **kwargs)
-
-    def debug3(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG3):
-            self._log(DEBUG3, msg, args, **kwargs)
-
-    def debug4(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG4):
-            self._log(DEBUG4, msg, args, **kwargs)
-
-    def debug5(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG5):
-            self._log(DEBUG5, msg, args, **kwargs)
-
-    def debug6(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG6):
-            self._log(DEBUG6, msg, args, **kwargs)
-
-    def debug7(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG7):
-            self._log(DEBUG7, msg, args, **kwargs)
-
-    def debug8(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG8):
-            self._log(DEBUG8, msg, args, **kwargs)
-
-    def debug9(self, msg, *args, **kwargs):
-        if self.isEnabledFor(DEBUG9):
-            self._log(DEBUG9, msg, args, **kwargs)

=== modified file 'lib/lp_sitecustomize.py'
--- lib/lp_sitecustomize.py	2011-03-28 07:00:46 +0000
+++ lib/lp_sitecustomize.py	2011-04-08 14:16:37 +0000
@@ -17,6 +17,7 @@
 
 from bzrlib.branch import Branch
 from lp.services.log import loglevels
+from lp.services.log.logger import LaunchpadLogger
 from lp.services.log.mappingfilter import MappingFilter
 from lp.services.log.nullhandler import NullHandler
 from lp.services.mime import customizeMimetypes
@@ -47,14 +48,14 @@
 
     # Install our customized Logger that provides easy access to our
     # custom loglevels.
-    logging.setLoggerClass(loglevels.LaunchpadLogger)
+    logging.setLoggerClass(LaunchpadLogger)
 
     # Fix the root logger, replacing it with an instance of our
     # customized Logger. The original one is instantiated on import of
     # the logging module, so our override does not take effect without
     # this manual effort.
     old_root = logging.root
-    new_root = loglevels.LaunchpadLogger('root', loglevels.WARNING)
+    new_root = LaunchpadLogger('root', loglevels.WARNING)
 
     # Fix globals.
     logging.root = new_root