launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #01244
[Merge] lp:~elachuni/launchpad/use-latest-shipit into lp:launchpad
Anthony Lenton has proposed merging lp:~elachuni/launchpad/use-latest-shipit into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
This branch updates sourcedeps.conf to pull in the latest shipit code.
I'm not sure if this belongs here, but this is a brief summary of the changes introduced in shipit, MPs approved separately:
* Autoapprove second request if made for different arch than previously shipped
* Added a link to ubuntu.com in footer logo
* Removed incorrect FAQ link from shipit.kubuntu
* Added tests for shipit's Openid consumer against replay attacks
* Lowered number of presiously shipped requests allowed
--
https://code.launchpad.net/~elachuni/launchpad/use-latest-shipit/+merge/36718
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~elachuni/launchpad/use-latest-shipit into lp:launchpad.
=== modified file 'lib/canonical/launchpad/scripts/logger.py'
--- lib/canonical/launchpad/scripts/logger.py 2010-09-24 08:29:24 +0000
+++ lib/canonical/launchpad/scripts/logger.py 2010-09-27 14:20:25 +0000
@@ -21,8 +21,10 @@
'BufferLogger',
'FakeLogger',
'QuietFakeLogger',
+ 'OopsHandler',
+ 'LaunchpadFormatter',
'DEBUG2', 'DEBUG3', 'DEBUG4', 'DEBUG5',
- 'DEBUG6', 'DEBUG7', 'DEBUG8', 'DEBUG9'
+ 'DEBUG6', 'DEBUG7', 'DEBUG8', 'DEBUG9',
]
@@ -33,7 +35,9 @@
)
import hashlib
import logging
+from logging.handlers import WatchedFileHandler
from optparse import OptionParser
+import os.path
import re
import sys
import time
@@ -44,34 +48,33 @@
from canonical.base import base
from canonical.config import config
+from canonical.launchpad.webapp.errorlog import (
+ globalErrorUtility,
+ ScriptRequest,
+ )
from canonical.librarian.interfaces import (
ILibrarianClient,
UploadFailed,
)
-
-# Custom log levels. logging.debug is 10.
-DEBUG2 = 9
-DEBUG3 = 8
-DEBUG4 = 7
-DEBUG5 = 6
-DEBUG6 = 5
-DEBUG7 = 4
-DEBUG8 = 3
-DEBUG9 = 2
-
-logging.addLevelName(DEBUG2, 'DEBUG2')
-logging.addLevelName(DEBUG3, 'DEBUG3')
-logging.addLevelName(DEBUG4, 'DEBUG4')
-logging.addLevelName(DEBUG5, 'DEBUG5')
-logging.addLevelName(DEBUG6, 'DEBUG6')
-logging.addLevelName(DEBUG7, 'DEBUG7')
-logging.addLevelName(DEBUG8, 'DEBUG8')
-logging.addLevelName(DEBUG9, 'DEBUG9')
+from lp.services.log import loglevels
+
+
+# Reexport our custom loglevels for old callsites. These callsites
+# should be importing the symbols from lp.services.log.loglevels
+DEBUG2 = loglevels.DEBUG2
+DEBUG3 = loglevels.DEBUG3
+DEBUG4 = loglevels.DEBUG4
+DEBUG5 = loglevels.DEBUG5
+DEBUG6 = loglevels.DEBUG6
+DEBUG7 = loglevels.DEBUG7
+DEBUG8 = loglevels.DEBUG8
+DEBUG9 = loglevels.DEBUG9
class FakeLogger:
"""Emulates a proper logger, just printing everything out the given file.
"""
+
def __init__(self, output_file=None):
"""The default output_file is sys.stdout."""
self.output_file = output_file
@@ -120,12 +123,14 @@
Does not print any message. Messages can be retrieved from
logger.output_file, which is a StringIO instance.
"""
+
def __init__(self):
self.output_file = StringIO()
class BufferLogger(FakeLogger):
"""A logger that logs to a StringIO object."""
+
def __init__(self):
self.buffer = StringIO()
@@ -139,13 +144,56 @@
self.log(line)
-class LibrarianFormatter(logging.Formatter):
+class OopsHandler(logging.Handler):
+ """Handler to log to the OOPS system."""
+
+ def __init__(self, script_name, level=logging.WARN):
+ logging.Handler.__init__(self, level)
+ # Context for OOPS reports.
+ self.request = ScriptRequest(
+ [('script_name', script_name), ('path', sys.argv[0])])
+ self.setFormatter(LaunchpadFormatter())
+
+ def emit(self, record):
+ """Emit a record as an OOPS."""
+ try:
+ msg = record.getMessage()
+ # Warnings and less are informational OOPS reports.
+ informational = (record.levelno <= logging.WARN)
+ with globalErrorUtility.oopsMessage(msg):
+ globalErrorUtility._raising(
+ sys.exc_info(), self.request, informational=informational)
+ except Exception, error:
+ self.handleError(record)
+
+
+class LaunchpadFormatter(logging.Formatter):
+ """logging.Formatter encoding our preferred output format."""
+
+ def __init__(self, fmt=None, datefmt=None):
+ if fmt is None:
+ if config.instance_name == 'testrunner':
+ # Don't output timestamps in the test environment
+ fmt = '%(levelname)-7s %(message)s'
+ else:
+ fmt = '%(asctime)s %(levelname)-7s %(message)s'
+ if datefmt is None:
+ datefmt = "%Y-%m-%d %H:%M:%S"
+ logging.Formatter.__init__(self, fmt, datefmt)
+ self.converter = time.gmtime # Output should be UTC
+
+
+class LibrarianFormatter(LaunchpadFormatter):
"""A logging.Formatter that stores tracebacks in the Librarian and emits
a URL rather than emitting the traceback directly.
The traceback will be emitted as a fallback if the Librarian cannot be
contacted.
+
+ XXX bug=641103 StuartBishop -- This class should die. Remove it and
+ replace with LaunchpadFormatter, fixing the test fallout.
"""
+
def formatException(self, ei):
"""Format the exception and store it in the Librian.
@@ -171,25 +219,22 @@
expiry = datetime.now().replace(tzinfo=utc) + timedelta(days=90)
try:
- filename = base(
- long(hashlib.sha1(traceback).hexdigest(),16), 62
- ) + '.txt'
+ filename = base(long(
+ hashlib.sha1(traceback).hexdigest(), 16), 62) + '.txt'
url = librarian.remoteAddFile(
filename, len(traceback), StringIO(traceback),
'text/plain;charset=%s' % sys.getdefaultencoding(),
- expires=expiry
- )
+ expires=expiry)
return ' -> %s (%s)' % (url, exception_string)
except UploadFailed:
return traceback
- except:
+ except Exception:
# Exceptions raised by the Formatter get swallowed, but we want
# to know about them. Since we are already spitting out exception
# information, we can stuff our own problems in there too.
return '%s\n\nException raised in formatter:\n%s\n' % (
traceback,
- logging.Formatter.formatException(self, sys.exc_info())
- )
+ logging.Formatter.formatException(self, sys.exc_info()))
def logger_options(parser, default=logging.INFO):
@@ -266,18 +311,52 @@
log._log = _logger(parser.values.loglevel, out_stream=sys.stderr)
parser.add_option(
- "-v", "--verbose", dest="loglevel", default=default,
- action="callback", callback=dec_loglevel,
- help="Increase verbosity. May be specified multiple times."
- )
- parser.add_option(
- "-q", "--quiet", action="callback", callback=inc_loglevel,
- help="Decrease verbosity. May be specified multiple times."
- )
- parser.add_option(
- "--log-file", action="store", type="string",
- help="Send log to the given file, rather than stderr."
- )
+ "-v", "--verbose", dest="loglevel", default=default,
+ action="callback", callback=dec_loglevel,
+ help="Increase stderr verbosity. May be specified multiple times.")
+ parser.add_option(
+ "-q", "--quiet", action="callback", callback=inc_loglevel,
+ help="Decrease stderr verbosity. May be specified multiple times.")
+
+ debug_levels = ', '.join([
+ v for k, v in sorted(logging._levelNames.items(), reverse=True)
+ if isinstance(k, int)])
+
+ def log_file(option, opt_str, value, parser):
+ try:
+ level, path = value.split(':', 1)
+ except ValueError:
+ level, path = logging.INFO, value
+
+ if isinstance(level, int):
+ pass
+ elif level.upper() not in logging._levelNames:
+ parser.error(
+ "'%s' is not a valid logging level. Must be one of %s" % (
+ level, debug_levels))
+ else:
+ level = logging._levelNames[level.upper()]
+
+ if not path:
+ parser.error("Path to log file not specified")
+
+ path = os.path.abspath(path)
+ try:
+ open(path, 'a')
+ except Exception:
+ parser.error("Unable to open log file %s" % path)
+
+ parser.values.log_file = path
+ parser.values.log_file_level = level
+
+ # Reset the global log.
+ log._log = _logger(parser.values.loglevel, out_stream=sys.stderr)
+
+ parser.add_option(
+ "--log-file", type="string", action="callback", callback=log_file,
+ metavar="LVL:FILE", default=None,
+ help="Send log messages to FILE. LVL is one of %s" % debug_levels)
+ parser.set_default('log_file_level', None)
# Set the global log
log._log = _logger(default, out_stream=sys.stderr)
@@ -306,12 +385,12 @@
logger_options(parser)
options, args = parser.parse_args()
- if options.log_file:
- out_stream = open(options.log_file, 'a')
- else:
- out_stream = sys.stderr
+ log_file = getattr(options, 'log_file', None)
+ log_file_level = getattr(options, 'log_file_level', None)
- return _logger(options.loglevel, out_stream=out_stream, name=name)
+ return _logger(
+ options.loglevel, out_stream=sys.stderr, name=name,
+ log_file=log_file, log_file_level=log_file_level)
def reset_root_logger():
@@ -325,7 +404,9 @@
root_logger.removeHandler(hdlr)
-def _logger(level, out_stream, name=None):
+def _logger(
+ level, out_stream, name=None,
+ log_file=None, log_file_level=logging.DEBUG):
"""Create the actual logger instance, logging at the given level
if name is None, it will get args[0] without the extension (e.g. gina).
@@ -348,27 +429,34 @@
# both command line tools and cron jobs (command line tools often end
# up being run from inside cron, so this is a good thing).
hdlr = logging.StreamHandler(strm=out_stream)
- if config.instance_name == 'testrunner':
- # Don't output timestamps in the test environment
- fmt = '%(levelname)-7s %(message)s'
- else:
- fmt = '%(asctime)s %(levelname)-7s %(message)s'
- formatter = LibrarianFormatter(
- fmt=fmt,
- datefmt="%Y-%m-%d %H:%M:%S",
- )
- formatter.converter = time.gmtime # Output should be UTC
+ # We set the level on the handler rather than the logger, so other
+ # handlers with different levels can be added for things like debug
+ # logs.
+ root_logger.setLevel(0)
+ hdlr.setLevel(level)
+ formatter = LibrarianFormatter()
hdlr.setFormatter(formatter)
root_logger.addHandler(hdlr)
+ # Add an optional aditional log file.
+ if log_file is not None:
+ handler = WatchedFileHandler(log_file, encoding="UTF8")
+ handler.setFormatter(formatter)
+ handler.setLevel(log_file_level)
+ root_logger.addHandler(handler)
+
# Create our logger
logger = logging.getLogger(name)
- logger.setLevel(level)
-
# Set the global log
log._log = logger
+ # Inform the user the extra log file is in operation.
+ if log_file is not None:
+ log.info(
+ "Logging %s and higher messages to %s" % (
+ logging.getLevelName(log_file_level), log_file))
+
return logger
@@ -403,5 +491,3 @@
log = _LogWrapper(logging.getLogger())
-
-
=== modified file 'lib/canonical/launchpad/scripts/tests/loglevels.py'
--- lib/canonical/launchpad/scripts/tests/loglevels.py 2010-08-20 20:31:18 +0000
+++ lib/canonical/launchpad/scripts/tests/loglevels.py 2010-09-27 14:20:25 +0000
@@ -20,7 +20,7 @@
from optparse import OptionParser
-from canonical.launchpad.scripts.logger import *
+from canonical.launchpad.scripts.logger import logger, logger_options
parser = OptionParser()
logger_options(parser)
@@ -36,12 +36,11 @@
log.warn("This is a warning")
log.info("This is info")
log.debug("This is debug")
-log.log(DEBUG2, "This is debug2")
-log.log(DEBUG3, "This is debug3")
-log.log(DEBUG4, "This is debug4")
-log.log(DEBUG5, "This is debug5")
-log.log(DEBUG6, "This is debug6")
-log.log(DEBUG7, "This is debug7")
-log.log(DEBUG8, "This is debug8")
-log.log(DEBUG9, "This is debug9")
-
+log.debug2("This is debug2")
+log.debug3("This is debug3")
+log.debug4("This is debug4")
+log.debug5("This is debug5")
+log.debug6("This is debug6")
+log.debug7("This is debug7")
+log.debug8("This is debug8")
+log.debug9("This is debug9")
=== modified file 'lib/canonical/launchpad/scripts/tests/test_logger.txt'
--- lib/canonical/launchpad/scripts/tests/test_logger.txt 2010-04-08 22:04:26 +0000
+++ lib/canonical/launchpad/scripts/tests/test_logger.txt 2010-09-27 14:20:25 +0000
@@ -89,12 +89,35 @@
We have 10 debug levels. DEBUG2 to DEBUG9 are custom, defined in
-canonical.launchpad.scripts.logger. DEBUG is part of the standard
-Python library.
+lp.services.log.loglevels. All available loglevels are exported from
+this module, including the stadard Python ones. You can see that
+FATAL is an alias for CRITICAL, and DEBUG1 is an alias for DEBUG.
- >>> from canonical.launchpad.scripts.logger import (
- ... DEBUG2, DEBUG3, DEBUG4, DEBUG5,
- ... DEBUG6, DEBUG7, DEBUG8, DEBUG9)
+ >>> from lp.services.log import loglevels
+ >>> levels = [
+ ... loglevels.FATAL, loglevels.CRITICAL, loglevels.ERROR,
+ ... loglevels.WARNING, loglevels.INFO, loglevels.DEBUG,
+ ... loglevels.DEBUG1, loglevels.DEBUG2, loglevels.DEBUG3,
+ ... loglevels.DEBUG4, loglevels.DEBUG5, loglevels.DEBUG6,
+ ... loglevels.DEBUG7, loglevels.DEBUG8, loglevels.DEBUG9]
+ >>> import logging
+ >>> for level in levels:
+ ... print "%2d %s" % (level, logging.getLevelName(level))
+ 50 CRITICAL
+ 50 CRITICAL
+ 40 ERROR
+ 30 WARNING
+ 20 INFO
+ 10 DEBUG
+ 10 DEBUG
+ 9 DEBUG2
+ 8 DEBUG3
+ 7 DEBUG4
+ 6 DEBUG5
+ 5 DEBUG6
+ 4 DEBUG7
+ 3 DEBUG8
+ 2 DEBUG9
>>> test(*["-v"] * 20)
ERROR This is an error
=== modified file 'lib/canonical/launchpad/scripts/tests/test_scriptmonitor.py'
--- lib/canonical/launchpad/scripts/tests/test_scriptmonitor.py 2010-08-20 20:31:18 +0000
+++ lib/canonical/launchpad/scripts/tests/test_scriptmonitor.py 2010-09-27 14:20:25 +0000
@@ -25,7 +25,7 @@
# away.
class FakeOptions:
log_file = '/dev/null'
- loglevel = logging.INFO
+ loglevel = 1000
verbose = False
self.con = connect(lp.dbuser)
self.log = logger(FakeOptions())
=== modified file 'lib/canonical/launchpad/webapp/errorlog.py'
--- lib/canonical/launchpad/webapp/errorlog.py 2010-09-15 22:09:12 +0000
+++ lib/canonical/launchpad/webapp/errorlog.py 2010-09-27 14:20:25 +0000
@@ -31,7 +31,6 @@
from canonical.launchpad.layers import WebServiceLayer
from canonical.launchpad.webapp.adapter import (
get_request_duration,
- get_request_start_time,
soft_timeout_expired,
)
from canonical.launchpad.webapp.interfaces import (
@@ -457,7 +456,7 @@
duration = get_request_duration()
# In principle the timeline is per-request, but see bug=623199 -
# at this point the request is optional, but get_request_timeline
- # does not care; when it starts caring, we will always have a
+ # does not care; when it starts caring, we will always have a
# request object (or some annotations containing object).
# RBC 20100901
timeline = get_request_timeline(request)
@@ -517,8 +516,10 @@
"""Add an oops message to be included in oopses from this context."""
key = self._oops_message_key_iter.next()
self._oops_messages[key] = message
- yield
- del self._oops_messages[key]
+ try:
+ yield
+ finally:
+ del self._oops_messages[key]
globalErrorUtility = ErrorReportingUtility()
=== modified file 'lib/lp/codehosting/codeimport/tests/test_dispatcher.py'
--- lib/lp/codehosting/codeimport/tests/test_dispatcher.py 2010-08-20 20:31:18 +0000
+++ lib/lp/codehosting/codeimport/tests/test_dispatcher.py 2010-09-27 14:20:25 +0000
@@ -79,6 +79,17 @@
def filterOutLoggingOptions(self, arglist):
"""Remove the standard logging options from a list of arguments."""
+
+ # Calling parser.parse_args as we do below is dangerous,
+ # as if a callback invokes parser.error the test suite
+ # terminates. This hack removes the dangerous argument manually.
+ arglist = [
+ arg for arg in arglist if not arg.startswith('--log-file=')]
+ while '--log-file' in arglist:
+ index = arglist.index('--log-file')
+ del arglist[index] # Delete the argument
+ del arglist[index] # And its parameter
+
parser = OptionParser()
scripts.logger_options(parser)
options, args = parser.parse_args(arglist)
=== added file 'lib/lp/services/log/loglevels.py'
--- lib/lp/services/log/loglevels.py 1970-01-01 00:00:00 +0000
+++ lib/lp/services/log/loglevels.py 2010-09-27 14:20:25 +0000
@@ -0,0 +1,91 @@
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Standard and custom log levels from the standard logging package.
+
+Custom log levels are registered in lp_sitecustomize.py.
+"""
+
+__metaclass__ = type
+__all__ = [
+ 'CRITICAL',
+ 'DEBUG',
+ 'DEBUG1',
+ 'DEBUG2',
+ 'DEBUG3',
+ 'DEBUG4',
+ 'DEBUG5',
+ 'DEBUG6',
+ 'DEBUG7',
+ 'DEBUG8',
+ 'DEBUG9',
+ 'ERROR',
+ 'FATAL',
+ 'INFO',
+ 'LaunchpadLogger',
+ 'WARNING',
+ ]
+
+
+import logging
+
+
+# Reexport standard log levels.
+DEBUG = logging.DEBUG
+INFO = logging.INFO
+WARNING = logging.WARNING
+ERROR = logging.ERROR
+CRITICAL = logging.CRITICAL
+FATAL = logging.FATAL
+
+
+# Custom log levels.
+DEBUG1 = DEBUG
+DEBUG2 = DEBUG - 1
+DEBUG3 = DEBUG - 2
+DEBUG4 = DEBUG - 3
+DEBUG5 = DEBUG - 4
+DEBUG6 = DEBUG - 5
+DEBUG7 = DEBUG - 6
+DEBUG8 = DEBUG - 7
+DEBUG9 = DEBUG - 8
+
+
+class LaunchpadLogger(logging.Logger):
+ """Logger that support our custom levels."""
+
+ def debug1(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG1):
+ self._log(DEBUG1, msg, args, **kwargs)
+
+ def debug2(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG2):
+ self._log(DEBUG2, msg, args, **kwargs)
+
+ def debug3(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG3):
+ self._log(DEBUG3, msg, args, **kwargs)
+
+ def debug4(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG4):
+ self._log(DEBUG4, msg, args, **kwargs)
+
+ def debug5(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG5):
+ self._log(DEBUG5, msg, args, **kwargs)
+
+ def debug6(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG6):
+ self._log(DEBUG6, msg, args, **kwargs)
+
+ def debug7(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG7):
+ self._log(DEBUG7, msg, args, **kwargs)
+
+ def debug8(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG8):
+ self._log(DEBUG8, msg, args, **kwargs)
+
+ def debug9(self, msg, *args, **kwargs):
+ if self.isEnabledFor(DEBUG9):
+ self._log(DEBUG9, msg, args, **kwargs)
=== added file 'lib/lp/services/log/mappingfilter.py'
--- lib/lp/services/log/mappingfilter.py 1970-01-01 00:00:00 +0000
+++ lib/lp/services/log/mappingfilter.py 2010-09-27 14:20:25 +0000
@@ -0,0 +1,27 @@
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""A logging.Filter to alter log levels of log records."""
+
+__metaclass__ = type
+__all__ = ['MappingFilter']
+
+
+import logging
+
+
+class MappingFilter(logging.Filter):
+ """logging.Filter that alters the level of records using a mapping."""
+
+ def __init__(self, mapping, name=''):
+ """mapping is in the form {10: (8, 'DEBUG3')}"""
+ logging.Filter.__init__(self, name)
+ self.mapping = mapping
+ self._dotted_name = name + '.'
+
+ def filter(self, record):
+ if (record.levelno in self.mapping and (
+ not record.name or self.name == record.name
+ or record.name.startswith(self._dotted_name))):
+ record.levelno, record.levelname = self.mapping[record.levelno]
+ return True
=== modified file 'lib/lp/services/scripts/base.py'
--- lib/lp/services/scripts/base.py 2010-09-17 04:45:30 +0000
+++ lib/lp/services/scripts/base.py 2010-09-27 14:20:25 +0000
@@ -29,15 +29,20 @@
from canonical.database.sqlbase import ISOLATION_LEVEL_DEFAULT
from canonical.launchpad import scripts
from canonical.launchpad.interfaces import IScriptActivitySet
+from canonical.launchpad.scripts.logger import OopsHandler
+from canonical.launchpad.webapp.errorlog import globalErrorUtility
from canonical.launchpad.webapp.interaction import (
ANONYMOUS,
setupInteractionByEmail,
)
from canonical.lp import initZopeless
+from lp.services.log.mappingfilter import MappingFilter
+from lp.services.log.nullhandler import NullHandler
+
LOCK_PATH = "/var/lock/"
-UTC = pytz.timezone('UTC')
+UTC = pytz.UTC
class LaunchpadScriptFailure(Exception):
@@ -67,6 +72,35 @@
exit_status = 1
+def log_unhandled_exception_and_exit(func):
+ """Decorator that logs unhandled exceptions via the logging module.
+
+ Exceptions are reraised except at the top level. ie. exceptions are
+ only propagated to the outermost decorated method. At the top level,
+ an exception causes the script to terminate.
+
+ Only for methods of `LaunchpadScript` and subclasses. Not thread safe,
+ which is fine as the decorated LaunchpadScript methods are only
+ invoked from the main thread.
+ """
+
+ def log_unhandled_exceptions_func(self, *args, **kw):
+ try:
+ self._log_unhandled_exceptions_level += 1
+ return func(self, *args, **kw)
+ except Exception:
+ if self._log_unhandled_exceptions_level == 1:
+ # self.logger is setup in LaunchpadScript.__init__() so
+ # we can use it.
+ self.logger.exception("Unhandled exception")
+ sys.exit(1)
+ else:
+ raise
+ finally:
+ self._log_unhandled_exceptions_level -= 1
+ return log_unhandled_exceptions_func
+
+
class LaunchpadScript:
"""A base class for runnable scripts and cronscripts.
@@ -102,6 +136,9 @@
lockfilepath = None
loglevel = logging.INFO
+ # State for the log_unhandled_exceptions decorator.
+ _log_unhandled_exceptions_level = 0
+
def __init__(self, name=None, dbuser=None, test_args=None):
"""Construct new LaunchpadScript.
@@ -170,6 +207,7 @@
#
# Convenience or death
#
+ @log_unhandled_exception_and_exit
def login(self, user):
"""Super-convenience method that avoids the import."""
setupInteractionByEmail(user)
@@ -196,6 +234,7 @@
"""
self.lock = GlobalLock(self.lockfilepath, logger=self.logger)
+ @log_unhandled_exception_and_exit
def lock_or_die(self, blocking=False):
"""Attempt to lock, and sys.exit(1) if the lock's already taken.
@@ -209,6 +248,7 @@
self.logger.debug('Lockfile %s in use' % self.lockfilepath)
sys.exit(1)
+ @log_unhandled_exception_and_exit
def lock_or_quit(self, blocking=False):
"""Attempt to lock, and sys.exit(0) if the lock's already taken.
@@ -223,6 +263,7 @@
self.logger.info('Lockfile %s in use' % self.lockfilepath)
sys.exit(0)
+ @log_unhandled_exception_and_exit
def unlock(self, skip_delete=False):
"""Release the lock. Do this before going home.
@@ -232,6 +273,7 @@
"""
self.lock.release(skip_delete=skip_delete)
+ @log_unhandled_exception_and_exit
def run(self, use_web_security=False, implicit_begin=True,
isolation=ISOLATION_LEVEL_DEFAULT):
"""Actually run the script, executing zcml and initZopeless."""
@@ -277,6 +319,7 @@
#
# Make things happen
#
+ @log_unhandled_exception_and_exit
def lock_and_run(self, blocking=False, skip_delete=False,
use_web_security=False, implicit_begin=True,
isolation=ISOLATION_LEVEL_DEFAULT):
@@ -306,11 +349,24 @@
"""
super(LaunchpadCronScript, self).__init__(name, dbuser, test_args)
+ name = self.name
+
+ # Configure the IErrorReportingUtility we use with defaults.
+ # Scripts can override this if they want.
+ globalErrorUtility.configure()
+
+ # Scripts do not have a zlog.
+ globalErrorUtility.copy_to_zlog = False
+
+ # WARN and higher log messages should generate OOPS reports.
+ logging.getLogger().addHandler(OopsHandler(name))
+
enabled = cronscript_enabled(
- config.canonical.cron_control_url, self.name, self.logger)
+ config.canonical.cron_control_url, name, self.logger)
if not enabled:
sys.exit(0)
+ @log_unhandled_exception_and_exit
def record_activity(self, date_started, date_completed):
"""Record the successful completion of the script."""
self.txn.begin()
@@ -332,6 +388,7 @@
control_fp = urlopen(control_url, timeout=5)
else:
control_fp = urlopen(control_url)
+ # Yuck. API makes it hard to catch 'does not exist'.
except HTTPError, error:
if error.code == 404:
log.debug("Cronscript control file not found at %s", control_url)
@@ -344,7 +401,7 @@
return True
log.exception("Error loading %s" % control_url)
return True
- except:
+ except Exception:
log.exception("Error loading %s" % control_url)
return True
=== added file 'lib/lp/services/scripts/doc/launchpad-scripts.txt'
--- lib/lp/services/scripts/doc/launchpad-scripts.txt 1970-01-01 00:00:00 +0000
+++ lib/lp/services/scripts/doc/launchpad-scripts.txt 2010-09-27 14:20:25 +0000
@@ -0,0 +1,39 @@
+Launchpad Scripts
+=================
+
+Launchpad scripts are built using the LaunchpadScript class in
+lp.services.scripts.base. This, along with the LaunchpadCronScript
+specialization, implement common behavior for all Launchpad command
+line scripts.
+
+ >>> from lp.services.scripts.base import (
+ ... LaunchpadScript, LaunchpadCronScript)
+
+
+Scripts report their executution using the standard Python logging
+module, with command line arguments and logging setup provided by
+LaunchpadScript. Unhandled exceptions from scripts are automatically
+sent to the Python logging system. Cronscripts (scripts using
+LaunchpadCronScript) also log warnings and and errors as OOPS reports.
+
+ >>> import os.path, subprocess, sys
+ >>> from canonical.config import config
+ >>> cronscript_crash_path = os.path.join(
+ ... config.root, 'lib', 'lp', 'services', 'scripts', 'tests',
+ ... 'cronscript-crash.py')
+ >>> p = subprocess.Popen(
+ ... [sys.executable, cronscript_crash_path, '-vq'],
+ ... stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ ... stdin=subprocess.PIPE)
+ >>> print p.communicate()[0]
+ INFO Creating lockfile: ...
+ WARNING This is a warning
+ INFO New OOPS detected
+ CRITICAL This is critical
+ INFO New OOPS detected
+ ERROR Unhandled exception
+ ...
+ NotImplementedError: Whoops
+ >>> p.returncode
+ 1
+
=== added symlink 'lib/lp/services/scripts/tests/_pythonpath.py'
=== target is u'../../../../../_pythonpath.py'
=== added file 'lib/lp/services/scripts/tests/cronscript-crash.py'
--- lib/lp/services/scripts/tests/cronscript-crash.py 1970-01-01 00:00:00 +0000
+++ lib/lp/services/scripts/tests/cronscript-crash.py 2010-09-27 14:20:25 +0000
@@ -0,0 +1,44 @@
+#!/usr/bin/python -S
+# Copyright 2010 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Cronscript that raises an unhandled exception."""
+
+__metaclass__ = type
+__all__ = []
+
+import _pythonpath
+
+from lp.services.scripts.base import LaunchpadCronScript
+
+from canonical.launchpad.webapp.errorlog import globalErrorUtility
+
+
+class CrashScript(LaunchpadCronScript):
+
+ def last_oops_id(self):
+ return getattr(globalErrorUtility.getLastOopsReport(), 'id', None)
+
+ def main(self):
+ initial_oops = self.last_oops_id()
+
+ self.logger.debug("This is debug level")
+ # Debug messages do not generate an OOPS.
+ assert self.last_oops_id() == initial_oops
+
+ self.logger.warn("This is a warning")
+ first_oops = self.last_oops_id()
+ if first_oops != initial_oops:
+ self.logger.info("New OOPS detected")
+
+ self.logger.critical("This is critical")
+ second_oops = self.last_oops_id()
+ if second_oops != first_oops:
+ self.logger.info("New OOPS detected")
+
+ raise NotImplementedError("Whoops")
+
+
+if __name__ == "__main__":
+ script = CrashScript("crash")
+ script.lock_and_run()
=== modified file 'lib/lp/services/scripts/tests/test_doc.py'
--- lib/lp/services/scripts/tests/test_doc.py 2010-08-20 20:31:18 +0000
+++ lib/lp/services/scripts/tests/test_doc.py 2010-09-27 14:20:25 +0000
@@ -12,7 +12,7 @@
setUp,
tearDown,
)
-from canonical.testing import LaunchpadZopelessLayer
+from canonical.testing import DatabaseLayer, LaunchpadZopelessLayer
from lp.services.testing import build_test_suite
@@ -22,9 +22,14 @@
'script-monitoring.txt': LayeredDocFileSuite(
'../doc/script-monitoring.txt',
setUp=setUp, tearDown=tearDown,
- layer=LaunchpadZopelessLayer
+ layer=LaunchpadZopelessLayer,
+ ),
+ 'launchpad-scripts.txt': LayeredDocFileSuite(
+ '../doc/launchpad-scripts.txt',
+ layer=DatabaseLayer,
),
}
+
def test_suite():
return build_test_suite(here, special)
=== modified file 'lib/lp/translations/doc/remove-translations-by.txt'
--- lib/lp/translations/doc/remove-translations-by.txt 2009-07-02 17:16:50 +0000
+++ lib/lp/translations/doc/remove-translations-by.txt 2010-09-27 14:20:25 +0000
@@ -80,7 +80,7 @@
Und jetzt auf deutsch.
>>> options = [
- ... '-vvv',
+ ... '-v',
... '--submitter=%s' % nl_message.submitter.id,
... '--reviewer=%s' % nl_message.reviewer.id,
... '--id=%s' % str(1),
@@ -140,7 +140,7 @@
database.
>>> (returncode, out, err) = run_script(script, [
- ... '-vvv',
+ ... '-v',
... '--potemplate=%s' % de_pofile.potemplate.id,
... '--force',
... '--dry-run',
=== modified file 'lib/lp/translations/scripts/tests/test_reupload_translations.py'
--- lib/lp/translations/scripts/tests/test_reupload_translations.py 2010-08-20 20:31:18 +0000
+++ lib/lp/translations/scripts/tests/test_reupload_translations.py 2010-09-27 14:20:25 +0000
@@ -190,7 +190,7 @@
'-s', self.distroseries.name,
'-p', self.sourcepackagename1.name,
'-p', self.sourcepackagename2.name,
- '-vvv',
+ '-vv',
'--dry-run',
])
@@ -204,7 +204,9 @@
"INFO\s*Processing [^\s]+ in .*\n"
"WARNING\s*Found no translations upload for .*\n"
"INFO\s*Done.\n")
- self.assertTrue(re.match(expected_output, stderr))
+ self.assertTrue(
+ re.match(expected_output, stderr),
+ 'expected %s, got %s' % (expected_output, stderr))
def test_suite():
=== modified file 'lib/lp_sitecustomize.py'
--- lib/lp_sitecustomize.py 2010-09-23 13:17:12 +0000
+++ lib/lp_sitecustomize.py 2010-09-27 14:20:25 +0000
@@ -10,20 +10,72 @@
import logging
from bzrlib.branch import Branch
+from lp.services.log import loglevels
+from lp.services.log.mappingfilter import MappingFilter
from lp.services.log.nullhandler import NullHandler
from lp.services.mime import customizeMimetypes
from zope.security import checker
+def add_custom_loglevels():
+ """Add out custom log levels to the Python logging package."""
+
+ # This import installs custom ZODB loglevels, which we can then
+ # override. BLATHER is between INFO and DEBUG, so we can leave it.
+ # TRACE conflicts with DEBUG6, and since we are not using ZEO, we
+ # just overwrite the level string by calling addLevelName.
+ from ZODB.loglevels import BLATHER, TRACE
+
+ # Confirm our above assumptions, and silence lint at the same time.
+ assert BLATHER == 15
+ assert TRACE == loglevels.DEBUG6
+
+ logging.addLevelName(loglevels.DEBUG2, 'DEBUG2')
+ logging.addLevelName(loglevels.DEBUG3, 'DEBUG3')
+ logging.addLevelName(loglevels.DEBUG4, 'DEBUG4')
+ logging.addLevelName(loglevels.DEBUG5, 'DEBUG5')
+ logging.addLevelName(loglevels.DEBUG6, 'DEBUG6')
+ logging.addLevelName(loglevels.DEBUG7, 'DEBUG7')
+ logging.addLevelName(loglevels.DEBUG8, 'DEBUG8')
+ logging.addLevelName(loglevels.DEBUG9, 'DEBUG9')
+
+ # Install our customized Logger that provides easy access to our
+ # custom loglevels.
+ logging.setLoggerClass(loglevels.LaunchpadLogger)
+
+
def silence_bzr_logger():
"""Install the NullHandler on the bzr logger to silence logs."""
logging.getLogger('bzr').addHandler(NullHandler())
+
+def silence_zcml_logger():
+ """Lower level of ZCML parsing DEBUG messages."""
+ config_filter = MappingFilter(
+ {logging.DEBUG: (7, 'DEBUG4')}, 'config')
+ logging.getLogger('config').addFilter(config_filter)
+
+
+def silence_transaction_logger():
+ """Lower level of DEBUG messages from the transaction module."""
+ # Transaction logging is too noisy. Lower its DEBUG messages
+ # to DEBUG3. Transactions log to loggers named txn.<thread_id>,
+ # so we need to register a null handler with a filter to ensure
+ # the logging records get mutated before being propagated up
+ # to higher level loggers.
+ txn_handler = NullHandler()
+ txn_filter = MappingFilter(
+ {logging.DEBUG: (8, 'DEBUG3')}, 'txn')
+ txn_handler.addFilter(txn_filter)
+ logging.getLogger('txn').addHandler(txn_handler)
+
+
def dont_wrap_class_and_subclasses(cls):
checker.BasicTypes.update({cls: checker.NoProxy})
for subcls in cls.__subclasses__():
dont_wrap_class_and_subclasses(subcls)
+
def silence_warnings():
"""Silence warnings across the entire Launchpad project."""
# pycrypto-2.0.1 on Python2.6:
@@ -34,6 +86,7 @@
category=DeprecationWarning,
module="Crypto")
+
def main():
# Note that we configure the LPCONFIG environmental variable in the
# custom buildout-generated sitecustomize.py in
@@ -44,6 +97,7 @@
# goes into this custom sitecustomize.py. We do as much other
# initialization as possible here, in a more visible place.
os.environ['STORM_CEXTENSIONS'] = '1'
+ add_custom_loglevels()
customizeMimetypes()
dont_wrap_class_and_subclasses(Branch)
checker.BasicTypes[itertools.groupby] = checker._iteratorChecker
@@ -53,5 +107,7 @@
checker.BasicTypes[grouper] = checker._iteratorChecker
silence_warnings()
silence_bzr_logger()
+ silence_zcml_logger()
+ silence_transaction_logger()
main()
=== modified file 'scripts/branch-rewrite.py'
--- scripts/branch-rewrite.py 2010-04-27 19:48:39 +0000
+++ scripts/branch-rewrite.py 2010-09-27 14:20:25 +0000
@@ -24,11 +24,16 @@
from canonical.database.sqlbase import ISOLATION_LEVEL_AUTOCOMMIT
from canonical.config import config
from lp.codehosting.rewrite import BranchRewriter
+from lp.services.log.loglevels import INFO, WARNING
from lp.services.scripts.base import LaunchpadScript
class BranchRewriteScript(LaunchpadScript):
+ # By default, only emit WARNING and above messages to stderr, which
+ # will end up in the Apache error log.
+ loglevel = WARNING
+
def add_my_options(self):
"""Make the logging go to a file by default.
@@ -43,6 +48,7 @@
if not os.path.isdir(log_file_directory):
os.makedirs(log_file_directory)
self.parser.defaults['log_file'] = log_file_location
+ self.parser.defaults['log_file_level'] = INFO
def main(self):
rewriter = BranchRewriter(self.logger)
=== modified file 'utilities/sourcedeps.conf'
--- utilities/sourcedeps.conf 2010-09-08 15:04:25 +0000
+++ utilities/sourcedeps.conf 2010-09-27 14:20:25 +0000
@@ -14,4 +14,4 @@
subvertpy lp:~launchpad-pqm/subvertpy/trunk;revno=2042
python-debian lp:~launchpad-pqm/python-debian/devel;revno=185
testresources lp:~launchpad-pqm/testresources/dev;revno=16
-shipit lp:~launchpad-pqm/shipit/trunk;revno=8911 optional
+shipit lp:~launchpad-pqm/shipit/trunk;revno=8916 optional