launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #04888
[Merge] lp:~wgrant/launchpad/more-zopeless-destruction into lp:launchpad
William Grant has proposed merging lp:~wgrant/launchpad/more-zopeless-destruction into lp:launchpad with lp:~wgrant/launchpad/destroy-lots-of-db-cruft as a prerequisite.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~wgrant/launchpad/more-zopeless-destruction/+merge/74367
This branch continues to strip down the Zopeless APIs, preparing for its eventual destruction.
initZopeless no longer takes dbname/host arguments, bringing it down to just overriding user and isolation. Its internals are now simplified, as it doesn't have to mutate rw_main_master. And since dbuser is mandatory these days, the config overlay is now constructed in one hit.
Only one callsite, utilities/check-sampledata.py, really needed dbname override functionality... but it was long-broken and unused and pointless and sampledata must die, so I deleted it instead of unbitrotting it.
All LaunchpadZopelessLayer.alterConnection callsites either changed the user or set the transaction isolation to what is now the default, so they're all fixed to use switchDbUser or lp.testing.dbuser.dbuser instead, and alterConnection is privatised.
--
https://code.launchpad.net/~wgrant/launchpad/more-zopeless-destruction/+merge/74367
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/more-zopeless-destruction into lp:launchpad.
=== modified file 'lib/canonical/database/sqlbase.py'
--- lib/canonical/database/sqlbase.py 2011-09-07 09:08:14 +0000
+++ lib/canonical/database/sqlbase.py 2011-09-07 09:08:15 +0000
@@ -291,39 +291,24 @@
"directly instantiated.")
@classmethod
- def _get_zopeless_connection_config(self, dbname, dbhost):
- # This method exists for testability.
-
- # This is only used by scripts, so we must connect to the read-write
- # DB here -- that's why we use rw_main_master directly.
+ def initZopeless(cls, dbuser=None, isolation=ISOLATION_LEVEL_DEFAULT):
from canonical.database.postgresql import ConnectionString
+
+ # The configured connection string should contain a user only if
+ # the db_options() -U option has been used. It looks like none
+ # of those scripts use initZopeless, so this should be safe.
+ # This is a temporary measure until we sort out dbconfig.dbuser
+ # and LaunchpadDatabase.raw_connection's user= settings.
main_connection_string = ConnectionString(dbconfig.rw_main_master)
-
- # Override dbname and dbhost in the connection string if they
- # have been passed in.
- if dbname is None:
- dbname = main_connection_string.dbname
- else:
- main_connection_string.dbname = dbname
-
- if dbhost is None:
- dbhost = main_connection_string.host
- else:
- main_connection_string.host = dbhost
-
- return str(main_connection_string), dbname, dbhost
-
- @classmethod
- def initZopeless(cls, dbname=None, dbhost=None, dbuser=None,
- isolation=ISOLATION_LEVEL_DEFAULT):
-
- main_connection_string, dbname, dbhost = (
- cls._get_zopeless_connection_config(dbname, dbhost))
-
- assert dbuser is not None, '''
- dbuser is now required. All scripts must connect as unique
- database users.
- '''
+ if main_connection_string is not None:
+ raise AssertionError(
+ "Can't initZopeless with user in connection string. "
+ "Did you use db_options() and initZopeless() in one script?")
+
+ if dbuser is None:
+ raise AssertionError(
+ "dbuser is now required. All scripts must connect as unique "
+ "database users.")
isolation_level = {
ISOLATION_LEVEL_AUTOCOMMIT: 'autocommit',
@@ -333,18 +318,13 @@
# Construct a config fragment:
overlay = dedent("""\
[database]
- rw_main_master: %(main_connection_string)s
isolation_level: %(isolation_level)s
- """ % {
- 'isolation_level': isolation_level,
- 'main_connection_string': main_connection_string,
- })
- if dbuser:
- overlay += dedent("""\
- [launchpad]
- dbuser: %(dbuser)s
- """ % {'dbuser': dbuser})
+ [launchpad]
+ dbuser: %(dbuser)s
+ """ % dict(
+ isolation_level=isolation_level,
+ dbuser=dbuser))
if cls._installed is not None:
if cls._config_overlay != overlay:
@@ -357,8 +337,6 @@
else:
config.push(cls._CONFIG_OVERLAY_NAME, overlay)
cls._config_overlay = overlay
- cls._dbname = dbname
- cls._dbhost = dbhost
cls._dbuser = dbuser
cls._isolation = isolation
cls._reset_stores()
@@ -419,7 +397,7 @@
assert cls._installed is not None, (
"ZopelessTransactionManager not installed")
cls.uninstall()
- cls.initZopeless(cls._dbname, cls._dbhost, cls._dbuser, isolation)
+ cls.initZopeless(cls._dbuser, isolation)
@staticmethod
def conn():
=== modified file 'lib/canonical/lp/__init__.py'
--- lib/canonical/lp/__init__.py 2011-09-07 09:08:14 +0000
+++ lib/canonical/lp/__init__.py 2011-09-07 09:08:15 +0000
@@ -28,12 +28,11 @@
return ZopelessTransactionManager._installed is not None
-def initZopeless(dbname=None, dbhost=None, dbuser=None,
- isolation=ISOLATION_LEVEL_DEFAULT):
+def initZopeless(dbuser=None, isolation=ISOLATION_LEVEL_DEFAULT):
"""Initialize the Zopeless environment."""
if dbuser is None:
dbuser = (
ConnectionString(dbconfig.main_master).user or dbconfig.dbuser)
return ZopelessTransactionManager.initZopeless(
- dbname=dbname, dbhost=dbhost, dbuser=dbuser, isolation=isolation)
+ dbuser=dbuser, isolation=isolation)
=== modified file 'lib/canonical/lp/ftests/test_zopeless.py'
--- lib/canonical/lp/ftests/test_zopeless.py 2010-10-17 05:02:20 +0000
+++ lib/canonical/lp/ftests/test_zopeless.py 2011-09-07 09:08:15 +0000
@@ -13,12 +13,14 @@
import psycopg2
from sqlobject import StringCol, IntCol
-from canonical.database.sqlbase import SQLBase, alreadyInstalledMsg, cursor
+from canonical.database.sqlbase import (
+ alreadyInstalledMsg,
+ connect,
+ cursor,
+ SQLBase,
+ )
from canonical.lp import initZopeless
-from canonical.testing.layers import (
- DatabaseLayer,
- LaunchpadScriptLayer,
- )
+from canonical.testing.layers import LaunchpadScriptLayer
class MoreBeer(SQLBase):
@@ -50,11 +52,8 @@
# Calling initZopeless with the same arguments twice should return
# the exact same object twice, but also emit a warning.
try:
- dbname = DatabaseLayer._db_fixture.dbname
- tm1 = initZopeless(
- dbname=dbname, dbhost='', dbuser='launchpad')
- tm2 = initZopeless(
- dbname=dbname, dbhost='', dbuser='launchpad')
+ tm1 = initZopeless(dbuser='launchpad')
+ tm2 = initZopeless(dbuser='launchpad')
self.failUnless(tm1 is tm2)
self.failUnless(self.warned)
finally:
@@ -73,8 +72,7 @@
layer = LaunchpadScriptLayer
def setUp(self):
- self.tm = initZopeless(dbname=DatabaseLayer._db_fixture.dbname,
- dbuser='launchpad')
+ self.tm = initZopeless(dbuser='launchpad')
c = cursor()
c.execute("CREATE TABLE morebeer ("
@@ -187,7 +185,7 @@
self.tm.commit()
# Make another change from a non-SQLObject connection, and commit that
- conn = psycopg2.connect('dbname=' + DatabaseLayer._db_fixture.dbname)
+ conn = connect()
cur = conn.cursor()
cur.execute("BEGIN TRANSACTION;")
cur.execute("UPDATE MoreBeer SET rating=4 "
@@ -207,8 +205,7 @@
>>> isZopeless()
False
- >>> tm = initZopeless(dbname=DatabaseLayer._db_fixture.dbname,
- ... dbhost='', dbuser='launchpad')
+ >>> tm = initZopeless(dbuser='launchpad')
>>> isZopeless()
True
@@ -218,6 +215,7 @@
"""
+
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestZopeless))
=== modified file 'lib/canonical/testing/layers.py'
--- lib/canonical/testing/layers.py 2011-08-19 13:58:57 +0000
+++ lib/canonical/testing/layers.py 2011-09-07 09:08:15 +0000
@@ -1555,11 +1555,11 @@
@classmethod
@profiled
def switchDbUser(cls, dbuser):
- LaunchpadZopelessLayer.alterConnection(dbuser=dbuser)
+ LaunchpadZopelessLayer._alterConnection(dbuser=dbuser)
@classmethod
@profiled
- def alterConnection(cls, **kw):
+ def _alterConnection(cls, **kw):
"""Reset the connection, and reopen the connection by calling
initZopeless with the given keyword arguments.
"""
=== modified file 'lib/lp/soyuz/scripts/tests/test_queue.py'
--- lib/lp/soyuz/scripts/tests/test_queue.py 2011-08-01 05:25:59 +0000
+++ lib/lp/soyuz/scripts/tests/test_queue.py 2011-09-07 09:08:15 +0000
@@ -18,7 +18,6 @@
from zope.security.proxy import removeSecurityProxy
from canonical.config import config
-from canonical.database.sqlbase import ISOLATION_LEVEL_READ_COMMITTED
from canonical.launchpad.database.librarian import LibraryFileAlias
from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet
from canonical.launchpad.interfaces.lpstorm import IStore
@@ -78,9 +77,7 @@
def setUp(self):
# Switch database user and set isolation level to READ COMMIITTED
# to avoid SERIALIZATION exceptions with the Librarian.
- LaunchpadZopelessLayer.alterConnection(
- dbuser=self.dbuser,
- isolation=ISOLATION_LEVEL_READ_COMMITTED)
+ LaunchpadZopelessLayer.switchDbUser(self.dbuser)
def _test_display(self, text):
"""Store output from queue tool for inspection."""
=== modified file 'lib/lp/soyuz/tests/soyuz.py'
--- lib/lp/soyuz/tests/soyuz.py 2011-06-28 15:04:29 +0000
+++ lib/lp/soyuz/tests/soyuz.py 2011-09-07 09:08:15 +0000
@@ -30,6 +30,7 @@
BinaryPackagePublishingHistory,
SourcePackagePublishingHistory,
)
+from lp.testing.dbuser import dbuser
from lp.testing.sampledata import (
BUILDD_ADMIN_USERNAME,
CHROOT_LIBRARYFILEALIAS,
@@ -153,17 +154,13 @@
Store the `FakePackager` object used in the test uploads as `packager`
so the tests can reuse it if necessary.
"""
- self.layer.alterConnection(dbuser=LAUNCHPAD_DBUSER_NAME)
-
- fake_chroot = LibraryFileAlias.get(CHROOT_LIBRARYFILEALIAS)
- ubuntu = getUtility(IDistributionSet).getByName(
- UBUNTU_DISTRIBUTION_NAME)
- warty = ubuntu.getSeries(WARTY_DISTROSERIES_NAME)
- warty[I386_ARCHITECTURE_NAME].addOrUpdateChroot(fake_chroot)
-
- self.layer.txn.commit()
-
- self.layer.alterConnection(dbuser=self.dbuser)
+ with dbuser(LAUNCHPAD_DBUSER_NAME):
+ fake_chroot = LibraryFileAlias.get(CHROOT_LIBRARYFILEALIAS)
+ ubuntu = getUtility(IDistributionSet).getByName(
+ UBUNTU_DISTRIBUTION_NAME)
+ warty = ubuntu.getSeries(WARTY_DISTROSERIES_NAME)
+ warty[I386_ARCHITECTURE_NAME].addOrUpdateChroot(fake_chroot)
+
self.packager = self.uploadTestPackages()
self.layer.txn.commit()
=== modified file 'lib/lp/soyuz/tests/test_doc.py'
--- lib/lp/soyuz/tests/test_doc.py 2011-08-12 19:15:43 +0000
+++ lib/lp/soyuz/tests/test_doc.py 2011-09-07 09:08:15 +0000
@@ -10,10 +10,7 @@
import unittest
from canonical.config import config
-from canonical.database.sqlbase import (
- commit,
- ISOLATION_LEVEL_READ_COMMITTED,
- )
+from canonical.database.sqlbase import commit
from canonical.launchpad.ftests import logout
from canonical.launchpad.testing.pages import PageTestSuite
from canonical.launchpad.testing.systemdocs import (
@@ -63,8 +60,7 @@
"""Setup the connection for the build master tests."""
test_dbuser = config.builddmaster.dbuser
test.globs['test_dbuser'] = test_dbuser
- LaunchpadZopelessLayer.alterConnection(
- dbuser=test_dbuser, isolation=ISOLATION_LEVEL_READ_COMMITTED)
+ LaunchpadZopelessLayer.switchDbUser(test_dbuser)
setGlobs(test)
=== modified file 'lib/lp/soyuz/tests/test_packagediff.py'
--- lib/lp/soyuz/tests/test_packagediff.py 2011-01-14 11:02:44 +0000
+++ lib/lp/soyuz/tests/test_packagediff.py 2011-09-07 09:08:15 +0000
@@ -20,6 +20,7 @@
from canonical.testing.layers import LaunchpadZopelessLayer
from lp.soyuz.enums import PackageDiffStatus
from lp.soyuz.tests.soyuz import TestPackageDiffsBase
+from lp.testing.dbuser import dbuser
class TestPackageDiffs(TestPackageDiffsBase):
@@ -58,10 +59,8 @@
AND sprf.SourcePackageRelease = spr.id
AND sprf.libraryfile = lfa.id
""" % sqlvalues(source.id)
- self.layer.alterConnection(dbuser='launchpad')
- result = store.execute(query)
- self.layer.txn.commit()
- self.layer.alterConnection(dbuser=self.dbuser)
+ with dbuser('launchpad'):
+ store.execute(query)
def test_packagediff_with_expired_and_deleted_lfas(self):
# Test the case where files required for the diff are expired *and*
=== removed file 'utilities/check-sampledata.py'
--- utilities/check-sampledata.py 2010-04-27 19:48:39 +0000
+++ utilities/check-sampledata.py 1970-01-01 00:00:00 +0000
@@ -1,364 +0,0 @@
-#! /usr/bin/python -S
-#
-# Copyright 2009 Canonical Ltd. This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""
-check-sampledata.py - Perform various checks on Sample Data
-
-= Launchpad Sample Data Consistency Checks =
-
-XXX flacoste 2007/03/08 Once all problems exposed by this script are solved,
-it should be integrated to our automated test suite.
-
-This script verify that all objects in sample data provides the interfaces
-they are supposed to. It also makes sure that the object pass its schema
-validation.
-
-Finally, it can also be used to report about sample data lacking in breadth.
-
-"""
-
-__metatype__ = type
-
-import _pythonpath
-
-import inspect
-from optparse import OptionParser
-import re
-from textwrap import dedent
-
-from psycopg2 import ProgrammingError
-
-from zope.interface import providedBy
-from zope.interface.exceptions import (
- BrokenImplementation, BrokenMethodImplementation)
-from zope.interface.verify import verifyObject
-from zope.schema.interfaces import IField, ValidationError
-
-from canonical.database.sqlbase import SQLBase
-import canonical.launchpad.database
-from canonical.lp import initZopeless
-from canonical.launchpad.scripts import execute_zcml_for_scripts
-
-
-def get_class_name(cls):
- """Return the class name without its package prefix."""
- return cls.__name__.split('.')[-1]
-
-
-def error_msg(error):
- """Convert an exception to a proper error.
-
- It make sure that the exception type is in the message and takes care
- of possible unicode conversion error.
- """
- try:
- return "%s: %s" % (get_class_name(error.__class__), str(error))
- except UnicodeEncodeError:
- return "UnicodeEncodeError in str(%s)" % error.__class__.__name__
-
-
-class SampleDataVerification:
- """Runs various checks on sample data and report about them."""
-
- def __init__(self, dbname="launchpad_ftest_template", dbuser="launchpad",
- table_filter=None, min_rows=10, only_summary=False):
- """Initialize the verification object.
-
- :param dbname: The database which contains the sample data to check.
- :param dbuser: The user to connect as.
- """
- self.txn = initZopeless(dbname=dbname, dbuser=dbuser)
- execute_zcml_for_scripts()
- self.classes_with_error = {}
- self.class_rows = {}
- self.table_filter = table_filter
- self.min_rows = min_rows
- self.only_summary = only_summary
-
- def findSQLBaseClasses(self):
- """Return an iterator over the classes in canonical.launchpad.database
- that extends SQLBase.
- """
- if self.table_filter:
- include_only_re = re.compile(self.table_filter)
- for class_name in dir(canonical.launchpad.database):
- if self.table_filter and not include_only_re.search(class_name):
- continue
- cls = getattr(canonical.launchpad.database, class_name)
- if inspect.isclass(cls) and issubclass(cls, SQLBase):
- yield cls
-
- def fetchTableRowsCount(self):
- """Fetch the number of rows of each tables.
-
- The count are stored in the table_rows_count attribute.
- """
- self.table_rows_count = {}
- for cls in self.findSQLBaseClasses():
- class_name = get_class_name(cls)
- try:
- self.table_rows_count[class_name] = cls.select().count()
- except ProgrammingError, error:
- self.classes_with_error[class_name] = str(error)
- # Transaction is borked, start another one.
- self.txn.begin()
-
- def checkSampleDataInterfaces(self):
- """Check that all sample data objects complies with the interfaces it
- declares.
- """
- self.validation_errors = {}
- self.broken_instances= {}
- for cls in self.findSQLBaseClasses():
- class_name = get_class_name(cls)
- if class_name in self.classes_with_error:
- continue
- try:
- for object in cls.select():
- self.checkObjectInterfaces(object)
- self.validateObjectSchemas(object)
- except ProgrammingError, error:
- self.classes_with_error[get_class_name(cls)] = str(error)
- # Transaction is borked, start another one.
- self.txn.begin()
-
- def checkObjectInterfaces(self, object):
- """Check that object provides every attributes in its declared interfaces.
-
- Collect errors in broken_instances dictionary attribute.
- """
- for interface in providedBy(object):
- interface_name = get_class_name(interface)
- try:
- result = verifyObject(interface, object)
- except BrokenImplementation, error:
- self.setInterfaceError(
- interface, object, "missing attribute %s" % error.name)
- except BrokenMethodImplementation, error:
- self.setInterfaceError(
- interface, object,
- "invalid method %s: %s" % (error.method, error.mess))
-
- def setInterfaceError(self, interface, object, error_msg):
- """Store an error about an interface in the broken_instances dictionary
-
- The errors data structure looks like:
-
- {interface: {
- error_msg: {
- class_name: [instance_id...]}}}
- """
- interface_errors = self.broken_instances.setdefault(
- get_class_name(interface), {})
- classes_with_error = interface_errors.setdefault(error_msg, {})
- object_ids_with_error = classes_with_error.setdefault(
- get_class_name(object.__class__), [])
- object_ids_with_error.append(object.id)
-
- def validateObjectSchemas(self, object):
- """Check that object validates with the schemas it says it provides.
-
- Collect errors in validation_errors. Data structure format is
- {schema:
- [[class_name, object_id,
- [(field, error), ...]],
- ...]}
- """
- for schema in providedBy(object):
- field_errors = []
- for name in schema.names(all=True):
- description = schema[name]
- if not IField.providedBy(description):
- continue
- try:
- value = getattr(object, name)
- except AttributeError:
- # This is an already reported verifyObject failures.
- continue
- try:
- description.validate(value)
- except ValidationError, error:
- field_errors.append((name, error_msg(error)))
- except (KeyboardInterrupt, SystemExit):
- # We should never catch KeyboardInterrupt or SystemExit.
- raise
- except ProgrammingError, error:
- field_errors.append((name, error_msg(error)))
- # We need to restart the transaction after these errors.
- self.txn.begin()
- except Exception, error:
- # Exception usually indicates a deeper problem with
- # the interface declaration or the validation code, than
- # the expected ValidationError.
- field_errors.append((name, error_msg(error)))
- if field_errors:
- schema_errors= self.validation_errors.setdefault(
- get_class_name(schema), [])
- schema_errors.append([
- get_class_name(object.__class__), object.id,
- field_errors])
-
- def getShortTables(self):
- """Return a list of tables which have less rows than self.min_rows.
-
- :return: [(table, rows_count)...]
- """
- return [
- (table, rows_count)
- for table, rows_count in self.table_rows_count.items()
- if rows_count < self.min_rows]
-
- def reportShortTables(self):
- """Report about tables with less than self.min_rows."""
- short_tables = self.getShortTables()
- if not short_tables:
- print """All tables have more than %d rows!!!""" % self.min_rows
- return
-
- print dedent("""\
- %d Tables with less than %d rows
- --------------------------------""" % (
- len(short_tables), self.min_rows))
- for table, rows_count in sorted(short_tables):
- print "%-20s: %2d" % (table, rows_count)
-
- def reportErrors(self):
- """Report about classes with database error.
-
- This will usually be classes without a database table.
- """
- if not self.classes_with_error:
- return
- print dedent("""\
- Classes with database errors
- ----------------------------""")
- for class_name, error_msg in sorted(self.classes_with_error.items()):
- print "%-20s %s" % (class_name, error_msg)
-
- def reportInterfaceErrors(self):
- """Report objects failing the verifyObject and schema validation."""
- if not self.broken_instances:
- print "All sample data comply with its provided interfaces!!!"
- return
- print dedent("""\
- %d Interfaces with broken instances
- -----------------------------------""" % len(
- self.broken_instances))
- for interface, errors in sorted(
- self.broken_instances.items()):
- print "%-20s:" % interface
- for error_msg, classes_with_error in sorted(errors.items()):
- print " %s:" % error_msg
- for class_name, object_ids in sorted(
- classes_with_error.items()):
- print " %s: %s" % (
- class_name, ", ".join([
- str(id) for id in sorted(object_ids)]))
-
- def reportValidationErrors(self):
- """Report object that fails their validation."""
- if not self.validation_errors:
- print "All sample data pass validation!!!"
- return
-
- print dedent("""\
- %d Schemas with instances failing validation
- --------------------------------------------""" % len(
- self.validation_errors))
- for schema, instances in sorted(self.validation_errors.items()):
- print "%-20s (%d objects with errors):" % (schema, len(instances))
- for class_name, object_id, errors in sorted(instances):
- print " <%s %s> (%d errors):" % (
- class_name, object_id, len(errors))
- for field, error in sorted(errors):
- print " %s: %s" % (field, error)
-
- def reportSummary(self):
- """Only report the name of the classes with errors."""
-
- short_tables = dict(self.getShortTables())
-
- # Compute number of implementation error by classes.
- verify_errors_count = {}
- for interface_errors in self.broken_instances.values():
- for broken_classes in interface_errors.values():
- for class_name in broken_classes.keys():
- verify_errors_count.setdefault(class_name, 0)
- verify_errors_count[class_name] += 1
-
- # Compute number of instances with validation error.
- validation_errors_count = {}
- for instances in self.validation_errors.values():
- for class_name, object_id, errors in instances:
- validation_errors_count.setdefault(class_name, 0)
- validation_errors_count[class_name] += 1
-
- classes_with_errors = set(short_tables.keys())
- classes_with_errors.update(verify_errors_count.keys())
- classes_with_errors.update(validation_errors_count.keys())
-
- print dedent("""\
- %d Classes with errors:
- -----------------------""" % len(classes_with_errors))
- for class_name in sorted(classes_with_errors):
- errors = []
- if class_name in short_tables:
- errors.append('%d rows' % short_tables[class_name])
- if class_name in verify_errors_count:
- errors.append(
- '%d verify errors' % verify_errors_count[class_name])
- if class_name in validation_errors_count:
- errors.append(
- '%d validation errors' %
- validation_errors_count[class_name])
- print "%s: %s" % (class_name, ", ".join(errors))
-
- def run(self):
- """Check and report on sample data."""
- self.fetchTableRowsCount()
- self.checkSampleDataInterfaces()
- print dedent("""\
- Verified %d content classes.
- ============================
- """ % len(self.table_rows_count))
- if self.only_summary:
- self.reportSummary()
- else:
- self.reportShortTables()
- print
- self.reportInterfaceErrors()
- print
- self.reportValidationErrors()
- print
- self.reportErrors()
- self.txn.abort()
-
-
-if __name__ == '__main__':
- parser = OptionParser()
- parser.add_option('-d', '--database', action="store", type="string",
- default="launchpad_ftest_template",
- help="Database to connect to for testing.")
- parser.add_option('-u', '--user', action="store", type="string",
- default="launchpad",
- help="Username to connect with.")
- parser.add_option('-i', '--table-filter', dest="table_filter",
- action="store", type="string", default=None,
- help="Limit classes to test using a regular expression.")
- parser.add_option('-m', '--min-rows', dest="min_rows",
- action="store", type="int", default=10,
- help="Minimum number of rows a table is expected to have.")
- parser.add_option('-s', '--summary',
- action='store_true', dest="summary", default=False,
- help=(
- "Only report the name of the classes with "
- "validation errors."))
- options, arguments = parser.parse_args()
- SampleDataVerification(
- dbname=options.database,
- dbuser=options.user,
- table_filter=options.table_filter,
- min_rows=options.min_rows,
- only_summary=options.summary).run()