launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #04881
[Merge] lp:~wgrant/launchpad/deprecated-functions-are-bad into lp:launchpad
William Grant has proposed merging lp:~wgrant/launchpad/deprecated-functions-are-bad into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
Bug #842304 in Launchpad itself: "Scripts still using connect() shouldn't pass in options.dbuser"
https://bugs.launchpad.net/launchpad/+bug/842304
For more details, see:
https://code.launchpad.net/~wgrant/launchpad/deprecated-functions-are-bad/+merge/74225
The user argument to canonical.database.sqlbase.connect() is now largely redundant, due to changes described at length in <https://code.launchpad.net/~wgrant/launchpad/fix-dbuser-override/+merge/74154>. This is a cleanup branch mostly to remove those obsolete parameters, but also includes a couple of other trivial cleanups, and the abolition of canonical.lp.sql.
Most of the altered scripts are covered by the test suite, but some in database/ are not. I've manually tested those that will not be verified by buildbot or staging.
lint returns 147 lines, but the branch's changes are far-reaching and fixing everything would blow the diff out to more than 1500 lines. Can I have a pass for this one? :)
canonical.lp.sql contained two ancient PostgreSQL configuration checks, which were accidentally disabled by ZCML removal some time ago. I don't think they're useful any more, and stub agrees:
6:08:49 < wgrant> Which was one of the other questions: the checks in canonical.lp.sql look reasonably pointless now, and aren't running any more.
16:09:02 < wgrant> I presume the ZCML inclusion was accidentally dropped somewhere along the line.
16:09:30 < wgrant> The automatic FROM thing has been off by default since postgres 8.1, so probably hardly worth checking for.
16:10:19 < stub> Those checks used to be useful. Probably not so much now, yeah.
16:11:24 < stub> Maybe the encoding one, but that would be more useful as locale one in any case to confirm the expected collation order. I think we have
duplicated these checks elsewhere?
16:11:43 < stub> But yeah, kill 'em.
--
https://code.launchpad.net/~wgrant/launchpad/deprecated-functions-are-bad/+merge/74225
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/deprecated-functions-are-bad into lp:launchpad.
=== modified file 'database/replication/helpers.py'
--- database/replication/helpers.py 2011-07-25 13:39:10 +0000
+++ database/replication/helpers.py 2011-09-06 14:23:27 +0000
@@ -314,7 +314,7 @@
"""Return the preable needed at the start of all slonik scripts."""
if con is None:
- con = connect('slony')
+ con = connect(user='slony')
master_node = get_master_node(con)
nodes = get_all_cluster_nodes(con)
=== modified file 'database/replication/initialize.py'
--- database/replication/initialize.py 2010-05-19 18:07:56 +0000
+++ database/replication/initialize.py 2011-09-06 14:23:27 +0000
@@ -42,9 +42,7 @@
log.info('Duplicating database schema')
master_cs = ConnectionString(config.database.rw_main_master)
- master_cs.user = options.dbuser
slave1_cs = ConnectionString(config.database.rw_main_slave)
- slave1_cs.user = options.dbuser
# We can't use pg_dump to replicate security as not all of the roles
# may exist in the slave databases' clusters yet.
@@ -154,7 +152,7 @@
# Generate lists of sequences and tables for our replication sets.
log.debug("Connecting as %s" % options.dbuser)
- con = connect(options.dbuser)
+ con = connect()
con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
global cur
cur = con.cursor()
=== modified file 'database/replication/new-slave.py'
--- database/replication/new-slave.py 2010-10-01 13:02:57 +0000
+++ database/replication/new-slave.py 2011-09-06 14:23:27 +0000
@@ -44,7 +44,7 @@
# Confirm we can connect to the source database.
# Keep the connection as we need it later.
- source_connection_string = ConnectionString(connect_string('slony'))
+ source_connection_string = ConnectionString(connect_string(user='slony'))
try:
log.debug(
"Opening source connection to '%s'" % source_connection_string)
=== modified file 'database/replication/preamble.py'
--- database/replication/preamble.py 2010-05-19 18:07:56 +0000
+++ database/replication/preamble.py 2011-09-06 14:23:27 +0000
@@ -28,7 +28,7 @@
parser.error("Too many arguments")
scripts.execute_zcml_for_scripts(use_web_security=False)
- con = connect(options.dbuser)
+ con = connect()
print '# slonik(1) preamble generated %s' % time.ctime()
print '# LPCONFIG=%s' % config.instance_name
print
=== modified file 'database/replication/repair-restored-db.py'
--- database/replication/repair-restored-db.py 2010-05-19 18:07:56 +0000
+++ database/replication/repair-restored-db.py 2011-09-06 14:23:27 +0000
@@ -45,7 +45,7 @@
log = logger(options)
- con = connect(options.dbuser, isolation=ISOLATION_LEVEL_AUTOCOMMIT)
+ con = connect(isolation=ISOLATION_LEVEL_AUTOCOMMIT)
if not replication.helpers.slony_installed(con):
log.info("Slony-I not installed. Nothing to do.")
=== modified file 'database/replication/report.py'
--- database/replication/report.py 2010-05-19 18:07:56 +0000
+++ database/replication/report.py 2011-09-06 14:23:27 +0000
@@ -253,7 +253,7 @@
else:
assert False, "Unknown mode %s" % options.mode
- con = connect(options.dbuser)
+ con = connect()
cur = con.cursor()
cur.execute(
=== modified file 'database/replication/slon_ctl.py'
--- database/replication/slon_ctl.py 2010-10-11 10:32:29 +0000
+++ database/replication/slon_ctl.py 2011-09-06 14:23:27 +0000
@@ -73,7 +73,8 @@
if explicit is not None:
nodes = [explicit]
else:
- nodes = replication.helpers.get_all_cluster_nodes(connect('slony'))
+ nodes = replication.helpers.get_all_cluster_nodes(
+ connect(user='slony'))
if command == 'start':
return start(log, nodes, options.lag)
=== modified file 'database/schema/emptytables.py'
--- database/schema/emptytables.py 2010-05-19 18:07:56 +0000
+++ database/schema/emptytables.py 2011-09-06 14:23:27 +0000
@@ -14,8 +14,9 @@
from canonical.launchpad.scripts import db_options
from fti import quote_identifier
+
def main(options):
- con = connect(options.dbuser)
+ con = connect()
cur = con.cursor()
cur.execute("""
SELECT relname FROM pg_class,pg_namespace
=== modified file 'database/schema/fti.py'
--- database/schema/fti.py 2010-11-26 22:44:00 +0000
+++ database/schema/fti.py 2011-09-06 14:23:27 +0000
@@ -24,7 +24,8 @@
import psycopg2.extensions
-from canonical import lp
+from canonical.config import config
+from canonical.database.postgresql import ConnectionString
from canonical.database.sqlbase import (
connect, ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED,
quote, quote_identifier)
@@ -280,7 +281,7 @@
except psycopg2.Error:
# No commit - we are in autocommit mode
log.exception('psycopg error')
- con = connect(lp.dbuser)
+ con = connect()
con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
@@ -314,11 +315,8 @@
"""
log.debug('Installing tsearch2')
- cmd = 'psql -f - -d %s' % lp.get_dbname()
- if lp.dbhost:
- cmd += ' -h %s' % lp.dbhost
- if options.dbuser:
- cmd += ' -U %s' % options.dbuser
+ cmd = 'psql -f - %s' % ConnectionString(
+ config.database.rw_main_master).asPGCommandLineArgs()
p = subprocess.Popen(
cmd.split(' '), stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
@@ -661,7 +659,7 @@
global log
log = logger(options)
- con = connect(lp.dbuser)
+ con = connect()
is_replicated_db = replication.helpers.slony_installed(con)
=== modified file 'database/schema/pending/new-person-columns.py'
--- database/schema/pending/new-person-columns.py 2010-05-19 18:07:56 +0000
+++ database/schema/pending/new-person-columns.py 2011-09-06 14:23:27 +0000
@@ -16,6 +16,7 @@
from canonical.launchpad.scripts import db_options
from canonical.launchpad.scripts.logger import log, logger_options
+
def update_until_done(con, table, query, vacuum_every=100):
log.info("Running %s" % query)
loops = 0
@@ -39,7 +40,7 @@
db_options(parser)
options, args = parser.parse_args()
-con = connect(options.dbuser, isolation=ISOLATION_LEVEL_AUTOCOMMIT)
+con = connect(isolation=ISOLATION_LEVEL_AUTOCOMMIT)
update_until_done(con, 'person', """
UPDATE Person
@@ -53,4 +54,3 @@
LIMIT 200
)
""")
-
=== modified file 'database/schema/pending/prune-nonce.py'
--- database/schema/pending/prune-nonce.py 2010-05-19 18:07:56 +0000
+++ database/schema/pending/prune-nonce.py 2011-09-06 14:23:27 +0000
@@ -39,7 +39,7 @@
db_options(parser)
options, args = parser.parse_args()
-con = connect(options.dbuser, isolation=ISOLATION_LEVEL_AUTOCOMMIT)
+con = connect(isolation=ISOLATION_LEVEL_AUTOCOMMIT)
update_until_done(con, 'OAuthNonce', """
DELETE FROM OAuthNonce
=== modified file 'database/schema/pending/update-translation-credits.py'
--- database/schema/pending/update-translation-credits.py 2010-05-19 18:07:56 +0000
+++ database/schema/pending/update-translation-credits.py 2011-09-06 14:23:27 +0000
@@ -40,7 +40,7 @@
db_options(parser)
options, args = parser.parse_args()
-con = connect(options.dbuser, isolation=ISOLATION_LEVEL_AUTOCOMMIT)
+con = connect(isolation=ISOLATION_LEVEL_AUTOCOMMIT)
# People have so far updated translation credits, often mis-crediting people,
# or removing credits to upstream translators: we want to disable all of these
=== modified file 'database/schema/preflight.py'
--- database/schema/preflight.py 2011-07-25 13:59:01 +0000
+++ database/schema/preflight.py 2011-09-06 14:23:27 +0000
@@ -29,7 +29,6 @@
logger,
logger_options,
)
-from canonical import lp
import replication.helpers
@@ -49,8 +48,7 @@
class DatabasePreflight:
def __init__(self, log):
- master_con = connect(lp.dbuser)
- master_con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
+ master_con = connect(isolation=ISOLATION_LEVEL_AUTOCOMMIT)
self.log = log
self.is_replicated = replication.helpers.slony_installed(master_con)
=== modified file 'database/schema/reset_sequences.py'
--- database/schema/reset_sequences.py 2010-05-19 18:07:56 +0000
+++ database/schema/reset_sequences.py 2011-09-06 14:23:27 +0000
@@ -20,18 +20,16 @@
from optparse import OptionParser
from canonical.database.postgresql import resetSequences
from canonical.database.sqlbase import connect
+from canonical.launchpad.scripts import db_options
if __name__ == '__main__':
parser = OptionParser()
- parser.add_option(
- "-d", "--dbname", dest="dbname", help="database name",
- )
+ db_options(parser)
(options, args) = parser.parse_args()
if args:
parser.error("Too many options given")
if not options.dbname:
parser.error("Required option --dbname not given")
- con = connect(None, options.dbname)
+ con = connect()
resetSequences(con.cursor())
con.commit()
-
=== modified file 'database/schema/security.py'
--- database/schema/security.py 2011-07-25 14:10:46 +0000
+++ database/schema/security.py 2011-09-06 14:23:27 +0000
@@ -158,7 +158,7 @@
configfile_name = os.path.join(os.path.dirname(__file__), 'security.cfg')
config.read([configfile_name])
- con = connect(options.dbuser)
+ con = connect()
if options.cluster:
nodes = replication.helpers.get_nodes(con, 1)
=== modified file 'database/schema/unautovacuumable.py'
--- database/schema/unautovacuumable.py 2010-05-19 18:07:56 +0000
+++ database/schema/unautovacuumable.py 2011-09-06 14:23:27 +0000
@@ -41,7 +41,7 @@
log = logger(options)
log.debug("Connecting")
- con = connect(options.dbuser)
+ con = connect()
con.set_isolation_level(0) # Autocommit
cur = con.cursor()
=== modified file 'database/schema/upgrade.py'
--- database/schema/upgrade.py 2011-09-02 23:17:37 +0000
+++ database/schema/upgrade.py 2011-09-06 14:23:27 +0000
@@ -30,7 +30,7 @@
def main():
- con = connect(options.dbuser)
+ con = connect()
patches = get_patchlist(con)
if replication.helpers.slony_installed(con):
@@ -39,7 +39,7 @@
parser.error("--dry-run does not make sense with replicated db")
log.info("Applying patches to Slony-I environment.")
apply_patches_replicated()
- con = connect(options.dbuser)
+ con = connect()
else:
log.info("Applying patches to unreplicated environment.")
apply_patches_normal(con)
@@ -172,7 +172,7 @@
# Get an autocommit connection. We use autocommit so we don't have to
# worry about blocking locks needed by Slony-I.
- con = connect(options.dbuser, isolation=ISOLATION_LEVEL_AUTOCOMMIT)
+ con = connect(isolation=ISOLATION_LEVEL_AUTOCOMMIT)
# We use three slonik scripts to apply our DB patches.
# The first script applies the DB patches to all nodes.
=== modified file 'lib/canonical/database/ftests/test_isolation.py'
--- lib/canonical/database/ftests/test_isolation.py 2011-08-12 11:19:40 +0000
+++ lib/canonical/database/ftests/test_isolation.py 2011-09-06 14:23:27 +0000
@@ -12,13 +12,13 @@
from textwrap import dedent
import unittest
-from canonical.config import config
from canonical.database.sqlbase import (
cursor, ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_DEFAULT,
ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE,
connect)
from canonical.testing.layers import LaunchpadZopelessLayer
+
class TestIsolation(unittest.TestCase):
layer = LaunchpadZopelessLayer
@@ -116,20 +116,19 @@
def test_connect(self):
# Ensure connect() method returns a connection with the correct
# default isolation
- con = connect(config.launchpad.dbuser)
+ con = connect()
self.failUnlessEqual(self.getCurrentIsolation(con), 'read committed')
con.rollback()
self.failUnlessEqual(self.getCurrentIsolation(con), 'read committed')
# Ensure that changing the isolation sticks.
- con = connect(
- config.launchpad.dbuser, isolation=ISOLATION_LEVEL_SERIALIZABLE)
+ con = connect(isolation=ISOLATION_LEVEL_SERIALIZABLE)
self.failUnlessEqual(self.getCurrentIsolation(con), 'serializable')
con.rollback()
self.failUnlessEqual(self.getCurrentIsolation(con), 'serializable')
# But on a fresh connection, it works just fine.
- con = connect(config.launchpad.dbuser)
+ con = connect()
con.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
self.failUnlessEqual(self.getCurrentIsolation(con), 'serializable')
con.rollback()
=== modified file 'lib/canonical/database/ftests/test_sqlbaseconnect.txt'
--- lib/canonical/database/ftests/test_sqlbaseconnect.txt 2010-10-17 02:38:59 +0000
+++ lib/canonical/database/ftests/test_sqlbaseconnect.txt 2011-09-06 14:23:27 +0000
@@ -6,7 +6,7 @@
... ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE)
>>> def do_connect(user, dbname=None, isolation=ISOLATION_LEVEL_DEFAULT):
- ... con = connect(user, dbname, isolation)
+ ... con = connect(user=user, dbname=dbname, isolation=isolation)
... cur = con.cursor()
... cur.execute('SHOW session_authorization')
... who = cur.fetchone()[0]
=== modified file 'lib/canonical/database/sqlbase.py'
--- lib/canonical/database/sqlbase.py 2011-09-06 02:27:23 +0000
+++ lib/canonical/database/sqlbase.py 2011-09-06 14:23:27 +0000
@@ -781,22 +781,20 @@
transaction.commit()
-def connect(user, dbname=None, isolation=ISOLATION_LEVEL_DEFAULT):
+def connect(user=None, dbname=None, isolation=ISOLATION_LEVEL_DEFAULT):
"""Return a fresh DB-API connection to the MAIN MASTER database.
- DEPRECATED - if needed, this should become a method on the Store.
-
- Use None for the user to connect as the default PostgreSQL user.
- This is not the default because the option should be rarely used.
+ Can be used without first setting up the Component Architecture,
+ unlike the usual stores.
Default database name is the one specified in the main configuration file.
"""
- con = psycopg2.connect(connect_string(user, dbname))
+ con = psycopg2.connect(connect_string(user=user, dbname=dbname))
con.set_isolation_level(isolation)
return con
-def connect_string(user, dbname=None):
+def connect_string(user=None, dbname=None):
"""Return a PostgreSQL connection string.
Allows you to pass the generated connection details to external
=== modified file 'lib/canonical/launchpad/scripts/tests/test_scriptmonitor.py'
--- lib/canonical/launchpad/scripts/tests/test_scriptmonitor.py 2011-08-12 11:37:08 +0000
+++ lib/canonical/launchpad/scripts/tests/test_scriptmonitor.py 2011-09-06 14:23:27 +0000
@@ -8,7 +8,6 @@
import logging
from unittest import TestCase
-from canonical import lp
from canonical.database.sqlbase import connect
from canonical.launchpad.scripts import logger
from canonical.launchpad.scripts.scriptmonitor import check_script
@@ -27,7 +26,7 @@
log_file = '/dev/null'
loglevel = 1000
verbose = False
- self.con = connect(lp.dbuser)
+ self.con = connect()
self.log = logger(FakeOptions())
def tearDown(self):
=== modified file 'lib/canonical/librarian/ftests/test_gc.py'
--- lib/canonical/librarian/ftests/test_gc.py 2011-08-12 11:37:08 +0000
+++ lib/canonical/librarian/ftests/test_gc.py 2011-09-06 14:23:27 +0000
@@ -80,8 +80,9 @@
open(path, 'w').write('whatever')
self.ztm.abort()
- self.con = connect(config.librarian_gc.dbuser)
- self.con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
+ self.con = connect(
+ user=config.librarian_gc.dbuser,
+ isolation=ISOLATION_LEVEL_AUTOCOMMIT)
def tearDown(self):
self.con.rollback()
@@ -748,8 +749,9 @@
self.layer.switchDbUser(config.librarian_gc.dbuser)
# Open a connection for our test
- self.con = connect(config.librarian_gc.dbuser)
- self.con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
+ self.con = connect(
+ user=config.librarian_gc.dbuser,
+ isolation=ISOLATION_LEVEL_AUTOCOMMIT)
self.patch(librariangc, 'log', BufferLogger())
=== removed file 'lib/canonical/lp/sql.py'
--- lib/canonical/lp/sql.py 2009-06-25 05:30:52 +0000
+++ lib/canonical/lp/sql.py 1970-01-01 00:00:00 +0000
@@ -1,61 +0,0 @@
-# Copyright 2009 Canonical Ltd. This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""Sanity checks for the PostgreSQL database"""
-
-__metaclass__ = type
-
-
-from canonical.config import config
-from canonical.database.sqlbase import connect
-
-
-def confirmEncoding(*args, **kw):
- '''Raise an exception, explaining what went wrong, if the PostgreSQL
- database encoding is not UNICODE
-
- subsribed to zope.app.appsetup.IProcessStartingEvent
-
- '''
- con = connect(config.launchpad.dbuser)
- try:
- cur = con.cursor()
- dbname = config.database.dbname
- cur.execute(
- 'select encoding from pg_catalog.pg_database where datname=%s',
- (dbname,)
- )
- res = cur.fetchall()
- if len(res) != 1:
- raise RuntimeError('Database %r does not exist or is not unique'
- % (dbname,)
- )
- if res[0][0] != 6:
- raise RuntimeError(
- "Database %r is using the wrong encidong (%r). You need "
- "to recreate your database using 'createdb -E UNICODE %s'" % (
- dbname, res[0][0], dbname
- )
- )
- finally:
- con.close()
-
-def confirmNoAddMissingFrom(*args, **kw):
- '''Raise a warning if add_missing_from is turned on (dangerous default).
-
- This will become an error in the future. Subscribed to
- zope.app.appsetup.IProcessStartingEvent
-
- '''
- con = connect(config.launchpad.dbuser)
- try:
- cur = con.cursor()
- cur.execute('show add_missing_from')
- res = cur.fetchall()
- if res[0][0] != 'off':
- raise RuntimeError(
- "Need to set add_missing_from=false in "
- "/etc/postgresql/postgresql.conf"
- )
- finally:
- con.close()
=== removed file 'lib/canonical/lp/sql.zcml'
--- lib/canonical/lp/sql.zcml 2009-07-13 18:15:02 +0000
+++ lib/canonical/lp/sql.zcml 1970-01-01 00:00:00 +0000
@@ -1,17 +0,0 @@
-<!-- Copyright 2009 Canonical Ltd. This software is licensed under the
- GNU Affero General Public License version 3 (see the file LICENSE).
--->
-
-<configure xmlns="http://namespaces.zope.org/zope">
- <!-- Raise an exception, explaining what went wrong, if the PostgreSQL
- database encoding is not UNICODE
- -->
- <subscriber
- handler=".sql.confirmEncoding"
- for="zope.app.appsetup.IProcessStartingEvent"
- />
- <subscriber
- handler=".sql.confirmNoAddMissingFrom"
- for="zope.app.appsetup.IProcessStartingEvent"
- />
-</configure>
=== modified file 'lib/lp/soyuz/scripts/gina/katie.py'
--- lib/lp/soyuz/scripts/gina/katie.py 2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/scripts/gina/katie.py 2011-09-06 14:23:27 +0000
@@ -20,7 +20,7 @@
self.dbname = dbname
self.dry_run = dry_run
log.info("Connecting to %s as %s" % (dbname, config.gina.dbuser))
- self.db = connect(config.gina.dbuser, dbname=dbname)
+ self.db = connect(user=config.gina.dbuser, dbname=dbname)
#
# Database convenience methods
=== modified file 'lib/lp/translations/tests/pofiletranslator.txt'
--- lib/lp/translations/tests/pofiletranslator.txt 2010-10-05 07:03:37 +0000
+++ lib/lp/translations/tests/pofiletranslator.txt 2011-09-06 14:23:27 +0000
@@ -6,7 +6,7 @@
as a superuser so we can poke at it directly.
>>> from canonical.database.sqlbase import connect
- >>> connection = connect('testadmin')
+ >>> connection = connect(user='testadmin')
>>> cur = connection.cursor()
>>> def pofiletranslator(person_id, pofile_id):
... cur.execute("""
=== modified file 'scripts/close-account.py'
--- scripts/close-account.py 2011-04-26 20:59:43 +0000
+++ scripts/close-account.py 2011-09-06 14:23:27 +0000
@@ -188,7 +188,7 @@
con = None
try:
log.debug("Connecting to database")
- con = connect(options.dbuser)
+ con = connect()
for username in args:
if not close_account(con, log, username):
log.debug("Rolling back")
=== modified file 'scripts/gina.py'
--- scripts/gina.py 2011-08-23 08:35:13 +0000
+++ scripts/gina.py 2011-09-06 14:23:27 +0000
@@ -27,7 +27,6 @@
import psycopg2
from zope.component import getUtility
-from canonical import lp
from canonical.config import config
from canonical.launchpad.scripts import log
from lp.services.scripts.base import LaunchpadCronScript
@@ -89,9 +88,6 @@
dry_run = options.dry_run
- LPDB = lp.get_dbname()
- LPDB_HOST = lp.dbhost
- LPDB_USER = config.gina.dbuser
KTDB = target_section.katie_dbname
LIBRHOST = config.librarian.upload_host
@@ -105,9 +101,7 @@
if component_override is not None:
log.info("Override components to: %s", component_override)
log.info("Architectures to import: %s", ", ".join(archs))
- log.debug("Launchpad database: %s", LPDB)
- log.debug("Launchpad database host: %s", LPDB_HOST)
- log.debug("Launchpad database user: %s", LPDB_USER)
+ log.debug("Launchpad database: %s", config.database.rw_main_master)
log.info("Katie database: %s", KTDB)
log.info("SourcePackage Only: %s", source_only)
log.info("SourcePackageName Only: %s", spnames_only)
=== modified file 'scripts/librarian-report.py'
--- scripts/librarian-report.py 2010-04-27 19:48:39 +0000
+++ scripts/librarian-report.py 2011-09-06 14:23:27 +0000
@@ -48,7 +48,7 @@
else:
until_date = ''
- con = connect(options.dbuser)
+ con = connect()
cur = con.cursor()
# Collect direct references to the LibraryFileAlias table.
=== modified file 'scripts/script-monitor-nagios.py'
--- scripts/script-monitor-nagios.py 2010-04-27 19:48:39 +0000
+++ scripts/script-monitor-nagios.py 2011-09-06 14:23:27 +0000
@@ -77,7 +77,7 @@
try:
log.debug("Connecting to database")
- con = connect(options.dbuser)
+ con = connect()
error_found = False
msg = []
for hostname, scriptname in hosts_scripts:
=== modified file 'scripts/script-monitor.py'
--- scripts/script-monitor.py 2010-04-27 19:48:39 +0000
+++ scripts/script-monitor.py 2011-09-06 14:23:27 +0000
@@ -64,7 +64,7 @@
try:
log.debug("Connecting to database")
- con = connect(options.dbuser)
+ con = connect()
error_found = False
msg, subj = [], []
for hostname, scriptname in hosts_scripts:
=== modified file 'utilities/report-database-stats.py'
--- utilities/report-database-stats.py 2011-04-12 11:04:24 +0000
+++ utilities/report-database-stats.py 2011-09-06 14:23:27 +0000
@@ -252,7 +252,7 @@
parser.error(
"Only two of --from, --until and --interval may be specified.")
- con = connect(options.dbuser)
+ con = connect()
cur = con.cursor()
tables = list(get_table_stats(cur, options))