← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~wgrant/launchpad/use-script-isolation-arg into lp:launchpad

 

William Grant has proposed merging lp:~wgrant/launchpad/use-script-isolation-arg into lp:launchpad.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~wgrant/launchpad/use-script-isolation-arg/+merge/50543

This branch minimises most scripts' dependencies on Zopeless, in preparation for my Zopelessless branches. In particular:

 - initZopeless' debug and implicitBegin arguments are gone, as they've done nothing since Storm.
 - LaunchpadScript.run's implicit_begin argument is gone for the same reason.
 - Scripts that need to set transaction isolation now do it through the existing argument to LaunchpadScript.lock_and_run, rather than calling ZTM.set_transaction_isolation manually later, since ZTM must die.
 - set_request_started and clear_request_started now call (un)registerSynch directly on the global transaction manager, which ZTM's methods delegated to anyway.
 - importdebianbugs.py retrieved the in-use ZTM manually and passed it around. It now passes the global manager around instead.
-- 
https://code.launchpad.net/~wgrant/launchpad/use-script-isolation-arg/+merge/50543
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/use-script-isolation-arg into lp:launchpad.
=== modified file 'cronscripts/allocate-revision-karma.py'
--- cronscripts/allocate-revision-karma.py	2010-04-27 19:48:39 +0000
+++ cronscripts/allocate-revision-karma.py	2011-02-21 01:40:01 +0000
@@ -15,4 +15,4 @@
 if __name__ == '__main__':
     script = RevisionKarmaAllocator('allocate-revision-karma',
         dbuser=config.revisionkarma.dbuser)
-    script.lock_and_run(implicit_begin=True)
+    script.lock_and_run()

=== modified file 'cronscripts/check-teamparticipation.py'
--- cronscripts/check-teamparticipation.py	2010-04-27 19:48:39 +0000
+++ cronscripts/check-teamparticipation.py	2011-02-21 01:40:01 +0000
@@ -37,7 +37,7 @@
     log = logger(options, 'check-teamparticipation')
 
     execute_zcml_for_scripts()
-    ztm = initZopeless(implicitBegin=False)
+    ztm = initZopeless()
 
     # Check self-participation.
     query = """

=== modified file 'cronscripts/code-import-dispatcher.py'
--- cronscripts/code-import-dispatcher.py	2010-04-27 19:48:39 +0000
+++ cronscripts/code-import-dispatcher.py	2011-02-21 01:40:01 +0000
@@ -24,8 +24,7 @@
             default=config.codeimportdispatcher.max_jobs_per_machine,
             help="The maximum number of jobs to run on this machine.")
 
-    def run(self, use_web_security=False, implicit_begin=True,
-            isolation=None):
+    def run(self, use_web_security=False, isolation=None):
         """See `LaunchpadScript.run`.
 
         We override to avoid all of the setting up all of the component

=== modified file 'cronscripts/distributionmirror-prober.py'
--- cronscripts/distributionmirror-prober.py	2010-12-24 17:08:10 +0000
+++ cronscripts/distributionmirror-prober.py	2011-02-21 01:40:01 +0000
@@ -12,6 +12,7 @@
 import os
 
 from canonical.config import config
+from canonical.database.sqlbase import ISOLATION_LEVEL_AUTOCOMMIT
 from lp.services.scripts.base import (
     LaunchpadCronScript,
     LaunchpadScriptFailure,
@@ -66,4 +67,4 @@
     script = DistroMirrorProberScript(
         'distributionmirror-prober',
         dbuser=config.distributionmirrorprober.dbuser)
-    script.lock_and_run()
+    script.lock_and_run(isolation=ISOLATION_LEVEL_AUTOCOMMIT)

=== modified file 'cronscripts/foaf-update-karma-cache.py'
--- cronscripts/foaf-update-karma-cache.py	2010-11-08 12:52:43 +0000
+++ cronscripts/foaf-update-karma-cache.py	2011-02-21 01:40:01 +0000
@@ -11,7 +11,10 @@
 
 from canonical.config import config
 from canonical.database.sqlbase import (
-    ISOLATION_LEVEL_AUTOCOMMIT, flush_database_updates)
+    cursor,
+    ISOLATION_LEVEL_AUTOCOMMIT,
+    flush_database_updates,
+    )
 from lp.app.errors import NotFoundError
 from lp.registry.interfaces.karma import IKarmaCacheManager
 from lp.services.scripts.base import LaunchpadCronScript
@@ -33,14 +36,7 @@
         """
         self.logger.info("Updating Launchpad karma caches")
 
-        # We use the autocommit transaction isolation level to minimize
-        # contention. It also allows us to not bother explicitly calling
-        # COMMIT all the time. However, if we interrupt this script mid-run
-        # it will need to be re-run as the data will be inconsistent (only
-        # part of the caches will have been recalculated).
-        self.txn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
-
-        self.cur = self.txn.conn().cursor()
+        self.cur = cursor()
         self.karmacachemanager = getUtility(IKarmaCacheManager)
 
         # This method ordering needs to be preserved. In particular,
@@ -291,7 +287,12 @@
 
 
 if __name__ == '__main__':
-    script = KarmaCacheUpdater('karma-update',
+    script = KarmaCacheUpdater(
+        'karma-update',
         dbuser=config.karmacacheupdater.dbuser)
-    script.lock_and_run(implicit_begin=True)
-
+    # We use the autocommit transaction isolation level to minimize
+    # contention. It also allows us to not bother explicitly calling
+    # COMMIT all the time. However, if we interrupt this script mid-run
+    # it will need to be re-run as the data will be inconsistent (only
+    # part of the caches will have been recalculated).
+    script.lock_and_run(isolation=ISOLATION_LEVEL_AUTOCOMMIT)

=== modified file 'cronscripts/oops-prune.py'
--- cronscripts/oops-prune.py	2010-04-27 19:48:39 +0000
+++ cronscripts/oops-prune.py	2011-02-21 01:40:01 +0000
@@ -42,7 +42,6 @@
 
             oops_directories.append(oops_dir)
 
-        self.txn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
         for oops_directory in oops_directories:
             for oops_path in unwanted_oops_files(oops_directory,
                                                  40, self.logger):
@@ -55,5 +54,4 @@
 
 if __name__ == '__main__':
     script = OOPSPruner('oops-prune', dbuser='oopsprune')
-    script.lock_and_run()
-
+    script.lock_and_run(isolation=ISOLATION_LEVEL_AUTOCOMMIT)

=== modified file 'cronscripts/rosetta-export-queue.py'
--- cronscripts/rosetta-export-queue.py	2011-01-13 18:09:48 +0000
+++ cronscripts/rosetta-export-queue.py	2011-02-21 01:40:01 +0000
@@ -17,11 +17,10 @@
     """Translation exports."""
 
     def main(self):
-        self.txn.set_isolation_level(ISOLATION_LEVEL_READ_COMMITTED)
         with SlaveDatabasePolicy():
             process_queue(self.txn, self.logger)
 
 
 if __name__ == '__main__':
     script = RosettaExportQueue('rosetta-export-queue', dbuser='poexport')
-    script.lock_and_run()
+    script.lock_and_run(isolation=ISOLATION_LEVEL_READ_COMMITTED)

=== modified file 'cronscripts/update-bugtask-targetnamecaches.py'
--- cronscripts/update-bugtask-targetnamecaches.py	2010-04-27 19:48:39 +0000
+++ cronscripts/update-bugtask-targetnamecaches.py	2011-02-21 01:40:01 +0000
@@ -29,5 +29,5 @@
     script = UpdateBugTaskTargetNameCaches(
         'launchpad-targetnamecacheupdater',
         dbuser=config.targetnamecacheupdater.dbuser)
-    script.lock_and_run(implicit_begin=False)
+    script.lock_and_run()
 

=== modified file 'cronscripts/update-stats.py'
--- cronscripts/update-stats.py	2010-11-08 12:52:43 +0000
+++ cronscripts/update-stats.py	2011-02-21 01:40:01 +0000
@@ -21,9 +21,8 @@
 
 
 class StatUpdater(LaunchpadCronScript):
+
     def main(self):
-        self.txn.set_isolation_level(ISOLATION_LEVEL_READ_COMMITTED)
-
         self.logger.debug('Starting the stats update')
 
         # Note that we do not issue commits here in the script; content
@@ -42,7 +41,5 @@
 
 
 if __name__ == '__main__':
-    script = StatUpdater('launchpad-stats',
-                         dbuser=config.statistician.dbuser)
-    script.lock_and_run()
-
+    script = StatUpdater('launchpad-stats', dbuser=config.statistician.dbuser)
+    script.lock_and_run(isolation=ISOLATION_LEVEL_READ_COMMITTED)

=== modified file 'lib/canonical/launchpad/webapp/adapter.py'
--- lib/canonical/launchpad/webapp/adapter.py	2011-02-14 23:34:02 +0000
+++ lib/canonical/launchpad/webapp/adapter.py	2011-02-21 01:40:01 +0000
@@ -183,9 +183,8 @@
         set_request_timeline(request, Timeline())
     _local.current_statement_timeout = None
     _local.enable_timeout = enable_timeout
-    if txn is not None:
-        _local.commit_logger = CommitLogger(txn)
-        txn.registerSynch(_local.commit_logger)
+    _local.commit_logger = CommitLogger(transaction)
+    transaction.manager.registerSynch(_local.commit_logger)
     set_permit_timeout_from_features(False)
 
 
@@ -199,10 +198,8 @@
     _local.request_start_time = None
     request = get_current_browser_request()
     set_request_timeline(request, Timeline())
-    commit_logger = getattr(_local, 'commit_logger', None)
-    if commit_logger is not None:
-        _local.commit_logger.txn.unregisterSynch(_local.commit_logger)
-        del _local.commit_logger
+    transaction.manager.unregisterSynch(_local.commit_logger)
+    del _local.commit_logger
 
 
 def summarize_requests():

=== modified file 'lib/canonical/lp/__init__.py'
--- lib/canonical/lp/__init__.py	2010-11-26 23:21:25 +0000
+++ lib/canonical/lp/__init__.py	2011-02-21 01:40:01 +0000
@@ -69,8 +69,8 @@
 _IGNORED = object()
 
 
-def initZopeless(debug=_IGNORED, dbname=None, dbhost=None, dbuser=None,
-                 implicitBegin=_IGNORED, isolation=ISOLATION_LEVEL_DEFAULT):
+def initZopeless(dbname=None, dbhost=None, dbuser=None,
+                 isolation=ISOLATION_LEVEL_DEFAULT):
     """Initialize the Zopeless environment."""
     if dbuser is None:
         # Nothing calling initZopeless should be connecting as the

=== modified file 'lib/lp/bugs/scripts/importdebianbugs.py'
--- lib/lp/bugs/scripts/importdebianbugs.py	2010-04-21 10:30:24 +0000
+++ lib/lp/bugs/scripts/importdebianbugs.py	2011-02-21 01:40:01 +0000
@@ -5,9 +5,9 @@
 
 __metaclass__ = type
 
+import transaction
 from zope.component import getUtility
 
-from canonical.database.sqlbase import ZopelessTransactionManager
 from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
 from canonical.launchpad.scripts.logger import log
 from lp.bugs.externalbugtracker import get_external_bugtracker
@@ -18,9 +18,8 @@
 def import_debian_bugs(bugs_to_import):
     """Import the specified Debian bugs into Launchpad."""
     debbugs = getUtility(ILaunchpadCelebrities).debbugs
-    txn = ZopelessTransactionManager._installed
     external_debbugs = get_external_bugtracker(debbugs)
-    bug_watch_updater = CheckwatchesMaster(txn, log)
+    bug_watch_updater = CheckwatchesMaster(transaction, log)
     debian = getUtility(ILaunchpadCelebrities).debian
     for debian_bug in bugs_to_import:
         existing_bug_ids = [

=== modified file 'lib/lp/registry/scripts/distributionmirror_prober.py'
--- lib/lp/registry/scripts/distributionmirror_prober.py	2011-02-18 04:49:37 +0000
+++ lib/lp/registry/scripts/distributionmirror_prober.py	2011-02-21 01:40:01 +0000
@@ -26,7 +26,6 @@
 from zope.component import getUtility
 
 from canonical.config import config
-from canonical.database.sqlbase import ISOLATION_LEVEL_AUTOCOMMIT
 from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
 from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet
 from canonical.launchpad.webapp import canonical_url
@@ -816,7 +815,6 @@
             raise ValueError(
                 "Unrecognized content_type: %s" % (content_type,))
 
-        self.txn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
         self.txn.begin()
 
         # To me this seems better than passing the no_remote_hosts value

=== modified file 'lib/lp/scripts/utilities/sanitizedb.py'
--- lib/lp/scripts/utilities/sanitizedb.py	2010-11-03 10:58:10 +0000
+++ lib/lp/scripts/utilities/sanitizedb.py	2011-02-21 01:40:01 +0000
@@ -39,7 +39,7 @@
             "-n", "--dry-run", action="store_true", default=False,
             help="Don't commit changes.")
 
-    def _init_db(self, implicit_begin, isolation):
+    def _init_db(self, isolation):
         if len(self.args) == 0:
             self.parser.error("PostgreSQL connection string required.")
         elif len(self.args) > 1:
@@ -59,7 +59,6 @@
             dbname=self.pg_connection_string.dbname,
             dbhost=self.pg_connection_string.host,
             dbuser=self.pg_connection_string.user,
-            implicitBegin=implicit_begin,
             isolation=isolation)
 
         self.store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)

=== modified file 'lib/lp/services/scripts/base.py'
--- lib/lp/services/scripts/base.py	2010-12-02 16:13:51 +0000
+++ lib/lp/services/scripts/base.py	2011-02-21 01:40:01 +0000
@@ -292,13 +292,12 @@
         self.lock.release(skip_delete=skip_delete)
 
     @log_unhandled_exception_and_exit
-    def run(self, use_web_security=False, implicit_begin=True,
-            isolation=None):
+    def run(self, use_web_security=False, isolation=None):
         """Actually run the script, executing zcml and initZopeless."""
         if isolation is None:
             isolation = ISOLATION_LEVEL_DEFAULT
         self._init_zca(use_web_security=use_web_security)
-        self._init_db(implicit_begin=implicit_begin, isolation=isolation)
+        self._init_db(isolation=isolation)
 
         date_started = datetime.datetime.now(UTC)
         profiler = None
@@ -324,14 +323,12 @@
         """Initialize the ZCA, this can be overriden for testing purpose."""
         scripts.execute_zcml_for_scripts(use_web_security=use_web_security)
 
-    def _init_db(self, implicit_begin, isolation):
+    def _init_db(self, isolation):
         """Initialize the database transaction.
 
         Can be overriden for testing purpose.
         """
-        self.txn = initZopeless(
-            dbuser=self.dbuser, implicitBegin=implicit_begin,
-            isolation=isolation)
+        self.txn = initZopeless(dbuser=self.dbuser, isolation=isolation)
 
     def record_activity(self, date_started, date_completed):
         """Hook to record script activity."""
@@ -341,7 +338,7 @@
     #
     @log_unhandled_exception_and_exit
     def lock_and_run(self, blocking=False, skip_delete=False,
-                     use_web_security=False, implicit_begin=True,
+                     use_web_security=False,
                      isolation=ISOLATION_LEVEL_DEFAULT):
         """Call lock_or_die(), and then run() the script.
 
@@ -349,8 +346,8 @@
         """
         self.lock_or_die(blocking=blocking)
         try:
-            self.run(use_web_security=use_web_security,
-                     implicit_begin=implicit_begin, isolation=isolation)
+            self.run(
+                use_web_security=use_web_security, isolation=isolation)
         finally:
             self.unlock(skip_delete=skip_delete)
 

=== modified file 'lib/lp/services/scripts/doc/profile.txt'
--- lib/lp/services/scripts/doc/profile.txt	2009-10-27 09:40:50 +0000
+++ lib/lp/services/scripts/doc/profile.txt	2011-02-21 01:40:01 +0000
@@ -14,7 +14,7 @@
     ...         pass
     ...     def _init_zca(self, use_web_security):
     ...         pass
-    ...     def _init_db(self, implicit_begin, isolation):
+    ...     def _init_db(self, isolation):
     ...         pass
 
     >>> profile_file = os.path.join(profile_dir, 'myscript.prof')

=== modified file 'scripts/code-import-worker-monitor.py'
--- scripts/code-import-worker-monitor.py	2010-06-07 09:11:06 +0000
+++ scripts/code-import-worker-monitor.py	2011-02-21 01:40:01 +0000
@@ -38,7 +38,7 @@
         LaunchpadScript.__init__(self, name, dbuser, test_args)
         set_up_oops_reporting('codeimportworker', name, mangle_stdout=True)
 
-    def _init_db(self, implicit_begin, isolation):
+    def _init_db(self, isolation):
         # This script doesn't access the database.
         pass
 

=== modified file 'scripts/linkreport.py'
--- scripts/linkreport.py	2010-04-27 19:48:39 +0000
+++ scripts/linkreport.py	2011-02-21 01:40:01 +0000
@@ -211,7 +211,7 @@
     else:
         csvfile = open(args[0], 'rb')
 
-    ztm = initZopeless(implicitBegin=True)
+    ztm = initZopeless()
 
     if options.create:
         # Create the table if it doesn't exist. Unfortunately, this is broken