launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #04088
[Merge] lp:~jml/launchpad/more-cleanups into lp:launchpad
Jonathan Lange has proposed merging lp:~jml/launchpad/more-cleanups into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~jml/launchpad/more-cleanups/+merge/66288
This branch aims to remove the _sqlobject module, which is used only for testing.
* sync() is killed, most call sites just didn't need to call it. Those that did have been replaced with a storm flush.
* print_date_attribute has been deleted, and the two doctests that relied on it have had chunks converted to unit tests (if equivalent unit tests already)
* set_so_attr() wasn't used
There's also various flakes cleanup.
Tests are running now.
--
https://code.launchpad.net/~jml/launchpad/more-cleanups/+merge/66288
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jml/launchpad/more-cleanups into lp:launchpad.
=== modified file 'lib/canonical/database/sqlbase.py'
--- lib/canonical/database/sqlbase.py 2011-06-09 10:50:25 +0000
+++ lib/canonical/database/sqlbase.py 2011-06-29 12:09:34 +0000
@@ -24,7 +24,6 @@
'quote_like',
'quoteIdentifier',
'quote_identifier',
- 'RandomiseOrderDescriptor',
'reset_store',
'rollback',
'session_store',
@@ -343,7 +342,10 @@
[database]
rw_main_master: %(main_connection_string)s
isolation_level: %(isolation_level)s
- """ % vars())
+ """ % {
+ 'isolation_level': isolation_level,
+ 'main_connection_string': main_connection_string,
+ })
if dbuser:
# XXX 2009-05-07 stub bug=373252: Scripts should not be connecting
@@ -352,7 +354,7 @@
[launchpad]
dbuser: %(dbuser)s
auth_dbuser: launchpad_auth
- """ % vars())
+ """ % {'dbuser': dbuser})
if cls._installed is not None:
if cls._config_overlay != overlay:
=== modified file 'lib/canonical/launchpad/ftests/__init__.py'
--- lib/canonical/launchpad/ftests/__init__.py 2011-06-28 15:05:56 +0000
+++ lib/canonical/launchpad/ftests/__init__.py 2011-06-29 12:09:34 +0000
@@ -14,19 +14,11 @@
'login',
'login_person',
'logout',
- 'print_date_attribute',
- 'set_so_attr',
- 'sync',
]
from canonical.launchpad.ftests._launchpadformharness import (
LaunchpadFormHarness,
)
-from canonical.launchpad.ftests._sqlobject import (
- print_date_attribute,
- set_so_attr,
- sync,
- )
from canonical.launchpad.ftests.keys_for_tests import (
decrypt_content,
import_public_key,
=== removed file 'lib/canonical/launchpad/ftests/_sqlobject.py'
--- lib/canonical/launchpad/ftests/_sqlobject.py 2011-06-28 15:05:56 +0000
+++ lib/canonical/launchpad/ftests/_sqlobject.py 1970-01-01 00:00:00 +0000
@@ -1,66 +0,0 @@
-# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""Helper functions for testing SQLObjects."""
-
-__all__ = [
- 'print_date_attribute',
- 'set_so_attr',
- 'sync',
- ]
-
-from storm.sqlobject import SQLObjectBase as SQLObject
-from zope.security.proxy import (
- isinstance as zope_isinstance,
- removeSecurityProxy,
- )
-
-from canonical.database.constants import UTC_NOW
-from canonical.database.sqlbase import sqlvalues
-
-
-def sync(object):
- """Sync the object's from the database.
-
- This is useful if the object's connection was commited in Zopeless mode,
- or if the database was updated outside the ORM.
- """
- if zope_isinstance(object, SQLObject):
- removeSecurityProxy(object).sync()
- else:
- raise TypeError('%r is not an SQLObject' % object)
-
-
-def set_so_attr(object, colname, value):
- """Set the underlying SQLObject's column value.
-
- Use this function to setup test data when the SQLObject decendant guards
- its data. Data is guarded by transitional conditions for workflows, or
- because the decendant is conjoined to another object that controls it.
- """
- if zope_isinstance(object, SQLObject):
- attr_setter = getattr(
- removeSecurityProxy(object), '_SO_set_%s' % colname)
- attr_setter(value)
- else:
- raise TypeError('%r is not an SQLObject' % object)
-
-
-def print_date_attribute(object, colname):
- """Print out a date attribute of an SQLObject, possibly as 'UTC_NOW'.
-
- If the value of the attribute is equal to the 'UTC_NOW' time of the
- current transaction, it prints the string 'UTC_NOW' instead of the actual
- time value. This helps write more precise doctests.
- """
- if zope_isinstance(object, SQLObject):
- cls = removeSecurityProxy(object).__class__
- query_template = 'id=%%s AND %s=%%s' % colname
- found_object = cls.selectOne(
- query_template % sqlvalues(object.id, UTC_NOW))
- if found_object is None:
- print getattr(object, colname)
- else:
- print 'UTC_NOW'
- else:
- raise TypeError('%r is not an SQLObject' % object)
=== modified file 'lib/canonical/launchpad/testing/codeimporthelpers.py'
--- lib/canonical/launchpad/testing/codeimporthelpers.py 2010-08-20 20:31:18 +0000
+++ lib/canonical/launchpad/testing/codeimporthelpers.py 2011-06-29 12:09:34 +0000
@@ -20,7 +20,6 @@
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
-from canonical.launchpad.ftests import sync
from lp.code.enums import (
CodeImportJobState,
CodeImportResultStatus,
@@ -111,7 +110,6 @@
if code_import.import_job.state != CodeImportJobState.RUNNING:
machine = factory.makeCodeImportMachine(set_online=True)
CodeImportJobWorkflow().startJob(code_import.import_job, machine)
- sync(code_import.import_job)
CodeImportJobWorkflow().finishJob(code_import.import_job, status, None)
=== modified file 'lib/lp/bugs/doc/bug.txt'
--- lib/lp/bugs/doc/bug.txt 2011-05-13 16:50:50 +0000
+++ lib/lp/bugs/doc/bug.txt 2011-06-29 12:09:34 +0000
@@ -1057,11 +1057,9 @@
>>> # expiration, so long as the pillar object has enabled bug expiration.
>>> # Every change to a bug or bugtask must be synced back to the
>>> # database to test can_expire.
- >>> from lp.bugs.tests.bug import (
- ... create_old_bug, sync_bugtasks)
+ >>> from lp.bugs.tests.bug import create_old_bug
>>> upstream_bugtask = create_old_bug('bug a', 1, thunderbird)
- >>> sync_bugtasks(upstream_bugtask)
>>> upstream_bugtask.status.name
'INCOMPLETE'
>>> upstream_bugtask.pillar.enable_bug_expiration
@@ -1076,7 +1074,6 @@
>>> expirable_bugtask = create_old_bug(
... 'bug c', 61, ubuntu, with_message=False)
- >>> sync_bugtasks(expirable_bugtask)
>>> expirable_bugtask.status.name
'INCOMPLETE'
@@ -1092,7 +1089,6 @@
expiration.
>>> expirable_bugtask.transitionToAssignee(sample_person)
- >>> sync_bugtasks(expirable_bugtask)
>>> expirable_bugtask.bug.permits_expiration
True
>>> expirable_bugtask.bug.can_expire
@@ -1103,7 +1099,6 @@
>>> expirable_bugtask.transitionToStatus(
... BugTaskStatus.CONFIRMED, sample_person)
- >>> sync_bugtasks(expirable_bugtask)
>>> expirable_bugtask.bug.permits_expiration
False
>>> expirable_bugtask.bug.can_expire
=== modified file 'lib/lp/bugs/doc/bugtask-expiration.txt'
--- lib/lp/bugs/doc/bugtask-expiration.txt 2011-06-28 15:04:29 +0000
+++ lib/lp/bugs/doc/bugtask-expiration.txt 2011-06-29 12:09:34 +0000
@@ -85,7 +85,6 @@
# An expirable bugtask, a distroseries. The ubuntu bugtask is its
# conjoined slave.
- >>> from lp.bugs.tests.bug import sync_bugtasks
>>> hoary_bugtask = bugtaskset.createTask(
... bug=ubuntu_bugtask.bug, owner=sample_person,
... distroseries=ubuntu.currentseries)
@@ -93,7 +92,6 @@
True
>>> ubuntu_bugtask.bug.permits_expiration
True
- >>> sync_bugtasks([ubuntu_bugtask, hoary_bugtask])
>>> ubuntu_bugtask.bug.can_expire
True
@@ -291,7 +289,6 @@
will be expirable.
>>> new_bugtask.transitionToStatus(BugTaskStatus.WONTFIX, sample_person)
- >>> sync_bugtasks(new_bugtask)
>>> hoary_bugtask.bug.can_expire
True
>>> summarize_bugtasks(hoary_bugtask.bug.bugtasks)
@@ -558,8 +555,6 @@
expired.
>>> no_expiration_bugtask.pillar.enable_bug_expiration = True
- >>> from canonical.launchpad.ftests import sync
- >>> sync(no_expiration_bugtask.pillar)
>>> no_expiration_bugtask.bug.permits_expiration
True
=== modified file 'lib/lp/bugs/doc/distribution-upstream-bug-report.txt'
--- lib/lp/bugs/doc/distribution-upstream-bug-report.txt 2011-02-14 19:14:15 +0000
+++ lib/lp/bugs/doc/distribution-upstream-bug-report.txt 2011-06-29 12:09:34 +0000
@@ -6,10 +6,10 @@
project and monitor them as they change. Launchpad offers functionality
to allow a distribution to focus on and improve this process.
+ >>> from storm.store import Store
>>> from canonical.launchpad.ftests import login
>>> from lp.bugs.tests.bug import (
... create_bug_from_strings)
- >>> from canonical.launchpad.ftests import sync
>>> from lp.registry.interfaces.sourcepackagename import (
... ISourcePackageNameSet)
>>> from lp.registry.interfaces.distribution import IDistributionSet
@@ -83,10 +83,10 @@
>>> mark = getUtility(IPersonSet).getByName('mark')
>>> ls_bug = getUtility(IBugTaskSet).get(23)
>>> ls_bug.transitionToStatus(BugTaskStatus.TRIAGED, mark)
- >>> sync(ls_bug)
+ >>> Store.of(ls_bug).flush()
>>> mf_bug = getUtility(IBugTaskSet).get(17)
>>> mf_bug.transitionToStatus(BugTaskStatus.TRIAGED, mark)
- >>> sync(mf_bug)
+ >>> Store.of(mf_bug).flush()
>>> print_report(ubuntu.getPackagesAndPublicUpstreamBugCounts())
linux-source-2.6.15 None 1 0 0 0 0
mozilla-firefox firefox 1 1 1 1 0
@@ -127,7 +127,6 @@
We then add an upstream task to the second pmount bug:
- >>> from storm.store import Store
>>> task = getUtility(IBugTaskSet).createTask(bug, name12, product=pmount)
>>> Store.of(task).flush()
>>> print_report(ubuntu.getPackagesAndPublicUpstreamBugCounts(limit=3))
@@ -176,7 +175,7 @@
>>> url = "http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=666"
>>> [watch] = getUtility(IBugWatchSet).fromText(url, bug, name12)
>>> task.bugwatch = watch
- >>> sync(task)
+ >>> Store.of(task).flush()
>>> print_report(ubuntu.getPackagesAndPublicUpstreamBugCounts(limit=3))
pmount pmount 2 2 1 1 0
linux-source-2.6.15 None 1 0 0 0 0
=== modified file 'lib/lp/bugs/doc/externalbugtracker-bugzilla.txt'
--- lib/lp/bugs/doc/externalbugtracker-bugzilla.txt 2011-02-23 23:07:06 +0000
+++ lib/lp/bugs/doc/externalbugtracker-bugzilla.txt 2011-06-29 12:09:34 +0000
@@ -477,9 +477,6 @@
... 'RESOLVED', 'FIXED', 'HIGH', 'ENHANCEMENT')
... expected_remote_statuses[remote_bug_id] = 'RESOLVED FIXED'
... expected_remote_importances[remote_bug_id] = 'HIGH ENHANCEMENT'
- >>> from canonical.launchpad.ftests import sync
- >>> for bug_watch in gnome_bugzilla.watches:
- ... sync(bug_watch)
Set the batch threshold higher than the number of bug watches.
@@ -531,8 +528,6 @@
... 'ASSIGNED', '', 'MEDIUM', 'URGENT')
... expected_remote_statuses[remote_bug_id] = 'ASSIGNED'
... expected_remote_importances[remote_bug_id] = 'MEDIUM URGENT'
- >>> for bug_watch in gnome_bugzilla.watches:
- ... sync(bug_watch)
Set the batch threshold very low and remove the batch size limit:
=== modified file 'lib/lp/bugs/doc/externalbugtracker-sourceforge.txt'
--- lib/lp/bugs/doc/externalbugtracker-sourceforge.txt 2011-05-27 19:53:20 +0000
+++ lib/lp/bugs/doc/externalbugtracker-sourceforge.txt 2011-06-29 12:09:34 +0000
@@ -216,9 +216,6 @@
For the sake of this test we can set the bug tracker's batch_size to
None so that it will process all the updates at once:
- >>> from canonical.launchpad.ftests import sync
- >>> for bug_watch in example_bug_tracker.watches:
- ... sync(bug_watch)
>>> sourceforge.batch_size = None
>>> bug_watch_updater.updateBugWatches(
... sourceforge, example_bug_tracker.watches)
=== modified file 'lib/lp/bugs/model/tests/test_bugtask.py'
--- lib/lp/bugs/model/tests/test_bugtask.py 2011-05-28 04:09:11 +0000
+++ lib/lp/bugs/model/tests/test_bugtask.py 2011-06-29 12:09:34 +0000
@@ -1,10 +1,9 @@
-# Copyright 2009 Canonical Ltd. This software is licensed under the
+# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
__metaclass__ = type
from datetime import timedelta
-from doctest import DocTestSuite
import unittest
from lazr.lifecycle.snapshot import Snapshot
@@ -39,7 +38,6 @@
from lp.bugs.model.bugtask import build_tag_search_clause
from lp.bugs.tests.bug import (
create_old_bug,
- sync_bugtasks,
)
from lp.hardwaredb.interfaces.hwdb import (
HWBus,
@@ -1255,7 +1253,6 @@
self.bugtaskset.createTask(
bug=bugtasks[-1].bug, owner=self.user,
productseries=self.productseries))
- sync_bugtasks(bugtasks)
def tearDown(self):
logout()
=== modified file 'lib/lp/bugs/tests/bug.py'
--- lib/lp/bugs/tests/bug.py 2011-06-16 13:50:58 +0000
+++ lib/lp/bugs/tests/bug.py 2011-06-29 12:09:34 +0000
@@ -17,7 +17,6 @@
from zope.security.proxy import removeSecurityProxy
from canonical.config import config
-from canonical.launchpad.ftests import sync
from canonical.launchpad.testing.pages import (
extract_text,
find_main_content,
@@ -227,7 +226,6 @@
bugtracker=external_bugtracker, remotebug='1234')
date = datetime.now(UTC) - timedelta(days=days_old)
removeSecurityProxy(bug).date_last_updated = date
- sync_bugtasks([bugtask])
return bugtask
@@ -253,15 +251,6 @@
bugtask.bug.messages.count() == 1)
-def sync_bugtasks(bugtasks):
- """Sync the bugtask and its bug to the database."""
- if not isinstance(bugtasks, list):
- bugtasks = [bugtasks]
- for bugtask in bugtasks:
- sync(bugtask)
- sync(bugtask.bug)
-
-
def print_upstream_linking_form(browser):
"""Print the upstream linking form found via +choose-affected-product.
=== modified file 'lib/lp/code/doc/codeimport-job.txt'
--- lib/lp/code/doc/codeimport-job.txt 2010-10-18 22:24:59 +0000
+++ lib/lp/code/doc/codeimport-job.txt 2011-06-29 12:09:34 +0000
@@ -81,13 +81,6 @@
== Test helpers ==
-The print_date_attribute function displays a date attribute of an
-object. If the value of the attribute is equal to the "UTC_NOW" time
-of the current transaction, it prints the string "UTC_NOW" instead of
-the actual time value.
-
- >>> from canonical.launchpad.ftests import print_date_attribute
-
The NewEvents class helps testing the creation of CodeImportEvent
objects.
@@ -160,12 +153,6 @@
>>> print new_job.state.name
PENDING
-If the associated code import has never been run, its date due is set to
-UTC_NOW, so it will be run as soon as possible.
-
- >>> print_date_attribute(new_job, 'date_due')
- UTC_NOW
-
When the code import is associated to existing CodeImportResult objects,
the date due may be UTC_NOW or a timestamp in the future. This is
covered in detail in the test_codeimportjob.py file.
@@ -207,8 +194,6 @@
>>> print pending_job.requesting_user.name
no-priv
- >>> print_date_attribute(pending_job, 'date_due')
- UTC_NOW
The job request is also recorded in the CodeImportEvent audit trail.
@@ -239,37 +224,9 @@
>>> machine = machine_set.getByHostname('bazaar-importer')
>>> new_events = NewEvents()
-This method updates the date_started, heartbeat and state fields of
-the job. Before the call, date_started, heartbeat, logtail and
-machine are NULL and the state is PENDING.
-
- >>> print_date_attribute(pending_job, 'date_started')
- None
- >>> print_date_attribute(pending_job, 'heartbeat')
- None
- >>> print pending_job.logtail
- None
- >>> print pending_job.machine
- None
- >>> print pending_job.state.name
- PENDING
-
-After the call, the date_started and heartbeat fields are both updated
-to the current time, the logtail is the empty string, machine is set
-to the supplied import machine and the state is RUNNING.
+Run the job:
>>> workflow.startJob(pending_job, machine)
-
- >>> print_date_attribute(pending_job, 'date_started')
- UTC_NOW
- >>> print_date_attribute(pending_job, 'heartbeat')
- UTC_NOW
- >>> pending_job.logtail
- u''
- >>> print pending_job.machine.hostname
- bazaar-importer
- >>> print pending_job.state.name
- RUNNING
>>> running_job = pending_job
The event is also recorded in the CodeImportEvent audit trail.
@@ -292,163 +249,12 @@
>>> removeSecurityProxy(running_job).heartbeat = \
... datetime(2007, 1, 1, 0, 0, 0, tzinfo=UTC)
- >>> from canonical.launchpad.ftests import sync
- >>> sync(running_job)
>>> new_events = NewEvents()
-As stated above updateHeartbeat updates the 'heartbeat' field to the
-current transaction time and also takes a 'logtail' parameter which is
-intended to be displayed in the web UI to give the operators some idea
-what the import worker is currently doing for this job.
-
- >>> print_date_attribute(running_job, 'heartbeat')
- 2007-01-01 00:00:00+00:00
- >>> running_job.logtail
- u''
-
>>> workflow.updateHeartbeat(running_job, u'some interesting log output')
- >>> print_date_attribute(running_job, 'heartbeat')
- UTC_NOW
- >>> running_job.logtail
- u'some interesting log output'
-
No code import events are generated by this method.
>>> new_events.summary()
''
-
-== Finishing a job ==
-
-When a job finishes, the code import worker records this fact by
-calling the finishJob workflow method, which is responsible for all of
-the housekeeping associated with the end of an attempt to update a
-code import, successful or not:
-
- - creating a CodeImportResult record for the job run,
- - deleting the row in the database for the now finished run and
- creating a new one for the next run, and
- - logging a FINISH CodeImportEvent.
-
-The method takes a running job, a status code indicating whether the
-job completed successfully or not and an optional link to the log of
-the import run in the librarian.
-
-Also, in the successful case, finishJob calls requestMirror() on the
-import branch so that the newly imported revisions can be pulled into
-the code hosting area.
-
-In this example, the import branch has never been marked as needing
-mirroring, so the 'next_mirror_time' field is empty:
-
- >>> print_date_attribute(code_import.branch, 'next_mirror_time')
- None
-
-We just document the successful case here, when a log is not recorded.
-The details are tested in unit tests in
-../database/tests/test_codeimportjob.py.
-
- >>> new_events = NewEvents()
- >>> finished_job_id = running_job.id
- >>> finished_date_due = running_job.date_due
- >>> from lp.code.enums import CodeImportResultStatus
- >>> workflow.finishJob(
- ... running_job, CodeImportResultStatus.SUCCESS, None)
-
-The passed in job is now deleted.
-
- >>> print job_set.getById(finished_job_id)
- None
-
-And a new one has been created, scheduled appropriately far in the
-future.
-
- >>> code_import.import_job.id != finished_job_id
- True
- >>> code_import.effective_update_interval
- datetime.timedelta(0, 21600)
- >>> code_import.import_job.date_due - finished_date_due
- datetime.timedelta(0, 21600)
-
-A CodeImportResult object has now been created to record the result of
-this run, containing details such as the import worked on, the dates
-the import started and finished and the final status of the run.
-
- >>> results = list(code_import.results)
- >>> len(results)
- 1
- >>> [result] = results
- >>> result.code_import.id
- 1
- >>> print_date_attribute(result, 'date_job_started')
- UTC_NOW
- >>> # The python-level 'date_job_finished' field is punned with the
- >>> # date_created database column.
- >>> print_date_attribute(result, 'date_created')
- UTC_NOW
- >>> print result.status.name
- SUCCESS
-
-And because we're pretending that this was as successful run, the
-branch is now due to be mirrored by branch puller:
-
- >>> print_date_attribute(code_import.branch, 'next_mirror_time')
- UTC_NOW
-
-Other details of the result object are checked in the unit tests.
-
-Finally, the finishJob() method created a FINISH CodeImportEvent.
-
- >>> print new_events.summary()
- FINISH ~vcs-imports/gnome-terminal/import bazaar-importer
-
-
-== Reclaiming a job that appears to be stuck ==
-
-The code import worker is meant to update the heartbeat field of the
-row of CodeImportJob frequently. The code import watchdog
-periodically checks the heartbeats of the running jobs and if it finds
-that a heartbeat was not updated recently enough, it assumes it has
-become stuck somehow and 'reclaims' the job -- removes the job from
-the database and creates a pending job for the same import that is due
-immediately. This reclaiming is done by the 'reclaimJob' code import
-job workflow method.
-
-It just takes a running code import job as a parameter.
-
- >>> from canonical.launchpad.testing.codeimporthelpers import (
- ... make_running_import)
- >>> running_import = make_running_import(factory=factory)
- >>> running_import_job = running_import.import_job
-
-'reclaimJob' does four separate things:
-
- >>> running_import_job_id = running_import_job.id
- >>> new_events = NewEvents()
- >>> workflow.reclaimJob(running_import_job)
-
-1) deletes the passed in job,
-
- >>> print job_set.getById(running_import_job_id)
- None
-
-2) creates a CodeImportResult with a status of 'RECLAIMED',
-
- >>> results = list(running_import.results)
- >>> len(results)
- 1
- >>> [result] = results
- >>> result.status.name
- 'RECLAIMED'
-
-3) creates a new, already due, job for the code import, and
-
- >>> print_date_attribute(running_import.import_job, 'date_due')
- UTC_NOW
-
-4) logs a 'RECLAIM' CodeImportEvent.
-
- >>> print new_events.summary()
- RECLAIM ...
-
=== modified file 'lib/lp/code/model/tests/test_codeimportjob.py'
--- lib/lp/code/model/tests/test_codeimportjob.py 2010-10-04 19:50:45 +0000
+++ lib/lp/code/model/tests/test_codeimportjob.py 2011-06-29 12:09:34 +0000
@@ -1,4 +1,4 @@
-# Copyright 2009 Canonical Ltd. This software is licensed under the
+# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Unit tests for CodeImportJob and CodeImportJobWorkflow."""
@@ -646,6 +646,21 @@
getUtility(ICodeImportJobWorkflow).requestJob,
job, machine)
+ def test_startJob(self):
+ # After startJob, the date_started and heartbeat fields are both
+ # updated to the current time, the logtail is the empty string,
+ # machine is set to the supplied import machine and the state is
+ # RUNNING.
+ code_import = self.factory.makeCodeImport()
+ machine = self.factory.makeCodeImportMachine(set_online=True)
+ job = self.factory.makeCodeImportJob(code_import)
+ getUtility(ICodeImportJobWorkflow).startJob(job, machine)
+ self.assertSqlAttributeEqualsDate(job, 'date_started', UTC_NOW)
+ self.assertSqlAttributeEqualsDate(job, 'heartbeat', UTC_NOW)
+ self.assertEqual('', job.logtail)
+ self.assertEqual(machine, job.machine)
+ self.assertEqual(CodeImportJobState.RUNNING, job.state)
+
def test_offlineMachine(self):
# Calling startJob with a machine which is not ONLINE is an error.
machine = self.factory.makeCodeImportMachine()
@@ -679,6 +694,19 @@
getUtility(ICodeImportJobWorkflow).updateHeartbeat,
job, u'')
+ def test_updateHeartboat(self):
+ code_import = self.factory.makeCodeImport()
+ machine = self.factory.makeCodeImportMachine(set_online=True)
+ job = self.factory.makeCodeImportJob(code_import)
+ workflow = getUtility(ICodeImportJobWorkflow)
+ workflow.startJob(job, machine)
+ # Set heartbeat to something wrong so that we can prove that it was
+ # changed.
+ removeSecurityProxy(job).heartbeat = None
+ workflow.updateHeartbeat(job, u'some interesting log output')
+ self.assertSqlAttributeEqualsDate(job, 'heartbeat', UTC_NOW)
+ self.assertEqual(u'some interesting log output', job.logtail)
+
class TestCodeImportJobWorkflowFinishJob(TestCaseWithFactory,
AssertFailureMixin, AssertEventMixin):
@@ -970,6 +998,71 @@
CodeImportReviewStatus.FAILING, code_import.review_status)
+class TestCodeImportJobWorkflowReclaimJob(TestCaseWithFactory,
+ AssertFailureMixin, AssertEventMixin):
+ """Tests for reclaimJob.
+
+ The code import worker is meant to update the heartbeat field of the row
+ of CodeImportJob frequently. The code import watchdog periodically checks
+ the heartbeats of the running jobs and if it finds that a heartbeat was
+ not updated recently enough, it assumes it has become stuck somehow and
+ 'reclaims' the job -- removes the job from the database and creates a
+ pending job for the same import that is due immediately. This reclaiming
+ is done by the 'reclaimJob' code import job workflow method.
+ """
+
+ layer = DatabaseFunctionalLayer
+
+ def setUp(self):
+ super(TestCodeImportJobWorkflowReclaimJob, self).setUp()
+ login_for_code_imports()
+ self.machine = self.factory.makeCodeImportMachine(set_online=True)
+
+ def makeRunningJob(self, code_import=None):
+ """Make and return a CodeImportJob object with state==RUNNING.
+
+ This is suitable for passing into finishJob().
+ """
+ if code_import is None:
+ code_import = self.factory.makeCodeImport()
+ job = code_import.import_job
+ if job is None:
+ job = self.factory.makeCodeImportJob(code_import)
+ getUtility(ICodeImportJobWorkflow).startJob(job, self.machine)
+ return job
+
+ def test_deletes_job(self):
+ running_job = self.makeRunningJob()
+ job_id = running_job.id
+ getUtility(ICodeImportJobWorkflow).reclaimJob(running_job)
+ matching_job = getUtility(ICodeImportJobSet).getById(job_id)
+ self.assertIs(None, matching_job)
+
+ def test_makes_reclaim_result(self):
+ running_job = self.makeRunningJob()
+ getUtility(ICodeImportJobWorkflow).reclaimJob(running_job)
+ [result] = list(running_job.code_import.results)
+ self.assertEqual(CodeImportResultStatus.RECLAIMED, result.status)
+
+ def test_creates_new_job(self):
+ running_job = self.makeRunningJob()
+ code_import = running_job.code_import
+ getUtility(ICodeImportJobWorkflow).reclaimJob(running_job)
+ self.assertSqlAttributeEqualsDate(
+ code_import.import_job, 'date_due', UTC_NOW)
+
+ def test_logs_reclaim_event(self):
+ running_job = self.makeRunningJob()
+ code_import = running_job.code_import
+ machine = running_job.machine
+ new_events = NewEvents()
+ getUtility(ICodeImportJobWorkflow).reclaimJob(running_job)
+ [reclaim_event] = list(new_events)
+ self.assertEventLike(
+ reclaim_event, CodeImportEventType.RECLAIM,
+ code_import, machine)
+
+
logged_in_for_code_imports = with_celebrity_logged_in('vcs_imports')
=== modified file 'lib/lp/registry/doc/standing.txt'
--- lib/lp/registry/doc/standing.txt 2011-06-28 15:04:29 +0000
+++ lib/lp/registry/doc/standing.txt 2011-06-29 12:09:34 +0000
@@ -15,7 +15,6 @@
... IPersonSet,
... PersonalStanding,
... )
- >>> from canonical.launchpad.ftests import sync
>>> person_set = getUtility(IPersonSet)
>>> lifeless = person_set.getByName('lifeless')
@@ -83,7 +82,6 @@
>>> message = lifeless_post('test-one')
>>> message.approve(foobar)
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
@@ -93,14 +91,12 @@
>>> message = lifeless_post('test-one')
>>> message.approve(foobar)
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
>>> message = lifeless_post('test-one')
>>> message.approve(foobar)
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
@@ -114,7 +110,6 @@
>>> message = lifeless_post('test-two')
>>> message.approve(foobar)
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
@@ -123,14 +118,12 @@
>>> message = lifeless_post('test-three')
>>> message.reject(foobar)
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
>>> message = lifeless_post('test-three')
>>> message.discard(foobar)
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
@@ -143,7 +136,6 @@
>>> message.approve(foobar)
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
@@ -157,7 +149,6 @@
>>> message = lifeless_post('test-three')
>>> message.approve(foobar)
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.GOOD...>
@@ -177,14 +168,12 @@
>>> message = post_message('mark@xxxxxxxxxxx', 'test-two')
>>> message.approve(foobar)
>>> script.main()
- >>> sync(mark)
>>> mark.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
>>> message = post_message('mark@xxxxxxxxxxx', 'test-three')
>>> message.approve(foobar)
>>> script.main()
- >>> sync(mark)
>>> mark.personal_standing
<DBItem PersonalStanding.GOOD...>
@@ -200,19 +189,16 @@
>>> lifeless.personal_standing = PersonalStanding.POOR
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.POOR...>
>>> lifeless.personal_standing = PersonalStanding.GOOD
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.GOOD...>
>>> lifeless.personal_standing = PersonalStanding.EXCELLENT
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.EXCELLENT...>
@@ -221,7 +207,6 @@
>>> lifeless.personal_standing = PersonalStanding.UNKNOWN
>>> script.main()
- >>> sync(lifeless)
>>> lifeless.personal_standing
<DBItem PersonalStanding.GOOD...>
@@ -240,9 +225,6 @@
>>> lifeless.personal_standing = PersonalStanding.UNKNOWN
>>> mark.personal_standing = PersonalStanding.UNKNOWN
>>> LaunchpadZopelessLayer.txn.commit()
- >>> sync(lifeless)
- >>> sync(carlos)
- >>> sync(mark)
>>> lifeless.personal_standing
<DBItem PersonalStanding.UNKNOWN...>
>>> carlos.personal_standing
@@ -265,9 +247,6 @@
<BLANKLINE>
>>> flush_database_caches()
- >>> sync(lifeless)
- >>> sync(carlos)
- >>> sync(mark)
>>> lifeless.personal_standing
<DBItem PersonalStanding.GOOD...>
@@ -299,6 +278,5 @@
<BLANKLINE>
>>> flush_database_caches()
- >>> sync(carlos)
>>> carlos.personal_standing
<DBItem PersonalStanding.GOOD...>
=== modified file 'lib/lp/translations/utilities/tests/test_xpi_po_exporter.py'
--- lib/lp/translations/utilities/tests/test_xpi_po_exporter.py 2011-05-27 21:12:25 +0000
+++ lib/lp/translations/utilities/tests/test_xpi_po_exporter.py 2011-06-29 12:09:34 +0000
@@ -13,7 +13,6 @@
from zope.interface.verify import verifyObject
from canonical.database.sqlbase import commit
-from canonical.launchpad.ftests import sync
from canonical.testing.layers import LaunchpadZopelessLayer
from lp.app.interfaces.launchpad import ILaunchpadCelebrities
from lp.registry.interfaces.person import IPersonSet
@@ -98,7 +97,6 @@
(subject, body) = self.firefox_template.importFromQueue(entry)
# The status is now IMPORTED:
- sync(entry)
self.assertEquals(entry.status, RosettaImportStatus.IMPORTED)
def test_Interface(self):
Follow ups