launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #22400
[Merge] lp:~cjwatson/launchpad/get-transaction-timestamp-per-store into lp:launchpad
Colin Watson has proposed merging lp:~cjwatson/launchpad/get-transaction-timestamp-per-store into lp:launchpad.
Commit message:
Make get_transaction_timestamp take a store argument, and use it in a few more places.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/get-transaction-timestamp-per-store/+merge/343591
My main motivation for this was the last commit in the series: I wanted to make the ArchiveFile tests more accurate regarding timestamps before beginning work on extending that to be a history table.
--
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/get-transaction-timestamp-per-store into lp:launchpad.
=== modified file 'cronscripts/librarian-gc.py'
--- cronscripts/librarian-gc.py 2013-09-10 10:48:31 +0000
+++ cronscripts/librarian-gc.py 2018-04-19 10:21:15 +0000
@@ -66,11 +66,12 @@
# XXX wgrant 2011-09-18 bug=853066: Using Storm's raw connection
# here is wrong. We should either create our own or use
# Store.execute or cursor() and the transaction module.
- conn = IStore(LibraryFileAlias)._connection._raw_connection
+ store = IStore(LibraryFileAlias)
+ conn = store._connection._raw_connection
# Refuse to run if we have significant clock skew between the
# librarian and the database.
- librariangc.confirm_no_clock_skew(conn)
+ librariangc.confirm_no_clock_skew(store)
# Note that each of these next steps will issue commit commands
# as appropriate to make this script transaction friendly
=== modified file 'lib/lp/blueprints/doc/sprint-agenda.txt'
--- lib/lp/blueprints/doc/sprint-agenda.txt 2011-12-30 06:14:56 +0000
+++ lib/lp/blueprints/doc/sprint-agenda.txt 2018-04-19 10:21:15 +0000
@@ -42,8 +42,9 @@
It is possible to revise your choice, declining the spec. This should update
the date_decided.
+ >>> from storm.store import Store
>>> from lp.services.database.sqlbase import get_transaction_timestamp
- >>> transaction_timestamp = get_transaction_timestamp()
+ >>> transaction_timestamp = get_transaction_timestamp(Store.of(sl))
# Nobody is allowed to write directly to SprintSpecification.date_decided,
# so we need to remove its security proxy here.
=== modified file 'lib/lp/code/model/tests/test_codeimportjob.py'
--- lib/lp/code/model/tests/test_codeimportjob.py 2018-03-15 20:44:04 +0000
+++ lib/lp/code/model/tests/test_codeimportjob.py 2018-04-19 10:21:15 +0000
@@ -55,6 +55,8 @@
from lp.code.tests.helpers import GitHostingFixture
from lp.services.config import config
from lp.services.database.constants import UTC_NOW
+from lp.services.database.interfaces import IStore
+from lp.services.database.sqlbase import get_transaction_timestamp
from lp.services.librarian.interfaces import ILibraryFileAliasSet
from lp.services.librarian.interfaces.client import ILibrarianClient
from lp.services.macaroons.interfaces import IMacaroonIssuer
@@ -561,10 +563,10 @@
# This causes problems for the "UTC_NOW - interval / 2"
# expression below.
interval = code_import.effective_update_interval
- from lp.services.database.sqlbase import get_transaction_timestamp
+ store = IStore(CodeImportResult)
recent_result = CodeImportResult(
code_import=code_import, machine=machine, status=FAILURE,
- date_job_started=get_transaction_timestamp() - interval / 2)
+ date_job_started=get_transaction_timestamp(store) - interval / 2)
# When we create the job, its date_due should be set to the date_due
# of the job that was deleted when the CodeImport review status
# changed from REVIEWED. That is the date_job_started of the most
=== modified file 'lib/lp/registry/doc/announcement.txt'
--- lib/lp/registry/doc/announcement.txt 2015-01-29 12:33:01 +0000
+++ lib/lp/registry/doc/announcement.txt 2018-04-19 10:21:15 +0000
@@ -212,8 +212,10 @@
Announcements can be retracted at any time. Retracting an announcement
updates the date_last_modified and sets the announcement.active flag to False
+ >>> from storm.store import Store
>>> from lp.services.database.sqlbase import get_transaction_timestamp
- >>> transaction_timestamp = get_transaction_timestamp()
+ >>> transaction_timestamp = get_transaction_timestamp(
+ ... Store.of(apache_asia))
>>> print apache_asia.date_last_modified
None
=== modified file 'lib/lp/services/database/sqlbase.py'
--- lib/lp/services/database/sqlbase.py 2015-07-08 16:05:11 +0000
+++ lib/lp/services/database/sqlbase.py 2018-04-19 10:21:15 +0000
@@ -263,11 +263,9 @@
_get_sqlobject_store().invalidate()
-def get_transaction_timestamp():
- """Get the timestamp for the current transaction on the MAIN DEFAULT
- store. DEPRECATED - if needed it should become a method on the store.
- """
- timestamp = _get_sqlobject_store().execute(
+def get_transaction_timestamp(store):
+ """Get the timestamp for the current transaction on `store`."""
+ timestamp = store.execute(
"SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'").get_one()[0]
return timestamp.replace(tzinfo=pytz.timezone('UTC'))
=== modified file 'lib/lp/services/database/tests/test_bulk.py'
--- lib/lp/services/database/tests/test_bulk.py 2018-01-02 10:54:31 +0000
+++ lib/lp/services/database/tests/test_bulk.py 2018-04-19 10:21:15 +0000
@@ -343,4 +343,6 @@
[(bug, person, person,
SQL("CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"),
BugNotificationLevel.LIFECYCLE)], get_objects=True)
- self.assertEqual(get_transaction_timestamp(), sub.date_created)
+ self.assertEqual(
+ get_transaction_timestamp(IStore(BugSubscription)),
+ sub.date_created)
=== modified file 'lib/lp/services/librarianserver/librariangc.py'
--- lib/lp/services/librarianserver/librariangc.py 2018-01-03 17:17:12 +0000
+++ lib/lp/services/librarianserver/librariangc.py 2018-04-19 10:21:15 +0000
@@ -17,6 +17,7 @@
from time import time
import iso8601
+import pytz
from swiftclient import client as swiftclient
from zope.interface import implementer
@@ -26,6 +27,7 @@
listReferences,
quoteIdentifier,
)
+from lp.services.database.sqlbase import get_transaction_timestamp
from lp.services.features import getFeatureFlag
from lp.services.librarianserver import swift
from lp.services.librarianserver.storage import (
@@ -67,7 +69,7 @@
def _utcnow():
# Wrapper that is replaced in the test suite.
- return datetime.utcnow()
+ return datetime.now(pytz.UTC)
def open_stream(content_id):
@@ -105,16 +107,14 @@
return hasher.hexdigest(), length
-def confirm_no_clock_skew(con):
+def confirm_no_clock_skew(store):
"""Raise an exception if there is significant clock skew between the
database and this machine.
It is theoretically possible to lose data if there is more than several
hours of skew.
"""
- cur = con.cursor()
- cur.execute("SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'")
- db_now = cur.fetchone()[0]
+ db_now = get_transaction_timestamp(store)
local_now = _utcnow()
five_minutes = timedelta(minutes=5)
@@ -813,8 +813,7 @@
and next_wanted_content_id == content_id)
if not file_wanted:
- mod_time = iso8601.parse_date(
- obj['last_modified']).replace(tzinfo=None)
+ mod_time = iso8601.parse_date(obj['last_modified'])
if mod_time > _utcnow() - timedelta(days=1):
log.debug3(
"File %d not removed - created too recently", content_id)
=== modified file 'lib/lp/services/librarianserver/tests/test_gc.py'
--- lib/lp/services/librarianserver/tests/test_gc.py 2018-02-13 17:46:48 +0000
+++ lib/lp/services/librarianserver/tests/test_gc.py 2018-04-19 10:21:15 +0000
@@ -23,6 +23,7 @@
import sys
import tempfile
+import pytz
from sqlobject import SQLObjectNotFound
from storm.store import Store
from swiftclient import client as swiftclient
@@ -88,8 +89,8 @@
# Make sure that every file the database knows about exists on disk.
# We manually remove them for tests that need to cope with missing
# library items.
- store = IMasterStore(LibraryFileContent)
- for content in store.find(LibraryFileContent):
+ self.store = IMasterStore(LibraryFileContent)
+ for content in self.store.find(LibraryFileContent):
path = librariangc.get_file_path(content.id)
if not os.path.exists(path):
if not os.path.exists(os.path.dirname(path)):
@@ -451,7 +452,7 @@
return org_time() + 24 * 60 * 60 + 1
def tomorrow_utcnow():
- return datetime.utcnow() + timedelta(days=1, seconds=1)
+ return datetime.now(pytz.UTC) + timedelta(days=1, seconds=1)
try:
librariangc.time = tomorrow_time
@@ -584,13 +585,13 @@
def test_confirm_no_clock_skew(self):
# There should not be any clock skew when running the test suite.
- librariangc.confirm_no_clock_skew(self.con)
+ librariangc.confirm_no_clock_skew(self.store)
# To test this function raises an excption when it should,
# fool the garbage collector into thinking it is tomorrow.
with self.librariangc_thinking_it_is_tomorrow():
self.assertRaises(
- Exception, librariangc.confirm_no_clock_skew, (self.con,)
+ Exception, librariangc.confirm_no_clock_skew, (self.store,)
)
=== modified file 'lib/lp/services/xref/tests/test_model.py'
--- lib/lp/services/xref/tests/test_model.py 2015-11-26 17:11:09 +0000
+++ lib/lp/services/xref/tests/test_model.py 2018-04-19 10:21:15 +0000
@@ -16,7 +16,10 @@
from zope.component import getUtility
from lp.services.database.interfaces import IStore
-from lp.services.database.sqlbase import flush_database_caches
+from lp.services.database.sqlbase import (
+ flush_database_caches,
+ get_transaction_timestamp,
+ )
from lp.services.xref.interfaces import IXRefSet
from lp.services.xref.model import XRef
from lp.testing import (
@@ -35,9 +38,7 @@
# date_created defaults to now, but can be overridden.
old = datetime.datetime.strptime('2005-01-01', '%Y-%m-%d').replace(
tzinfo=pytz.UTC)
- now = IStore(XRef).execute(
- "SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
- ).get_one()[0].replace(tzinfo=pytz.UTC)
+ now = get_transaction_timestamp(IStore(XRef))
getUtility(IXRefSet).create({
('a', '1'): {('b', 'foo'): {}},
('a', '2'): {('b', 'bar'): {'date_created': old}}})
@@ -68,9 +69,7 @@
def test_findFrom(self):
creator = self.factory.makePerson()
- now = IStore(XRef).execute(
- "SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
- ).get_one()[0].replace(tzinfo=pytz.UTC)
+ now = get_transaction_timestamp(IStore(XRef))
getUtility(IXRefSet).create({
('a', 'bar'): {
('b', 'foo'): {'creator': creator, 'metadata': {'test': 1}}},
=== modified file 'lib/lp/soyuz/doc/publishing.txt'
--- lib/lp/soyuz/doc/publishing.txt 2017-06-02 21:46:50 +0000
+++ lib/lp/soyuz/doc/publishing.txt 2018-04-19 10:21:15 +0000
@@ -347,8 +347,9 @@
Inspecting the modified record shows it's ready for domination:
+ >>> from storm.store import Store
>>> from lp.services.database.sqlbase import get_transaction_timestamp
- >>> transaction_timestamp = get_transaction_timestamp()
+ >>> transaction_timestamp = get_transaction_timestamp(Store.of(spph))
>>> modified_spph = spph
>>> modified_spph.status
=== modified file 'lib/lp/soyuz/tests/test_archivefile.py'
--- lib/lp/soyuz/tests/test_archivefile.py 2016-04-04 10:06:33 +0000
+++ lib/lp/soyuz/tests/test_archivefile.py 2018-04-19 10:21:15 +0000
@@ -7,19 +7,18 @@
__metaclass__ = type
-from datetime import (
- datetime,
- timedelta,
- )
+from datetime import timedelta
import os
-import pytz
-from testtools.matchers import LessThan
+from storm.store import Store
import transaction
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
-from lp.services.database.sqlbase import flush_database_caches
+from lp.services.database.sqlbase import (
+ flush_database_caches,
+ get_transaction_timestamp,
+ )
from lp.services.osutils import open_for_writing
from lp.soyuz.interfaces.archivefile import IArchiveFileSet
from lp.testing import TestCaseWithFactory
@@ -63,7 +62,7 @@
def test_getByArchive(self):
archives = [self.factory.makeArchive(), self.factory.makeArchive()]
archive_files = []
- now = datetime.now(pytz.UTC)
+ now = get_transaction_timestamp(Store.of(archives[0]))
for archive in archives:
archive_files.append(self.factory.makeArchiveFile(
archive=archive, scheduled_deletion_date=now))
@@ -113,19 +112,16 @@
archive_files[:2], timedelta(days=1))
self.assertContentEqual(expected_rows, rows)
flush_database_caches()
- tomorrow = datetime.now(pytz.UTC) + timedelta(days=1)
- # Allow a bit of timing slack for slow tests.
- self.assertThat(
- tomorrow - archive_files[0].scheduled_deletion_date,
- LessThan(timedelta(minutes=5)))
- self.assertThat(
- tomorrow - archive_files[1].scheduled_deletion_date,
- LessThan(timedelta(minutes=5)))
+ tomorrow = (
+ get_transaction_timestamp(Store.of(archive_files[0])) +
+ timedelta(days=1))
+ self.assertEqual(tomorrow, archive_files[0].scheduled_deletion_date)
+ self.assertEqual(tomorrow, archive_files[1].scheduled_deletion_date)
self.assertIsNone(archive_files[2].scheduled_deletion_date)
def test_unscheduleDeletion(self):
archive_files = [self.factory.makeArchiveFile() for _ in range(3)]
- now = datetime.now(pytz.UTC)
+ now = get_transaction_timestamp(Store.of(archive_files[0]))
for archive_file in archive_files:
removeSecurityProxy(archive_file).scheduled_deletion_date = now
expected_rows = [
@@ -138,7 +134,7 @@
flush_database_caches()
self.assertIsNone(archive_files[0].scheduled_deletion_date)
self.assertIsNone(archive_files[1].scheduled_deletion_date)
- self.assertIsNotNone(archive_files[2].scheduled_deletion_date)
+ self.assertEqual(now, archive_files[2].scheduled_deletion_date)
def test_getContainersToReap(self):
archive = self.factory.makeArchive()
@@ -150,7 +146,7 @@
other_archive = self.factory.makeArchive()
archive_files.append(self.factory.makeArchiveFile(
archive=other_archive, container="baz"))
- now = datetime.now(pytz.UTC)
+ now = get_transaction_timestamp(Store.of(archive_files[0]))
removeSecurityProxy(archive_files[0]).scheduled_deletion_date = (
now - timedelta(days=1))
removeSecurityProxy(archive_files[1]).scheduled_deletion_date = (
@@ -183,7 +179,7 @@
other_archive = self.factory.makeArchive()
archive_files.append(
self.factory.makeArchiveFile(archive=other_archive))
- now = datetime.now(pytz.UTC)
+ now = get_transaction_timestamp(Store.of(archive_files[0]))
removeSecurityProxy(archive_files[0]).scheduled_deletion_date = (
now - timedelta(days=1))
removeSecurityProxy(archive_files[1]).scheduled_deletion_date = (
Follow ups