launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #06486
[Merge] lp:~wgrant/launchpad/bulk-insert-errywhere into lp:launchpad
William Grant has proposed merging lp:~wgrant/launchpad/bulk-insert-errywhere into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~wgrant/launchpad/bulk-insert-errywhere/+merge/94490
Port most non-branch INSERT INTO ... VALUES bulk inserts to my new bulk.create() helper. This required fixing it to cope with a couple of extra cases, and I changed it to default to not returning the created objects.
--
https://code.launchpad.net/~wgrant/launchpad/bulk-insert-errywhere/+merge/94490
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/bulk-insert-errywhere into lp:launchpad.
=== modified file 'lib/lp/bugs/model/bugnotification.py'
--- lib/lp/bugs/model/bugnotification.py 2011-12-30 06:14:56 +0000
+++ lib/lp/bugs/model/bugnotification.py 2012-02-24 02:00:24 +0000
@@ -52,13 +52,11 @@
from lp.bugs.model.structuralsubscription import StructuralSubscription
from lp.registry.interfaces.person import IPersonSet
from lp.services.config import config
+from lp.services.database import bulk
from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.enumcol import EnumCol
from lp.services.database.lpstorm import IStore
-from lp.services.database.sqlbase import (
- SQLBase,
- sqlvalues,
- )
+from lp.services.database.sqlbase import SQLBase
from lp.services.database.stormbase import StormBase
from lp.services.messages.model.message import Message
@@ -182,28 +180,19 @@
# XXX jamesh 2008-05-21: these flushes are to fix ordering
# problems in the bugnotification-sending.txt tests.
store.flush()
- sql_values = []
- for recipient in recipients:
- reason_body, reason_header = recipients.getReason(recipient)
- sql_values.append('(%s, %s, %s, %s)' % sqlvalues(
- bug_notification, recipient, reason_header, reason_body))
-
- # We add all the recipients in a single SQL statement to make
- # this a bit more efficient for bugs with many subscribers.
- if len(sql_values) > 0:
- store.execute("""
- INSERT INTO BugNotificationRecipient
- (bug_notification, person, reason_header, reason_body)
- VALUES %s;""" % ', '.join(sql_values))
-
- if len(recipients.subscription_filters) > 0:
- filter_link_sql = [
- "(%s, %s)" % sqlvalues(bug_notification, filter.id)
- for filter in recipients.subscription_filters]
- store.execute("""
- INSERT INTO BugNotificationFilter
- (bug_notification, bug_subscription_filter)
- VALUES %s;""" % ", ".join(filter_link_sql))
+
+ bulk.create(
+ (BugNotificationRecipient.bug_notification,
+ BugNotificationRecipient.person,
+ BugNotificationRecipient.reason_body,
+ BugNotificationRecipient.reason_header),
+ [(bug_notification, recipient) + recipients.getReason(recipient)
+ for recipient in recipients])
+ bulk.create(
+ (BugNotificationFilter.bug_notification,
+ BugNotificationFilter.bug_subscription_filter),
+ [(bug_notification, filter)
+ for filter in recipients.subscription_filters])
return bug_notification
=== modified file 'lib/lp/bugs/model/bugwatch.py'
--- lib/lp/bugs/model/bugwatch.py 2012-02-21 22:46:28 +0000
+++ lib/lp/bugs/model/bugwatch.py 2012-02-24 02:00:24 +0000
@@ -64,16 +64,17 @@
from lp.bugs.model.bugset import BugSetBase
from lp.bugs.model.bugtask import BugTask
from lp.registry.interfaces.person import validate_public_person
+from lp.services.database import bulk
from lp.services.database.constants import UTC_NOW
from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.enumcol import EnumCol
from lp.services.database.lpstorm import IStore
-from lp.services.database.sqlbase import (
- SQLBase,
- sqlvalues,
- )
+from lp.services.database.sqlbase import SQLBase
from lp.services.database.stormbase import StormBase
-from lp.services.helpers import shortlist
+from lp.services.helpers import (
+ ensure_unicode,
+ shortlist,
+ )
from lp.services.messages.model.message import Message
from lp.services.webapp import (
urlappend,
@@ -739,18 +740,13 @@
def bulkAddActivity(self, references,
result=BugWatchActivityStatus.SYNC_SUCCEEDED,
- message=None, oops_id=None):
+ oops_id=None):
"""See `IBugWatchSet`."""
- bug_watch_ids = set(get_bug_watch_ids(references))
- if len(bug_watch_ids) > 0:
- insert_activity_statement = (
- "INSERT INTO BugWatchActivity"
- " (bug_watch, result, message, oops_id) "
- "SELECT BugWatch.id, %s, %s, %s FROM BugWatch"
- " WHERE BugWatch.id IN %s")
- IStore(BugWatch).execute(
- insert_activity_statement % sqlvalues(
- result, message, oops_id, bug_watch_ids))
+ bulk.create(
+ (BugWatchActivity.bug_watch_id, BugWatchActivity.result,
+ BugWatchActivity.oops_id),
+ [(bug_watch_id, result, ensure_unicode(oops_id))
+ for bug_watch_id in set(get_bug_watch_ids(references))])
class BugWatchActivity(StormBase):
=== modified file 'lib/lp/bugs/scripts/bzremotecomponentfinder.py'
--- lib/lp/bugs/scripts/bzremotecomponentfinder.py 2012-01-01 02:58:52 +0000
+++ lib/lp/bugs/scripts/bzremotecomponentfinder.py 2012-02-24 02:00:24 +0000
@@ -16,6 +16,7 @@
)
from BeautifulSoup import BeautifulSoup
+import transaction
from zope.component import getUtility
from lp.bugs.interfaces.bugtracker import (
@@ -23,6 +24,7 @@
IBugTrackerSet,
)
from lp.bugs.model.bugtracker import BugTrackerComponent
+from lp.services.database import bulk
from lp.services.database.lpstorm import IStore
from lp.services.scripts.logger import log as default_log
@@ -199,18 +201,15 @@
# added to launchpad. Record them for now.
for component in product['components'].values():
components_to_add.append(
- "('%s', %d, 'True', 'False')" % (
- component['name'], lp_component_group.id))
+ (component['name'], lp_component_group, True, False))
if len(components_to_add) > 0:
- sqltext = """
- INSERT INTO BugTrackerComponent
- (name, component_group, is_visible, is_custom)
- VALUES %s""" % ",\n ".join(components_to_add)
-
self.logger.debug("...Inserting components into database")
- store = IStore(BugTrackerComponent)
- store.execute(sqltext)
- store.commit()
- store.flush()
+ bulk.create(
+ (BugTrackerComponent.name,
+ BugTrackerComponent.component_group,
+ BugTrackerComponent.is_visible,
+ BugTrackerComponent.is_custom),
+ components_to_add)
+ transaction.commit()
self.logger.debug("...Done")
=== modified file 'lib/lp/bugs/tests/test_bugnotification.py'
--- lib/lp/bugs/tests/test_bugnotification.py 2012-01-20 15:42:44 +0000
+++ lib/lp/bugs/tests/test_bugnotification.py 2012-02-24 02:00:24 +0000
@@ -28,11 +28,11 @@
from lp.bugs.model.bugnotification import (
BugNotification,
BugNotificationFilter,
+ BugNotificationRecipient,
BugNotificationSet,
)
from lp.bugs.model.bugsubscriptionfilter import BugSubscriptionFilterMute
from lp.services.config import config
-from lp.services.database.sqlbase import sqlvalues
from lp.services.messages.interfaces.message import IMessageSet
from lp.services.messages.model.message import MessageSet
from lp.testing import (
@@ -165,14 +165,9 @@
def addNotificationRecipient(self, notification, person):
# Manually insert BugNotificationRecipient for
# construct_email_notifications to work.
- # Not sure why using SQLObject constructor doesn't work (it
- # tries to insert a row with only the ID which fails).
- Store.of(notification).execute("""
- INSERT INTO BugNotificationRecipient
- (bug_notification, person, reason_header, reason_body)
- VALUES (%s, %s, %s, %s)""" % sqlvalues(
- notification, person,
- u'reason header', u'reason body'))
+ BugNotificationRecipient(
+ bug_notification=notification, person=person,
+ reason_header=u'reason header', reason_body=u'reason body')
def addNotification(self, person, bug=None):
# Add a notification along with recipient data.
=== modified file 'lib/lp/bugs/tests/test_bugwatch.py'
--- lib/lp/bugs/tests/test_bugwatch.py 2012-01-20 15:42:44 +0000
+++ lib/lp/bugs/tests/test_bugwatch.py 2012-02-24 02:00:24 +0000
@@ -582,8 +582,8 @@
# the given bug watches.
error = BugWatchActivityStatus.PRIVATE_REMOTE_BUG
getUtility(IBugWatchSet).bulkAddActivity(
- self.bug_watches, error, "Forbidden", "OOPS-1234")
- self._checkActivityForBugWatches(error, "Forbidden", "OOPS-1234")
+ self.bug_watches, error, "OOPS-1234")
+ self._checkActivityForBugWatches(error, None, "OOPS-1234")
def test_bulkAddActivity_with_id_list(self):
# The ids of bug watches can be passed in.
=== modified file 'lib/lp/services/database/bulk.py'
--- lib/lp/services/database/bulk.py 2012-02-22 05:07:08 +0000
+++ lib/lp/services/database/bulk.py 2012-02-24 02:00:24 +0000
@@ -26,6 +26,7 @@
And,
Insert,
Or,
+ SQL,
)
from storm.info import (
get_cls_info,
@@ -168,7 +169,9 @@
def _dbify_value(col, val):
"""Convert a value into a form that Storm can compile directly."""
- if isinstance(col, Reference):
+ if isinstance(val, SQL):
+ return (val,)
+ elif isinstance(col, Reference):
# References are mainly meant to be used as descriptors, so we
# have to perform a bit of evil here to turn the (potentially
# None) value into a sequence of primary key values.
@@ -192,13 +195,13 @@
return (col,)
-def create(columns, values, return_created=True):
+def create(columns, values, load_created=False):
"""Create a large number of objects efficiently.
:param cols: The Storm columns to insert values into. Must be from a
single class.
:param values: A list of lists of values for the columns.
- :param return_created: Retrieve the created objects.
+ :param load_created: Return the created objects.
:return: A list of the created objects if return_created, otherwise None.
"""
# Flatten Reference faux-columns into their primary keys.
@@ -210,7 +213,7 @@
"class.")
if len(values) == 0:
- return [] if return_created else None
+ return [] if load_created else None
[cls] = clses
primary_key = get_cls_info(cls).primary_key
@@ -223,7 +226,7 @@
_dbify_value(col, val) for col, val in zip(columns, value)))
for value in values]
- if not return_created:
+ if not load_created:
IStore(cls).execute(Insert(db_cols, expr=db_values))
return None
else:
=== modified file 'lib/lp/services/database/tests/test_bulk.py'
--- lib/lp/services/database/tests/test_bulk.py 2012-02-22 05:07:08 +0000
+++ lib/lp/services/database/tests/test_bulk.py 2012-02-24 02:00:24 +0000
@@ -9,6 +9,7 @@
from pytz import UTC
from storm.exceptions import ClassInfoError
+from storm.expr import SQL
from storm.info import get_obj_info
from storm.store import Store
from testtools.matchers import Equals
@@ -34,6 +35,7 @@
ISlaveStore,
IStore,
)
+from lp.services.database.sqlbase import get_transaction_timestamp
from lp.services.features.model import (
FeatureFlag,
getFeatureStore,
@@ -260,7 +262,7 @@
(BugSubscription.bug, BugSubscription.person,
BugSubscription.subscribed_by, BugSubscription.date_created,
BugSubscription.bug_notification_level),
- wanted)
+ wanted, load_created=True)
self.assertThat(recorder, HasQueryCount(Equals(2)))
self.assertContentEqual(
@@ -274,7 +276,7 @@
wanted = [(None, job, BranchJobType.RECLAIM_BRANCH_SPACE)]
[branchjob] = bulk.create(
(BranchJob.branch, BranchJob.job, BranchJob.job_type),
- wanted)
+ wanted, load_created=True)
self.assertEqual(
wanted, [(branchjob.branch, branchjob.job, branchjob.job_type)])
@@ -294,7 +296,9 @@
def test_zero_values_is_noop(self):
# create()ing 0 rows is a no-op.
with StormStatementRecorder() as recorder:
- self.assertEqual([], bulk.create((BugSubscription.bug,), []))
+ self.assertEqual(
+ [],
+ bulk.create((BugSubscription.bug,), [], load_created=True))
self.assertThat(recorder, HasQueryCount(Equals(0)))
def test_load_can_be_skipped(self):
@@ -307,9 +311,23 @@
None,
bulk.create(
(BranchJob.branch, BranchJob.job, BranchJob.job_type),
- wanted, return_created=False))
+ wanted, load_created=False))
self.assertThat(recorder, HasQueryCount(Equals(1)))
[reclaimjob] = ReclaimBranchSpaceJob.iterReady()
branchjob = reclaimjob.context
self.assertEqual(
wanted, [(branchjob.branch, branchjob.job, branchjob.job_type)])
+
+ def test_sql_passed_through(self):
+ # create() passes SQL() expressions through untouched.
+ bug = self.factory.makeBug()
+ person = self.factory.makePerson()
+
+ [sub] = bulk.create(
+ (BugSubscription.bug, BugSubscription.person,
+ BugSubscription.subscribed_by, BugSubscription.date_created,
+ BugSubscription.bug_notification_level),
+ [(bug, person, person,
+ SQL("CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"),
+ BugNotificationLevel.LIFECYCLE)], load_created=True)
+ self.assertEqual(get_transaction_timestamp(), sub.date_created)
=== modified file 'lib/lp/services/job/model/job.py'
--- lib/lp/services/job/model/job.py 2011-12-30 06:14:56 +0000
+++ lib/lp/services/job/model/job.py 2012-02-24 02:00:24 +0000
@@ -27,13 +27,11 @@
)
from zope.interface import implements
+from lp.services.database import bulk
from lp.services.database.constants import UTC_NOW
from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.enumcol import EnumCol
-from lp.services.database.sqlbase import (
- quote,
- SQLBase,
- )
+from lp.services.database.sqlbase import SQLBase
from lp.services.job.interfaces.job import (
IJob,
JobStatus,
@@ -123,15 +121,11 @@
:param request: The `IPerson` requesting the jobs.
:return: An iterable of `Job.id` values for the new jobs.
"""
- job_contents = [
- "(%s, %s)" % (
- quote(JobStatus.WAITING), quote(requester))] * num_jobs
- result = store.execute("""
- INSERT INTO Job (status, requester)
- VALUES %s
- RETURNING id
- """ % ", ".join(job_contents))
- return [job_id for job_id, in result]
+ return [
+ job.id for job in bulk.create(
+ (Job._status, Job.requester),
+ [(JobStatus.WAITING, requester) for i in range(num_jobs)],
+ load_created=True)]
def acquireLease(self, duration=300):
"""See `IJob`."""
=== modified file 'lib/lp/soyuz/model/distroseriesdifferencejob.py'
--- lib/lp/soyuz/model/distroseriesdifferencejob.py 2011-12-30 06:14:56 +0000
+++ lib/lp/soyuz/model/distroseriesdifferencejob.py 2012-02-24 02:00:24 +0000
@@ -8,7 +8,6 @@
'DistroSeriesDifferenceJob',
]
-import simplejson
from zope.component import getUtility
from zope.interface import (
classProvides,
@@ -22,11 +21,11 @@
from lp.registry.model.distroseries import DistroSeries
from lp.registry.model.distroseriesdifference import DistroSeriesDifference
from lp.registry.model.sourcepackagename import SourcePackageName
+from lp.services.database import bulk
from lp.services.database.lpstorm import (
IMasterStore,
IStore,
)
-from lp.services.database.sqlbase import quote
from lp.services.features import getFeatureFlag
from lp.services.job.model.job import Job
from lp.soyuz.interfaces.distributionjob import (
@@ -74,28 +73,6 @@
return DistroSeriesDifferenceJob(job)
-def compose_job_insertion_tuple(derived_series, parent_series,
- sourcepackagename_id, job_id):
- """Compose tuple for insertion into `DistributionJob`.
-
- :param derived_series: Derived `DistroSeries`.
- :param parent_series: Parent `DistroSeries`.
- :param sourcepackagename_id: ID of `SourcePackageName`.
- :param job_id: associated `Job` id.
- :return: A tuple of: derived distribution id, derived distroseries id,
- job type, job id, JSON data map.
- """
- json = simplejson.dumps(
- make_metadata(sourcepackagename_id, parent_series.id))
- return (
- derived_series.distribution.id,
- derived_series.id,
- DistributionJobType.DISTROSERIESDIFFERENCE,
- job_id,
- json,
- )
-
-
def create_multiple_jobs(derived_series, parent_series):
"""Create `DistroSeriesDifferenceJob`s between parent and derived series.
@@ -120,20 +97,16 @@
sourcepackagenames = source_package_releases.values(
SourcePackageRelease.sourcepackagenameID)
job_ids = Job.createMultiple(store, nb_jobs)
-
- job_tuples = [
- quote(compose_job_insertion_tuple(
- derived_series, parent_series, sourcepackagename, job_id))
- for job_id, sourcepackagename in zip(job_ids, sourcepackagenames)]
-
- store = IStore(DistributionJob)
- result = store.execute("""
- INSERT INTO DistributionJob (
- distribution, distroseries, job_type, job, json_data)
- VALUES %s
- RETURNING id
- """ % ", ".join(job_tuples))
- return [job_id for job_id, in result]
+ return [
+ job.id for job in bulk.create(
+ (DistributionJob.distribution, DistributionJob.distroseries,
+ DistributionJob.job_type, DistributionJob.job_id,
+ DistributionJob.metadata),
+ [(derived_series.distribution, derived_series,
+ DistributionJobType.DISTROSERIESDIFFERENCE, job_id,
+ make_metadata(spn_id, parent_series.id))
+ for job_id, spn_id in zip(job_ids, sourcepackagenames)],
+ load_created=True)]
def find_waiting_jobs(derived_series, sourcepackagename, parent_series):
=== modified file 'lib/lp/soyuz/model/packagecopyjob.py'
--- lib/lp/soyuz/model/packagecopyjob.py 2012-01-17 21:45:24 +0000
+++ lib/lp/soyuz/model/packagecopyjob.py 2012-02-24 02:00:24 +0000
@@ -11,7 +11,6 @@
import logging
from lazr.delegates import delegates
-import simplejson
from storm.locals import (
And,
Int,
@@ -40,13 +39,13 @@
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.registry.interfaces.sourcepackagename import ISourcePackageNameSet
from lp.registry.model.distroseries import DistroSeries
+from lp.services.database import bulk
from lp.services.database.decoratedresultset import DecoratedResultSet
from lp.services.database.enumcol import EnumCol
from lp.services.database.lpstorm import (
IMasterStore,
IStore,
)
-from lp.services.database.sqlbase import sqlvalues
from lp.services.database.stormbase import StormBase
from lp.services.job.interfaces.job import (
JobStatus,
@@ -297,9 +296,8 @@
data = (
cls.class_job_type, target_distroseries, copy_policy,
source_archive, target_archive, package_name, job_id,
- simplejson.dumps(metadata, ensure_ascii=False))
- format_string = "(%s)" % ", ".join(["%s"] * len(data))
- return format_string % sqlvalues(*data)
+ metadata)
+ return data
@classmethod
def createMultiple(cls, target_distroseries, copy_tasks, requester,
@@ -313,20 +311,14 @@
target_distroseries, copy_policy, include_binaries, job_id,
task, sponsored)
for job_id, task in zip(job_ids, copy_tasks)]
- result = store.execute("""
- INSERT INTO PackageCopyJob (
- job_type,
- target_distroseries,
- copy_policy,
- source_archive,
- target_archive,
- package_name,
- job,
- json_data)
- VALUES %s
- RETURNING id
- """ % ", ".join(job_contents))
- return [job_id for job_id, in result]
+ return [
+ job.id for job in
+ bulk.create(
+ (PackageCopyJob.job_type, PackageCopyJob.target_distroseries,
+ PackageCopyJob.copy_policy, PackageCopyJob.source_archive,
+ PackageCopyJob.target_archive, PackageCopyJob.package_name,
+ PackageCopyJob.job_id, PackageCopyJob.metadata),
+ job_contents, load_created=True)]
@classmethod
def getActiveJobs(cls, target_archive):
=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py 2012-01-20 13:23:01 +0000
+++ lib/lp/soyuz/model/publishing.py 2012-02-24 02:00:24 +0000
@@ -48,15 +48,13 @@
from lp.buildmaster.model.packagebuild import PackageBuild
from lp.registry.interfaces.person import validate_public_person
from lp.registry.interfaces.pocket import PackagePublishingPocket
+from lp.services.database import bulk
from lp.services.database.constants import UTC_NOW
from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.decoratedresultset import DecoratedResultSet
from lp.services.database.enumcol import EnumCol
from lp.services.database.lpstorm import IMasterStore
-from lp.services.database.sqlbase import (
- SQLBase,
- sqlvalues,
- )
+from lp.services.database.sqlbase import SQLBase
from lp.services.librarian.browser import ProxiedLibraryFileAlias
from lp.services.librarian.model import (
LibraryFileAlias,
@@ -1462,29 +1460,18 @@
if not needed:
return []
- insert_head = """
- INSERT INTO BinaryPackagePublishingHistory
- (archive, distroarchseries, pocket, binarypackagerelease,
- binarypackagename, component, section, priority, status,
- datecreated)
- VALUES
- """
- insert_pubs = ", ".join(
- "(%s)" % ", ".join(sqlvalues(
- get_archive(archive, bpr).id, das.id, pocket, bpr.id,
- bpr.binarypackagename,
- get_component(archive, das.distroseries, component).id,
- section.id, priority, PackagePublishingStatus.PENDING,
- UTC_NOW))
- for (das, bpr, (component, section, priority)) in needed)
- insert_tail = " RETURNING BinaryPackagePublishingHistory.id"
- new_ids = IMasterStore(BinaryPackagePublishingHistory).execute(
- insert_head + insert_pubs + insert_tail)
-
- publications = IMasterStore(BinaryPackagePublishingHistory).find(
- BinaryPackagePublishingHistory,
- BinaryPackagePublishingHistory.id.is_in(id[0] for id in new_ids))
- return list(publications)
+ BPPH = BinaryPackagePublishingHistory
+ return bulk.create(
+ (BPPH.archive, BPPH.distroarchseries, BPPH.pocket,
+ BPPH.binarypackagerelease, BPPH.binarypackagename,
+ BPPH.component, BPPH.section, BPPH.priority, BPPH.status,
+ BPPH.datecreated),
+ [(get_archive(archive, bpr), das, pocket, bpr,
+ bpr.binarypackagename,
+ get_component(archive, das.distroseries, component),
+ section, priority, PackagePublishingStatus.PENDING, UTC_NOW)
+ for (das, bpr, (component, section, priority)) in needed],
+ load_created=True)
def publishBinary(self, archive, binarypackagerelease, distroseries,
component, section, priority, pocket):