launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #00820
[Merge] lp:~lifeless/launchpad/bug-618367 into lp:launchpad/devel
Robert Collins has proposed merging lp:~lifeless/launchpad/bug-618367 into lp:launchpad/devel.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
#618367 Distribution:+assignments is taking a long time maybe 20% of the time
https://bugs.launchpad.net/bugs/618367
Preload is_valid_person for +assignments on Product and Distribution, should fix bug 618367, according to the observed oopses. It will probably still be slow, but 6 seconds faster.
--
https://code.launchpad.net/~lifeless/launchpad/bug-618367/+merge/34162
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~lifeless/launchpad/bug-618367 into lp:launchpad/devel.
=== modified file 'lib/lp/blueprints/browser/tests/test_views.py'
--- lib/lp/blueprints/browser/tests/test_views.py 2010-08-20 20:31:18 +0000
+++ lib/lp/blueprints/browser/tests/test_views.py 2010-08-31 06:11:00 +0000
@@ -9,19 +9,92 @@
import os
import unittest
+from storm.store import Store
+from testtools.matchers import LessThan
+
from canonical.launchpad.testing.systemdocs import (
LayeredDocFileSuite,
setUp,
tearDown,
)
+from canonical.launchpad.webapp import canonical_url
from canonical.testing import DatabaseFunctionalLayer
-
-
-here = os.path.dirname(os.path.realpath(__file__))
-
+from lp.testing import (
+ login,
+ logout,
+ TestCaseWithFactory,
+ )
+from lp.testing.matchers import HasQueryCount
+from lp.testing.sampledata import ADMIN_EMAIL
+from lp.testing._webservice import QueryCollector
+
+
+class TestAssignments(TestCaseWithFactory):
+
+ layer = DatabaseFunctionalLayer
+
+ def invalidate_and_render(self, browser, dbobj, url):
+ # Ensure caches have been flushed.
+ store = Store.of(dbobj)
+ store.flush()
+ store.invalidate()
+ browser.open(url)
+
+ def check_query_counts_scaling_with_unique_people(self,
+ target, targettype):
+ """Check that a particular hasSpecifications target scales well.
+
+ :param target: A spec target like a product.
+ :param targettype: The parameter to pass to makeSpecification to
+ associate the target. e.g. 'product'.
+ """
+ query_baseline = 40
+ people = []
+ for _ in range(10):
+ people.append(self.factory.makePerson())
+ specs = []
+ for _ in range(10):
+ specs.append(self.factory.makeSpecification(
+ **{targettype:target}))
+ collector = QueryCollector()
+ collector.register()
+ self.addCleanup(collector.unregister)
+ viewer = self.factory.makePerson(password="test")
+ browser = self.getUserBrowser(user=viewer)
+ url = canonical_url(target) + "/+assignments"
+ # Seed the cookie cache and any other cross-request state we may gain
+ # in future. See canonical.launchpad.webapp.serssion: _get_secret.
+ browser.open(url)
+ self.invalidate_and_render(browser, target, url)
+ # Set a baseline
+ self.assertThat(collector, HasQueryCount(LessThan(query_baseline)))
+ no_assignees_count = collector.count
+ # Assign many unique people, which shouldn't change the page queries.
+ # Due to storm bug 619017 additional queries can be triggered when
+ # revalidating people, so we allow -some- fuzz.
+ login(ADMIN_EMAIL)
+ for person, spec in zip(people, specs):
+ spec.assignee = person
+ logout()
+ self.invalidate_and_render(browser, target, url)
+ self.assertThat(
+ collector,
+ HasQueryCount(LessThan(no_assignees_count + 5)))
+
+ def test_product_query_counts_scale_below_unique_people(self):
+ self.check_query_counts_scaling_with_unique_people(
+ self.factory.makeProduct(),
+ 'product')
+
+ def test_distro_query_counts_scale_below_unique_people(self):
+ self.check_query_counts_scaling_with_unique_people(
+ self.factory.makeDistribution(),
+ 'distribution')
+
def test_suite():
- suite = unittest.TestSuite()
+ suite = unittest.TestLoader().loadTestsFromName(__name__)
+ here = os.path.dirname(os.path.realpath(__file__))
testsdir = os.path.abspath(here)
# Add tests using default setup/teardown
=== modified file 'lib/lp/blueprints/model/specification.py'
--- lib/lp/blueprints/model/specification.py 2010-08-20 20:31:18 +0000
+++ lib/lp/blueprints/model/specification.py 2010-08-31 06:11:00 +0000
@@ -24,6 +24,14 @@
SQLRelatedJoin,
StringCol,
)
+from storm.expr import (
+ LeftJoin,
+ )
+from storm.locals import (
+ ClassAlias,
+ Desc,
+ SQL,
+ )
from storm.store import Store
from zope.event import notify
from zope.interface import implements
@@ -40,6 +48,9 @@
SQLBase,
sqlvalues,
)
+from canonical.launchpad.components.decoratedresultset import (
+ DecoratedResultSet,
+ )
from canonical.launchpad.helpers import (
get_contact_email_addresses,
shortlist,
@@ -680,6 +691,74 @@
# this should be implemented by the actual context class
raise NotImplementedError
+ def _specification_sort(self, sort):
+ """Return the storm sort order for 'specifications'.
+
+ :param sort: As per HasSpecificationsMixin.specifications.
+ """
+ # sort by priority descending, by default
+ if sort is None or sort == SpecificationSort.PRIORITY:
+ return (
+ Desc(Specification.priority), Specification.definition_status,
+ Specification.name)
+ elif sort == SpecificationSort.DATE:
+ return (Desc(Specification.datecreated), Specification.id)
+
+ def _preload_specifications_people(self, query):
+ """Perform eager loading of people and their validity for query.
+
+ :param query: a string query generated in the 'specifications'
+ method.
+ :return: A DecoratedResultSet with Person precaching setup.
+ """
+ # Circular import.
+ from lp.registry.model.person import Person
+ constraints = []
+ assignee = ClassAlias(Person, "assignee")
+ approver = ClassAlias(Person, "approver")
+ drafter = ClassAlias(Person, "drafter")
+ origin = [Specification,
+ LeftJoin(assignee, Specification.assignee==assignee.id),
+ LeftJoin(approver, Specification.approver==approver.id),
+ LeftJoin(drafter, Specification.drafter==drafter.id),
+ ]
+ columns = [Specification, assignee, approver, drafter]
+ for alias in (assignee, approver, drafter):
+ validity_info = Person._validity_queries(person_table=alias)
+ constraints.extend(validity_info[2])
+ origin.extend(validity_info[0])
+ columns.extend(validity_info[1])
+ # We only need one decorators list.
+ decorators = validity_info[3]
+ columns = tuple(columns)
+ results = Store.of(self).using(*origin).find(
+ columns,
+ SQL(query),
+ *constraints
+ )
+ def cache_person(person, row, start_index):
+ """apply caching decorators to person."""
+ index = start_index
+ for decorator in decorators:
+ column = row[index]
+ index += 1
+ decorator(person, column)
+ return index
+ def cache_validity(row):
+ # Assignee
+ person = row[1]
+ # After drafter
+ index = 4
+ index = cache_person(person, row, index)
+ # approver
+ person = row[2]
+ index = cache_person(person, row, index)
+ # drafter
+ person = row[3]
+ index = cache_person(person, row, index)
+ return row[0]
+ return DecoratedResultSet(results, cache_validity)
+
@property
def valid_specifications(self):
"""See IHasSpecifications."""
=== modified file 'lib/lp/registry/model/distribution.py'
--- lib/lp/registry/model/distribution.py 2010-08-24 15:29:01 +0000
+++ lib/lp/registry/model/distribution.py 2010-08-31 06:11:00 +0000
@@ -810,13 +810,7 @@
# defaults for informationalness: we don't have to do anything
# because the default if nothing is said is ANY
- # sort by priority descending, by default
- if sort is None or sort == SpecificationSort.PRIORITY:
- order = (
- ['-priority', 'Specification.definition_status',
- 'Specification.name'])
- elif sort == SpecificationSort.DATE:
- order = ['-Specification.datecreated', 'Specification.id']
+ order = self._specification_sort(sort)
# figure out what set of specifications we are interested in. for
# distributions, we need to be able to filter on the basis of:
@@ -859,9 +853,15 @@
query += ' AND Specification.fti @@ ftq(%s) ' % quote(
constraint)
- results = Specification.select(query, orderBy=order, limit=quantity)
if prejoin_people:
- results = results.prejoin(['assignee', 'approver', 'drafter'])
+ results = self._preload_specifications_people(query)
+ else:
+ results = Store.of(self).find(
+ Specification,
+ SQL(query))
+ results.order_by(order)
+ if quantity is not None:
+ results = results[:quantity]
return results
def getSpecification(self, name):
=== modified file 'lib/lp/registry/model/person.py'
--- lib/lp/registry/model/person.py 2010-08-23 16:51:11 +0000
+++ lib/lp/registry/model/person.py 2010-08-31 06:11:00 +0000
@@ -1172,6 +1172,7 @@
@cachedproperty('_is_valid_person_cached')
def is_valid_person(self):
"""See `IPerson`."""
+ # This is prepopulated by various queries in and out of person.py.
if self.is_team:
return False
try:
@@ -1577,6 +1578,69 @@
need_location=True, need_archive=True, need_preferred_email=True,
need_validity=True)
+ @staticmethod
+ def _validity_queries(person_table=None):
+ """Return storm expressions and a decorator function for validity.
+
+ Preloading validity implies preloading preferred email addresses.
+
+ :param person_table: The person table to join to. Only supply if
+ ClassAliases are in use.
+ :return: A four tuple (joins, tables, conditions, decorators)
+
+ joins are additional joins to use.
+ tables are tables to use
+ conditions are restrictions for the joins - and them together.
+ decorators are callbacks to call for each row. Each decorator takes
+ (Person, column) where column is the column in the result set for that
+ decorators type.
+ """
+ if person_table is None:
+ person_table = Person
+ email_table = EmailAddress
+ account_table = Account
+ else:
+ email_table = ClassAlias(EmailAddress)
+ account_table = ClassAlias(Account)
+ origins = []
+ columns = []
+ conditions = []
+ decorators = []
+ # Teams don't have email, so a left join
+ origins.append(
+ LeftJoin(email_table, email_table.personID == person_table.id))
+ columns.append(email_table)
+ conditions.append(Or(email_table.status == None,
+ email_table.status == EmailAddressStatus.PREFERRED))
+ # May be used in a query that finds teams (teams are not valid people)
+ origins.append(
+ LeftJoin(account_table, person_table.accountID == account_table.id))
+ columns.append(account_table)
+ conditions.append(
+ Or(
+ account_table.status == None,
+ account_table.status == AccountStatus.ACTIVE))
+ def handleemail(person, column):
+ #-- preferred email caching
+ if not person:
+ return
+ email = column
+ cache_property(person, '_preferredemail_cached', email)
+ decorators.append(handleemail)
+ def handleaccount(person, column):
+ #-- validity caching
+ if not person:
+ return
+ # valid if:
+ valid = (
+ # -- valid account found
+ column is not None
+ # -- preferred email found
+ and person.preferredemail is not None)
+ cache_property(person, '_is_valid_person_cached', valid)
+ decorators.append(handleaccount)
+ return origins, columns, conditions, decorators
+
def _all_members(self, need_karma=False, need_ubuntu_coc=False,
need_location=False, need_archive=False, need_preferred_email=False,
need_validity=False):
@@ -1606,6 +1670,7 @@
# But not the team itself.
TeamParticipation.person != self.id)
columns = [Person]
+ decorators = []
if need_karma:
# New people have no karmatotalcache rows.
origin.append(
@@ -1636,7 +1701,7 @@
Archive.owner == Person.id, Archive),
Archive.purpose == ArchivePurpose.PPA)))
# checking validity requires having a preferred email.
- if need_preferred_email or need_validity:
+ if need_preferred_email and not need_validity:
# Teams don't have email, so a left join
origin.append(
LeftJoin(EmailAddress, EmailAddress.person == Person.id))
@@ -1645,14 +1710,11 @@
Or(EmailAddress.status == None,
EmailAddress.status == EmailAddressStatus.PREFERRED))
if need_validity:
- # May find teams (teams are not valid people)
- origin.append(
- LeftJoin(Account, Person.account == Account.id))
- columns.append(Account)
- conditions = And(conditions,
- Or(
- Account.status == None,
- Account.status == AccountStatus.ACTIVE))
+ valid_stuff = Person._validity_queries()
+ origin.extend(valid_stuff[0])
+ columns.extend(valid_stuff[1])
+ conditions = And(conditions, *valid_stuff[2])
+ decorators.extend(valid_stuff[3])
if len(columns) == 1:
columns = columns[0]
# Return a simple ResultSet
@@ -1689,20 +1751,14 @@
index += 1
cache_property(result, '_archive_cached', archive)
#-- preferred email caching
- if need_preferred_email:
+ if need_preferred_email and not need_validity:
email = row[index]
index += 1
cache_property(result, '_preferredemail_cached', email)
- #-- validity caching
- if need_validity:
- # valid if:
- valid = (
- # -- valid account found
- row[index] is not None
- # -- preferred email found
- and result.preferredemail is not None)
+ for decorator in decorators:
+ column = row[index]
index += 1
- cache_property(result, '_is_valid_person_cached', valid)
+ decorator(result, column)
return result
return DecoratedResultSet(raw_result,
result_decorator=prepopulate_person)
=== modified file 'lib/lp/registry/model/product.py'
--- lib/lp/registry/model/product.py 2010-08-23 18:03:26 +0000
+++ lib/lp/registry/model/product.py 2010-08-31 06:11:00 +0000
@@ -1033,13 +1033,7 @@
# defaults for informationalness: we don't have to do anything
# because the default if nothing is said is ANY
- # sort by priority descending, by default
- if sort is None or sort == SpecificationSort.PRIORITY:
- order = (
- ['-priority', 'Specification.definition_status',
- 'Specification.name'])
- elif sort == SpecificationSort.DATE:
- order = ['-Specification.datecreated', 'Specification.id']
+ order = self._specification_sort(sort)
# figure out what set of specifications we are interested in. for
# products, we need to be able to filter on the basis of:
@@ -1082,9 +1076,15 @@
query += ' AND Specification.fti @@ ftq(%s) ' % quote(
constraint)
- results = Specification.select(query, orderBy=order, limit=quantity)
if prejoin_people:
- results = results.prejoin(['assignee', 'approver', 'drafter'])
+ results = self._preload_specifications_people(query)
+ else:
+ results = Store.of(self).find(
+ Specification,
+ SQL(query))
+ results.order_by(order)
+ if quantity is not None:
+ results = results[:quantity]
return results
def getSpecification(self, name):