launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #14881
[Merge] lp:~stevenk/launchpad/moar-preload-distroseries-queue into lp:launchpad
Steve Kowalik has proposed merging lp:~stevenk/launchpad/moar-preload-distroseries-queue into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
Bug #835645 in Launchpad itself: "DistroSeries:+queue timeout paginating"
https://bugs.launchpad.net/launchpad/+bug/835645
Bug #1032176 in Launchpad itself: "DistroSeries.getPackageUploads timing out due to authorization checking "
https://bugs.launchpad.net/launchpad/+bug/1032176
For more details, see:
https://code.launchpad.net/~stevenk/launchpad/moar-preload-distroseries-queue/+merge/141581
Force the PackageUpload security adapter to use the cachedproperties on it, after making sure they are set carefully in add{Source,Build,Custom}.
Do lots of preloading in IPackageUploadSet.getAll() and in the preloading helper behind DistroSeries:+queue, like making ISourcePackageRelease.published_archives a cachedproperty, and populating it, and setting changes_file_id so we can preload all LFAs for changesfiles.
Write two query count tests, one for DistroSeries:+queue, and the other for IDistroSeries.getPackageUploads() (which is a thin wrapper around IPackageUploadSet.getAll())
--
https://code.launchpad.net/~stevenk/launchpad/moar-preload-distroseries-queue/+merge/141581
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~stevenk/launchpad/moar-preload-distroseries-queue into lp:launchpad.
=== modified file 'lib/lp/security.py'
--- lib/lp/security.py 2012-12-19 22:01:13 +0000
+++ lib/lp/security.py 2013-01-02 06:22:22 +0000
@@ -1,8 +1,6 @@
# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-# pylint: disable-msg=F0401
-
"""Security policies for using content objects."""
__metaclass__ = type
@@ -1836,10 +1834,10 @@
# We cannot use self.obj.sourcepackagerelease, as that causes
# interference with the property cache if we are called in the
# process of adding a source or a build.
- if not self.obj._sources.is_empty():
- spr = self.obj._sources[0].sourcepackagerelease
- elif not self.obj._builds.is_empty():
- spr = self.obj._builds[0].build.source_package_release
+ if self.obj.sources:
+ spr = self.obj.sources[0].sourcepackagerelease
+ elif self.obj.builds:
+ spr = self.obj.builds[0].build.source_package_release
else:
spr = None
if spr is not None:
=== modified file 'lib/lp/services/webapp/adapter.py'
--- lib/lp/services/webapp/adapter.py 2012-11-29 18:08:12 +0000
+++ lib/lp/services/webapp/adapter.py 2013-01-02 06:22:22 +0000
@@ -1,9 +1,6 @@
# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-# We use global in this module.
-# pylint: disable-msg=W0602
-
__metaclass__ = type
from functools import partial
@@ -442,7 +439,6 @@
for using connections from the main thread.
"""
# Record the ID of the main thread.
- # pylint: disable-msg=W0603
global _main_thread_id
_main_thread_id = thread.get_ident()
=== modified file 'lib/lp/soyuz/browser/queue.py'
--- lib/lp/soyuz/browser/queue.py 2012-12-12 03:35:48 +0000
+++ lib/lp/soyuz/browser/queue.py 2013-01-02 06:22:22 +0000
@@ -10,7 +10,7 @@
'QueueItemsView',
]
-import operator
+from operator import attrgetter
from lazr.delegates import delegates
from zope.component import getUtility
@@ -20,7 +20,7 @@
NotFoundError,
UnexpectedFormData,
)
-from lp.registry.model.person import Person
+from lp.registry.interfaces.person import IPersonSet
from lp.services.database.bulk import (
load_referencing,
load_related,
@@ -55,9 +55,15 @@
)
from lp.soyuz.interfaces.section import ISectionSet
from lp.soyuz.model.archive import Archive
+from lp.soyuz.model.component import Component
from lp.soyuz.model.packagecopyjob import PackageCopyJob
-from lp.soyuz.model.queue import PackageUploadSource
-from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
+from lp.soyuz.model.queue import (
+ PackageUploadBuild,
+ PackageUploadCustom,
+ PackageUploadSource,
+ prefill_packageupload_caches,
+ )
+from lp.soyuz.model.section import Section
QUEUE_SIZE = 30
@@ -124,8 +130,7 @@
build_ids = [binary_file.binarypackagerelease.build.id
for binary_file in binary_files]
upload_set = getUtility(IPackageUploadSet)
- package_upload_builds = upload_set.getBuildByBuildIDs(
- build_ids)
+ package_upload_builds = upload_set.getBuildByBuildIDs(build_ids)
package_upload_builds_dict = {}
for package_upload_build in package_upload_builds:
package_upload_builds_dict[
@@ -135,8 +140,8 @@
def binary_files_dict(self, package_upload_builds_dict, binary_files):
"""Build a dictionary of lists of binary files keyed by upload ID.
- To do this efficiently we need to get all the PacakgeUploadBuild
- records at once, otherwise the Ibuild.package_upload property
+ To do this efficiently we need to get all the PackageUploadBuild
+ records at once, otherwise the IBuild.package_upload property
causes one query per iteration of the loop.
"""
build_upload_files = {}
@@ -208,7 +213,9 @@
PackageCopyJob, uploads, ['package_copy_job_id'])
load_related(Archive, package_copy_jobs, ['source_archive_id'])
jobs = load_related(Job, package_copy_jobs, ['job_id'])
- load_related(Person, jobs, ['requester_id'])
+ person_ids = map(attrgetter('requester_id'), jobs)
+ list(getUtility(IPersonSet).getPrecachedPersonsFromIDs(
+ person_ids, need_validity=True))
def decoratedQueueBatch(self):
"""Return the current batch, converted to decorated objects.
@@ -224,20 +231,25 @@
upload_ids = [upload.id for upload in uploads]
binary_file_set = getUtility(IBinaryPackageFileSet)
- binary_files = binary_file_set.getByPackageUploadIDs(upload_ids)
+ binary_files = list(binary_file_set.getByPackageUploadIDs(upload_ids))
binary_file_set.loadLibraryFiles(binary_files)
packageuploadsources = load_referencing(
PackageUploadSource, uploads, ['packageuploadID'])
source_file_set = getUtility(ISourcePackageReleaseFileSet)
- source_files = source_file_set.getByPackageUploadIDs(upload_ids)
-
- source_sprs = load_related(
- SourcePackageRelease, packageuploadsources,
- ['sourcepackagereleaseID'])
+ source_files = list(source_file_set.getByPackageUploadIDs(upload_ids))
+
+ pubs = load_referencing(
+ PackageUploadBuild, uploads, ['packageuploadID'])
+ pucs = load_referencing(
+ PackageUploadCustom, uploads, ['packageuploadID'])
+ sprs = prefill_packageupload_caches(
+ uploads, packageuploadsources, pubs, pucs)
+
+ load_related(Section, sprs, ['sectionID'])
+ load_related(Component, sprs, ['componentID'])
# Get a dictionary of lists of binary files keyed by upload ID.
- package_upload_builds_dict = self.builds_dict(
- upload_ids, binary_files)
+ package_upload_builds_dict = self.builds_dict(upload_ids, binary_files)
build_upload_files, binary_package_names = self.binary_files_dict(
package_upload_builds_dict, binary_files)
@@ -254,7 +266,7 @@
self.old_binary_packages = self.calculateOldBinaries(
binary_package_names)
- package_sets = self.getPackagesetsFor(source_sprs)
+ package_sets = self.getPackagesetsFor(sprs)
self.loadPackageCopyJobs(uploads)
@@ -461,7 +473,7 @@
sorted by their name.
"""
return sorted(
- self.context.sections, key=operator.attrgetter('name'))
+ self.context.sections, key=attrgetter('name'))
def priorities(self):
"""An iterable of priorities from PackagePublishingPriority."""
@@ -516,8 +528,6 @@
if self.contains_source:
self.sourcepackagerelease = self.sources[0].sourcepackagerelease
-
- if self.contains_source:
self.package_sets = package_sets.get(
self.sourcepackagerelease.sourcepackagenameID, [])
else:
@@ -561,8 +571,7 @@
if title is None:
title = alt
return structured(
- '<img alt="[%s]" src="/@@/%s" title="%s" />',
- alt, icon, title)
+ '<img alt="[%s]" src="/@@/%s" title="%s" />', alt, icon, title)
def composeIconList(self):
"""List icons that should be shown for this upload."""
@@ -599,9 +608,5 @@
icon_string = structured('\n'.join(['%s'] * len(icons)), *icons)
link = self.composeNameAndChangesLink()
return structured(
- """<div id="%s">
- %s
- %s
- (%s)
- </div>""",
+ """<div id="%s"> %s %s (%s)</div>""",
iconlist_id, icon_string, link, self.displayarchs).escapedtext
=== modified file 'lib/lp/soyuz/browser/tests/test_queue.py'
--- lib/lp/soyuz/browser/tests/test_queue.py 2012-12-12 04:59:52 +0000
+++ lib/lp/soyuz/browser/tests/test_queue.py 2013-01-02 06:22:22 +0000
@@ -6,6 +6,8 @@
__metaclass__ = type
from lxml import html
+from storm.store import Store
+from testtools.matchers import Equals
import transaction
from zope.component import (
getUtility,
@@ -26,12 +28,14 @@
login_person,
logout,
person_logged_in,
+ StormStatementRecorder,
TestCaseWithFactory,
)
from lp.testing.layers import (
LaunchpadFunctionalLayer,
LaunchpadZopelessLayer,
)
+from lp.testing.matchers import HasQueryCount
from lp.testing.sampledata import ADMIN_EMAIL
from lp.testing.views import create_initialized_view
@@ -361,14 +365,31 @@
self.assertIn(
upload.package_copy_job.job.requester.displayname, html_text)
+ def test_query_count(self):
+ login(ADMIN_EMAIL)
+ uploads = []
+ distroseries = self.factory.makeDistroSeries()
+ for i in range(5):
+ uploads.append(self.factory.makeSourcePackageUpload(distroseries))
+ uploads.append(self.factory.makeCustomPackageUpload(distroseries))
+ uploads.append(self.factory.makeCopyJobPackageUpload(distroseries))
+ for i in range(15):
+ uploads.append(self.factory.makeBuildPackageUpload(distroseries))
+ queue_admin = self.factory.makeArchiveAdmin(distroseries.main_archive)
+ Store.of(uploads[0]).invalidate()
+ with person_logged_in(queue_admin):
+ with StormStatementRecorder() as recorder:
+ view = self.makeView(distroseries, queue_admin)
+ view()
+ self.assertThat(recorder, HasQueryCount(Equals(61)))
+
class TestCompletePackageUpload(TestCaseWithFactory):
layer = LaunchpadZopelessLayer
def makeCompletePackageUpload(self, upload=None, build_upload_files=None,
- source_upload_files=None,
- package_sets=None):
+ source_upload_files=None, package_sets=None):
if upload is None:
upload = self.factory.makeSourcePackageUpload()
if build_upload_files is None:
=== modified file 'lib/lp/soyuz/configure.zcml'
--- lib/lp/soyuz/configure.zcml 2012-12-10 06:27:12 +0000
+++ lib/lp/soyuz/configure.zcml 2013-01-02 06:22:22 +0000
@@ -192,7 +192,8 @@
section_name
components
searchable_names
- searchable_versions"/>
+ searchable_versions
+ changes_file_id"/>
<require
permission="launchpad.Edit"
attributes="
=== modified file 'lib/lp/soyuz/model/archive.py'
--- lib/lp/soyuz/model/archive.py 2012-11-13 13:43:31 +0000
+++ lib/lp/soyuz/model/archive.py 2013-01-02 06:22:22 +0000
@@ -1554,7 +1554,7 @@
PackageUploadSource.sourcepackagereleaseID,
PackageUploadSource.packageuploadID == PackageUpload.id,
PackageUpload.status == PackageUploadStatus.DONE,
- PackageUpload.changesfileID == LibraryFileAlias.id,
+ PackageUpload.changes_file_id == LibraryFileAlias.id,
)
else:
raise NotFoundError(filename)
=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
--- lib/lp/soyuz/model/binarypackagebuild.py 2012-11-15 01:42:33 +0000
+++ lib/lp/soyuz/model/binarypackagebuild.py 2013-01-02 06:22:22 +0000
@@ -180,7 +180,7 @@
Join(PackageUpload,
PackageUploadBuild.packageuploadID == PackageUpload.id),
Join(LibraryFileAlias,
- LibraryFileAlias.id == PackageUpload.changesfileID),
+ LibraryFileAlias.id == PackageUpload.changes_file_id),
Join(LibraryFileContent,
LibraryFileContent.id == LibraryFileAlias.contentID),
]
=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py 2012-11-15 23:28:13 +0000
+++ lib/lp/soyuz/model/publishing.py 2013-01-02 06:22:22 +0000
@@ -1,8 +1,6 @@
# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-# pylint: disable-msg=E0611,W0212
-
__metaclass__ = type
__all__ = [
@@ -1864,7 +1862,7 @@
(SourcePackagePublishingHistory, PackageUpload,
SourcePackageRelease, LibraryFileAlias, LibraryFileContent),
LibraryFileContent.id == LibraryFileAlias.contentID,
- LibraryFileAlias.id == PackageUpload.changesfileID,
+ LibraryFileAlias.id == PackageUpload.changes_file_id,
PackageUpload.id == PackageUploadSource.packageuploadID,
PackageUpload.status == PackageUploadStatus.DONE,
PackageUpload.distroseriesID ==
@@ -1889,7 +1887,7 @@
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
result_set = store.find(
LibraryFileAlias,
- LibraryFileAlias.id == PackageUpload.changesfileID,
+ LibraryFileAlias.id == PackageUpload.changes_file_id,
PackageUpload.status == PackageUploadStatus.DONE,
PackageUpload.distroseriesID == spr.upload_distroseries.id,
PackageUpload.archiveID == spr.upload_archive.id,
=== modified file 'lib/lp/soyuz/model/queue.py'
--- lib/lp/soyuz/model/queue.py 2012-12-26 01:04:05 +0000
+++ lib/lp/soyuz/model/queue.py 2013-01-02 06:22:22 +0000
@@ -9,6 +9,7 @@
'PackageUploadSource',
'PackageUploadCustom',
'PackageUploadSet',
+ 'prefill_packageupload_caches',
]
from itertools import chain
@@ -52,7 +53,10 @@
from lp.registry.model.sourcepackagename import SourcePackageName
from lp.services.auditor.client import AuditorClient
from lp.services.config import config
-from lp.services.database.bulk import load_referencing
+from lp.services.database.bulk import (
+ load_referencing,
+ load_related,
+ )
from lp.services.database.constants import UTC_NOW
from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.decoratedresultset import DecoratedResultSet
@@ -112,6 +116,12 @@
QueueStateWriteProtectedError,
)
from lp.soyuz.interfaces.section import ISectionSet
+<<<<<<< TREE
+=======
+from lp.soyuz.model.binarypackagename import BinaryPackageName
+from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
+from lp.soyuz.model.distroarchseries import DistroArchSeries
+>>>>>>> MERGE-SOURCE
from lp.soyuz.pas import BuildDaemonPackagesArchSpecific
# There are imports below in PackageUploadCustom for various bits
@@ -174,8 +184,8 @@
dbName='pocket', unique=False, notNull=True,
schema=PackagePublishingPocket)
- changesfile = ForeignKey(
- dbName='changesfile', foreignKey="LibraryFileAlias", notNull=False)
+ changes_file_id = Int(name='changesfile')
+ changesfile = Reference(changes_file_id, 'LibraryFileAlias.id')
archive = ForeignKey(dbName="archive", foreignKey="Archive", notNull=True)
@@ -814,15 +824,16 @@
def addSource(self, spr):
"""See `IPackageUpload`."""
- del get_property_cache(self).sources
self.addSearchableNames([spr.name])
self.addSearchableVersions([spr.version])
- return PackageUploadSource(
+ pus = PackageUploadSource(
packageupload=self, sourcepackagerelease=spr.id)
+ Store.of(self).flush()
+ del get_property_cache(self).sources
+ return pus
def addBuild(self, build):
"""See `IPackageUpload`."""
- del get_property_cache(self).builds
names = [build.source_package_release.name]
versions = []
for bpr in build.binarypackages:
@@ -830,15 +841,20 @@
versions.append(bpr.version)
self.addSearchableNames(names)
self.addSearchableVersions(versions)
- return PackageUploadBuild(packageupload=self, build=build.id)
+ pub = PackageUploadBuild(packageupload=self, build=build.id)
+ Store.of(self).flush()
+ del get_property_cache(self).builds
+ return pub
def addCustom(self, library_file, custom_type):
"""See `IPackageUpload`."""
- del get_property_cache(self).customfiles
self.addSearchableNames([library_file.filename])
- return PackageUploadCustom(
+ puc = PackageUploadCustom(
packageupload=self, libraryfilealias=library_file.id,
customformat=custom_type)
+ Store.of(self).flush()
+ del get_property_cache(self).customfiles
+ return puc
def isPPA(self):
"""See `IPackageUpload`."""
@@ -1670,13 +1686,7 @@
cache.sources = []
cache.builds = []
cache.customfiles = []
-
- for pus in puses:
- get_property_cache(pus.packageupload).sources.append(pus)
- for pub in pubs:
- get_property_cache(pub.packageupload).builds.append(pub)
- for puc in pucs:
- get_property_cache(puc.packageupload).customfiles.append(puc)
+ prefill_packageupload_caches(rows, puses, pubs, pucs)
return DecoratedResultSet(query, pre_iter_hook=preload_hook)
@@ -1704,3 +1714,34 @@
return IStore(PackageUpload).find(
PackageUpload,
PackageUpload.package_copy_job_id.is_in(pcj_ids))
+
+def prefill_packageupload_caches(uploads, puses, pubs, pucs):
+ # Circular imports.
+ from lp.soyuz.model.archive import Archive
+ from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
+ from lp.soyuz.model.publishing import SourcePackagePublishingHistory
+ from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
+ for pus in puses:
+ get_property_cache(pus.packageupload).sources.append(pus)
+ for pub in pubs:
+ get_property_cache(pub.packageupload).builds.append(pub)
+ for puc in pucs:
+ get_property_cache(puc.packageupload).customfiles.append(puc)
+ source_sprs = load_related(
+ SourcePackageRelease, puses, ['sourcepackagereleaseID'])
+ bpbs = load_related(BinaryPackageBuild, pubs, ['buildID'])
+ load_related(DistroArchSeries, bpbs, ['distro_arch_series_id'])
+ binary_sprs = load_related(
+ SourcePackageRelease, bpbs, ['source_package_release_id'])
+ sprs = source_sprs + binary_sprs
+ load_related(SourcePackageName, sprs, ['sourcepackagenameID'])
+ load_related(LibraryFileAlias, uploads, ['changes_file_id'])
+ publications = load_referencing(
+ SourcePackagePublishingHistory, sprs, ['sourcepackagereleaseID'])
+ load_related(Archive, publications, ['archiveID'])
+ for spr in sprs:
+ get_property_cache(spr).published_archives = []
+ for publication in publications:
+ spr = get_property_cache(publication.sourcepackagerelease)
+ spr.published_archives.append(publication.archive)
+ return sprs
=== modified file 'lib/lp/soyuz/model/sourcepackagerelease.py'
--- lib/lp/soyuz/model/sourcepackagerelease.py 2012-11-26 12:53:30 +0000
+++ lib/lp/soyuz/model/sourcepackagerelease.py 2013-01-02 06:22:22 +0000
@@ -1,8 +1,6 @@
# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-# pylint: disable-msg=E0611,W0212
-
__metaclass__ = type
__all__ = [
'SourcePackageRelease',
@@ -291,18 +289,21 @@
@property
def current_publishings(self):
"""See ISourcePackageRelease."""
- from lp.soyuz.model.distroseriessourcepackagerelease \
- import DistroSeriesSourcePackageRelease
+ from lp.soyuz.model.distroseriessourcepackagerelease import (
+ DistroSeriesSourcePackageRelease)
return [DistroSeriesSourcePackageRelease(pub.distroseries, self)
for pub in self.publishings]
- @property
- def published_archives(self):
+ def _published_archives(self):
"""See `ISourcePackageRelease`."""
archives = set(
pub.archive for pub in self.publishings.prejoin(['archive']))
return sorted(archives, key=operator.attrgetter('id'))
+ @cachedproperty
+ def published_archives(self):
+ return list(self._published_archives())
+
def addFile(self, file):
"""See ISourcePackageRelease."""
return SourcePackageReleaseFile(
@@ -503,7 +504,7 @@
Join(PackageUpload,
PackageUploadSource.packageuploadID == PackageUpload.id),
Join(LibraryFileAlias,
- LibraryFileAlias.id == PackageUpload.changesfileID),
+ LibraryFileAlias.id == PackageUpload.changes_file_id),
Join(LibraryFileContent,
LibraryFileContent.id == LibraryFileAlias.contentID),
]
=== modified file 'lib/lp/soyuz/tests/test_packageupload.py'
--- lib/lp/soyuz/tests/test_packageupload.py 2012-12-17 05:10:29 +0000
+++ lib/lp/soyuz/tests/test_packageupload.py 2013-01-02 06:22:22 +0000
@@ -1292,3 +1292,16 @@
"customformat": "raw-translations",
}
self.assertEqual(expected_custom, ws_binaries[-1])
+
+ def test_getPackageUploads_query_count(self):
+ person = self.makeQueueAdmin([self.universe])
+ uploads = []
+ for i in range(5):
+ upload, _ = self.makeBinaryPackageUpload(
+ person, component=self.universe)
+ uploads.append(upload)
+ ws_distroseries = self.load(self.distroseries, person)
+ IStore(uploads[0].__class__).invalidate()
+ with StormStatementRecorder() as recorder:
+ ws_distroseries.getPackageUploads()
+ self.assertThat(recorder, HasQueryCount(Equals(27)))