launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #02098
[Merge] lp:~wgrant/launchpad/bug-675621-packages-binary-scaling into lp:launchpad
William Grant has proposed merging lp:~wgrant/launchpad/bug-675621-packages-binary-scaling into lp:launchpad.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
#669717 archive:+index timeout
https://bugs.launchpad.net/bugs/669717
#675621 Archive:+packages timeouts
https://bugs.launchpad.net/bugs/675621
Archive:+index and Archive:+packages have been timing out a bit lately, with lots of queries for single BuildFarmJobs and PackageBuilds. This branch fixes the culprit, PublishingSet.getBuildStatusSummariesForSourceIdsAndArchive, to prepopulate the cache, eliminating those queries and making the pages' query count constant.
I also fixed test_binary_query_counts to actually test scaling. This necessitated a change in the factory to coerce makeBinaryPackagePublishingHistory into creating a BinaryPackageBuild in the same context (as builds from other contexts are omitted from the PPA index). This same bug was making the baseline artificially low, so it has been increased to the real value.
And there are some lint fixes too.
--
https://code.launchpad.net/~wgrant/launchpad/bug-675621-packages-binary-scaling/+merge/42607
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/bug-675621-packages-binary-scaling into lp:launchpad.
=== modified file 'lib/lp/soyuz/browser/tests/test_archive_packages.py'
--- lib/lp/soyuz/browser/tests/test_archive_packages.py 2010-11-10 05:53:57 +0000
+++ lib/lp/soyuz/browser/tests/test_archive_packages.py 2010-12-03 11:47:00 +0000
@@ -133,9 +133,10 @@
viewer = self.factory.makePerson(password="test")
browser = self.getUserBrowser(user=viewer)
with person_logged_in(viewer):
- # The baseline has one package, because otherwise the short-circuit
- # prevents the packages iteration happening at all and we're not
- # actually measuring scaling appropriately.
+ # The baseline has one package, because otherwise the
+ # short-circuit prevents the packages iteration happening at
+ # all and we're not actually measuring scaling
+ # appropriately.
self.factory.makeSourcePackagePublishingHistory(archive=ppa)
url = canonical_url(ppa) + "/+packages"
browser.open(url)
@@ -144,11 +145,11 @@
# We scale with 1 query per distro series because of
# getCurrentSourceReleases.
expected_count += 1
- # We need a fuzz of one because if the test is the first to run a
+ # We need a fuzz of one because if the test is the first to run a
# credentials lookup is done as well (and accrued to the collector).
expected_count += 1
- # Use all new objects - avoids caching issues invalidating the gathered
- # metrics.
+ # Use all new objects - avoids caching issues invalidating the
+ # gathered metrics.
login(ADMIN_EMAIL)
ppa = self.factory.makeArchive()
viewer = self.factory.makePerson(password="test")
@@ -164,7 +165,7 @@
self.assertThat(collector, HasQueryCount(LessThan(expected_count)))
def test_binary_query_counts(self):
- query_baseline = 26
+ query_baseline = 40
# Assess the baseline.
collector = QueryCollector()
collector.register()
@@ -173,27 +174,27 @@
viewer = self.factory.makePerson(password="test")
browser = self.getUserBrowser(user=viewer)
with person_logged_in(viewer):
- # The baseline has one package, because otherwise the short-circuit
- # prevents the packages iteration happening at all and we're not
- # actually measuring scaling appropriately.
- self.factory.makeBinaryPackagePublishingHistory(archive=ppa)
+ # The baseline has one package, because otherwise the
+ # short-circuit prevents the packages iteration happening at
+ # all and we're not actually measuring scaling
+ # appropriately.
+ pkg = self.factory.makeBinaryPackagePublishingHistory(
+ archive=ppa)
url = canonical_url(ppa) + "/+packages"
browser.open(url)
self.assertThat(collector, HasQueryCount(
MatchesAny(LessThan(query_baseline), Equals(query_baseline))))
expected_count = collector.count
- # Use all new objects - avoids caching issues invalidating the gathered
- # metrics.
+ # Use all new objects - avoids caching issues invalidating the
+ # gathered metrics.
login(ADMIN_EMAIL)
ppa = self.factory.makeArchive()
viewer = self.factory.makePerson(password="test")
browser = self.getUserBrowser(user=viewer)
with person_logged_in(viewer):
- for i in range(2):
+ for i in range(3):
pkg = self.factory.makeBinaryPackagePublishingHistory(
- archive=ppa)
- self.factory.makeBinaryPackagePublishingHistory(archive=ppa,
- distroarchseries=pkg.distroarchseries)
+ archive=ppa, distroarchseries=pkg.distroarchseries)
url = canonical_url(ppa) + "/+packages"
browser.open(url)
self.assertThat(collector, HasQueryCount(
=== modified file 'lib/lp/soyuz/interfaces/publishing.py'
--- lib/lp/soyuz/interfaces/publishing.py 2010-11-10 04:25:52 +0000
+++ lib/lp/soyuz/interfaces/publishing.py 2010-12-03 11:47:00 +0000
@@ -561,7 +561,8 @@
It is changed only if the argument is not None.
Return the overridden publishing record, either a
- `ISourcePackagePublishingHistory` or `IBinaryPackagePublishingHistory`.
+ `ISourcePackagePublishingHistory` or
+ `IBinaryPackagePublishingHistory`.
"""
def copyTo(distroseries, pocket, archive):
@@ -788,7 +789,8 @@
It is changed only if the argument is not None.
Return the overridden publishing record, either a
- `ISourcePackagePublishingHistory` or `IBinaryPackagePublishingHistory`.
+ `ISourcePackagePublishingHistory` or
+ `IBinaryPackagePublishingHistory`.
"""
def copyTo(distroseries, pocket, archive):
@@ -896,7 +898,8 @@
binary publications.
"""
- def getBuildsForSourceIds(source_ids, archive=None, build_states=None):
+ def getBuildsForSourceIds(source_ids, archive=None, build_states=None,
+ need_build_farm_job=False):
"""Return all builds related with each given source publication.
The returned ResultSet contains entries with the wanted `Build`s
@@ -917,13 +920,17 @@
`SourcePackagePublishingHistory` object.
:type source_ids: ``list`` or `SourcePackagePublishingHistory`
:param archive: An optional archive with which to filter the source
- ids.
+ ids.
:type archive: `IArchive`
:param build_states: optional list of build states to which the
result will be limited. Defaults to all states if ommitted.
:type build_states: ``list`` or None
+ :param need_build_farm_job: whether to include the `PackageBuild`
+ and `BuildFarmJob` in the result.
+ :type need_build_farm_job: bool
:return: a storm ResultSet containing tuples as
- (`SourcePackagePublishingHistory`, `Build`, `DistroArchSeries`)
+ (`SourcePackagePublishingHistory`, `Build`, `DistroArchSeries`,
+ [`PackageBuild`, `BuildFarmJob` if need_build_farm_job])
:rtype: `storm.store.ResultSet`.
"""
=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py 2010-11-11 11:55:53 +0000
+++ lib/lp/soyuz/model/publishing.py 2010-12-03 11:47:00 +0000
@@ -506,7 +506,7 @@
@staticmethod
def _convertBuilds(builds_for_sources):
"""Convert from IPublishingSet getBuilds to SPPH getBuilds."""
- return [build for source, build, arch in builds_for_sources]
+ return [build[1] for build in builds_for_sources]
def getBuilds(self):
"""See `ISourcePackagePublishingHistory`."""
@@ -1331,7 +1331,8 @@
return pub
def getBuildsForSourceIds(
- self, source_publication_ids, archive=None, build_states=None):
+ self, source_publication_ids, archive=None, build_states=None,
+ need_build_farm_job=False):
"""See `IPublishingSet`."""
# Import Build and DistroArchSeries locally to avoid circular
# imports, since that Build uses SourcePackagePublishingHistory
@@ -1404,16 +1405,22 @@
SourcePackagePublishingHistory,
BinaryPackageBuild,
DistroArchSeries,
- )
+ ) + ((PackageBuild, BuildFarmJob) if need_build_farm_job else ())
# Storm doesn't let us do builds_union.values('id') -
# ('Union' object has no attribute 'columns'). So instead
# we have to instantiate the objects just to get the id.
build_ids = [build.id for build in builds_union]
+ prejoin_exprs = (
+ BinaryPackageBuild.package_build == PackageBuild.id,
+ PackageBuild.build_farm_job == BuildFarmJob.id,
+ ) if need_build_farm_job else ()
+
result_set = store.find(
find_spec, builds_for_distroseries_expr,
- BinaryPackageBuild.id.is_in(build_ids))
+ BinaryPackageBuild.id.is_in(build_ids),
+ *prejoin_exprs)
return result_set.order_by(
SourcePackagePublishingHistory.id,
@@ -1697,8 +1704,11 @@
return {}
store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
+ # Find relevant builds while also getting PackageBuilds and
+ # BuildFarmJobs into the cache. They're used later.
build_info = list(
- self.getBuildsForSourceIds(source_ids, archive=archive))
+ self.getBuildsForSourceIds(
+ source_ids, archive=archive, need_build_farm_job=True))
source_pubs = set()
found_source_ids = set()
for row in build_info:
@@ -1722,20 +1732,21 @@
source_build_statuses = {}
need_unpublished = set()
for source_pub in source_pubs:
- source_builds = [build for build in build_info
- if build[0].id == source_pub.id]
- builds = SourcePackagePublishingHistory._convertBuilds(source_builds)
+ source_builds = [
+ build for build in build_info if build[0].id == source_pub.id]
+ builds = SourcePackagePublishingHistory._convertBuilds(
+ source_builds)
summary = binarypackages.getStatusSummaryForBuilds(builds)
source_build_statuses[source_pub.id] = summary
# If:
# 1. the SPPH is in an active publishing state, and
# 2. all the builds are fully-built, and
- # 3. the SPPH is not being published in a rebuild/copy archive (in
- # which case the binaries are not published)
+ # 3. the SPPH is not being published in a rebuild/copy
+ # archive (in which case the binaries are not published)
# 4. There are unpublished builds
- # Then we augment the result with FULLYBUILT_PENDING and attach the
- # unpublished builds.
+ # Then we augment the result with FULLYBUILT_PENDING and
+ # attach the unpublished builds.
if (source_pub.status in active_publishing_status and
summary['status'] == BuildSetStatus.FULLYBUILT and
not source_pub.archive.is_copy):
@@ -1750,7 +1761,7 @@
for source_pub, builds in unpublished_per_source.items():
summary = {
'status': BuildSetStatus.FULLYBUILT_PENDING,
- 'builds': builds
+ 'builds': builds,
}
source_build_statuses[source_pub.id] = summary
=== modified file 'lib/lp/testing/factory.py'
--- lib/lp/testing/factory.py 2010-11-30 02:38:38 +0000
+++ lib/lp/testing/factory.py 2010-12-03 11:47:00 +0000
@@ -104,7 +104,6 @@
from lp.archiveuploader.uploadpolicy import BuildDaemonUploadPolicy
from lp.blueprints.enums import (
SpecificationDefinitionStatus,
- SpecificationGoalStatus,
SpecificationPriority,
)
from lp.blueprints.interfaces.specification import ISpecificationSet
@@ -2788,23 +2787,29 @@
archive = self.makeArchive()
else:
archive = source_package_release.upload_archive
+ if processor is None:
+ processor = self.makeProcessor()
+ if distroarchseries is None:
+ if source_package_release is not None:
+ distroseries = source_package_release.upload_distroseries
+ else:
+ distroseries = self.makeDistroSeries()
+ distroarchseries = self.makeDistroArchSeries(
+ distroseries=distroseries,
+ processorfamily=processor.family)
+ if pocket is None:
+ pocket = PackagePublishingPocket.RELEASE
if source_package_release is None:
multiverse = self.makeComponent(name='multiverse')
source_package_release = self.makeSourcePackageRelease(
- archive, component=multiverse)
+ archive, component=multiverse,
+ distroseries=distroarchseries.distroseries)
self.makeSourcePackagePublishingHistory(
distroseries=source_package_release.upload_distroseries,
- archive=archive, sourcepackagerelease=source_package_release)
- if processor is None:
- processor = self.makeProcessor()
- if distroarchseries is None:
- distroarchseries = self.makeDistroArchSeries(
- distroseries=source_package_release.upload_distroseries,
- processorfamily=processor.family)
+ archive=archive, sourcepackagerelease=source_package_release,
+ pocket=pocket)
if status is None:
status = BuildStatus.NEEDSBUILD
- if pocket is None:
- pocket = PackagePublishingPocket.RELEASE
if date_created is None:
date_created = self.getUniqueDate()
binary_package_build = getUtility(IBinaryPackageBuildSet).new(
@@ -2925,7 +2930,13 @@
priority = PackagePublishingPriority.OPTIONAL
if binarypackagerelease is None:
+ # Create a new BinaryPackageBuild and BinaryPackageRelease
+ # in the same archive and suite.
+ binarypackagebuild = self.makeBinaryPackageBuild(
+ archive=archive, distroarchseries=distroarchseries,
+ pocket=pocket)
binarypackagerelease = self.makeBinaryPackageRelease(
+ build=binarypackagebuild,
component=component,
section_name=section_name,
priority=priority)