← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~lifeless/launchpad/getBuildSummariesForSourceIds into lp:launchpad/devel

 

Robert Collins has proposed merging lp:~lifeless/launchpad/getBuildSummariesForSourceIds into lp:launchpad/devel.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)


Group up calls in Archive:EntryResource:getBuildSummariesForSourceIds rather than suffering death by a thousand queries. I haven't tuned the queries (which also needs doing), nor have I written a test for the query counts. For the former, I want the actual queries we should be doing to be visible, which is why I've refactored to a constant query count(ish). For the latter I haven't reached the activation energy needed to write one yet, and while I think it would be useful, I think the most useful thing to do right now is fix the OOPSes.
-- 
https://code.launchpad.net/~lifeless/launchpad/getBuildSummariesForSourceIds/+merge/40298
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~lifeless/launchpad/getBuildSummariesForSourceIds into lp:launchpad/devel.
=== modified file 'lib/lp/soyuz/adapters/archivesourcepublication.py'
--- lib/lp/soyuz/adapters/archivesourcepublication.py	2010-11-03 06:55:53 +0000
+++ lib/lp/soyuz/adapters/archivesourcepublication.py	2010-11-07 23:13:45 +0000
@@ -92,7 +92,8 @@
         # using the delegate as self - might not be possible without
         # duck-typing.
         return getUtility(
-            IPublishingSet).getBuildStatusSummaryForSourcePublication(self)
+            IPublishingSet).getBuildStatusSummaryForSourcePublication(self,
+            self.getBuilds)
 
 class ArchiveSourcePublications:
     """`ArchiveSourcePublication` iterator."""

=== modified file 'lib/lp/soyuz/interfaces/publishing.py'
--- lib/lp/soyuz/interfaces/publishing.py	2010-10-06 12:40:00 +0000
+++ lib/lp/soyuz/interfaces/publishing.py	2010-11-07 23:13:45 +0000
@@ -507,6 +507,10 @@
         """Return a resultset of `IBuild` objects in this context that are
         not published.
 
+        Note that this is convenience glue for
+        PublishingSet.getUnpublishedBuildsForSources - and that method should
+        be considered authoritative.
+
         :param build_states: list of build states to which the result should
             be limited. Defaults to BuildStatus.FULLYBUILT if none are
             specified.
@@ -1075,7 +1079,8 @@
             `IBinaryPackagePublishingHistory`.
         """
 
-    def getBuildStatusSummariesForSourceIdsAndArchive(source_ids, archive):
+    def getBuildStatusSummariesForSourceIdsAndArchive(source_ids, archive,
+        get_builds=None):
         """Return a summary of the build statuses for source publishing ids.
 
         This method collects all the builds for the provided source package
@@ -1090,6 +1095,9 @@
         :param archive: The archive which will be used to filter the source
                         ids.
         :type archive: `IArchive`
+        :param get_builds: Optional override to replace database querying for
+            build records. This is used in the ArchivePublication adapter code
+            path and should be avoided where possible.
         :return: A dict consisting of the overall status summaries for the
             given ids that belong in the archive. For example:
                 {
@@ -1100,7 +1108,8 @@
         :rtype: ``dict``.
         """
 
-    def getBuildStatusSummaryForSourcePublication(source_publication):
+    def getBuildStatusSummaryForSourcePublication(source_publication,
+        get_builds=None):
         """Return a summary of the build statuses for this source
         publication.
 
@@ -1108,6 +1117,13 @@
         for details. The call is just proxied here so that it can also be
         used with an ArchiveSourcePublication passed in as
         the source_package_pub, allowing the use of the cached results.
+
+        :param get_builds: An optional callback to replace the database lookup
+            builds. This is used by some adapters as part of a preloading
+            strategy - but the preferred form is to have
+            getBuildStatusSummariesForSourceIdsAndArchive (which this call
+            delegates to) perform the initial and only loading of the build
+            records.
         """
 
     def getNearestAncestor(

=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py	2010-11-03 06:04:16 +0000
+++ lib/lp/soyuz/model/publishing.py	2010-11-07 23:13:45 +0000
@@ -16,6 +16,7 @@
     ]
 
 
+from collections import defaultdict
 from datetime import datetime
 import operator
 import os
@@ -502,25 +503,23 @@
 
         return unique_binary_publications
 
+    @staticmethod
+    def _convertBuilds(builds_for_sources):
+        """Convert from IPublishingSet getBuilds to SPPH getBuilds."""
+        return [build for source, build, arch in builds_for_sources]
+
     def getBuilds(self):
         """See `ISourcePackagePublishingHistory`."""
         publishing_set = getUtility(IPublishingSet)
-        result_set = publishing_set.getBuildsForSources(self)
-
-        return [build for source, build, arch in result_set]
+        result_set = publishing_set.getBuildsForSources([self])
+        return SourcePackagePublishingHistory._convertBuilds(result_set)
 
     def getUnpublishedBuilds(self, build_states=None):
         """See `ISourcePackagePublishingHistory`."""
         publishing_set = getUtility(IPublishingSet)
         result_set = publishing_set.getUnpublishedBuildsForSources(
             self, build_states)
-
-        # Create a function that will just return the second item
-        # in the result tuple (the build).
-        def result_to_build(result):
-            return result[1]
-
-        return DecoratedResultSet(result_set, result_to_build)
+        return DecoratedResultSet(result_set, operator.itemgetter(1))
 
     def changesFileUrl(self):
         """See `ISourcePackagePublishingHistory`."""
@@ -1699,28 +1698,70 @@
             PackageUploadSource.sourcepackagereleaseID == spr.id)
         return result_set.one()
 
-    def getBuildStatusSummariesForSourceIdsAndArchive(self,
-                                                      source_ids,
-                                                      archive):
+    def getBuildStatusSummariesForSourceIdsAndArchive(self, source_ids,
+        archive, get_builds=None):
         """See `IPublishingSet`."""
         # source_ids can be None or an empty sequence.
         if not source_ids:
             return {}
 
-        store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
-        source_pubs = store.find(
-            SourcePackagePublishingHistory,
-            SourcePackagePublishingHistory.id.is_in(source_ids),
-            SourcePackagePublishingHistory.archive == archive)
-
+        if len(source_ids) == 1 and get_builds is not None:
+            # Fake out the DB access for ArchivePublication
+            store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
+            source_pubs = list(store.find(
+                SourcePackagePublishingHistory,
+                SourcePackagePublishingHistory.id.is_in(source_ids),
+                SourcePackagePublishingHistory.archive == archive))
+            source = source_pubs[0]
+            build_info = [(source, build, None) for build in get_builds()]
+        else:
+            build_info = list(
+                self.getBuildsForSourceIds(source_ids, archive=archive))
+            source_pubs = set()
+            for row in build_info:
+                source_pubs.add(row[0])
+        # For each source_pub found, provide an aggregate summary of its
+        # builds.
+        binarypackages = getUtility(IBinaryPackageBuildSet)
         source_build_statuses = {}
+        need_unpublished = set()
         for source_pub in source_pubs:
-            status_summary = source_pub.getStatusSummaryForBuilds()
-            source_build_statuses[source_pub.id] = status_summary
+            source_builds = [build for build in build_info 
+                if build[0].id == source_pub.id]
+            builds = SourcePackagePublishingHistory._convertBuilds(source_builds)
+            summary = binarypackages.getStatusSummaryForBuilds(builds)
+            source_build_statuses[source_pub.id] = summary
+
+            # If:
+            #   1. the SPPH is in an active publishing state, and
+            #   2. all the builds are fully-built, and
+            #   3. the SPPH is not being published in a rebuild/copy archive (in
+            #      which case the binaries are not published)
+            #   4. There are unpublished builds
+            # Then we augment the result with FULLYBUILT_PENDING and attach the
+            # unpublished builds.
+            if (source_pub.status in active_publishing_status and
+                    summary['status'] == BuildSetStatus.FULLYBUILT and
+                    not source_pub.archive.is_copy):
+                need_unpublished.add(source_pub)
+
+        if need_unpublished:
+            unpublished = list(self.getUnpublishedBuildsForSources(
+                need_unpublished))
+            unpublished_per_source = defaultdict(list)
+            for source_pub, build, _ in unpublished:
+                unpublished_per_source[source_pub].append(build)
+            for source_pub, builds in unpublished_per_source.items():
+                summary = {
+                    'status': BuildSetStatus.FULLYBUILT_PENDING,
+                    'builds': builds
+                }
+                source_build_statuses[source_pub.id] = summary
 
         return source_build_statuses
 
-    def getBuildStatusSummaryForSourcePublication(self, source_publication):
+    def getBuildStatusSummaryForSourcePublication(self, source_publication,
+        get_builds=None):
         """See `ISourcePackagePublishingHistory`.getStatusSummaryForBuilds.
 
         This is provided here so it can be used by both the SPPH as well
@@ -1728,31 +1769,9 @@
         the same interface but uses cached results for builds and binaries
         used in the calculation.
         """
-        builds = source_publication.getBuilds()
-        summary = getUtility(
-            IBinaryPackageBuildSet).getStatusSummaryForBuilds(builds)
-
-        # We only augment the result if:
-        #   1. we (the SPPH) are ourselves in an active publishing state, and
-        #   2. all the builds are fully-built, and
-        #   3. we are not being published in a rebuild/copy archive (in
-        #      which case the binaries are not currently published anyway)
-        # In this case we check to see if they are all published, and if
-        # not we return FULLYBUILT_PENDING:
-        augmented_summary = summary
-        if (source_publication.status in active_publishing_status and
-                summary['status'] == BuildSetStatus.FULLYBUILT and
-                not source_publication.archive.is_copy):
-
-            unpublished_builds = list(
-                source_publication.getUnpublishedBuilds())
-
-            if unpublished_builds:
-                augmented_summary = {
-                    'status': BuildSetStatus.FULLYBUILT_PENDING,
-                    'builds': unpublished_builds
-                }
-        return augmented_summary
+        source_id = source_publication.id
+        return self.getBuildStatusSummariesForSourceIdsAndArchive([source_id],
+            source_publication.archive, get_builds=get_builds)[source_id]
 
     def requestDeletion(self, sources, removed_by, removal_comment=None):
         """See `IPublishingSet`."""