← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~wgrant/launchpad/trim-asp into lp:launchpad

 

William Grant has proposed merging lp:~wgrant/launchpad/trim-asp into lp:launchpad.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~wgrant/launchpad/trim-asp/+merge/54152

Several Archive views (primarily +index and +packages, but also +copy-packages and +delete-packages) decorate SPPH collections with ArchiveSourcePublications, which preloads .changes files and builds and build summaries. The build preloading was intended to make build summary calculation faster, but it no longer helps. This branch drops it, saving a couple of slow queries on each load.
-- 
https://code.launchpad.net/~wgrant/launchpad/trim-asp/+merge/54152
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~wgrant/launchpad/trim-asp into lp:launchpad.
=== modified file 'lib/lp/soyuz/adapters/archivesourcepublication.py'
--- lib/lp/soyuz/adapters/archivesourcepublication.py	2010-12-02 16:13:51 +0000
+++ lib/lp/soyuz/adapters/archivesourcepublication.py	2011-03-21 05:12:34 +0000
@@ -56,10 +56,8 @@
     """
     delegates(ISourcePackagePublishingHistory)
 
-    def __init__(self, context, unpublished_builds, builds, changesfile, status_summary):
+    def __init__(self, context, changesfile, status_summary):
         self.context = context
-        self._unpublished_builds = unpublished_builds
-        self._builds = builds
         self._changesfile = changesfile
         self._status_summary = status_summary
 
@@ -73,19 +71,6 @@
         return ArchiveSourcePackageRelease(
             self.context.sourcepackagerelease, changesfile)
 
-    def getUnpublishedBuilds(self, build_state='ignored'):
-        """See `ISourcePackagePublishingHistory`.
-
-        In this cached implementation, we ignore the build_state argument
-        and simply return the unpublished builds with which we were
-        created.
-        """
-        return self._unpublished_builds
-
-    def getBuilds(self):
-        """See `ISourcePackagePublishingHistory`."""
-        return self._builds
-
     def getStatusSummaryForBuilds(self):
         """See `ISourcePackagePublishingHistory`."""
         return self._status_summary
@@ -103,39 +88,6 @@
         """Whether or not there are sources to be processed."""
         return len(self._source_publications) > 0
 
-    def groupBySource(self, source_and_value_list):
-        """Group the give list of tuples as a dictionary.
-
-        This is a common internal task for this class, it groups the given
-        list of tuples, (source, related_object), as a dictionary keyed by
-        distinct sources and pointing to a list of `relates_object`s.
-
-        :return: a dictionary keyed by the distinct sources and pointing to
-            a list of `related_object`s in their original order.
-        """
-        source_and_values = {}
-        for source, value in source_and_value_list:
-            values = source_and_values.setdefault(source, [])
-            values.append(value)
-        return source_and_values
-
-    def getBuildsBySource(self):
-        """Builds for all source publications."""
-        build_set = getUtility(IPublishingSet).getBuildsForSources(
-            self._source_publications)
-        source_and_builds = [
-            (source, build) for source, build, arch in build_set]
-        return self.groupBySource(source_and_builds)
-
-    def getUnpublishedBuildsBySource(self):
-        """Unpublished builds for sources."""
-        publishing_set = getUtility(IPublishingSet)
-        build_set = publishing_set.getUnpublishedBuildsForSources(
-            self._source_publications)
-        source_and_builds = [
-            (source, build) for source, build, arch in build_set]
-        return self.groupBySource(source_and_builds)
-
     def getChangesFileBySource(self):
         """Map changesfiles by their corresponding source publications."""
         publishing_set = getUtility(IPublishingSet)
@@ -160,8 +112,6 @@
         # Load the extra-information for all source publications.
         # All of this code would be better on an object representing a set of
         # publications.
-        builds_by_source = self.getBuildsBySource()
-        unpublished_builds_by_source = self.getUnpublishedBuildsBySource()
         changesfiles_by_source = self.getChangesFileBySource()
         # Source package names are used by setNewerDistroSeriesVersions:
         # batch load the used source package names.
@@ -184,13 +134,10 @@
 
         # Build the decorated object with the information we have.
         for pub in self._source_publications:
-            builds = builds_by_source.get(pub, [])
-            unpublished_builds = unpublished_builds_by_source.get(pub, [])
             changesfile = changesfiles_by_source.get(pub, None)
             status_summary = status_summaries[pub.id]
             complete_pub = ArchiveSourcePublication(
-                pub, unpublished_builds=unpublished_builds, builds=builds,
-                changesfile=changesfile, status_summary=status_summary)
+                pub, changesfile=changesfile, status_summary=status_summary)
             results.append(complete_pub)
 
         return iter(results)

=== modified file 'lib/lp/soyuz/browser/tests/test_archive_packages.py'
--- lib/lp/soyuz/browser/tests/test_archive_packages.py	2011-03-04 05:54:40 +0000
+++ lib/lp/soyuz/browser/tests/test_archive_packages.py	2011-03-21 05:12:34 +0000
@@ -187,7 +187,7 @@
         self.assertIs(None, view.specified_name_filter)
 
     def test_source_query_counts(self):
-        query_baseline = 47
+        query_baseline = 43
         # Assess the baseline.
         collector = QueryCollector()
         collector.register()
@@ -228,7 +228,7 @@
         self.assertThat(collector, HasQueryCount(LessThan(expected_count)))
 
     def test_binary_query_counts(self):
-        query_baseline = 43
+        query_baseline = 40
         # Assess the baseline.
         collector = QueryCollector()
         collector.register()
@@ -245,8 +245,7 @@
                 archive=ppa)
             url = canonical_url(ppa) + "/+packages"
         browser.open(url)
-        self.assertThat(collector, HasQueryCount(
-            MatchesAny(LessThan(query_baseline), Equals(query_baseline))))
+        self.assertThat(collector, HasQueryCount(LessThan(query_baseline)))
         expected_count = collector.count
         # Use all new objects - avoids caching issues invalidating the
         # gathered metrics.
@@ -260,5 +259,4 @@
                     archive=ppa, distroarchseries=pkg.distroarchseries)
             url = canonical_url(ppa) + "/+packages"
         browser.open(url)
-        self.assertThat(collector, HasQueryCount(
-            MatchesAny(Equals(expected_count), LessThan(expected_count))))
+        self.assertThat(collector, HasQueryCount(Equals(expected_count)))