← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:stormify-archivepublisher-xpph-queries into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:stormify-archivepublisher-xpph-queries into launchpad:master.

Commit message:
Convert archivepublisher publishing history queries to Storm

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/394585
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:stormify-archivepublisher-xpph-queries into launchpad:master.
diff --git a/lib/lp/archivepublisher/deathrow.py b/lib/lp/archivepublisher/deathrow.py
index 82faacf..af6e090 100644
--- a/lib/lp/archivepublisher/deathrow.py
+++ b/lib/lp/archivepublisher/deathrow.py
@@ -1,4 +1,4 @@
-# Copyright 2009-2011 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2020 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 """
@@ -11,11 +11,22 @@ import logging
 import os
 
 import pytz
+from storm.expr import Exists
+from storm.locals import (
+    And,
+    ClassAlias,
+    Not,
+    Select,
+    )
 
 from lp.archivepublisher.config import getPubConfig
 from lp.archivepublisher.diskpool import DiskPool
 from lp.services.database.constants import UTC_NOW
-from lp.services.database.sqlbase import sqlvalues
+from lp.services.database.interfaces import IStore
+from lp.services.librarian.model import (
+    LibraryFileAlias,
+    LibraryFileContent,
+    )
 from lp.soyuz.enums import ArchivePurpose
 from lp.soyuz.interfaces.publishing import (
     IBinaryPackagePublishingHistory,
@@ -24,6 +35,10 @@ from lp.soyuz.interfaces.publishing import (
     MissingSymlinkInPool,
     NotInPool,
     )
+from lp.soyuz.model.files import (
+    BinaryPackageFile,
+    SourcePackageReleaseFile,
+    )
 from lp.soyuz.model.publishing import (
     BinaryPackagePublishingHistory,
     SourcePackagePublishingHistory,
@@ -108,35 +123,37 @@ class DeathRow:
 
         Both sources and binaries are lists.
         """
-        sources = SourcePackagePublishingHistory.select("""
-            SourcePackagePublishingHistory.archive = %s AND
-            SourcePackagePublishingHistory.scheduleddeletiondate < %s AND
-            SourcePackagePublishingHistory.dateremoved IS NULL AND
-            NOT EXISTS (
-              SELECT 1 FROM sourcepackagepublishinghistory as spph
-              WHERE
-                  SourcePackagePublishingHistory.sourcepackagerelease =
-                      spph.sourcepackagerelease AND
-                  spph.archive = %s AND
-                  spph.status NOT IN %s)
-        """ % sqlvalues(self.archive, UTC_NOW, self.archive,
-                        inactive_publishing_status), orderBy="id")
-        self.logger.debug("%d Sources" % sources.count())
-
-        binaries = BinaryPackagePublishingHistory.select("""
-            BinaryPackagePublishingHistory.archive = %s AND
-            BinaryPackagePublishingHistory.scheduleddeletiondate < %s AND
-            BinaryPackagePublishingHistory.dateremoved IS NULL AND
-            NOT EXISTS (
-              SELECT 1 FROM binarypackagepublishinghistory as bpph
-              WHERE
-                  BinaryPackagePublishingHistory.binarypackagerelease =
-                      bpph.binarypackagerelease AND
-                  bpph.archive = %s AND
-                  bpph.status NOT IN %s)
-        """ % sqlvalues(self.archive, UTC_NOW, self.archive,
-                        inactive_publishing_status), orderBy="id")
-        self.logger.debug("%d Binaries" % binaries.count())
+        OtherSPPH = ClassAlias(SourcePackagePublishingHistory)
+        sources = list(IStore(SourcePackagePublishingHistory).find(
+            SourcePackagePublishingHistory,
+            SourcePackagePublishingHistory.archive == self.archive,
+            SourcePackagePublishingHistory.scheduleddeletiondate < UTC_NOW,
+            SourcePackagePublishingHistory.dateremoved == None,
+            Not(Exists(Select(
+                1, tables=[OtherSPPH],
+                where=And(
+                    SourcePackagePublishingHistory.sourcepackagereleaseID ==
+                        OtherSPPH.sourcepackagereleaseID,
+                    OtherSPPH.archiveID == self.archive.id,
+                    Not(OtherSPPH.status.is_in(inactive_publishing_status))),
+                )))).order_by(SourcePackagePublishingHistory.id))
+        self.logger.debug("%d Sources" % len(sources))
+
+        OtherBPPH = ClassAlias(BinaryPackagePublishingHistory)
+        binaries = list(IStore(BinaryPackagePublishingHistory).find(
+            BinaryPackagePublishingHistory,
+            BinaryPackagePublishingHistory.archive == self.archive,
+            BinaryPackagePublishingHistory.scheduleddeletiondate < UTC_NOW,
+            BinaryPackagePublishingHistory.dateremoved == None,
+            Not(Exists(Select(
+                1, tables=[OtherBPPH],
+                where=And(
+                    BinaryPackagePublishingHistory.binarypackagereleaseID ==
+                        OtherBPPH.binarypackagereleaseID,
+                    OtherBPPH.archiveID == self.archive.id,
+                    Not(OtherBPPH.status.is_in(inactive_publishing_status))),
+                )))).order_by(BinaryPackagePublishingHistory.id))
+        self.logger.debug("%d Binaries" % len(binaries))
 
         return (sources, binaries)
 
@@ -150,41 +167,34 @@ class DeathRow:
         Only allow removal of unnecessary files.
         """
         clauses = []
-        clauseTables = []
-
-        if ISourcePackagePublishingHistory.implementedBy(
-            publication_class):
-            clauses.append("""
-                SourcePackagePublishingHistory.archive = %s AND
-                SourcePackagePublishingHistory.dateremoved is NULL AND
-                SourcePackagePublishingHistory.sourcepackagerelease =
-                    SourcePackageReleaseFile.sourcepackagerelease AND
-                SourcePackageReleaseFile.libraryfile = LibraryFileAlias.id
-            """ % sqlvalues(self.archive))
-            clauseTables.append('SourcePackageReleaseFile')
-        elif IBinaryPackagePublishingHistory.implementedBy(
-            publication_class):
-            clauses.append("""
-                BinaryPackagePublishingHistory.archive = %s AND
-                BinaryPackagePublishingHistory.dateremoved is NULL AND
-                BinaryPackagePublishingHistory.binarypackagerelease =
-                    BinaryPackageFile.binarypackagerelease AND
-                BinaryPackageFile.libraryfile = LibraryFileAlias.id
-            """ % sqlvalues(self.archive))
-            clauseTables.append('BinaryPackageFile')
+
+        if ISourcePackagePublishingHistory.implementedBy(publication_class):
+            clauses.extend([
+                SourcePackagePublishingHistory.archive == self.archive,
+                SourcePackagePublishingHistory.dateremoved == None,
+                SourcePackagePublishingHistory.sourcepackagerelease ==
+                    SourcePackageReleaseFile.sourcepackagereleaseID,
+                SourcePackageReleaseFile.libraryfile == LibraryFileAlias.id,
+                ])
+        elif IBinaryPackagePublishingHistory.implementedBy(publication_class):
+            clauses.extend([
+                BinaryPackagePublishingHistory.archive == self.archive,
+                BinaryPackagePublishingHistory.dateremoved == None,
+                BinaryPackagePublishingHistory.binarypackagerelease ==
+                    BinaryPackageFile.binarypackagereleaseID,
+                BinaryPackageFile.libraryfile == LibraryFileAlias.id,
+                ])
         else:
             raise AssertionError("%r is not supported." % publication_class)
 
-        clauses.append("""
-           LibraryFileAlias.content = LibraryFileContent.id AND
-           LibraryFileAlias.filename = %s AND
-           LibraryFileContent.md5 = %s
-        """ % sqlvalues(filename, file_md5))
-        clauseTables.extend(
-            ['LibraryFileAlias', 'LibraryFileContent'])
+        clauses.extend([
+            LibraryFileAlias.content == LibraryFileContent.id,
+            LibraryFileAlias.filename == filename,
+            LibraryFileContent.md5 == file_md5,
+            ])
 
-        all_publications = publication_class.select(
-            " AND ".join(clauses), clauseTables=clauseTables)
+        all_publications = IStore(publication_class).find(
+            publication_class, *clauses)
 
         right_now = datetime.datetime.now(pytz.timezone('UTC'))
         for pub in all_publications:
diff --git a/lib/lp/archivepublisher/domination.py b/lib/lp/archivepublisher/domination.py
index 0e9a8b0..dc6f1cf 100644
--- a/lib/lp/archivepublisher/domination.py
+++ b/lib/lp/archivepublisher/domination.py
@@ -1,4 +1,4 @@
-# Copyright 2009-2019 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2020 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 """Archive Domination class.
@@ -79,10 +79,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.interfaces import IStore
-from lp.services.database.sqlbase import (
-    flush_database_updates,
-    sqlvalues,
-    )
+from lp.services.database.sqlbase import flush_database_updates
 from lp.services.orderingcheck import OrderingCheck
 from lp.soyuz.enums import (
     BinaryPackageFormat,
@@ -95,6 +92,7 @@ from lp.soyuz.interfaces.publishing import (
 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
 from lp.soyuz.model.binarypackagename import BinaryPackageName
 from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
+from lp.soyuz.model.distroarchseries import DistroArchSeries
 from lp.soyuz.model.publishing import (
     BinaryPackagePublishingHistory,
     SourcePackagePublishingHistory,
@@ -537,34 +535,31 @@ class Dominator:
             # Attempt to find all binaries of this
             # SourcePackageRelease which are/have been in this
             # distroseries...
-            considered_binaries = BinaryPackagePublishingHistory.select("""
-            binarypackagepublishinghistory.distroarchseries =
-                distroarchseries.id AND
-            binarypackagepublishinghistory.scheduleddeletiondate IS NULL AND
-            binarypackagepublishinghistory.dateremoved IS NULL AND
-            binarypackagepublishinghistory.archive = %s AND
-            binarypackagebuild.source_package_release = %s AND
-            distroarchseries.distroseries = %s AND
-            binarypackagepublishinghistory.binarypackagerelease =
-            binarypackagerelease.id AND
-            binarypackagerelease.build = binarypackagebuild.id AND
-            binarypackagepublishinghistory.pocket = %s
-            """ % sqlvalues(self.archive, srcpkg_release,
-                            pub_record.distroseries, pub_record.pocket),
-            clauseTables=['DistroArchSeries', 'BinaryPackageRelease',
-                          'BinaryPackageBuild'])
+            considered_binaries = IStore(BinaryPackagePublishingHistory).find(
+                BinaryPackagePublishingHistory.distroarchseries ==
+                    DistroArchSeries.id,
+                BinaryPackagePublishingHistory.scheduleddeletiondate == None,
+                BinaryPackagePublishingHistory.dateremoved == None,
+                BinaryPackagePublishingHistory.archive == self.archive,
+                BinaryPackageBuild.source_package_release == srcpkg_release,
+                DistroArchSeries.distroseries == pub_record.distroseries,
+                BinaryPackagePublishingHistory.binarypackagerelease ==
+                    BinaryPackageRelease.id,
+                BinaryPackageRelease.build == BinaryPackageBuild.id,
+                BinaryPackagePublishingHistory.pocket == pub_record.pocket)
 
             # There is at least one non-removed binary to consider
             if not considered_binaries.is_empty():
                 # However we can still remove *this* record if there's
                 # at least one other PUBLISHED for the spr. This happens
                 # when a package is moved between components.
-                published = SourcePackagePublishingHistory.selectBy(
+                published = IStore(SourcePackagePublishingHistory).find(
+                    SourcePackagePublishingHistory,
                     distroseries=pub_record.distroseries,
                     pocket=pub_record.pocket,
                     status=PackagePublishingStatus.PUBLISHED,
                     archive=self.archive,
-                    sourcepackagereleaseID=srcpkg_release.id)
+                    sourcepackagerelease=srcpkg_release)
                 # Zero PUBLISHED for this spr, so nothing to take over
                 # for us, so leave it for consideration next time.
                 if published.is_empty():
@@ -857,30 +852,27 @@ class Dominator:
 
     def judge(self, distroseries, pocket):
         """Judge superseded sources and binaries."""
-        sources = SourcePackagePublishingHistory.select("""
-            sourcepackagepublishinghistory.distroseries = %s AND
-            sourcepackagepublishinghistory.archive = %s AND
-            sourcepackagepublishinghistory.pocket = %s AND
-            sourcepackagepublishinghistory.status IN %s AND
-            sourcepackagepublishinghistory.scheduleddeletiondate is NULL AND
-            sourcepackagepublishinghistory.dateremoved is NULL
-            """ % sqlvalues(
-                distroseries, self.archive, pocket,
-                inactive_publishing_status))
-
-        binaries = BinaryPackagePublishingHistory.select("""
-            binarypackagepublishinghistory.distroarchseries =
-                distroarchseries.id AND
-            distroarchseries.distroseries = %s AND
-            binarypackagepublishinghistory.archive = %s AND
-            binarypackagepublishinghistory.pocket = %s AND
-            binarypackagepublishinghistory.status IN %s AND
-            binarypackagepublishinghistory.scheduleddeletiondate is NULL AND
-            binarypackagepublishinghistory.dateremoved is NULL
-            """ % sqlvalues(
-                distroseries, self.archive, pocket,
+        sources = IStore(SourcePackagePublishingHistory).find(
+            SourcePackagePublishingHistory,
+            SourcePackagePublishingHistory.distroseries == distroseries,
+            SourcePackagePublishingHistory.archive == self.archive,
+            SourcePackagePublishingHistory.pocket == pocket,
+            SourcePackagePublishingHistory.status.is_in(
+                inactive_publishing_status),
+            SourcePackagePublishingHistory.scheduleddeletiondate == None,
+            SourcePackagePublishingHistory.dateremoved == None)
+
+        binaries = IStore(BinaryPackagePublishingHistory).find(
+            BinaryPackagePublishingHistory,
+            BinaryPackagePublishingHistory.distroarchseries ==
+                DistroArchSeries.id,
+            DistroArchSeries.distroseries == distroseries,
+            BinaryPackagePublishingHistory.archive == self.archive,
+            BinaryPackagePublishingHistory.pocket == pocket,
+            BinaryPackagePublishingHistory.status.is_in(
                 inactive_publishing_status),
-            clauseTables=['DistroArchSeries'])
+            BinaryPackagePublishingHistory.scheduleddeletiondate == None,
+            BinaryPackagePublishingHistory.dateremoved == None)
 
         self._judgeSuperseded(sources, binaries)
 
diff --git a/lib/lp/archivepublisher/tests/test_processdeathrow.py b/lib/lp/archivepublisher/tests/test_processdeathrow.py
index 32d1bb2..e333727 100644
--- a/lib/lp/archivepublisher/tests/test_processdeathrow.py
+++ b/lib/lp/archivepublisher/tests/test_processdeathrow.py
@@ -1,4 +1,4 @@
-# Copyright 2009-2018 Canonical Ltd.  This software is licensed under the
+# Copyright 2009-2020 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
 """Functional tests for process-death-row.py script.
@@ -26,6 +26,7 @@ from zope.security.proxy import removeSecurityProxy
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.interfaces.person import IPersonSet
 from lp.services.config import config
+from lp.services.database.interfaces import IStore
 from lp.soyuz.enums import PackagePublishingStatus
 from lp.soyuz.model.publishing import SourcePackagePublishingHistory
 from lp.testing import TestCaseWithFactory
@@ -148,25 +149,28 @@ class TestProcessDeathRow(TestCaseWithFactory):
 
     def probePublishingStatus(self, pubrec_ids, status):
         """Check if all source publishing records match the given status."""
+        store = IStore(SourcePackagePublishingHistory)
         for pubrec_id in pubrec_ids:
-            spph = SourcePackagePublishingHistory.get(pubrec_id)
+            spph = store.get(SourcePackagePublishingHistory, pubrec_id)
             self.assertEqual(
                 spph.status, status, "ID %s -> %s (expected %s)" % (
                 spph.id, spph.status.title, status.title))
 
     def probeRemoved(self, pubrec_ids):
         """Check if all source publishing records were removed."""
+        store = IStore(SourcePackagePublishingHistory)
         right_now = datetime.datetime.now(pytz.timezone('UTC'))
         for pubrec_id in pubrec_ids:
-            spph = SourcePackagePublishingHistory.get(pubrec_id)
+            spph = store.get(SourcePackagePublishingHistory, pubrec_id)
             self.assertTrue(
                 spph.dateremoved < right_now,
                 "ID %s -> not removed" % (spph.id))
 
     def probeNotRemoved(self, pubrec_ids):
         """Check if all source publishing records were not removed."""
+        store = IStore(SourcePackagePublishingHistory)
         for pubrec_id in pubrec_ids:
-            spph = SourcePackagePublishingHistory.get(pubrec_id)
+            spph = store.get(SourcePackagePublishingHistory, pubrec_id)
             self.assertTrue(
                 spph.dateremoved is None,
                 "ID %s -> removed" % (spph.id))