← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:stormify-packageupload into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:stormify-packageupload into launchpad:master.

Commit message:
Convert PackageUpload and friends to Storm

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/424246
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:stormify-packageupload into launchpad:master.
diff --git a/lib/lp/registry/browser/distroseries.py b/lib/lp/registry/browser/distroseries.py
index 0834c89..b9e6028 100644
--- a/lib/lp/registry/browser/distroseries.py
+++ b/lib/lp/registry/browser/distroseries.py
@@ -222,7 +222,14 @@ class DistroSeriesNavigation(GetitemNavigation, BugTargetTraversalMixin,
 
     @stepthrough('+upload')
     def traverse_queue(self, id):
-        return getUtility(IPackageUploadSet).get(id)
+        try:
+            queue_id = int(id)
+        except ValueError:
+            return None
+        try:
+            return getUtility(IPackageUploadSet).get(queue_id)
+        except NotFoundError:
+            return None
 
 
 class DistroSeriesBreadcrumb(Breadcrumb):
diff --git a/lib/lp/registry/model/distroseries.py b/lib/lp/registry/model/distroseries.py
index 78b3606..46fb381 100644
--- a/lib/lp/registry/model/distroseries.py
+++ b/lib/lp/registry/model/distroseries.py
@@ -1170,14 +1170,13 @@ class DistroSeries(SQLBase, BugTargetBase, HasSpecificationsMixin,
                 And(
                     PackageUpload.status == PackageUploadStatus.DONE,
                     PackageUpload.distroseries == self,
-                    PackageUpload.archiveID.is_in(
+                    PackageUpload.archive_id.is_in(
                         self.distribution.all_distro_archive_ids))))
         clauses = [
-            SourcePackageRelease.id ==
-                PackageUploadSource.sourcepackagereleaseID,
+            PackageUploadSource.sourcepackagerelease ==
+                SourcePackageRelease.id,
             SourcePackageRelease.sourcepackagenameID == SourcePackageName.id,
-            PackageUploadSource.packageuploadID == Column(
-                "id", RelevantUpload),
+            PackageUploadSource.packageupload == Column("id", RelevantUpload),
             ]
 
         last_uploads = DecoratedResultSet(
diff --git a/lib/lp/soyuz/browser/queue.py b/lib/lp/soyuz/browser/queue.py
index 3888c6d..ba6650a 100644
--- a/lib/lp/soyuz/browser/queue.py
+++ b/lib/lp/soyuz/browser/queue.py
@@ -222,8 +222,8 @@ class QueueItemsView(LaunchpadView):
         pubs = sum((removeSecurityProxy(u.builds) for u in uploads), [])
 
         source_sprs = load_related(
-            SourcePackageRelease, puses, ['sourcepackagereleaseID'])
-        bpbs = load_related(BinaryPackageBuild, pubs, ['buildID'])
+            SourcePackageRelease, puses, ['sourcepackagerelease_id'])
+        bpbs = load_related(BinaryPackageBuild, pubs, ['build_id'])
         bprs = load_referencing(BinaryPackageRelease, bpbs, ['buildID'])
         source_files = load_referencing(
             SourcePackageReleaseFile, source_sprs, ['sourcepackagereleaseID'])
@@ -242,7 +242,7 @@ class QueueItemsView(LaunchpadView):
         # Get a dictionary of lists of source files keyed by upload ID.
         package_upload_source_dict = {}
         for pus in puses:
-            package_upload_source_dict[pus.sourcepackagereleaseID] = pus
+            package_upload_source_dict[pus.sourcepackagerelease_id] = pus
         source_upload_files = self.source_files_dict(
             package_upload_source_dict, source_files)
 
diff --git a/lib/lp/soyuz/browser/tests/test_build_views.py b/lib/lp/soyuz/browser/tests/test_build_views.py
index 2f734d1..176b21e 100644
--- a/lib/lp/soyuz/browser/tests/test_build_views.py
+++ b/lib/lp/soyuz/browser/tests/test_build_views.py
@@ -29,7 +29,6 @@ from lp.soyuz.enums import ArchivePurpose
 from lp.soyuz.interfaces.archivepermission import IArchivePermissionSet
 from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
 from lp.soyuz.interfaces.packageset import IPackagesetSet
-from lp.soyuz.model.queue import PackageUploadBuild
 from lp.testing import (
     admin_logged_in,
     ANONYMOUS,
@@ -184,8 +183,8 @@ class TestBuildViews(TestCaseWithFactory):
         package_upload = build.distro_series.createQueueEntry(
             PackagePublishingPocket.UPDATES, build.archive,
             'changes.txt', b'my changes')
-        # Old SQL Object: creating it, adds it automatically to the store.
-        PackageUploadBuild(packageupload=package_upload, build=build)
+        with person_logged_in(self.admin):
+            package_upload.addBuild(build)
         self.assertEqual(package_upload.status.name, 'NEW')
         build_view = getMultiAdapter(
             (build, self.empty_request), name="+index")
diff --git a/lib/lp/soyuz/doc/distroseriesqueue.txt b/lib/lp/soyuz/doc/distroseriesqueue.txt
index 52e1d05..b8e3bda 100644
--- a/lib/lp/soyuz/doc/distroseriesqueue.txt
+++ b/lib/lp/soyuz/doc/distroseriesqueue.txt
@@ -85,10 +85,11 @@ upload can not have that state.
 
 XXX cprov 20051209: need to build a broken upload to test it properly
 
+    >>> from lp.services.database.interfaces import IStore
     >>> for item in new_queue:
     ...     try:
     ...         item.setAccepted()
-    ...         item.syncUpdate()
+    ...         IStore(item).flush()
     ...     except QueueInconsistentStateError as info:
     ...         print(info)
 
@@ -105,7 +106,6 @@ XXX cprov 20051209: need to build a broken upload to test it properly
 
 Confirm we can now find ed published in hoary.
 
-    >>> from lp.services.database.interfaces import IStore
     >>> from lp.soyuz.enums import PackagePublishingStatus
     >>> from lp.soyuz.model.publishing import SourcePackagePublishingHistory
     >>> for release in IStore(SourcePackagePublishingHistory).find(
@@ -388,7 +388,7 @@ The upload admin can not accept both since we check unique
 The database modification needs to be realised in the DB, otherwise
 the look up code won't be able to identify any duplications:
 
-    >>> dup_one.syncUpdate()
+    >>> IStore(dup_one).flush()
 
 As expected the second item acceptance will fail and the item will
 remain in the original queue
@@ -406,7 +406,7 @@ remain in the original queue
 The only available action will be rejection:
 
     >>> dup_two.setRejected()
-    >>> dup_one.syncUpdate()
+    >>> IStore(dup_one).flush()
     >>> dup_two.status.name
     'REJECTED'
 
@@ -415,14 +415,14 @@ test after the former accepted item was published (DONE queue)
 
     >>> dup_two.status = PassthroughStatusValue(
     ...     PackageUploadStatus.UNAPPROVED)
-    >>> dup_two.syncUpdate()
+    >>> IStore(dup_two).flush()
     >>> dup_two.status.name
     'UNAPPROVED'
 
     >>> dup_one.setDone()
     >>> dup_one.status == PackageUploadStatus.DONE
     True
-    >>> dup_one.syncUpdate()
+    >>> IStore(dup_one).flush()
 
 The protection code should also identify dups with items in DONE queue
 
diff --git a/lib/lp/soyuz/doc/packageupload-lookups.txt b/lib/lp/soyuz/doc/packageupload-lookups.txt
index 035e7f7..ad940ce 100644
--- a/lib/lp/soyuz/doc/packageupload-lookups.txt
+++ b/lib/lp/soyuz/doc/packageupload-lookups.txt
@@ -111,7 +111,7 @@ The `SourcePackageRelease` 'package_upload' and 'upload_changesfile'
 
     >>> original_source_upload = source.sourcepackagerelease.package_upload
     >>> print(original_source_upload)
-    <PackageUpload ...>
+    <lp.soyuz.model.queue.PackageUpload ...>
 
     >>> source_changesfile = source.sourcepackagerelease.upload_changesfile
     >>> original_source_upload.changesfile == source_changesfile
diff --git a/lib/lp/soyuz/doc/soyuz-upload.txt b/lib/lp/soyuz/doc/soyuz-upload.txt
index 475d449..c6f892d 100644
--- a/lib/lp/soyuz/doc/soyuz-upload.txt
+++ b/lib/lp/soyuz/doc/soyuz-upload.txt
@@ -375,13 +375,14 @@ Also check the upload folders contain all the files we uploaded.
 Now let's see if all of the valid uploads are in the Upload queue marked
 as NEW and RELEASE.
 
+    >>> from lp.services.database.interfaces import IStore
     >>> from lp.soyuz.model.queue import PackageUploadSource
     >>> for name in package_names:
     ...     print(name)
     ...     spn = SourcePackageName.selectOneBy(name=name)
     ...     spr = SourcePackageRelease.selectOneBy(sourcepackagenameID=spn.id)
-    ...     us = PackageUploadSource.selectOneBy(
-    ...         sourcepackagereleaseID=spr.id)
+    ...     us = IStore(PackageUploadSource).find(
+    ...         PackageUploadSource, sourcepackagerelease=spr).one()
     ...     assert us.packageupload.status.name == 'NEW'
     ...     assert us.packageupload.pocket.name == 'RELEASE'
     drdsl
diff --git a/lib/lp/soyuz/interfaces/queue.py b/lib/lp/soyuz/interfaces/queue.py
index 3afd574..89d36a3 100644
--- a/lib/lp/soyuz/interfaces/queue.py
+++ b/lib/lp/soyuz/interfaces/queue.py
@@ -408,7 +408,7 @@ class IPackageUpload(Interface):
     @call_with(user=REQUEST_USER)
     @operation_for_version("devel")
     def acceptFromQueue(user=None):
-        """Call setAccepted, do a syncUpdate, and send notification email.
+        """Call setAccepted, do a flush, and send notification email.
 
          * Grant karma to people involved with the upload.
         """
@@ -419,7 +419,7 @@ class IPackageUpload(Interface):
     @call_with(user=REQUEST_USER)
     @operation_for_version("devel")
     def rejectFromQueue(user, comment=None):
-        """Call setRejected, do a syncUpdate, and send notification email."""
+        """Call setRejected, do a flush, and send notification email."""
 
     def realiseUpload(logger=None):
         """Take this ACCEPTED upload and create the publishing records for it
@@ -445,13 +445,6 @@ class IPackageUpload(Interface):
         the given custom type.
         """
 
-    def syncUpdate():
-        """Write updates made on this object to the database.
-
-        This should be used when you can't wait until the transaction is
-        committed to have some updates actually written to the database.
-        """
-
     def notify(status=None, summary_text=None, changes_file_object=None,
                logger=None):
         """Notify by email when there is a new distroseriesqueue entry.
diff --git a/lib/lp/soyuz/model/archive.py b/lib/lp/soyuz/model/archive.py
index 76fbadf..a728a61 100644
--- a/lib/lp/soyuz/model/archive.py
+++ b/lib/lp/soyuz/model/archive.py
@@ -579,9 +579,9 @@ class Archive(SQLBase):
             # Pre-cache related `PackageUpload`s and `PackageUploadSource`s
             # which are immediatelly used in the API context for checking
             # permissions on the returned entries.
-            uploads = load_related(PackageUpload, rows, ['packageuploadID'])
+            uploads = load_related(PackageUpload, rows, ['packageupload_id'])
             pu_sources = load_referencing(
-                PackageUploadSource, uploads, ['packageuploadID'])
+                PackageUploadSource, uploads, ['packageupload_id'])
             for pu_source in pu_sources:
                 upload = pu_source.packageupload
                 get_property_cache(upload).sources = [pu_source]
@@ -1688,8 +1688,8 @@ class Archive(SQLBase):
             clauses = (
                 SourcePackagePublishingHistory.archive == self.id,
                 SourcePackagePublishingHistory.sourcepackagereleaseID ==
-                    PackageUploadSource.sourcepackagereleaseID,
-                PackageUploadSource.packageuploadID == PackageUpload.id,
+                    PackageUploadSource.sourcepackagerelease_id,
+                PackageUploadSource.packageupload_id == PackageUpload.id,
                 PackageUpload.status == PackageUploadStatus.DONE,
                 PackageUpload.changes_file_id == LibraryFileAlias.id,
                 )
diff --git a/lib/lp/soyuz/model/binarypackagebuild.py b/lib/lp/soyuz/model/binarypackagebuild.py
index d83ebc8..a685ea4 100644
--- a/lib/lp/soyuz/model/binarypackagebuild.py
+++ b/lib/lp/soyuz/model/binarypackagebuild.py
@@ -315,7 +315,7 @@ class BinaryPackageBuild(PackageBuildMixin, SQLBase):
         origin = [
             PackageUploadBuild,
             Join(PackageUpload,
-                 PackageUploadBuild.packageuploadID == PackageUpload.id),
+                 PackageUploadBuild.packageupload == PackageUpload.id),
             Join(LibraryFileAlias,
                  LibraryFileAlias.id == PackageUpload.changes_file_id),
             Join(LibraryFileContent,
diff --git a/lib/lp/soyuz/model/publishing.py b/lib/lp/soyuz/model/publishing.py
index 7ae3765..7db85bd 100644
--- a/lib/lp/soyuz/model/publishing.py
+++ b/lib/lp/soyuz/model/publishing.py
@@ -33,6 +33,8 @@ from storm.expr import (
     Sum,
     )
 from storm.info import ClassAlias
+from storm.properties import Int
+from storm.references import Reference
 from storm.store import Store
 from storm.zope import IResultSet
 from storm.zope.interfaces import ISQLObjectResultSet
@@ -288,8 +290,8 @@ class SourcePackagePublishingHistory(SQLBase, ArchivePublisherBase):
     sponsor = ForeignKey(
         dbName='sponsor', foreignKey='Person',
         storm_validator=validate_public_person, notNull=False, default=None)
-    packageupload = ForeignKey(
-        dbName='packageupload', foreignKey='PackageUpload', default=None)
+    packageupload_id = Int(name="packageupload", allow_none=True, default=None)
+    packageupload = Reference(packageupload_id, "PackageUpload.id")
 
     @property
     def format(self):
@@ -1743,13 +1745,12 @@ class PublishingSet:
              SourcePackageRelease, LibraryFileAlias, LibraryFileContent),
             LibraryFileContent.id == LibraryFileAlias.contentID,
             LibraryFileAlias.id == PackageUpload.changes_file_id,
-            PackageUpload.id == PackageUploadSource.packageuploadID,
+            PackageUpload.id == PackageUploadSource.packageupload_id,
             PackageUpload.status == PackageUploadStatus.DONE,
-            PackageUpload.distroseriesID ==
+            PackageUpload.distroseries ==
                 SourcePackageRelease.upload_distroseriesID,
-            PackageUpload.archiveID ==
-                SourcePackageRelease.upload_archiveID,
-            PackageUploadSource.sourcepackagereleaseID ==
+            PackageUpload.archive == SourcePackageRelease.upload_archiveID,
+            PackageUploadSource.sourcepackagerelease ==
                 SourcePackageRelease.id,
             SourcePackageRelease.id ==
                 SourcePackagePublishingHistory.sourcepackagereleaseID,
@@ -1770,10 +1771,10 @@ class PublishingSet:
             LibraryFileAlias,
             LibraryFileAlias.id == PackageUpload.changes_file_id,
             PackageUpload.status == PackageUploadStatus.DONE,
-            PackageUpload.distroseriesID == spr.upload_distroseries.id,
-            PackageUpload.archiveID == spr.upload_archive.id,
-            PackageUpload.id == PackageUploadSource.packageuploadID,
-            PackageUploadSource.sourcepackagereleaseID == spr.id).one()
+            PackageUpload.distroseries == spr.upload_distroseries,
+            PackageUpload.archive == spr.upload_archive,
+            PackageUploadSource.packageupload == PackageUpload.id,
+            PackageUploadSource.sourcepackagerelease == spr).one()
 
     def getBuildStatusSummariesForSourceIdsAndArchive(self, source_ids,
         archive):
diff --git a/lib/lp/soyuz/model/queue.py b/lib/lp/soyuz/model/queue.py
index 712dd7a..4e85e04 100644
--- a/lib/lp/soyuz/model/queue.py
+++ b/lib/lp/soyuz/model/queue.py
@@ -24,6 +24,7 @@ from storm.locals import (
     Join,
     List,
     Reference,
+    ReferenceSet,
     SQL,
     Unicode,
     )
@@ -49,23 +50,13 @@ from lp.services.database.constants import (
     DEFAULT,
     UTC_NOW,
     )
-from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
 from lp.services.database.interfaces import (
     IMasterStore,
     IStore,
     )
-from lp.services.database.sqlbase import (
-    SQLBase,
-    sqlvalues,
-    )
-from lp.services.database.sqlobject import (
-    ForeignKey,
-    SQLMultipleJoin,
-    SQLObjectNotFound,
-    StringCol,
-    )
+from lp.services.database.sqlbase import sqlvalues
 from lp.services.database.stormbase import StormBase
 from lp.services.database.stormexpr import (
     Array,
@@ -143,12 +134,9 @@ def validate_status(self, attr, value):
     if isinstance(value, PassthroughStatusValue):
         return value.value
 
-    if self._SO_creating:
-        return value
-    else:
-        raise QueueStateWriteProtectedError(
-            'Directly write on queue status is forbidden use the '
-            'provided methods to set it.')
+    raise QueueStateWriteProtectedError(
+        'Directly write on queue status is forbidden use the '
+        'provided methods to set it.')
 
 
 @implementer(IPackageUploadQueue)
@@ -160,19 +148,23 @@ class PackageUploadQueue:
 
 
 @implementer(IPackageUpload)
-class PackageUpload(SQLBase):
+class PackageUpload(StormBase):
     """A Queue item for the archive uploader."""
 
-    _defaultOrder = ['id']
+    __storm_table__ = "PackageUpload"
+    __storm_order__ = ["id"]
+
+    id = Int(primary=True)
 
     status = DBEnum(
         name='status', allow_none=False,
         default=PackageUploadStatus.NEW, enum=PackageUploadStatus,
         validator=validate_status)
 
-    date_created = UtcDateTimeCol(notNull=False, default=UTC_NOW)
+    date_created = DateTime(allow_none=True, default=UTC_NOW, tzinfo=pytz.UTC)
 
-    distroseries = ForeignKey(dbName="distroseries", foreignKey='DistroSeries')
+    distroseries_id = Int(name="distroseries", allow_none=False)
+    distroseries = Reference(distroseries_id, "DistroSeries.id")
 
     pocket = DBEnum(
         name='pocket', allow_none=False, enum=PackagePublishingPocket)
@@ -180,7 +172,8 @@ class PackageUpload(SQLBase):
     changes_file_id = Int(name='changesfile')
     changesfile = Reference(changes_file_id, 'LibraryFileAlias.id')
 
-    archive = ForeignKey(dbName="archive", foreignKey="Archive", notNull=True)
+    archive_id = Int(name="archive", allow_none=False)
+    archive = Reference(archive_id, "Archive.id")
 
     signing_key_owner_id = Int(name="signing_key_owner")
     signing_key_owner = Reference(signing_key_owner_id, 'Person.id')
@@ -189,23 +182,32 @@ class PackageUpload(SQLBase):
     package_copy_job_id = Int(name='package_copy_job', allow_none=True)
     package_copy_job = Reference(package_copy_job_id, 'PackageCopyJob.id')
 
-    searchable_names = StringCol(name='searchable_names', default='')
+    searchable_names = Unicode(name="searchable_names", default="")
     searchable_versions = List(type=Unicode(), default_factory=list)
 
     # XXX julian 2007-05-06:
-    # Sources should not be SQLMultipleJoin, there is only ever one
-    # of each at most.
+    # Sources should not be ReferenceSet, there is only ever one of each at
+    # most.
 
     # Join this table to the PackageUploadBuild and the
     # PackageUploadSource objects which are related.
-    _sources = SQLMultipleJoin('PackageUploadSource',
-                               joinColumn='packageupload')
+    _sources = ReferenceSet("id", "PackageUploadSource.packageupload_id")
     # Does not include source builds.
-    _builds = SQLMultipleJoin('PackageUploadBuild',
-                              joinColumn='packageupload')
+    _builds = ReferenceSet("id", "PackageUploadBuild.packageupload_id")
 
-    def __init__(self, *args, **kwargs):
-        super().__init__(*args, **kwargs)
+    def __init__(self, distroseries, pocket, archive,
+                 status=PackageUploadStatus.NEW, changesfile=None,
+                 signing_key_owner=None, signing_key_fingerprint=None,
+                 package_copy_job=None):
+        super().__init__()
+        self.distroseries = distroseries
+        self.pocket = pocket
+        self.archive = archive
+        self.status = PassthroughStatusValue(status)
+        self.changesfile = changesfile
+        self.signing_key_owner = signing_key_owner
+        self.signing_key_fingerprint = signing_key_fingerprint
+        self.package_copy_job = package_copy_job
         # searchable_{name,version}s are set for the other cases when
         # add{Source,Build,Custom} are called.
         if self.package_copy_job:
@@ -271,13 +273,12 @@ class PackageUpload(SQLBase):
             SourcePackageRecipeBuild,
             SourcePackageRecipeBuild.id ==
                 SourcePackageRelease.source_package_recipe_build_id,
-            SourcePackageRelease.id ==
-            PackageUploadSource.sourcepackagereleaseID,
+            PackageUploadSource.sourcepackagerelease ==
+                SourcePackageRelease.id,
             PackageUploadSource.packageupload == self.id).one()
 
     # Also the custom files associated with the build.
-    _customfiles = SQLMultipleJoin('PackageUploadCustom',
-                                   joinColumn='packageupload')
+    _customfiles = ReferenceSet("id", "PackageUploadCustom.packageupload_id")
 
     @cachedproperty
     def customfiles(self):
@@ -340,8 +341,7 @@ class PackageUpload(SQLBase):
         custom = Store.of(self).find(
             PackageUploadCustom,
             PackageUploadCustom.packageupload == self.id,
-            LibraryFileAlias.id ==
-                PackageUploadCustom.libraryfilealiasID,
+            PackageUploadCustom.libraryfilealias == LibraryFileAlias.id,
             LibraryFileAlias.filename == filename).one()
         if custom is not None:
             return custom.libraryfilealias
@@ -576,7 +576,7 @@ class PackageUpload(SQLBase):
         self.setAccepted()
 
         getUtility(IPackageUploadNotificationJobSource).create(self)
-        self.syncUpdate()
+        IStore(self).flush()
 
         # If this is a single source upload we can create the
         # publishing records now so that the user doesn't have to
@@ -628,7 +628,7 @@ class PackageUpload(SQLBase):
             summary_text = "Rejected by %s." % user.displayname
         getUtility(IPackageUploadNotificationJobSource).create(
             self, summary_text=summary_text)
-        self.syncUpdate()
+        IStore(self).flush()
 
     def _isSingleSourceUpload(self):
         """Return True if this upload contains only a single source."""
@@ -845,8 +845,7 @@ class PackageUpload(SQLBase):
         """See `IPackageUpload`."""
         self.addSearchableNames([spr.name])
         self.addSearchableVersions([spr.version])
-        pus = PackageUploadSource(
-            packageupload=self, sourcepackagerelease=spr.id)
+        pus = PackageUploadSource(packageupload=self, sourcepackagerelease=spr)
         Store.of(self).flush()
         del get_property_cache(self).sources
         return pus
@@ -860,7 +859,7 @@ class PackageUpload(SQLBase):
             versions.append(bpr.version)
         self.addSearchableNames(names)
         self.addSearchableVersions(versions)
-        pub = PackageUploadBuild(packageupload=self, build=build.id)
+        pub = PackageUploadBuild(packageupload=self, build=build)
         Store.of(self).flush()
         del get_property_cache(self).builds
         return pub
@@ -869,7 +868,7 @@ class PackageUpload(SQLBase):
         """See `IPackageUpload`."""
         self.addSearchableNames([library_file.filename])
         puc = PackageUploadCustom(
-            packageupload=self, libraryfilealias=library_file.id,
+            packageupload=self, libraryfilealias=library_file,
             customformat=custom_type)
         Store.of(self).flush()
         del get_property_cache(self).customfiles
@@ -1223,16 +1222,24 @@ class PackageUploadLog(StormBase):
 
 
 @implementer(IPackageUploadBuild)
-class PackageUploadBuild(SQLBase):
+class PackageUploadBuild(StormBase):
     """A Queue item's related builds."""
 
-    _defaultOrder = ['id']
+    __storm_table__ = "PackageUploadBuild"
+    __storm_order__ = ["id"]
 
-    packageupload = ForeignKey(
-        dbName='packageupload',
-        foreignKey='PackageUpload')
+    id = Int(primary=True)
+
+    packageupload_id = Int(name="packageupload", allow_none=False)
+    packageupload = Reference(packageupload_id, "PackageUpload.id")
 
-    build = ForeignKey(dbName='build', foreignKey='BinaryPackageBuild')
+    build_id = Int(name="build", allow_none=False)
+    build = Reference(build_id, "BinaryPackageBuild.id")
+
+    def __init__(self, packageupload, build):
+        super().__init__()
+        self.packageupload = packageupload
+        self.build = build
 
     @property
     def binaries(self):
@@ -1283,18 +1290,26 @@ class PackageUploadBuild(SQLBase):
 
 
 @implementer(IPackageUploadSource)
-class PackageUploadSource(SQLBase):
+class PackageUploadSource(StormBase):
     """A Queue item's related sourcepackagereleases."""
 
-    _defaultOrder = ['id']
+    __storm_table__ = "PackageUploadSource"
+    __storm_order__ = ["id"]
+
+    id = Int(primary=True)
+
+    packageupload_id = Int(name="packageupload", allow_none=False)
+    packageupload = Reference(packageupload_id, "PackageUpload.id")
 
-    packageupload = ForeignKey(
-        dbName='packageupload',
-        foreignKey='PackageUpload')
+    sourcepackagerelease_id = Int(
+        name="sourcepackagerelease", allow_none=False)
+    sourcepackagerelease = Reference(
+        sourcepackagerelease_id, "SourcePackageRelease.id")
 
-    sourcepackagerelease = ForeignKey(
-        dbName='sourcepackagerelease',
-        foreignKey='SourcePackageRelease')
+    def __init__(self, packageupload, sourcepackagerelease):
+        super().__init__()
+        self.packageupload = packageupload
+        self.sourcepackagerelease = sourcepackagerelease
 
     def getSourceAncestryForDiffs(self):
         """See `IPackageUploadSource`."""
@@ -1408,19 +1423,28 @@ class PackageUploadSource(SQLBase):
 
 
 @implementer(IPackageUploadCustom)
-class PackageUploadCustom(SQLBase):
+class PackageUploadCustom(StormBase):
     """A Queue item's related custom format uploads."""
 
-    _defaultOrder = ['id']
+    __storm_table__ = "PackageUploadCustom"
+    __storm_order__ = ["id"]
 
-    packageupload = ForeignKey(
-        dbName='packageupload', foreignKey='PackageUpload')
+    id = Int(primary=True)
+
+    packageupload_id = Int(name="packageupload", allow_none=False)
+    packageupload = Reference(packageupload_id, "PackageUpload.id")
 
     customformat = DBEnum(
         name='customformat', allow_none=False, enum=PackageUploadCustomFormat)
 
-    libraryfilealias = ForeignKey(
-        dbName='libraryfilealias', foreignKey="LibraryFileAlias", notNull=True)
+    libraryfilealias_id = Int(name="libraryfilealias", allow_none=False)
+    libraryfilealias = Reference(libraryfilealias_id, "LibraryFileAlias.id")
+
+    def __init__(self, packageupload, customformat, libraryfilealias):
+        super().__init__()
+        self.packageupload = packageupload
+        self.customformat = customformat
+        self.libraryfilealias = libraryfilealias
 
     def publish(self, logger=None):
         """See `IPackageUploadCustom`."""
@@ -1441,21 +1465,16 @@ class PackageUploadSet:
 
     def __iter__(self):
         """See `IPackageUploadSet`."""
-        return iter(PackageUpload.select())
+        return iter(IStore(PackageUpload).find(PackageUpload))
 
     def __getitem__(self, queue_id):
         """See `IPackageUploadSet`."""
-        try:
-            return PackageUpload.get(queue_id)
-        except SQLObjectNotFound:
+        package_upload = IStore(PackageUpload).get(PackageUpload, queue_id)
+        if package_upload is None:
             raise NotFoundError(queue_id)
+        return package_upload
 
-    def get(self, queue_id):
-        """See `IPackageUploadSet`."""
-        try:
-            return PackageUpload.get(queue_id)
-        except SQLObjectNotFound:
-            raise NotFoundError(queue_id)
+    get = __getitem__
 
     def findSourceUpload(self, name, version, archive, distribution):
         """See `IPackageUploadSet`."""
@@ -1467,12 +1486,12 @@ class PackageUploadSet:
         origin = (
             PackageUpload,
             Join(DistroSeries,
-                 DistroSeries.id == PackageUpload.distroseriesID),
+                 PackageUpload.distroseries == DistroSeries.id),
             Join(PackageUploadSource,
-                 PackageUploadSource.packageuploadID == PackageUpload.id),
+                 PackageUploadSource.packageupload == PackageUpload.id),
             Join(SourcePackageRelease,
-                 SourcePackageRelease.id ==
-                     PackageUploadSource.sourcepackagereleaseID),
+                 PackageUploadSource.sourcepackagerelease ==
+                     SourcePackageRelease.id),
             Join(SourcePackageName,
                  SourcePackageName.id ==
                      SourcePackageRelease.sourcepackagenameID),
@@ -1501,8 +1520,8 @@ class PackageUploadSet:
         archives = distroseries.distribution.getArchiveIDList()
         clauses = [
             PackageUpload.distroseries == distroseries,
-            PackageUpload.archiveID.is_in(archives),
-            PackageUploadBuild.packageuploadID == PackageUpload.id,
+            PackageUpload.archive_id.is_in(archives),
+            PackageUploadBuild.packageupload == PackageUpload.id,
             ]
 
         if status is not None:
@@ -1511,9 +1530,8 @@ class PackageUploadSet:
             clauses.append(PackageUpload.pocket.is_in(pockets))
         if names is not None:
             clauses.extend([
-                BinaryPackageBuild.id == PackageUploadBuild.buildID,
-                BinaryPackageBuild.source_package_name ==
-                    SourcePackageName.id,
+                PackageUploadBuild.build == BinaryPackageBuild.id,
+                BinaryPackageBuild.source_package_name == SourcePackageName.id,
                 SourcePackageName.name.is_in(names),
                 ])
 
@@ -1524,16 +1542,15 @@ class PackageUploadSet:
         """See `IPackageUploadSet`."""
         clauses = []
         if status:
-            clauses.append("status=%s" % sqlvalues(status))
+            clauses.append(PackageUpload.status == status)
 
         if distroseries:
-            clauses.append("distroseries=%s" % sqlvalues(distroseries))
+            clauses.append(PackageUpload.distroseries == distroseries)
 
         if pocket:
-            clauses.append("pocket=%s" % sqlvalues(pocket))
+            clauses.append(PackageUpload.pocket == pocket)
 
-        query = " AND ".join(clauses)
-        return PackageUpload.select(query).count()
+        return IStore(PackageUpload).find(PackageUpload, *clauses).count()
 
     def getAll(self, distroseries, created_since_date=None, status=None,
                archive=None, pocket=None, custom_type=None, name=None,
@@ -1558,7 +1575,7 @@ class PackageUploadSet:
             conditions.append(PackageUpload.status.is_in(status))
 
         archives = distroseries.distribution.getArchiveIDList(archive)
-        conditions.append(PackageUpload.archiveID.is_in(archives))
+        conditions.append(PackageUpload.archive_id.is_in(archives))
 
         if pocket is not None:
             pocket = dbitem_tuple(pocket)
@@ -1567,7 +1584,7 @@ class PackageUploadSet:
         if custom_type is not None:
             custom_type = dbitem_tuple(custom_type)
             joins.append(Join(PackageUploadCustom, And(
-                PackageUpload.id == PackageUploadCustom.packageuploadID,
+                PackageUploadCustom.packageupload == PackageUpload.id,
                 PackageUploadCustom.customformat.is_in(custom_type))))
 
         if name:
@@ -1591,11 +1608,11 @@ class PackageUploadSet:
 
         def preload_hook(rows):
             puses = load_referencing(
-                PackageUploadSource, rows, ["packageuploadID"])
+                PackageUploadSource, rows, ["packageupload_id"])
             pubs = load_referencing(
-                PackageUploadBuild, rows, ["packageuploadID"])
+                PackageUploadBuild, rows, ["packageupload_id"])
             pucs = load_referencing(
-                PackageUploadCustom, rows, ["packageuploadID"])
+                PackageUploadCustom, rows, ["packageupload_id"])
             logs = load_referencing(
                 PackageUploadLog, rows, ["package_upload_id"])
 
@@ -1607,8 +1624,8 @@ class PackageUploadSet:
         """See `IPackageUploadSet`."""
         if build_ids is None or len(build_ids) == 0:
             return []
-        return PackageUploadBuild.select(
-            "PackageUploadBuild.build IN %s" % sqlvalues(build_ids))
+        return IStore(PackageUploadBuild).find(
+            PackageUploadBuild, PackageUploadBuild.build_id.is_in(build_ids))
 
     def getByPackageCopyJobIDs(self, pcj_ids):
         """See `IPackageUploadSet`."""
@@ -1657,8 +1674,8 @@ def prefill_packageupload_caches(uploads, puses, pubs, pucs, logs):
         get_property_cache(puc.packageupload).customfiles.append(puc)
 
     source_sprs = load_related(
-        SourcePackageRelease, puses, ['sourcepackagereleaseID'])
-    bpbs = load_related(BinaryPackageBuild, pubs, ['buildID'])
+        SourcePackageRelease, puses, ['sourcepackagerelease_id'])
+    bpbs = load_related(BinaryPackageBuild, pubs, ['build_id'])
     load_related(DistroArchSeries, bpbs, ['distro_arch_series_id'])
     binary_sprs = load_related(
         SourcePackageRelease, bpbs, ['source_package_release_id'])
@@ -1676,7 +1693,7 @@ def prefill_packageupload_caches(uploads, puses, pubs, pucs, logs):
     diffs = getUtility(IPackageDiffSet).getDiffsToReleases(
         sprs, preload_for_display=True)
 
-    puc_lfas = load_related(LibraryFileAlias, pucs, ['libraryfilealiasID'])
+    puc_lfas = load_related(LibraryFileAlias, pucs, ['libraryfilealias_id'])
     load_related(LibraryFileContent, puc_lfas, ['contentID'])
 
     for spr_cache in sprs:
diff --git a/lib/lp/soyuz/model/sourcepackagerelease.py b/lib/lp/soyuz/model/sourcepackagerelease.py
index eaa5533..c68ad80 100644
--- a/lib/lp/soyuz/model/sourcepackagerelease.py
+++ b/lib/lp/soyuz/model/sourcepackagerelease.py
@@ -352,7 +352,7 @@ class SourcePackageRelease(SQLBase):
         origin = [
             PackageUploadSource,
             Join(PackageUpload,
-                 PackageUploadSource.packageuploadID == PackageUpload.id),
+                 PackageUploadSource.packageupload == PackageUpload.id),
             Join(LibraryFileAlias,
                  LibraryFileAlias.id == PackageUpload.changes_file_id),
             Join(LibraryFileContent,
diff --git a/lib/lp/soyuz/scripts/custom_uploads_copier.py b/lib/lp/soyuz/scripts/custom_uploads_copier.py
index 43a0128..a53afa1 100644
--- a/lib/lp/soyuz/scripts/custom_uploads_copier.py
+++ b/lib/lp/soyuz/scripts/custom_uploads_copier.py
@@ -76,7 +76,7 @@ class CustomUploadsCopier:
         """Find custom uploads that may need copying."""
         uploads = source_series.getPackageUploads(
             pocket=source_pocket, custom_type=list(self.copyable_types))
-        load_referencing(PackageUploadCustom, uploads, ['packageuploadID'])
+        load_referencing(PackageUploadCustom, uploads, ['packageupload_id'])
         customs = sum((list(upload.customfiles) for upload in uploads), [])
         return sorted(
             filter(self.isCopyable, customs),
diff --git a/lib/lp/soyuz/stories/soyuz/xx-binarypackagerelease-index.txt b/lib/lp/soyuz/stories/soyuz/xx-binarypackagerelease-index.txt
index 39f3f55..e449100 100644
--- a/lib/lp/soyuz/stories/soyuz/xx-binarypackagerelease-index.txt
+++ b/lib/lp/soyuz/stories/soyuz/xx-binarypackagerelease-index.txt
@@ -21,7 +21,6 @@ Next, we'll manually create a suitable package upload record for our
 build:
 XXX: noodles 2009-01-16 bug 317863: move this into the STP.
 
-    >>> from lp.soyuz.model.queue import PackageUploadBuild
     >>> from lp.soyuz.interfaces.binarypackagebuild import (
     ...     IBinaryPackageBuildSet)
     >>> from lp.registry.interfaces.pocket import (
@@ -45,9 +44,7 @@ manually insert one so that we can check how it's rendered.
     >>> package_upload = build.distro_series.createQueueEntry(
     ...     PackagePublishingPocket.UPDATES, build.archive,
     ...    'changes.txt', b'my changes')
-    >>> package_upload_build = PackageUploadBuild(
-    ...     packageupload =package_upload,
-    ...     build=build)
+    >>> _ = package_upload.addBuild(build)
     >>> package_upload.setDone()
     >>> logout()
 
diff --git a/lib/lp/soyuz/tests/test_packageupload.py b/lib/lp/soyuz/tests/test_packageupload.py
index 3e88b0e..b3e887e 100644
--- a/lib/lp/soyuz/tests/test_packageupload.py
+++ b/lib/lp/soyuz/tests/test_packageupload.py
@@ -370,9 +370,9 @@ class PackageUploadTestCase(TestCaseWithFactory):
             component="main", version="1.0"))
         transaction.commit()
         upload_one.setUnapproved()
-        upload_one.syncUpdate()
+        IStore(upload_one).flush()
         upload_two.setUnapproved()
-        upload_two.syncUpdate()
+        IStore(upload_two).flush()
 
         # There are now duplicate uploads in UNAPPROVED.
         unapproved = distroseries.getPackageUploads(