← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:stormify-libraryfile into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:stormify-libraryfile into launchpad:master.

Commit message:
Convert LibraryFile* to Storm

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/451762

Some interfaces, particularly those in `lp.services.librarianserver.db.Library`, passed around bare IDs in a way that's awkward to do with Storm.  I refactored those to pass around model objects instead, which is also harder to get wrong by accident.

`LibraryFileAlias.products` and `LibraryFileAlias.sourcepackages` were defined twice for no good reason, but also don't seem to be used anywhere, so I just removed them.

With this commit, there are finally no more users of `SQLBase`!
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:stormify-libraryfile into launchpad:master.
diff --git a/lib/lp/bugs/model/bug.py b/lib/lp/bugs/model/bug.py
index 5da7f9b..989abe8 100644
--- a/lib/lp/bugs/model/bug.py
+++ b/lib/lp/bugs/model/bug.py
@@ -2610,7 +2610,7 @@ class Bug(StormBase, InformationTypeMixin):
                 ),
                 LeftJoin(
                     LibraryFileContent,
-                    LibraryFileContent.id == LibraryFileAlias.contentID,
+                    LibraryFileContent.id == LibraryFileAlias.content_id,
                 ),
             )
             .find(
@@ -2618,7 +2618,7 @@ class Bug(StormBase, InformationTypeMixin):
                 BugAttachment.bug == self,
                 Or(
                     BugAttachment.url != None,
-                    LibraryFileAlias.contentID != None,
+                    LibraryFileAlias.content_id != None,
                 ),
             )
             .order_by(BugAttachment.id)
diff --git a/lib/lp/charms/model/charmrecipebuild.py b/lib/lp/charms/model/charmrecipebuild.py
index 6024b31..e63743e 100644
--- a/lib/lp/charms/model/charmrecipebuild.py
+++ b/lib/lp/charms/model/charmrecipebuild.py
@@ -377,7 +377,7 @@ class CharmRecipeBuild(PackageBuildMixin, StormBase):
             (CharmFile, LibraryFileAlias, LibraryFileContent),
             CharmFile.build == self.id,
             LibraryFileAlias.id == CharmFile.library_file_id,
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
         )
         return result.order_by([LibraryFileAlias.filename, CharmFile.id])
 
@@ -520,7 +520,7 @@ class CharmRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
 
         load_related(Person, builds, ["requester_id"])
         lfas = load_related(LibraryFileAlias, builds, ["log_id"])
-        load_related(LibraryFileContent, lfas, ["contentID"])
+        load_related(LibraryFileContent, lfas, ["content_id"])
         distroarchserieses = load_related(
             DistroArchSeries, builds, ["distro_arch_series_id"]
         )
diff --git a/lib/lp/code/model/cibuild.py b/lib/lp/code/model/cibuild.py
index 8c32605..f6faff6 100644
--- a/lib/lp/code/model/cibuild.py
+++ b/lib/lp/code/model/cibuild.py
@@ -901,7 +901,7 @@ class CIBuildSet(SpecificBuildFarmJobSourceMixin):
 
     def preloadBuildsData(self, builds):
         lfas = load_related(LibraryFileAlias, builds, ["log_id"])
-        load_related(LibraryFileContent, lfas, ["contentID"])
+        load_related(LibraryFileContent, lfas, ["content_id"])
         distroarchseries = load_related(
             DistroArchSeries, builds, ["distro_arch_series_id"]
         )
diff --git a/lib/lp/oci/model/ocirecipebuild.py b/lib/lp/oci/model/ocirecipebuild.py
index b1548a5..fbc5b61 100644
--- a/lib/lp/oci/model/ocirecipebuild.py
+++ b/lib/lp/oci/model/ocirecipebuild.py
@@ -282,7 +282,7 @@ class OCIRecipeBuild(PackageBuildMixin, StormBase):
             (OCIFile, LibraryFileAlias, LibraryFileContent),
             OCIFile.build == self.id,
             LibraryFileAlias.id == OCIFile.library_file_id,
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
         )
         return result.order_by([LibraryFileAlias.filename, OCIFile.id])
 
@@ -421,7 +421,7 @@ class OCIRecipeBuild(PackageBuildMixin, StormBase):
                 (OCIFile, LibraryFileAlias, LibraryFileContent),
                 OCIFile.build == self.id,
                 LibraryFileAlias.id == OCIFile.library_file_id,
-                LibraryFileContent.id == LibraryFileAlias.contentID,
+                LibraryFileContent.id == LibraryFileAlias.content_id,
                 OCIFile.layer_file_digest == layer_file_digest,
             )
             .one()
@@ -593,7 +593,7 @@ class OCIRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
 
         load_related(Person, builds, ["requester_id"])
         lfas = load_related(LibraryFileAlias, builds, ["log_id"])
-        load_related(LibraryFileContent, lfas, ["contentID"])
+        load_related(LibraryFileContent, lfas, ["content_id"])
         recipes = load_related(OCIRecipe, builds, ["recipe_id"])
         getUtility(IOCIRecipeSet).preloadDataForOCIRecipes(recipes)
         # XXX twom 2019-12-05 This needs to be extended to include
diff --git a/lib/lp/registry/model/productrelease.py b/lib/lp/registry/model/productrelease.py
index 8a87785..a2dd584 100644
--- a/lib/lp/registry/model/productrelease.py
+++ b/lib/lp/registry/model/productrelease.py
@@ -365,7 +365,7 @@ class ProductReleaseSet:
                 ),
                 LeftJoin(
                     LibraryFileContent,
-                    LibraryFileAlias.contentID == LibraryFileContent.id,
+                    LibraryFileAlias.content == LibraryFileContent.id,
                 ),
                 Join(
                     ProductRelease,
diff --git a/lib/lp/services/librarian/client.py b/lib/lp/services/librarian/client.py
index b3c8ad7..71912a9 100644
--- a/lib/lp/services/librarian/client.py
+++ b/lib/lp/services/librarian/client.py
@@ -24,7 +24,6 @@ from urllib.request import urlopen
 
 import six
 from lazr.restful.utils import get_current_browser_request
-from storm.store import Store
 from zope.interface import implementer
 
 from lp.services.config import config, dbconfig
@@ -249,6 +248,7 @@ class FileUploadClient:
                 sha1=sha1_digester.hexdigest(),
                 md5=md5_digester.hexdigest(),
             )
+            store.add(content)
             LibraryFileAlias(
                 id=aliasID,
                 content=content,
@@ -258,7 +258,7 @@ class FileUploadClient:
                 restricted=self.restricted,
             )
 
-            Store.of(content).flush()
+            store.flush()
 
             assert isinstance(aliasID, int), "aliasID %r not an integer" % (
                 aliasID,
diff --git a/lib/lp/services/librarian/model.py b/lib/lp/services/librarian/model.py
index 9c517c5..25692c8 100644
--- a/lib/lp/services/librarian/model.py
+++ b/lib/lp/services/librarian/model.py
@@ -15,7 +15,16 @@ from datetime import datetime, timezone
 from urllib.parse import urlparse
 
 from lazr.delegates import delegate_to
-from storm.locals import Date, Desc, Int, Reference, ReferenceSet, Store
+from storm.locals import (
+    Bool,
+    Date,
+    DateTime,
+    Desc,
+    Int,
+    Reference,
+    Store,
+    Unicode,
+)
 from zope.component import adapter, getUtility
 from zope.interface import Interface, implementer
 
@@ -23,16 +32,8 @@ from lp.app.errors import NotFoundError
 from lp.registry.errors import InvalidFilename
 from lp.services.config import config
 from lp.services.database.constants import DEFAULT, UTC_NOW
-from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.interfaces import IPrimaryStore, IStore
-from lp.services.database.sqlbase import SQLBase, session_store
-from lp.services.database.sqlobject import (
-    BoolCol,
-    ForeignKey,
-    IntCol,
-    SQLRelatedJoin,
-    StringCol,
-)
+from lp.services.database.sqlbase import session_store
 from lp.services.database.stormbase import StormBase
 from lp.services.librarian.interfaces import (
     ILibraryFileAlias,
@@ -52,48 +53,65 @@ from lp.services.tokens import create_token
 
 
 @implementer(ILibraryFileContent)
-class LibraryFileContent(SQLBase):
+class LibraryFileContent(StormBase):
     """A pointer to file content in the librarian."""
 
-    _table = "LibraryFileContent"
+    __storm_table__ = "LibraryFileContent"
+
+    id = Int(primary=True)
+    datecreated = DateTime(
+        allow_none=False, default=UTC_NOW, tzinfo=timezone.utc
+    )
+    filesize = Int(allow_none=False)
+    sha256 = Unicode()
+    sha1 = Unicode(allow_none=False)
+    md5 = Unicode(allow_none=False)
 
-    datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW)
-    filesize = IntCol(notNull=True)
-    sha256 = StringCol()
-    sha1 = StringCol(notNull=True)
-    md5 = StringCol(notNull=True)
+    def __init__(self, filesize, md5, sha1, sha256, id=None):
+        super().__init__()
+        if id is not None:
+            self.id = id
+        self.filesize = filesize
+        self.md5 = md5
+        self.sha1 = sha1
+        self.sha256 = sha256
 
 
 @implementer(ILibraryFileAlias)
-class LibraryFileAlias(SQLBase):
+class LibraryFileAlias(StormBase):
     """A filename and mimetype that we can serve some given content with."""
 
-    _table = "LibraryFileAlias"
-    date_created = UtcDateTimeCol(notNull=False, default=DEFAULT)
-    content = ForeignKey(
-        foreignKey="LibraryFileContent",
-        dbName="content",
-        notNull=False,
-    )
-    filename = StringCol(notNull=True)
-    mimetype = StringCol(notNull=True)
-    expires = UtcDateTimeCol(notNull=False, default=None)
-    restricted = BoolCol(notNull=True, default=False)
-    hits = IntCol(notNull=True, default=0)
-
-    products = SQLRelatedJoin(
-        "ProductRelease",
-        joinColumn="libraryfile",
-        otherColumn="productrelease",
-        intermediateTable="ProductReleaseFile",
-    )
+    __storm_table__ = "LibraryFileAlias"
 
-    sourcepackages = ReferenceSet(
-        "id",
-        "SourcePackageReleaseFile.libraryfile_id",
-        "SourcePackageReleaseFile.sourcepackagerelease_id",
-        "SourcePackageRelease.id",
+    id = Int(primary=True)
+    date_created = DateTime(
+        allow_none=True, default=DEFAULT, tzinfo=timezone.utc
     )
+    content_id = Int(name="content", allow_none=True)
+    content = Reference(content_id, "LibraryFileContent.id")
+    filename = Unicode(allow_none=False)
+    mimetype = Unicode(allow_none=False)
+    expires = DateTime(allow_none=True, default=None, tzinfo=timezone.utc)
+    restricted = Bool(allow_none=False, default=False)
+    hits = Int(allow_none=False, default=0)
+
+    def __init__(
+        self,
+        content,
+        filename,
+        mimetype,
+        id=None,
+        expires=None,
+        restricted=False,
+    ):
+        super().__init__()
+        if id is not None:
+            self.id = id
+        self.content = content
+        self.filename = filename
+        self.mimetype = mimetype
+        self.expires = expires
+        self.restricted = restricted
 
     @property
     def client(self):
@@ -199,23 +217,9 @@ class LibraryFileAlias(SQLBase):
             entry.count += count
         self.hits += count
 
-    products = SQLRelatedJoin(
-        "ProductRelease",
-        joinColumn="libraryfile",
-        otherColumn="productrelease",
-        intermediateTable="ProductReleaseFile",
-    )
-
-    sourcepackages = ReferenceSet(
-        "id",
-        "SourcePackageReleaseFile.libraryfile_id",
-        "SourcePackageReleaseFile.sourcepackagerelease_id",
-        "SourcePackageRelease.id",
-    )
-
     @property
     def deleted(self):
-        return self.contentID is None
+        return self.content_id is None
 
     def __storm_invalidated__(self):
         """Make sure that the file is closed across transaction boundary."""
@@ -327,7 +331,7 @@ class LibraryFileAliasSet:
 
 
 @implementer(ILibraryFileDownloadCount)
-class LibraryFileDownloadCount(SQLBase):
+class LibraryFileDownloadCount(StormBase):
     """See `ILibraryFileDownloadCount`"""
 
     __storm_table__ = "LibraryFileDownloadCount"
@@ -340,16 +344,23 @@ class LibraryFileDownloadCount(SQLBase):
     country_id = Int(name="country", allow_none=True)
     country = Reference(country_id, "Country.id")
 
+    def __init__(self, libraryfilealias, day, count, country=None):
+        super().__init__()
+        self.libraryfilealias = libraryfilealias
+        self.day = day
+        self.count = count
+        self.country = country
+
 
 class TimeLimitedToken(StormBase):
     """A time limited access token for accessing a private file."""
 
     __storm_table__ = "TimeLimitedToken"
 
-    created = UtcDateTimeCol(notNull=True, default=UTC_NOW)
-    path = StringCol(notNull=True)
+    created = DateTime(allow_none=False, default=UTC_NOW, tzinfo=timezone.utc)
+    path = Unicode(allow_none=False)
     # The hex SHA-256 hash of the token.
-    token = StringCol(notNull=True)
+    token = Unicode(allow_none=False)
 
     __storm_primary__ = ("path", "token")
 
diff --git a/lib/lp/services/librarianserver/db.py b/lib/lp/services/librarianserver/db.py
index 1ffc2f6..21631b2 100644
--- a/lib/lp/services/librarianserver/db.py
+++ b/lib/lp/services/librarianserver/db.py
@@ -147,7 +147,7 @@ class Library:
     def getAliases(self, fileid):
         results = IStore(LibraryFileAlias).find(
             LibraryFileAlias,
-            LibraryFileAlias.contentID == LibraryFileContent.id,
+            LibraryFileAlias.content_id == LibraryFileContent.id,
             LibraryFileAlias.restricted == self.restricted,
             LibraryFileContent.id == fileid,
         )
@@ -156,20 +156,22 @@ class Library:
     # the following methods are used for adding to the library
 
     def add(self, digest, size, md5_digest, sha256_digest):
+        store = IStore(LibraryFileContent)
         lfc = LibraryFileContent(
             filesize=size, sha1=digest, md5=md5_digest, sha256=sha256_digest
         )
-        return lfc.id
-
-    def addAlias(self, fileid, filename, mimetype, expires=None):
-        """Add an alias, and return its ID.
-
-        If a matching alias already exists, it will return that ID instead.
-        """
-        return LibraryFileAlias(
-            contentID=fileid,
+        store.add(lfc)
+        store.flush()
+        return lfc
+
+    def addAlias(self, content, filename, mimetype, expires=None):
+        """Add an alias and return it."""
+        lfa = LibraryFileAlias(
+            content=content,
             filename=filename,
             mimetype=mimetype,
             expires=expires,
             restricted=self.restricted,
-        ).id
+        )
+        IStore(LibraryFileAlias).flush()
+        return lfa
diff --git a/lib/lp/services/librarianserver/storage.py b/lib/lp/services/librarianserver/storage.py
index 2c2437b..cabded8 100644
--- a/lib/lp/services/librarianserver/storage.py
+++ b/lib/lp/services/librarianserver/storage.py
@@ -270,15 +270,16 @@ class LibraryFileUpload:
             # If we haven't got a contentID, we need to create one and return
             # it to the client.
             if self.contentID is None:
-                contentID = self.storage.library.add(
+                content = self.storage.library.add(
                     dstDigest,
                     self.size,
                     self.md5_digester.hexdigest(),
                     self.sha256_digester.hexdigest(),
                 )
+                contentID = content.id
                 aliasID = self.storage.library.addAlias(
-                    contentID, self.filename, self.mimetype, self.expires
-                )
+                    content, self.filename, self.mimetype, self.expires
+                ).id
                 self.debugLog.append(
                     "created contentID: %r, aliasID: %r."
                     % (contentID, aliasID)
diff --git a/lib/lp/services/librarianserver/testing/fake.py b/lib/lp/services/librarianserver/testing/fake.py
index e265e6a..9898dc2 100644
--- a/lib/lp/services/librarianserver/testing/fake.py
+++ b/lib/lp/services/librarianserver/testing/fake.py
@@ -25,6 +25,7 @@ from transaction.interfaces import ISynchronizer
 from zope.interface import implementer
 
 from lp.services.config import config
+from lp.services.database.interfaces import IStore
 from lp.services.librarian.client import get_libraryfilealias_download_path
 from lp.services.librarian.interfaces import ILibraryFileAliasSet
 from lp.services.librarian.interfaces.client import (
@@ -102,7 +103,7 @@ class FakeLibrarian(Fixture):
             )
 
         file_ref = self._makeLibraryFileContent(content)
-        alias = self._makeAlias(file_ref.id, name, content, contentType)
+        alias = self._makeAlias(file_ref, name, content, contentType)
         self.aliases[alias.id] = alias
         return alias
 
@@ -142,12 +143,13 @@ class FakeLibrarian(Fixture):
         for alias in self.aliases.values():
             alias.file_committed = True
 
-    def _makeAlias(self, file_id, name, content, content_type):
+    def _makeAlias(self, lfc, name, content, content_type):
         """Create a `LibraryFileAlias`."""
         alias = InstrumentedLibraryFileAlias(
-            contentID=file_id, filename=name, mimetype=content_type
+            content=lfc, filename=name, mimetype=content_type
         )
         alias.content_bytes = content
+        IStore(LibraryFileAlias).flush()
         return alias
 
     def _makeLibraryFileContent(self, content):
@@ -160,6 +162,7 @@ class FakeLibrarian(Fixture):
         content_object = LibraryFileContent(
             filesize=size, md5=md5, sha1=sha1, sha256=sha256
         )
+        IStore(LibraryFileContent).add(content_object)
         return content_object
 
     def create(
diff --git a/lib/lp/services/librarianserver/tests/test_db.py b/lib/lp/services/librarianserver/tests/test_db.py
index f43d918..48d980b 100644
--- a/lib/lp/services/librarianserver/tests/test_db.py
+++ b/lib/lp/services/librarianserver/tests/test_db.py
@@ -42,21 +42,20 @@ class DBTestCase(TestCase):
         self.assertEqual([], library.lookupBySHA1("deadbeef"))
 
         # Add a file, check it is found by lookupBySHA1
-        fileID = library.add("deadbeef", 1234, "abababab", "babababa")
-        self.assertEqual([fileID], library.lookupBySHA1("deadbeef"))
+        content = library.add("deadbeef", 1234, "abababab", "babababa")
+        self.assertEqual([content.id], library.lookupBySHA1("deadbeef"))
 
         # Add a new file with the same digest
-        newFileID = library.add("deadbeef", 1234, "abababab", "babababa")
+        new_content = library.add("deadbeef", 1234, "abababab", "babababa")
         # Check it gets a new ID anyway
-        self.assertNotEqual(fileID, newFileID)
+        self.assertNotEqual(content.id, new_content.id)
         # Check it is found by lookupBySHA1
         self.assertEqual(
-            sorted([fileID, newFileID]),
+            sorted([content.id, new_content.id]),
             sorted(library.lookupBySHA1("deadbeef")),
         )
 
-        aliasID = library.addAlias(fileID, "file1", "text/unknown")
-        alias = library.getAlias(aliasID, None, "/")
+        alias = library.addAlias(content, "file1", "text/unknown")
         self.assertEqual("file1", alias.filename)
         self.assertEqual("text/unknown", alias.mimetype)
 
@@ -97,7 +96,9 @@ class TestLibrarianStuff(TestCase):
         super().setUp()
         switch_dbuser("librarian")
         self.store = IStore(LibraryFileContent)
-        self.content_id = db.Library().add("deadbeef", 1234, "abababab", "ba")
+        self.content_id = (
+            db.Library().add("deadbeef", 1234, "abababab", "ba").id
+        )
         self.file_content = self._getTestFileContent()
         transaction.commit()
 
diff --git a/lib/lp/services/librarianserver/tests/test_gc.py b/lib/lp/services/librarianserver/tests/test_gc.py
index 0d2a163..90f2039 100644
--- a/lib/lp/services/librarianserver/tests/test_gc.py
+++ b/lib/lp/services/librarianserver/tests/test_gc.py
@@ -131,7 +131,7 @@ class TestLibrarianGarbageCollectionBase:
 
         # Make sure the duplicates really are distinct
         self.assertNotEqual(f1_id, f2_id)
-        self.assertNotEqual(f1.contentID, f2.contentID)
+        self.assertNotEqual(f1.content_id, f2.content_id)
 
         f1.date_created = self.ancient_past
         f2.date_created = self.ancient_past
@@ -140,8 +140,8 @@ class TestLibrarianGarbageCollectionBase:
 
         # Set the time on disk to match the database timestamp.
         utime = calendar.timegm(self.ancient_past.utctimetuple())
-        os.utime(librariangc.get_file_path(f1.contentID), (utime, utime))
-        os.utime(librariangc.get_file_path(f2.contentID), (utime, utime))
+        os.utime(librariangc.get_file_path(f1.content_id), (utime, utime))
+        os.utime(librariangc.get_file_path(f2.content_id), (utime, utime))
 
         del f1, f2
 
@@ -166,7 +166,7 @@ class TestLibrarianGarbageCollectionBase:
         self.ztm.begin()
         f1 = self.store.get(LibraryFileAlias, self.f1_id)
         f2 = self.store.get(LibraryFileAlias, self.f2_id)
-        self.assertEqual(f1.contentID, f2.contentID)
+        self.assertEqual(f1.content_id, f2.content_id)
 
     def test_DeleteUnreferencedAliases(self):
         self.ztm.begin()
@@ -176,8 +176,8 @@ class TestLibrarianGarbageCollectionBase:
         f2 = self.store.get(LibraryFileAlias, self.f2_id)
         # Grab the content IDs related to these
         # unreferenced LibraryFileAliases
-        c1_id = f1.contentID
-        c2_id = f2.contentID
+        c1_id = f1.content_id
+        c2_id = f2.content_id
         del f1, f2
         self.ztm.abort()
 
@@ -630,7 +630,7 @@ class TestDiskLibrarianGarbageCollection(
                 "foo.txt", len(content), io.BytesIO(content), "text/plain"
             ),
         )
-        id_aborted = lfa.contentID
+        id_aborted = lfa.content_id
         # Roll back the database changes, leaving the file on disk.
         transaction.abort()
 
@@ -641,7 +641,7 @@ class TestDiskLibrarianGarbageCollection(
             ),
         )
         transaction.commit()
-        id_committed = lfa.contentID
+        id_committed = lfa.content_id
 
         switch_dbuser(config.librarian_gc.dbuser)
 
@@ -818,7 +818,7 @@ class TestSwiftLibrarianGarbageCollection(
                 "foo.txt", len(content), io.BytesIO(content), "text/plain"
             ),
         )
-        big1_id = big1_lfa.contentID
+        big1_id = big1_lfa.content_id
 
         big2_lfa = self.store.get(
             LibraryFileAlias,
@@ -826,7 +826,7 @@ class TestSwiftLibrarianGarbageCollection(
                 "bar.txt", len(content), io.BytesIO(content), "text/plain"
             ),
         )
-        big2_id = big2_lfa.contentID
+        big2_id = big2_lfa.content_id
         transaction.commit()
 
         for lfc_id in (big1_id, big2_id):
@@ -881,7 +881,7 @@ class TestSwiftLibrarianGarbageCollection(
                 "foo.txt", len(content), io.BytesIO(content), "text/plain"
             ),
         )
-        f1_id = f1_lfa.contentID
+        f1_id = f1_lfa.content_id
 
         f2_lfa = self.store.get(
             LibraryFileAlias,
@@ -889,7 +889,7 @@ class TestSwiftLibrarianGarbageCollection(
                 "bar.txt", len(content), io.BytesIO(content), "text/plain"
             ),
         )
-        f2_id = f2_lfa.contentID
+        f2_id = f2_lfa.content_id
         transaction.commit()
 
         for lfc_id in (f1_id, f2_id):
@@ -948,7 +948,7 @@ class TestSwiftLibrarianGarbageCollection(
                 "foo.txt", len(content), io.BytesIO(content), "text/plain"
             ),
         )
-        f1_id = f1_lfa.contentID
+        f1_id = f1_lfa.content_id
 
         f2_lfa = self.store.get(
             LibraryFileAlias,
@@ -956,7 +956,7 @@ class TestSwiftLibrarianGarbageCollection(
                 "bar.txt", len(content), io.BytesIO(content), "text/plain"
             ),
         )
-        f2_id = f2_lfa.contentID
+        f2_id = f2_lfa.content_id
         transaction.commit()
 
         for lfc_id in (f1_id, f2_id):
@@ -1032,7 +1032,7 @@ class TestTwoSwiftsLibrarianGarbageCollection(
             )
             for _ in range(12)
         ]
-        lfc_ids = [lfa.contentID for lfa in lfas]
+        lfc_ids = [lfa.content_id for lfa in lfas]
         transaction.commit()
 
         # Simulate a migration in progress.  Some files are only in the old
@@ -1119,7 +1119,7 @@ class TestTwoSwiftsLibrarianGarbageCollection(
             )
             for _ in range(12)
         ]
-        lfc_ids = [lfa.contentID for lfa in lfas]
+        lfc_ids = [lfa.content_id for lfa in lfas]
         transaction.commit()
 
         for lfc_id in lfc_ids:
diff --git a/lib/lp/services/librarianserver/tests/test_storage.py b/lib/lp/services/librarianserver/tests/test_storage.py
index f5100a1..bd0daa4 100644
--- a/lib/lp/services/librarianserver/tests/test_storage.py
+++ b/lib/lp/services/librarianserver/tests/test_storage.py
@@ -77,7 +77,7 @@ class LibrarianStorageTestCase(unittest.TestCase):
         self.storage.library = StubLibrary()
         data = b"data " * 50
         newfile = self.storage.startAddFile("file", len(data))
-        newfile.contentID = 0x11111111
+        newfile.content_id = 0x11111111
         newfile.append(data)
         fileid1, aliasid = newfile.store()
         # First id from stub library should be 0x11111111
@@ -111,17 +111,22 @@ class LibrarianStorageTestCase(unittest.TestCase):
         self.assertEqual(sha256, lfc.sha256)
 
 
+class StubLibraryFileContent:
+    def __init__(self, id):
+        self.id = id
+
+
 class StubLibrary:
     # Used by test_multipleFilesInOnePrefixedDirectory
 
     def lookupBySHA1(self, digest):
         return []
 
-    def addAlias(self, fileid, filename, mimetype):
+    def addAlias(self, content, filename, mimetype):
         pass
 
     id = 0x11111110
 
     def add(self, digest, size):
         self.id += 1
-        return self.id
+        return StubLibraryFileContent(self.id)
diff --git a/lib/lp/services/librarianserver/tests/test_storage_db.py b/lib/lp/services/librarianserver/tests/test_storage_db.py
index b2d0b1b..05723f1 100644
--- a/lib/lp/services/librarianserver/tests/test_storage_db.py
+++ b/lib/lp/services/librarianserver/tests/test_storage_db.py
@@ -10,6 +10,7 @@ from testtools.testcase import ExpectedException
 from testtools.twistedsupport import AsynchronousDeferredRunTest
 from twisted.internet import defer
 
+from lp.services.database.interfaces import IStore
 from lp.services.database.sqlbase import flush_database_updates
 from lp.services.features.testing import FeatureFixture
 from lp.services.librarian.model import LibraryFileContent
@@ -129,11 +130,16 @@ class LibrarianStorageDBTests(TestCase):
         fileid2, aliasid2 = newfile2.store()
 
         # Create rows in the database for these files.
-        LibraryFileContent(
-            filesize=0, sha1="foo", md5="xx", sha256="xx", id=6661
+        store = IStore(LibraryFileContent)
+        store.add(
+            LibraryFileContent(
+                filesize=0, sha1="foo", md5="xx", sha256="xx", id=6661
+            )
         )
-        LibraryFileContent(
-            filesize=0, sha1="foo", md5="xx", sha256="xx", id=6662
+        store.add(
+            LibraryFileContent(
+                filesize=0, sha1="foo", md5="xx", sha256="xx", id=6662
+            )
         )
 
         flush_database_updates()
diff --git a/lib/lp/services/librarianserver/tests/test_web.py b/lib/lp/services/librarianserver/tests/test_web.py
index a84b74d..19ebe56 100644
--- a/lib/lp/services/librarianserver/tests/test_web.py
+++ b/lib/lp/services/librarianserver/tests/test_web.py
@@ -342,7 +342,7 @@ class LibrarianWebTestCase(LibrarianWebTestMixin, TestCaseWithFactory):
 
         # Delete the on-disk file.
         storage = LibrarianStorage(config.librarian_server.root, None)
-        os.remove(storage._fileLocation(file_alias.contentID))
+        os.remove(storage._fileLocation(file_alias.content_id))
 
         # The URL now 500s, since the DB says it should exist.
         response = requests.get(url)
diff --git a/lib/lp/services/librarianserver/web.py b/lib/lp/services/librarianserver/web.py
index ef9b6eb..efa7f55 100644
--- a/lib/lp/services/librarianserver/web.py
+++ b/lib/lp/services/librarianserver/web.py
@@ -130,7 +130,7 @@ class LibraryFileAliasResource(resource.Resource):
         try:
             alias = self.storage.getFileAlias(aliasID, token, path)
             return (
-                alias.contentID,
+                alias.content_id,
                 alias.filename,
                 alias.mimetype,
                 alias.date_created,
diff --git a/lib/lp/snappy/model/snap.py b/lib/lp/snappy/model/snap.py
index c742fc2..a9a0128 100644
--- a/lib/lp/snappy/model/snap.py
+++ b/lib/lp/snappy/model/snap.py
@@ -1143,7 +1143,7 @@ class Snap(StormBase, WebhookTargetMixin):
 
         # Prefetch data to keep DB query count constant
         lfas = load_related(LibraryFileAlias, builds, ["log_id"])
-        load_related(LibraryFileContent, lfas, ["contentID"])
+        load_related(LibraryFileContent, lfas, ["content_id"])
 
         for build in builds:
             if build.date is not None:
diff --git a/lib/lp/snappy/model/snapbuild.py b/lib/lp/snappy/model/snapbuild.py
index 0f4f041..df2e2c2 100644
--- a/lib/lp/snappy/model/snapbuild.py
+++ b/lib/lp/snappy/model/snapbuild.py
@@ -334,7 +334,7 @@ class SnapBuild(PackageBuildMixin, StormBase):
             (SnapFile, LibraryFileAlias, LibraryFileContent),
             SnapFile.snapbuild == self.id,
             LibraryFileAlias.id == SnapFile.libraryfile_id,
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
         )
         return result.order_by([LibraryFileAlias.filename, SnapFile.id])
 
@@ -602,7 +602,7 @@ class SnapBuildSet(SpecificBuildFarmJobSourceMixin):
 
         load_related(Person, builds, ["requester_id"])
         lfas = load_related(LibraryFileAlias, builds, ["log_id"])
-        load_related(LibraryFileContent, lfas, ["contentID"])
+        load_related(LibraryFileContent, lfas, ["content_id"])
         archives = load_related(Archive, builds, ["archive_id"])
         load_related(Person, archives, ["owner_id"])
         distroarchseries = load_related(
diff --git a/lib/lp/soyuz/browser/queue.py b/lib/lp/soyuz/browser/queue.py
index 5112ea7..ff942aa 100644
--- a/lib/lp/soyuz/browser/queue.py
+++ b/lib/lp/soyuz/browser/queue.py
@@ -233,7 +233,7 @@ class QueueItemsView(LaunchpadView):
         file_lfas = load_related(
             LibraryFileAlias, source_files + binary_files, ["libraryfile_id"]
         )
-        load_related(LibraryFileContent, file_lfas, ["contentID"])
+        load_related(LibraryFileContent, file_lfas, ["content_id"])
 
         # Get a dictionary of lists of binary files keyed by upload ID.
         package_upload_builds_dict = self.builds_dict(upload_ids, binary_files)
diff --git a/lib/lp/soyuz/model/archive.py b/lib/lp/soyuz/model/archive.py
index fe36739..c7749ba 100644
--- a/lib/lp/soyuz/model/archive.py
+++ b/lib/lp/soyuz/model/archive.py
@@ -957,7 +957,7 @@ class Archive(StormBase):
             SourcePackagePublishingHistory.sourcepackagerelease_id
             == SourcePackageReleaseFile.sourcepackagerelease_id,
             SourcePackageReleaseFile.libraryfile_id == LibraryFileAlias.id,
-            LibraryFileAlias.contentID == LibraryFileContent.id,
+            LibraryFileAlias.content_id == LibraryFileContent.id,
         )
 
         # Note: we can't use the LFC.sha1 instead of LFA.filename above
@@ -1246,7 +1246,7 @@ class Archive(StormBase):
             BinaryPackagePublishingHistory.binarypackagerelease_id
             == BinaryPackageFile.binarypackagerelease_id,
             BinaryPackageFile.libraryfile_id == LibraryFileAlias.id,
-            LibraryFileAlias.contentID == LibraryFileContent.id,
+            LibraryFileAlias.content_id == LibraryFileContent.id,
         )
         # See `IArchive.sources_size`.
         result = result.config(distinct=True)
@@ -2956,7 +2956,7 @@ class Archive(StormBase):
                 == SourcePackageReleaseFile.sourcepackagerelease_id,
                 LibraryFileAlias.id == SourcePackageReleaseFile.libraryfile_id,
                 LibraryFileAlias.filename.is_in(source_files),
-                LibraryFileContent.id == LibraryFileAlias.contentID,
+                LibraryFileContent.id == LibraryFileAlias.content_id,
             )
             .config(distinct=True)
         )
diff --git a/lib/lp/soyuz/model/archivefile.py b/lib/lp/soyuz/model/archivefile.py
index f9bbe36..215d0f5 100644
--- a/lib/lp/soyuz/model/archivefile.py
+++ b/lib/lp/soyuz/model/archivefile.py
@@ -143,7 +143,7 @@ class ArchiveFileSet:
             clauses.extend(
                 [
                     ArchiveFile.library_file == LibraryFileAlias.id,
-                    LibraryFileAlias.contentID == LibraryFileContent.id,
+                    LibraryFileAlias.content_id == LibraryFileContent.id,
                     LibraryFileContent.sha256 == sha256,
                 ]
             )
@@ -189,7 +189,7 @@ class ArchiveFileSet:
 
         def eager_load(rows):
             lfas = load_related(LibraryFileAlias, rows, ["library_file_id"])
-            load_related(LibraryFileContent, lfas, ["contentID"])
+            load_related(LibraryFileContent, lfas, ["content_id"])
 
         if eager_load:
             return DecoratedResultSet(archive_files, pre_iter_hook=eager_load)
diff --git a/lib/lp/soyuz/model/binarypackagebuild.py b/lib/lp/soyuz/model/binarypackagebuild.py
index 4616feb..a801a6a 100644
--- a/lib/lp/soyuz/model/binarypackagebuild.py
+++ b/lib/lp/soyuz/model/binarypackagebuild.py
@@ -341,7 +341,7 @@ class BinaryPackageBuild(PackageBuildMixin, StormBase):
             ),
             Join(
                 LibraryFileContent,
-                LibraryFileContent.id == LibraryFileAlias.contentID,
+                LibraryFileContent.id == LibraryFileAlias.content_id,
             ),
         ]
         results = store.using(*origin).find(
@@ -483,7 +483,7 @@ class BinaryPackageBuild(PackageBuildMixin, StormBase):
             BinaryPackageRelease.id
             == BinaryPackageFile.binarypackagerelease_id,
             LibraryFileAlias.id == BinaryPackageFile.libraryfile_id,
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
         )
         return result.order_by(
             [LibraryFileAlias.filename, BinaryPackageRelease.id]
@@ -1352,7 +1352,7 @@ class BinaryPackageBuildSet(SpecificBuildFarmJobSourceMixin):
             ),
             LeftJoin(
                 LibraryFileContent,
-                LibraryFileContent.id == LibraryFileAlias.contentID,
+                LibraryFileContent.id == LibraryFileAlias.content_id,
             ),
             LeftJoin(Builder, Builder.id == BinaryPackageBuild.builder_id),
         )
diff --git a/lib/lp/soyuz/model/livefsbuild.py b/lib/lp/soyuz/model/livefsbuild.py
index f7312a9..3c4f567 100644
--- a/lib/lp/soyuz/model/livefsbuild.py
+++ b/lib/lp/soyuz/model/livefsbuild.py
@@ -277,7 +277,7 @@ class LiveFSBuild(PackageBuildMixin, StormBase):
             (LiveFSFile, LibraryFileAlias, LibraryFileContent),
             LiveFSFile.livefsbuild == self.id,
             LibraryFileAlias.id == LiveFSFile.libraryfile_id,
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
         )
         return result.order_by([LibraryFileAlias.filename, LiveFSFile.id])
 
diff --git a/lib/lp/soyuz/model/packagediff.py b/lib/lp/soyuz/model/packagediff.py
index e21de8b..14dd76c 100644
--- a/lib/lp/soyuz/model/packagediff.py
+++ b/lib/lp/soyuz/model/packagediff.py
@@ -346,7 +346,7 @@ class PackageDiffSet:
 
         def preload_hook(rows):
             lfas = load(LibraryFileAlias, (pd.diff_content_id for pd in rows))
-            load(LibraryFileContent, (lfa.contentID for lfa in lfas))
+            load(LibraryFileContent, (lfa.content_id for lfa in lfas))
             sprs = load(
                 SourcePackageRelease,
                 itertools.chain.from_iterable(
diff --git a/lib/lp/soyuz/model/publishing.py b/lib/lp/soyuz/model/publishing.py
index 3f52ac0..5c32866 100644
--- a/lib/lp/soyuz/model/publishing.py
+++ b/lib/lp/soyuz/model/publishing.py
@@ -550,7 +550,7 @@ class SourcePackagePublishingHistory(StormBase, ArchivePublisherBase):
 
         files = self.sourcepackagerelease.files
         lfas = bulk.load_related(LibraryFileAlias, files, ["libraryfile_id"])
-        bulk.load_related(LibraryFileContent, lfas, ["contentID"])
+        bulk.load_related(LibraryFileContent, lfas, ["content_id"])
         return files
 
     def getSourceAndBinaryLibraryFiles(self):
@@ -731,7 +731,7 @@ class SourcePackagePublishingHistory(StormBase, ArchivePublisherBase):
         """See `ISourcePackagePublishingHistory`."""
         sources = Store.of(self).find(
             (LibraryFileAlias, LibraryFileContent),
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
             LibraryFileAlias.id == SourcePackageReleaseFile.libraryfile_id,
             SourcePackageReleaseFile.sourcepackagerelease
             == self.sourcepackagerelease_id,
@@ -947,7 +947,7 @@ class BinaryPackagePublishingHistory(StormBase, ArchivePublisherBase):
         """See `IPublishing`."""
         files = self.binarypackagerelease.files
         lfas = bulk.load_related(LibraryFileAlias, files, ["libraryfile_id"])
-        bulk.load_related(LibraryFileContent, lfas, ["contentID"])
+        bulk.load_related(LibraryFileContent, lfas, ["content_id"])
         return files
 
     @property
@@ -1367,7 +1367,7 @@ class BinaryPackagePublishingHistory(StormBase, ArchivePublisherBase):
         """See `IBinaryPackagePublishingHistory`."""
         binaries = Store.of(self).find(
             (LibraryFileAlias, LibraryFileContent),
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
             LibraryFileAlias.id == BinaryPackageFile.libraryfile_id,
             BinaryPackageFile.binarypackagerelease
             == self.binarypackagerelease_id,
@@ -1976,7 +1976,7 @@ class PublishingSet:
                 LibraryFileAlias,
                 LibraryFileContent,
             ),
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
             LibraryFileAlias.id == BinaryPackageFile.libraryfile_id,
             BinaryPackageFile.binarypackagerelease == BinaryPackageRelease.id,
             BinaryPackageRelease.build_id == BinaryPackageBuild.id,
@@ -2004,7 +2004,7 @@ class PublishingSet:
                 LibraryFileAlias,
                 LibraryFileContent,
             ),
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
             LibraryFileAlias.id == SourcePackageReleaseFile.libraryfile_id,
             SourcePackageReleaseFile.sourcepackagerelease
             == SourcePackagePublishingHistory.sourcepackagerelease_id,
@@ -2150,7 +2150,7 @@ class PublishingSet:
             lfas = bulk.load_related(
                 LibraryFileAlias, sprfs, ["libraryfile_id"]
             )
-            bulk.load_related(LibraryFileContent, lfas, ["contentID"])
+            bulk.load_related(LibraryFileContent, lfas, ["content_id"])
 
         return DecoratedResultSet(spphs, pre_iter_hook=eager_load)
 
@@ -2204,7 +2204,7 @@ class PublishingSet:
             lfas = bulk.load_related(
                 LibraryFileAlias, bpfs, ["libraryfile_id"]
             )
-            bulk.load_related(LibraryFileContent, lfas, ["contentID"])
+            bulk.load_related(LibraryFileContent, lfas, ["content_id"])
             bulk.load_related(
                 SourcePackageName, sprs, ["sourcepackagename_id"]
             )
@@ -2231,7 +2231,7 @@ class PublishingSet:
                 LibraryFileAlias,
                 LibraryFileContent,
             ),
-            LibraryFileContent.id == LibraryFileAlias.contentID,
+            LibraryFileContent.id == LibraryFileAlias.content_id,
             LibraryFileAlias.id == PackageUpload.changes_file_id,
             PackageUpload.id == PackageUploadSource.packageupload_id,
             PackageUpload.status == PackageUploadStatus.DONE,
diff --git a/lib/lp/soyuz/model/queue.py b/lib/lp/soyuz/model/queue.py
index f330355..0d477d7 100644
--- a/lib/lp/soyuz/model/queue.py
+++ b/lib/lp/soyuz/model/queue.py
@@ -1899,7 +1899,7 @@ def prefill_packageupload_caches(uploads, puses, pubs, pucs, logs):
     )
 
     puc_lfas = load_related(LibraryFileAlias, pucs, ["libraryfilealias_id"])
-    load_related(LibraryFileContent, puc_lfas, ["contentID"])
+    load_related(LibraryFileContent, puc_lfas, ["content_id"])
 
     for spr_cache in sprs:
         get_property_cache(spr_cache).published_archives = []
diff --git a/lib/lp/soyuz/model/sourcepackagerelease.py b/lib/lp/soyuz/model/sourcepackagerelease.py
index 6e1222e..1f7b00f 100644
--- a/lib/lp/soyuz/model/sourcepackagerelease.py
+++ b/lib/lp/soyuz/model/sourcepackagerelease.py
@@ -480,7 +480,7 @@ class SourcePackageRelease(StormBase):
             ),
             Join(
                 LibraryFileContent,
-                LibraryFileContent.id == LibraryFileAlias.contentID,
+                LibraryFileContent.id == LibraryFileAlias.content_id,
             ),
         ]
         results = store.using(*origin).find(
diff --git a/lib/lp/soyuz/tests/test_packagediff.py b/lib/lp/soyuz/tests/test_packagediff.py
index b86a26f..c5db107 100644
--- a/lib/lp/soyuz/tests/test_packagediff.py
+++ b/lib/lp/soyuz/tests/test_packagediff.py
@@ -102,7 +102,7 @@ class TestPackageDiffs(TestCaseWithFactory):
         if expire:
             update_map[LibraryFileAlias.expires] = datetime.now(timezone.utc)
         if delete:
-            update_map[LibraryFileAlias.contentID] = None
+            update_map[LibraryFileAlias.content_id] = None
         with dbuser("launchpad"):
             IStore(LibraryFileAlias).execute(
                 BulkUpdate(
diff --git a/lib/lp/testing/factory.py b/lib/lp/testing/factory.py
index 60cb62e..7605ed7 100644
--- a/lib/lp/testing/factory.py
+++ b/lib/lp/testing/factory.py
@@ -3283,6 +3283,7 @@ class LaunchpadObjectFactory(ObjectFactory):
                 sha1=hashlib.sha1(content).hexdigest(),
                 md5=hashlib.md5(content).hexdigest(),
             )
+            IStore(LibraryFileContent).add(lfc)
             lfa = ProxyFactory(
                 LibraryFileAlias(
                     content=lfc,
@@ -3292,6 +3293,7 @@ class LaunchpadObjectFactory(ObjectFactory):
                     restricted=restricted,
                 )
             )
+            IStore(LibraryFileAlias).flush()
         else:
             lfa = getUtility(ILibraryFileAliasSet).create(
                 filename,
diff --git a/lib/lp/translations/model/translationgroup.py b/lib/lp/translations/model/translationgroup.py
index c8dc2dd..5471b10 100644
--- a/lib/lp/translations/model/translationgroup.py
+++ b/lib/lp/translations/model/translationgroup.py
@@ -178,7 +178,7 @@ class TranslationGroup(StormBase):
             LeftJoin(LibraryFileAlias, LibraryFileAlias.id == Person.icon_id),
             LeftJoin(
                 LibraryFileContent,
-                LibraryFileContent.id == LibraryFileAlias.contentID,
+                LibraryFileContent.id == LibraryFileAlias.content_id,
             ),
         ]
         tables = (
@@ -223,7 +223,7 @@ class TranslationGroup(StormBase):
         )
         get_precached_products(products, need_licences=True)
         icons = bulk.load_related(LibraryFileAlias, products, ["icon_id"])
-        bulk.load_related(LibraryFileContent, icons, ["contentID"])
+        bulk.load_related(LibraryFileContent, icons, ["content_id"])
         return products
 
     def fetchProjectGroupsForDisplay(self):
@@ -238,7 +238,7 @@ class TranslationGroup(StormBase):
             ),
             LeftJoin(
                 LibraryFileContent,
-                LibraryFileContent.id == LibraryFileAlias.contentID,
+                LibraryFileContent.id == LibraryFileAlias.content_id,
             ),
         ]
         tables = (
@@ -271,7 +271,7 @@ class TranslationGroup(StormBase):
             ),
             LeftJoin(
                 LibraryFileContent,
-                LibraryFileContent.id == LibraryFileAlias.contentID,
+                LibraryFileContent.id == LibraryFileAlias.content_id,
             ),
         ]
         tables = (