← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~cjwatson/launchpad:rename-master-store into launchpad:master

 

Colin Watson has proposed merging ~cjwatson/launchpad:rename-master-store into launchpad:master.

Commit message:
Rename IMasterStore to IPrimaryStore

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/432028

Also `IMasterObject` to `IPrimaryObject`, and some other references to the "master" database.  This is in line with an earlier renaming of the flavor.

This is long, but the bulk of it was mechanical search-and-replace.  I've run the full test suite to verify it.
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:rename-master-store into launchpad:master.
diff --git a/cronscripts/update-database-stats.py b/cronscripts/update-database-stats.py
index 3e63626..60192d1 100755
--- a/cronscripts/update-database-stats.py
+++ b/cronscripts/update-database-stats.py
@@ -7,7 +7,7 @@
 import _pythonpath  # noqa: F401
 
 from lp.registry.model.person import Person
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.scripts import db_options
 from lp.services.scripts.base import LaunchpadCronScript
 
@@ -17,7 +17,7 @@ class UpdateDatabaseStats(LaunchpadCronScript):
 
     def main(self):
         "Run UpdateDatabaseTableStats." ""
-        store = IMasterStore(Person)
+        store = IPrimaryStore(Person)
 
         # The logic is in a stored procedure because we want to run
         # ps(1) on the database server rather than the host this script
diff --git a/lib/lp/answers/model/faq.py b/lib/lp/answers/model/faq.py
index 0ce5c30..5ce44c3 100644
--- a/lib/lp/answers/model/faq.py
+++ b/lib/lp/answers/model/faq.py
@@ -25,7 +25,7 @@ from lp.registry.interfaces.person import IPerson, validate_public_person
 from lp.registry.interfaces.product import IProduct
 from lp.registry.interfaces.projectgroup import IProjectGroup
 from lp.services.database.constants import DEFAULT
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.nl_search import nl_phrase_search
 from lp.services.database.stormbase import StormBase
 from lp.services.database.stormexpr import fti_search, rank_by_fti
@@ -144,7 +144,7 @@ class FAQ(StormBase):
             product=product,
             distribution=distribution,
         )
-        store = IMasterStore(FAQ)
+        store = IPrimaryStore(FAQ)
         store.add(faq)
         store.flush()
         notify(ObjectCreatedEvent(faq))
diff --git a/lib/lp/answers/model/questionjob.py b/lib/lp/answers/model/questionjob.py
index 3a7277c..cca5853 100644
--- a/lib/lp/answers/model/questionjob.py
+++ b/lib/lp/answers/model/questionjob.py
@@ -26,7 +26,7 @@ from lp.answers.model.question import Question
 from lp.registry.interfaces.person import IPersonSet
 from lp.services.config import config
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import Job
 from lp.services.job.runner import BaseRunnableJob
@@ -124,7 +124,7 @@ class QuestionEmailJob(BaseRunnableJob):
     @classmethod
     def iterReady(cls):
         """See `IJobSource`."""
-        store = IMasterStore(QuestionJob)
+        store = IPrimaryStore(QuestionJob)
         jobs = store.find(
             QuestionJob,
             And(
diff --git a/lib/lp/app/stories/basics/xx-dbpolicy.rst b/lib/lp/app/stories/basics/xx-dbpolicy.rst
index 919336f..e553a75 100644
--- a/lib/lp/app/stories/basics/xx-dbpolicy.rst
+++ b/lib/lp/app/stories/basics/xx-dbpolicy.rst
@@ -2,13 +2,13 @@ Application Server Database Policy
 ==================================
 
 The database policy chooses the default Storm store to used. Its goal
-is to distribute load away from the master databases to read only
+is to distribute load away from the primary databases to read only
 stores where possible. It will benefit old code - new code should
-explicitly select objects from the master or standby stores as needed.
+explicitly select objects from the primary or standby stores as needed.
 
 To test this policy, lets point the MAIN STANDBY store to a Launchpad
 database with a different name. This makes it easy to check if a
-request is querying the master or standby database.
+request is querying the primary or standby database.
 
     >>> from lp.services.config import config
     >>> from textwrap import dedent
@@ -22,13 +22,13 @@ request is querying the master or standby database.
 
     >>> from lp.registry.model.person import Person
     >>> from lp.services.database.interfaces import (
-    ...     IMasterStore,
+    ...     IPrimaryStore,
     ...     IStandbyStore,
     ... )
     >>> from lp.testing.layers import DatabaseLayer
-    >>> master = IMasterStore(Person)
+    >>> primary = IPrimaryStore(Person)
     >>> dbname = DatabaseLayer._db_fixture.dbname
-    >>> dbname == master.execute("SELECT current_database()").get_one()[0]
+    >>> dbname == primary.execute("SELECT current_database()").get_one()[0]
     True
     >>> standby = IStandbyStore(Person)
     >>> print(standby.execute("SELECT current_database()").get_one()[0])
@@ -37,10 +37,10 @@ request is querying the master or standby database.
 We should confirm that the empty database is as empty as we hope it is.
 
     >>> standby_store = IStandbyStore(Person)
-    >>> master_store = IMasterStore(Person)
+    >>> primary_store = IPrimaryStore(Person)
     >>> standby_store.find(Person).is_empty()
     True
-    >>> master_store.find(Person).is_empty()
+    >>> primary_store.find(Person).is_empty()
     False
 
 This helper parses the output of the +whichdb view (which unfortunately
@@ -49,7 +49,7 @@ needs to be created externally to this pagetest).
     >>> def whichdb(browser):
     ...     dbname = extract_text(find_tag_by_id(browser.contents, "dbname"))
     ...     if dbname == DatabaseLayer._db_fixture.dbname:
-    ...         return "MASTER"
+    ...         return "PRIMARY"
     ...     elif dbname == "launchpad_empty":
     ...         return "STANDBY"
     ...     else:
@@ -63,12 +63,12 @@ Store by default.
     >>> print(whichdb(browser))
     STANDBY
 
-POST requests might make updates, so they use the MAIN MASTER
+POST requests might make updates, so they use the MAIN PRIMARY
 Store by default.
 
     >>> browser.getControl("Do Post").click()
     >>> print(whichdb(browser))
-    MASTER
+    PRIMARY
 
 This is an unauthenticated browser.  These typically have no session, unless
 special dispensation has been made. Without a session, subsequent requests
@@ -81,17 +81,17 @@ will then immediately return to using the STANDBY.
 However, if the request has a session (that is, is authenticated; or is
 unauthenticated, but under special dispensation to have a session), once a
 POST request has been made, further GET and HEAD requests from the same client
-continue to use the MAIN MASTER Store by default for 5 minutes. This ensures
+continue to use the MAIN PRIMARY Store by default for 5 minutes. This ensures
 that a user will see any changes they have made immediately, even though the
-standby databases may lag some time behind the master database.
+standby databases may lag some time behind the primary database.
 
     >>> browser.addHeader("Authorization", "Basic mark@xxxxxxxxxxx:test")
     >>> browser.getControl("Do Post").click()  # POST request
     >>> print(whichdb(browser))
-    MASTER
+    PRIMARY
     >>> browser.open("http://launchpad.test/+whichdb";)  # GET request
     >>> print(whichdb(browser))
-    MASTER
+    PRIMARY
 
 GET and HEAD requests from other clients are unaffected though
 and use the MAIN STANDBY Store by default.
@@ -105,7 +105,7 @@ and use the MAIN STANDBY Store by default.
 
 If no more POST requests are made for 5 minutes, GET and HEAD
 requests will once again be using the MAIN STANDBY store as we
-can assume that any changes made to the master database have
+can assume that any changes made to the primary database have
 propagated to the standbys.
 
 To test this, first we need to wind forward the database policy's clock.
@@ -120,7 +120,7 @@ To test this, first we need to wind forward the database policy's clock.
 
     >>> browser.open("http://launchpad.test/+whichdb";)
     >>> print(whichdb(browser))
-    MASTER
+    PRIMARY
 
     >>> dbpolicy._now = _future_now  # Install the time machine.
 
@@ -142,14 +142,14 @@ on the standbys allowing them to catch up.
     >>> dbpolicy._test_lag = timedelta(minutes=10)
     >>> anon_browser.open("http://launchpad.test/+whichdb";)
     >>> print(whichdb(anon_browser))
-    MASTER
+    PRIMARY
     >>> dbpolicy._test_lag = None
 
 
 A 404 error page is shown when code raises a LookupError. If a standby
 database is being used, this might have been caused by replication lag
 if the missing data was only recently created. To fix this surprising
-error, requests are always retried using the master database before
+error, requests are always retried using the primary database before
 returning a 404 error to the user.
 
     >>> anon_browser.handleErrors = True
@@ -161,13 +161,13 @@ returning a 404 error to the user.
     STANDBY
 
     # The standby database contains no data, but we don't get
-    # a 404 page - the request is retried against the MASTER.
+    # a 404 page - the request is retried against the PRIMARY.
     >>> anon_browser.open("http://launchpad.test/~stub";)
     >>> anon_browser.headers["Status"]
     '200 Ok'
 
     # 404s are still returned though if the data doesn't exist in the
-    # MASTER database either.
+    # PRIMARY database either.
     >>> anon_browser.open("http://launchpad.test/~does-not-exist";)
     >>> anon_browser.headers["Status"]
     '404 Not Found'
diff --git a/lib/lp/archivepublisher/model/publisherconfig.py b/lib/lp/archivepublisher/model/publisherconfig.py
index 3f05833..e5cd108 100644
--- a/lib/lp/archivepublisher/model/publisherconfig.py
+++ b/lib/lp/archivepublisher/model/publisherconfig.py
@@ -15,7 +15,7 @@ from lp.archivepublisher.interfaces.publisherconfig import (
     IPublisherConfig,
     IPublisherConfigSet,
 )
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 
 
 @implementer(IPublisherConfig)
@@ -44,7 +44,7 @@ class PublisherConfigSet:
 
     def new(self, distribution, root_dir, base_url, copy_base_url):
         """Make and return a new `PublisherConfig`."""
-        store = IMasterStore(PublisherConfig)
+        store = IPrimaryStore(PublisherConfig)
         pubconf = PublisherConfig()
         pubconf.distribution = distribution
         pubconf.root_dir = root_dir
diff --git a/lib/lp/archivepublisher/tests/test_publish_ftpmaster.py b/lib/lp/archivepublisher/tests/test_publish_ftpmaster.py
index a5a1d0f..5f686ad 100644
--- a/lib/lp/archivepublisher/tests/test_publish_ftpmaster.py
+++ b/lib/lp/archivepublisher/tests/test_publish_ftpmaster.py
@@ -34,7 +34,7 @@ from lp.archivepublisher.scripts.publish_ftpmaster import (
 from lp.archivepublisher.tests.test_run_parts import RunPartsMixin
 from lp.registry.interfaces.pocket import PackagePublishingPocket, pocketsuffix
 from lp.registry.interfaces.series import SeriesStatus
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.log.logger import BufferLogger, DevNullLogger
 from lp.services.osutils import write_file
 from lp.services.scripts.base import LaunchpadScriptFailure
@@ -1156,7 +1156,7 @@ class TestCreateDistroSeriesIndexes(TestCaseWithFactory, HelpersMixin):
         # to publish such distributions.
         series = self.makeDistroSeriesNeedingIndexes()
         pub_config = get_pub_config(series.distribution)
-        IMasterStore(pub_config).remove(pub_config)
+        IPrimaryStore(pub_config).remove(pub_config)
         script = self.makeScript(series.distribution)
         self.assertEqual([], script.listSuitesNeedingIndexes(series))
 
diff --git a/lib/lp/bugs/doc/bugsummary.rst b/lib/lp/bugs/doc/bugsummary.rst
index 93d8b2a..83daaf4 100644
--- a/lib/lp/bugs/doc/bugsummary.rst
+++ b/lib/lp/bugs/doc/bugsummary.rst
@@ -29,12 +29,12 @@ sourcepackagename, tag, milestone, status, importance, has_patch,
 viewed_by and the count. viewed_by is a team reference and used to
 query private bug counts.
 
-    >>> from lp.services.database.interfaces import IMasterStore
+    >>> from lp.services.database.interfaces import IPrimaryStore
     >>> from lp.bugs.interfaces.bugtask import BugTaskStatus
     >>> from lp.bugs.model.bugsummary import BugSummary
     >>> from lp.testing import login_celebrity
     >>> me = login_celebrity("admin")
-    >>> store = IMasterStore(BugSummary)
+    >>> store = IPrimaryStore(BugSummary)
 
     >>> def name(object_or_none):
     ...     if object_or_none is None:
diff --git a/lib/lp/bugs/model/tests/test_bugsummary.py b/lib/lp/bugs/model/tests/test_bugsummary.py
index 8ad15e4..f231c35 100644
--- a/lib/lp/bugs/model/tests/test_bugsummary.py
+++ b/lib/lp/bugs/model/tests/test_bugsummary.py
@@ -17,7 +17,7 @@ from lp.bugs.model.bug import BugTag
 from lp.bugs.model.bugsummary import BugSummary, get_bugsummary_filter_for_user
 from lp.bugs.model.bugtask import BugTask
 from lp.registry.enums import SharingPermission
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.testing import TestCaseWithFactory
 from lp.testing.dbuser import switch_dbuser
 from lp.testing.layers import LaunchpadZopelessLayer
@@ -34,7 +34,7 @@ class TestBugSummary(TestCaseWithFactory):
         # but might happen from the SQL command line.
         switch_dbuser("testadmin")
 
-        self.store = IMasterStore(BugSummary)
+        self.store = IPrimaryStore(BugSummary)
 
     def getCount(self, person, **kw_find_expr):
         self._maybe_rollup()
diff --git a/lib/lp/bugs/scripts/bugtasktargetnamecaches.py b/lib/lp/bugs/scripts/bugtasktargetnamecaches.py
index 647f004..70a82d4 100644
--- a/lib/lp/bugs/scripts/bugtasktargetnamecaches.py
+++ b/lib/lp/bugs/scripts/bugtasktargetnamecaches.py
@@ -16,7 +16,7 @@ from lp.registry.model.ociproject import OCIProject
 from lp.registry.model.product import Product
 from lp.registry.model.productseries import ProductSeries
 from lp.registry.model.sourcepackagename import SourcePackageName
-from lp.services.database.interfaces import IMasterStore, IStandbyStore
+from lp.services.database.interfaces import IPrimaryStore, IStandbyStore
 from lp.services.looptuner import ITunableLoop, LoopTuner
 
 # These two tuples must be in the same order. They specify the ID
@@ -91,7 +91,7 @@ class BugTaskTargetNameCachesTunableLoop:
         chunk = self.candidates[start:end]
 
         self.transaction.begin()
-        store = IMasterStore(BugTask)
+        store = IPrimaryStore(BugTask)
 
         # Transpose the target rows into lists of object IDs to retrieve.
         ids_to_cache = list(zip(*(target for (target, names) in chunk)))
diff --git a/lib/lp/bugs/scripts/checkwatches/scheduler.py b/lib/lp/bugs/scripts/checkwatches/scheduler.py
index 7d176b9..103b9ba 100644
--- a/lib/lp/bugs/scripts/checkwatches/scheduler.py
+++ b/lib/lp/bugs/scripts/checkwatches/scheduler.py
@@ -11,7 +11,7 @@ import transaction
 
 from lp.bugs.interfaces.bugwatch import BUG_WATCH_ACTIVITY_SUCCESS_STATUSES
 from lp.bugs.model.bugwatch import BugWatch
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.sqlbase import sqlvalues
 from lp.services.looptuner import TunableLoop
 
@@ -36,7 +36,7 @@ class BugWatchScheduler(TunableLoop):
     ):
         super().__init__(log, abort_time)
         self.transaction = transaction
-        self.store = IMasterStore(BugWatch)
+        self.store = IPrimaryStore(BugWatch)
 
         if max_delay_days is None:
             max_delay_days = MAX_DELAY_DAYS
diff --git a/lib/lp/buildmaster/model/buildfarmjob.py b/lib/lp/buildmaster/model/buildfarmjob.py
index 160d635..2a8dc30 100644
--- a/lib/lp/buildmaster/model/buildfarmjob.py
+++ b/lib/lp/buildmaster/model/buildfarmjob.py
@@ -28,7 +28,7 @@ from lp.buildmaster.interfaces.buildfarmjob import (
 )
 from lp.buildmaster.model.buildqueue import BuildQueue
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.propertycache import cachedproperty, get_property_cache
 from lp.services.statsd.interfaces.statsd_client import IStatsdClient
 
@@ -109,7 +109,7 @@ class BuildFarmJob(Storm):
         build_farm_job = BuildFarmJob(
             job_type, status, date_created, builder, archive
         )
-        store = IMasterStore(BuildFarmJob)
+        store = IPrimaryStore(BuildFarmJob)
         store.add(build_farm_job)
         return build_farm_job
 
diff --git a/lib/lp/charms/model/charmbase.py b/lib/lp/charms/model/charmbase.py
index 959b26c..76ef34f 100644
--- a/lib/lp/charms/model/charmbase.py
+++ b/lib/lp/charms/model/charmbase.py
@@ -20,7 +20,7 @@ from lp.charms.interfaces.charmbase import (
     NoSuchCharmBase,
 )
 from lp.services.database.constants import DEFAULT
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 
 
 @implementer(ICharmBase)
@@ -123,7 +123,7 @@ class CharmBaseSet:
             pass
         else:
             raise DuplicateCharmBase(distro_series)
-        store = IMasterStore(CharmBase)
+        store = IPrimaryStore(CharmBase)
         charm_base = CharmBase(
             registrant,
             distro_series,
diff --git a/lib/lp/charms/model/charmrecipe.py b/lib/lp/charms/model/charmrecipe.py
index b2d9be5..566b61c 100644
--- a/lib/lp/charms/model/charmrecipe.py
+++ b/lib/lp/charms/model/charmrecipe.py
@@ -111,7 +111,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.database.stormexpr import (
     Greatest,
@@ -944,7 +944,7 @@ class CharmRecipeSet:
         if not self.isValidInformationType(information_type, owner, git_ref):
             raise CharmRecipePrivacyMismatch
 
-        store = IMasterStore(CharmRecipe)
+        store = IPrimaryStore(CharmRecipe)
         recipe = CharmRecipe(
             registrant,
             owner,
diff --git a/lib/lp/charms/model/charmrecipebuild.py b/lib/lp/charms/model/charmrecipebuild.py
index c7f830f..b07521e 100644
--- a/lib/lp/charms/model/charmrecipebuild.py
+++ b/lib/lp/charms/model/charmrecipebuild.py
@@ -64,7 +64,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.model.job import Job
@@ -406,7 +406,7 @@ class CharmRecipeBuild(PackageBuildMixin, StormBase):
     def addFile(self, lfa):
         """See `ICharmRecipeBuild`."""
         charm_file = CharmFile(build=self, library_file=lfa)
-        IMasterStore(CharmFile).add(charm_file)
+        IPrimaryStore(CharmFile).add(charm_file)
         return charm_file
 
     def verifySuccessfulUpload(self) -> bool:
@@ -471,7 +471,7 @@ class CharmRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
         date_created=DEFAULT,
     ):
         """See `ICharmRecipeBuildSet`."""
-        store = IMasterStore(CharmRecipeBuild)
+        store = IPrimaryStore(CharmRecipeBuild)
         build_farm_job = getUtility(IBuildFarmJobSource).new(
             CharmRecipeBuild.job_type, BuildStatus.NEEDSBUILD, date_created
         )
@@ -495,7 +495,7 @@ class CharmRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
 
     def getByID(self, build_id):
         """See `ISpecificBuildFarmJobSource`."""
-        store = IMasterStore(CharmRecipeBuild)
+        store = IPrimaryStore(CharmRecipeBuild)
         return store.get(CharmRecipeBuild, build_id)
 
     def getByBuildFarmJob(self, build_farm_job):
diff --git a/lib/lp/charms/model/charmrecipebuildjob.py b/lib/lp/charms/model/charmrecipebuildjob.py
index 41638e9..d923485 100644
--- a/lib/lp/charms/model/charmrecipebuildjob.py
+++ b/lib/lp/charms/model/charmrecipebuildjob.py
@@ -38,7 +38,7 @@ from lp.charms.interfaces.charmrecipebuildjob import (
 from lp.charms.mail.charmrecipebuild import CharmRecipeBuildMailer
 from lp.services.config import config
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import EnumeratedSubclass, Job
 from lp.services.job.runner import BaseRunnableJob
@@ -136,7 +136,7 @@ class CharmRecipeBuildJobDerived(
     @classmethod
     def iterReady(cls):
         """See `IJobSource`."""
-        jobs = IMasterStore(CharmRecipeBuildJob).find(
+        jobs = IPrimaryStore(CharmRecipeBuildJob).find(
             CharmRecipeBuildJob,
             CharmRecipeBuildJob.job_type == cls.class_job_type,
             CharmRecipeBuildJob.job == Job.id,
diff --git a/lib/lp/charms/model/charmrecipejob.py b/lib/lp/charms/model/charmrecipejob.py
index a54e0c5..6971b4d 100644
--- a/lib/lp/charms/model/charmrecipejob.py
+++ b/lib/lp/charms/model/charmrecipejob.py
@@ -35,7 +35,7 @@ from lp.services.config import config
 from lp.services.database.bulk import load_related
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import EnumeratedSubclass, Job
 from lp.services.job.runner import BaseRunnableJob
@@ -130,7 +130,7 @@ class CharmRecipeJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     @classmethod
     def iterReady(cls):
         """See `IJobSource`."""
-        jobs = IMasterStore(CharmRecipeJob).find(
+        jobs = IPrimaryStore(CharmRecipeJob).find(
             CharmRecipeJob,
             CharmRecipeJob.job_type == cls.class_job_type,
             CharmRecipeJob.job == Job.id,
diff --git a/lib/lp/code/model/branch.py b/lib/lp/code/model/branch.py
index 63d1b13..71c7fc9 100644
--- a/lib/lp/code/model/branch.py
+++ b/lib/lp/code/model/branch.py
@@ -134,7 +134,7 @@ from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase, sqlvalues
 from lp.services.database.sqlobject import ForeignKey, IntCol, StringCol
 from lp.services.database.stormexpr import Array, ArrayAgg, ArrayIntersects
@@ -1253,7 +1253,7 @@ class Branch(SQLBase, WebhookTargetMixin, BzrIdentityMixin):
         """See `IBranch`."""
         if isinstance(revision_ids, str):
             revision_ids = [revision_ids]
-        IMasterStore(BranchRevision).find(
+        IPrimaryStore(BranchRevision).find(
             BranchRevision,
             BranchRevision.branch == self,
             BranchRevision.revision_id.is_in(
diff --git a/lib/lp/code/model/branchjob.py b/lib/lp/code/model/branchjob.py
index 083f3d7..291c1e7 100644
--- a/lib/lp/code/model/branchjob.py
+++ b/lib/lp/code/model/branchjob.py
@@ -74,7 +74,7 @@ from lp.registry.interfaces.productseries import IProductSeriesSet
 from lp.scripts.helpers import TransactionFreeOperation
 from lp.services.config import config
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.locking import (
     AdvisoryLockHeld,
     LockType,
@@ -262,7 +262,7 @@ class BranchJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     @classmethod
     def iterReady(cls):
         """See `IRevisionMailJobSource`."""
-        jobs = IMasterStore(Branch).find(
+        jobs = IPrimaryStore(Branch).find(
             (BranchJob),
             And(
                 BranchJob.job_type == cls.class_job_type,
@@ -1041,7 +1041,7 @@ class RosettaUploadJob(BranchJobDerived):
     def iterReady():
         """See `IRosettaUploadJobSource`."""
         jobs = (
-            IMasterStore(BranchJob)
+            IPrimaryStore(BranchJob)
             .using(BranchJob, Job, Branch)
             .find(
                 (BranchJob),
@@ -1060,7 +1060,7 @@ class RosettaUploadJob(BranchJobDerived):
     @staticmethod
     def findUnfinishedJobs(branch, since=None):
         """See `IRosettaUploadJobSource`."""
-        store = IMasterStore(BranchJob)
+        store = IPrimaryStore(BranchJob)
         match = And(
             Job.id == BranchJob.job_id,
             BranchJob.branch == branch,
diff --git a/lib/lp/code/model/branchmergeproposal.py b/lib/lp/code/model/branchmergeproposal.py
index 20e0cea..884e943 100644
--- a/lib/lp/code/model/branchmergeproposal.py
+++ b/lib/lp/code/model/branchmergeproposal.py
@@ -84,7 +84,7 @@ from lp.services.database.bulk import load, load_referencing, load_related
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase, quote
 from lp.services.database.sqlobject import ForeignKey, IntCol, StringCol
 from lp.services.helpers import shortlist
@@ -1365,7 +1365,7 @@ class BranchMergeProposal(SQLBase, BugLinkTargetMixin):
         incremental_diff.branch_merge_proposal = self
         incremental_diff.old_revision = old_revision
         incremental_diff.new_revision = new_revision
-        IMasterStore(IncrementalDiff).add(incremental_diff)
+        IPrimaryStore(IncrementalDiff).add(incremental_diff)
         return incremental_diff
 
     def getIncrementalDiffs(self, revision_list):
diff --git a/lib/lp/code/model/branchmergeproposaljob.py b/lib/lp/code/model/branchmergeproposaljob.py
index 35c8bdf..7019c54 100644
--- a/lib/lp/code/model/branchmergeproposaljob.py
+++ b/lib/lp/code/model/branchmergeproposaljob.py
@@ -63,7 +63,7 @@ from lp.codehosting.vfs import get_ro_server
 from lp.registry.interfaces.person import IPersonSet
 from lp.services.config import config
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlobject import SQLObjectNotFound
 from lp.services.database.stormbase import StormBase
 from lp.services.job.interfaces.job import JobStatus
@@ -261,7 +261,7 @@ class BranchMergeProposalJobDerived(
         """Iterate through all ready BranchMergeProposalJobs."""
         from lp.code.model.branch import Branch
 
-        jobs = IMasterStore(Branch).find(
+        jobs = IPrimaryStore(Branch).find(
             (BranchMergeProposalJob),
             And(
                 BranchMergeProposalJob.job_type == klass.class_job_type,
@@ -676,7 +676,7 @@ class BranchMergeProposalJobSource(BaseRunnableJobSource):
         ]
         if job_type is not None:
             clauses.append(BranchMergeProposalJob.job_type == job_type)
-        jobs = IMasterStore(BranchMergeProposalJob).find(
+        jobs = IPrimaryStore(BranchMergeProposalJob).find(
             (BranchMergeProposalJob, Job, BranchMergeProposal), And(*clauses)
         )
         # Order by the job status first (to get running before waiting), then
diff --git a/lib/lp/code/model/cibuild.py b/lib/lp/code/model/cibuild.py
index 3f08eb7..0629fd7 100644
--- a/lib/lp/code/model/cibuild.py
+++ b/lib/lp/code/model/cibuild.py
@@ -64,7 +64,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.librarian.browser import ProxiedLibraryFileAlias
 from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent
@@ -580,7 +580,7 @@ class CIBuildSet(SpecificBuildFarmJobSourceMixin):
         date_created=DEFAULT,
     ):
         """See `ICIBuildSet`."""
-        store = IMasterStore(CIBuild)
+        store = IPrimaryStore(CIBuild)
         build_farm_job = getUtility(IBuildFarmJobSource).new(
             CIBuild.job_type, BuildStatus.NEEDSBUILD, date_created
         )
@@ -736,7 +736,7 @@ class CIBuildSet(SpecificBuildFarmJobSourceMixin):
 
     def getByID(self, build_id):
         """See `ISpecificBuildFarmJobSource`."""
-        store = IMasterStore(CIBuild)
+        store = IPrimaryStore(CIBuild)
         return store.get(CIBuild, build_id)
 
     def getByBuildFarmJob(self, build_farm_job):
diff --git a/lib/lp/code/model/directbranchcommit.py b/lib/lp/code/model/directbranchcommit.py
index 886a479..cae7d70 100644
--- a/lib/lp/code/model/directbranchcommit.py
+++ b/lib/lp/code/model/directbranchcommit.py
@@ -19,7 +19,7 @@ from breezy.transform import ROOT_PARENT
 from lp.code.errors import StaleLastMirrored
 from lp.codehosting.bzrutils import get_branch_info, get_stacked_on_url
 from lp.services.config import config
-from lp.services.database.interfaces import IMasterObject
+from lp.services.database.interfaces import IPrimaryObject
 from lp.services.mail.sendmail import format_address_for_person
 from lp.services.osutils import override_environ
 
@@ -248,7 +248,7 @@ class DirectBranchCommit:
                     self.merge_parents,
                     committer=committer_id,
                 )
-            IMasterObject(self.db_branch).branchChanged(
+            IPrimaryObject(self.db_branch).branchChanged(
                 get_stacked_on_url(self.bzrbranch),
                 None if new_rev_id is None else new_rev_id.decode(),
                 self.db_branch.control_format,
diff --git a/lib/lp/code/model/gitjob.py b/lib/lp/code/model/gitjob.py
index f37b9d5..566c728 100644
--- a/lib/lp/code/model/gitjob.py
+++ b/lib/lp/code/model/gitjob.py
@@ -35,7 +35,7 @@ from lp.code.mail.branch import BranchMailer
 from lp.registry.interfaces.person import IPersonSet
 from lp.services.config import config
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.locking import (
     AdvisoryLockHeld,
     LockType,
@@ -157,7 +157,7 @@ class GitJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     @classmethod
     def iterReady(cls):
         """See `IJobSource`."""
-        jobs = IMasterStore(GitJob).find(
+        jobs = IPrimaryStore(GitJob).find(
             GitJob,
             GitJob.job_type == cls.class_job_type,
             GitJob.job == Job.id,
diff --git a/lib/lp/code/model/revision.py b/lib/lp/code/model/revision.py
index b0738ed..9d1a631 100644
--- a/lib/lp/code/model/revision.py
+++ b/lib/lp/code/model/revision.py
@@ -47,7 +47,7 @@ from lp.registry.interfaces.projectgroup import IProjectGroup
 from lp.services.database.bulk import create
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.helpers import shortlist
 from lp.services.identity.interfaces.emailaddress import (
@@ -295,7 +295,7 @@ class RevisionSet:
         if "@" not in email_address:
             email_address = None
 
-        store = IMasterStore(RevisionAuthor)
+        store = IPrimaryStore(RevisionAuthor)
         author = RevisionAuthor(name=revision_author, email=email_address)
         store.add(author)
         author.linkToLaunchpadPerson()
@@ -357,7 +357,7 @@ class RevisionSet:
             foo@xxxxxxx (Foo Bar)
         :return: a dict of name -> RevisionAuthor
         """
-        store = IMasterStore(Revision)
+        store = IPrimaryStore(Revision)
         author_names = set(author_names)
         authors = {}
         for author in store.find(
@@ -693,7 +693,7 @@ class RevisionSet:
         """See `IRevisionSet`."""
         # Storm doesn't handle remove a limited result set:
         #    FeatureError: Can't remove a sliced result set
-        store = IMasterStore(RevisionCache)
+        store = IPrimaryStore(RevisionCache)
         epoch = datetime.now(tz=pytz.UTC) - timedelta(days=30)
         subquery = Select(
             [RevisionCache.id],
diff --git a/lib/lp/code/model/seriessourcepackagebranch.py b/lib/lp/code/model/seriessourcepackagebranch.py
index 95ab392..110df7d 100644
--- a/lib/lp/code/model/seriessourcepackagebranch.py
+++ b/lib/lp/code/model/seriessourcepackagebranch.py
@@ -20,7 +20,7 @@ from lp.code.interfaces.seriessourcepackagebranch import (
 )
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 
 
 @implementer(ISeriesSourcePackageBranch)
@@ -101,7 +101,7 @@ class SeriesSourcePackageBranchSet:
             registrant,
             date_created,
         )
-        IMasterStore(SeriesSourcePackageBranch).add(sspb)
+        IPrimaryStore(SeriesSourcePackageBranch).add(sspb)
         DistributionSourcePackageCache.updateOfficialBranches(
             distroseries.distribution, [sourcepackagename]
         )
@@ -155,7 +155,7 @@ class SeriesSourcePackageBranchSet:
         distroseries = sourcepackage.distroseries
         sourcepackagename = sourcepackage.sourcepackagename
         return (
-            IMasterStore(SeriesSourcePackageBranch)
+            IPrimaryStore(SeriesSourcePackageBranch)
             .find(
                 SeriesSourcePackageBranch,
                 SeriesSourcePackageBranch.distroseries == distroseries.id,
diff --git a/lib/lp/code/model/sourcepackagerecipe.py b/lib/lp/code/model/sourcepackagerecipe.py
index 5cfa59a..38ea8f6 100644
--- a/lib/lp/code/model/sourcepackagerecipe.py
+++ b/lib/lp/code/model/sourcepackagerecipe.py
@@ -47,7 +47,7 @@ from lp.registry.model.distroseries import DistroSeries
 from lp.services.database.bulk import load_referencing
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormexpr import Greatest, NullsLast
 from lp.services.propertycache import cachedproperty, get_property_cache
 from lp.soyuz.model.archive import Archive
@@ -213,7 +213,7 @@ class SourcePackageRecipe(Storm):
         date_created=DEFAULT,
     ):
         """See `ISourcePackageRecipeSource.new`."""
-        store = IMasterStore(SourcePackageRecipe)
+        store = IPrimaryStore(SourcePackageRecipe)
         sprecipe = SourcePackageRecipe()
         builder_recipe, recipe_branch_type = getUtility(
             IRecipeBranchSource
@@ -264,7 +264,7 @@ class SourcePackageRecipe(Storm):
     @staticmethod
     def exists(owner, name):
         """See `ISourcePackageRecipeSource.new`."""
-        store = IMasterStore(SourcePackageRecipe)
+        store = IPrimaryStore(SourcePackageRecipe)
         recipe = store.find(
             SourcePackageRecipe,
             SourcePackageRecipe.owner == owner,
diff --git a/lib/lp/code/model/sourcepackagerecipebuild.py b/lib/lp/code/model/sourcepackagerecipebuild.py
index c550eec..11d0658 100644
--- a/lib/lp/code/model/sourcepackagerecipebuild.py
+++ b/lib/lp/code/model/sourcepackagerecipebuild.py
@@ -43,7 +43,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.librarian.browser import ProxiedLibraryFileAlias
 from lp.soyuz.interfaces.archive import CannotUploadToArchive
 from lp.soyuz.model.archive import Archive
@@ -217,7 +217,7 @@ class SourcePackageRecipeBuild(
         duration=None,
     ):
         """See `ISourcePackageRecipeBuildSource`."""
-        store = IMasterStore(SourcePackageRecipeBuild)
+        store = IPrimaryStore(SourcePackageRecipeBuild)
         if pocket is None:
             pocket = PackagePublishingPocket.RELEASE
         if date_created is None:
@@ -307,7 +307,7 @@ class SourcePackageRecipeBuild(
     @classmethod
     def getByID(cls, build_id):
         """See `ISourcePackageRecipeBuildSource`."""
-        store = IMasterStore(SourcePackageRecipeBuild)
+        store = IPrimaryStore(SourcePackageRecipeBuild)
         return store.find(cls, cls.id == build_id).one()
 
     @classmethod
diff --git a/lib/lp/code/model/tests/test_branchjob.py b/lib/lp/code/model/tests/test_branchjob.py
index 08b9d16..2f66299 100644
--- a/lib/lp/code/model/tests/test_branchjob.py
+++ b/lib/lp/code/model/tests/test_branchjob.py
@@ -57,7 +57,7 @@ from lp.codehosting.vfs import branch_id_to_path
 from lp.scripts.helpers import TransactionFreeOperation
 from lp.services.config import config
 from lp.services.database.constants import UTC_NOW
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.features.testing import FeatureFixture
 from lp.services.identity.interfaces.emailaddress import EmailAddressStatus
 from lp.services.job.interfaces.job import JobStatus
@@ -205,7 +205,7 @@ class TestBranchScanJob(TestCaseWithFactory):
         job = BranchScanJob.create(db_branch)
 
         def mock_run(*args):
-            IMasterStore(BranchJob).execute("SELECT '" + "x" * 1000 + "'")
+            IPrimaryStore(BranchJob).execute("SELECT '" + "x" * 1000 + "'")
             raise Exception("boom")
 
         self.useFixture(
@@ -467,7 +467,7 @@ class TestRevisionsAddedJob(TestCaseWithFactory):
             except bzr_errors.NoSuchRevision:
                 revno = None
             if existing is not None:
-                branchrevision = IMasterStore(branch).find(
+                branchrevision = IPrimaryStore(branch).find(
                     BranchRevision,
                     BranchRevision.branch_id == branch.id,
                     BranchRevision.revision_id == revision.id,
diff --git a/lib/lp/oci/model/ocirecipe.py b/lib/lp/oci/model/ocirecipe.py
index 53392f4..ed46c81 100644
--- a/lib/lp/oci/model/ocirecipe.py
+++ b/lib/lp/oci/model/ocirecipe.py
@@ -94,7 +94,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormexpr import (
     Array,
     ArrayAgg,
@@ -930,7 +930,7 @@ class OCIRecipeSet:
         if build_path is None:
             build_path = "."
 
-        store = IMasterStore(OCIRecipe)
+        store = IPrimaryStore(OCIRecipe)
         oci_recipe = OCIRecipe(
             name,
             registrant,
diff --git a/lib/lp/oci/model/ocirecipebuild.py b/lib/lp/oci/model/ocirecipebuild.py
index e62d8b8..a3aa27e 100644
--- a/lib/lp/oci/model/ocirecipebuild.py
+++ b/lib/lp/oci/model/ocirecipebuild.py
@@ -60,7 +60,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.model.job import Job
@@ -437,7 +437,7 @@ class OCIRecipeBuild(PackageBuildMixin, StormBase):
         oci_file = OCIFile(
             build=self, library_file=lfa, layer_file_digest=layer_file_digest
         )
-        IMasterStore(OCIFile).add(oci_file)
+        IPrimaryStore(OCIFile).add(oci_file)
         return oci_file
 
     @cachedproperty
@@ -572,7 +572,7 @@ class OCIRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
             or recipe.require_virtualized
         )
 
-        store = IMasterStore(OCIRecipeBuild)
+        store = IPrimaryStore(OCIRecipeBuild)
         build_farm_job = getUtility(IBuildFarmJobSource).new(
             OCIRecipeBuild.job_type, BuildStatus.NEEDSBUILD, date_created
         )
@@ -605,7 +605,7 @@ class OCIRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
 
     def getByID(self, build_id):
         """See `ISpecificBuildFarmJobSource`."""
-        store = IMasterStore(OCIRecipeBuild)
+        store = IPrimaryStore(OCIRecipeBuild)
         return store.get(OCIRecipeBuild, build_id)
 
     def getByBuildFarmJob(self, build_farm_job):
diff --git a/lib/lp/oci/model/ocirecipejob.py b/lib/lp/oci/model/ocirecipejob.py
index f628f43..735e516 100644
--- a/lib/lp/oci/model/ocirecipejob.py
+++ b/lib/lp/oci/model/ocirecipejob.py
@@ -37,7 +37,7 @@ from lp.services.config import config
 from lp.services.database.bulk import load_related
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import EnumeratedSubclass, Job
 from lp.services.job.runner import BaseRunnableJob
@@ -126,7 +126,7 @@ class OCIRecipeJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     @classmethod
     def iterReady(cls):
         """See `IJobSource`."""
-        jobs = IMasterStore(OCIRecipeJob).find(
+        jobs = IPrimaryStore(OCIRecipeJob).find(
             OCIRecipeJob,
             OCIRecipeJob.job_type == cls.class_job_type,
             OCIRecipeJob.job == Job.id,
diff --git a/lib/lp/registry/doc/person.rst b/lib/lp/registry/doc/person.rst
index 33d1811..f90db65 100644
--- a/lib/lp/registry/doc/person.rst
+++ b/lib/lp/registry/doc/person.rst
@@ -146,8 +146,8 @@ using the createPersonAndEmail() method.
     <DBItem AccountStatus.NOACCOUNT...
 
     >>> from lp.services.identity.model.account import Account
-    >>> from lp.services.database.interfaces import IMasterStore
-    >>> account = IMasterStore(Account).get(Account, p.accountID)
+    >>> from lp.services.database.interfaces import IPrimaryStore
+    >>> account = IPrimaryStore(Account).get(Account, p.accountID)
     >>> account.reactivate("Activated by doc test.")
     >>> p.account_status
     <DBItem AccountStatus.ACTIVE...
diff --git a/lib/lp/registry/model/announcement.py b/lib/lp/registry/model/announcement.py
index fbb6a7b..82d4be6 100644
--- a/lib/lp/registry/model/announcement.py
+++ b/lib/lp/registry/model/announcement.py
@@ -22,7 +22,7 @@ from lp.registry.interfaces.person import validate_public_person
 from lp.registry.interfaces.product import IProduct
 from lp.registry.interfaces.projectgroup import IProjectGroup
 from lp.services.database.constants import UTC_NOW
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.utils import utc_now
 
@@ -91,7 +91,7 @@ class Announcement(StormBase):
         self.distribution = distribution
 
     def destroySelf(self):
-        IMasterStore(self).remove(self)
+        IPrimaryStore(self).remove(self)
 
     def modify(self, title, summary, url):
         title = str(title) if title is not None else None
@@ -260,7 +260,7 @@ class MakesAnnouncements(HasAnnouncements):
             projectgroup=projectgroup,
             distribution=distribution,
         )
-        store = IMasterStore(Announcement)
+        store = IPrimaryStore(Announcement)
         store.add(announcement)
         store.flush()
 
diff --git a/lib/lp/registry/model/distroseriesdifference.py b/lib/lp/registry/model/distroseriesdifference.py
index 6921c07..d7b942a 100644
--- a/lib/lp/registry/model/distroseriesdifference.py
+++ b/lib/lp/registry/model/distroseriesdifference.py
@@ -47,7 +47,7 @@ from lp.registry.model.teammembership import TeamParticipation
 from lp.services.database import bulk
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlobject import StringCol
 from lp.services.database.stormbase import StormBase
 from lp.services.messages.model.message import Message, MessageChunk
@@ -403,7 +403,7 @@ class DistroSeriesDifference(StormBase):
         if dsp is None:
             raise NotADerivedSeriesError()
 
-        store = IMasterStore(DistroSeriesDifference)
+        store = IPrimaryStore(DistroSeriesDifference)
         diff = DistroSeriesDifference()
         diff.derived_series = derived_series
         diff.parent_series = parent_series
diff --git a/lib/lp/registry/model/distroseriesdifferencecomment.py b/lib/lp/registry/model/distroseriesdifferencecomment.py
index 103c522..d882fa0 100644
--- a/lib/lp/registry/model/distroseriesdifferencecomment.py
+++ b/lib/lp/registry/model/distroseriesdifferencecomment.py
@@ -17,7 +17,7 @@ from lp.registry.interfaces.distroseriesdifferencecomment import (
     IDistroSeriesDifferenceCommentSource,
 )
 from lp.registry.model.sourcepackagename import SourcePackageName
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.messages.model.message import Message, MessageChunk
 
 
@@ -72,7 +72,7 @@ class DistroSeriesDifferenceComment(Storm):
         )
         MessageChunk(message=message, content=comment, sequence=1)
 
-        store = IMasterStore(DistroSeriesDifferenceComment)
+        store = IPrimaryStore(DistroSeriesDifferenceComment)
         dsd_comment = DistroSeriesDifferenceComment()
         dsd_comment.distro_series_difference = distro_series_difference
         dsd_comment.message = message
diff --git a/lib/lp/registry/model/distroseriesparent.py b/lib/lp/registry/model/distroseriesparent.py
index 7ceb1de..94adc35 100644
--- a/lib/lp/registry/model/distroseriesparent.py
+++ b/lib/lp/registry/model/distroseriesparent.py
@@ -17,7 +17,7 @@ from lp.registry.interfaces.distroseriesparent import (
 )
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 
 
 @implementer(IDistroSeriesParent)
@@ -67,7 +67,7 @@ class DistroSeriesParentSet:
         ordering=1,
     ):
         """Make and return a new `DistroSeriesParent`."""
-        store = IMasterStore(DistroSeriesParent)
+        store = IPrimaryStore(DistroSeriesParent)
         dsp = DistroSeriesParent()
         dsp.derived_series = derived_series
         dsp.parent_series = parent_series
diff --git a/lib/lp/registry/model/mailinglist.py b/lib/lp/registry/model/mailinglist.py
index 7b8d5a5..9c9831a 100644
--- a/lib/lp/registry/model/mailinglist.py
+++ b/lib/lp/registry/model/mailinglist.py
@@ -59,7 +59,7 @@ from lp.services.config import config
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.database.stormexpr import Concatenate
 from lp.services.identity.interfaces.account import AccountStatus
@@ -808,7 +808,7 @@ class MailingListSet:
             Person.id == MailingList.team_id,
             EmailAddress.email.endswith(old_suffix),
         ]
-        addresses = IMasterStore(EmailAddress).find(
+        addresses = IPrimaryStore(EmailAddress).find(
             EmailAddress,
             EmailAddress.id.is_in(Select(EmailAddress.id, And(*clauses))),
         )
@@ -876,10 +876,10 @@ class MessageApprovalSet:
 
     def getHeldMessagesWithStatus(self, status):
         """See `IMessageApprovalSet`."""
-        # Use the master store as the messages will also be acknowledged and
+        # Use the primary store as the messages will also be acknowledged and
         # we want to make sure we are acknowledging the same messages that we
         # iterate over.
-        return IMasterStore(MessageApproval).find(
+        return IPrimaryStore(MessageApproval).find(
             (Message.rfc822msgid, Person.name),
             MessageApproval.status == status,
             MessageApproval.message == Message.id,
@@ -900,7 +900,7 @@ class MessageApprovalSet:
             next_state = transitions[status]
         except KeyError:
             raise AssertionError("Not an acknowledgeable state: %s" % status)
-        approvals = IMasterStore(MessageApproval).find(
+        approvals = IPrimaryStore(MessageApproval).find(
             MessageApproval, MessageApproval.status == status
         )
         approvals.set(status=next_state)
diff --git a/lib/lp/registry/model/ociproject.py b/lib/lp/registry/model/ociproject.py
index 1bdda8b..a686b17 100644
--- a/lib/lp/registry/model/ociproject.py
+++ b/lib/lp/registry/model/ociproject.py
@@ -50,7 +50,7 @@ from lp.registry.model.ociprojectseries import OCIProjectSeries
 from lp.registry.model.person import Person
 from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT, UTC_NOW
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 
 
@@ -418,7 +418,7 @@ class OCIProjectSet:
         """See `IOCIProjectSet`."""
         if isinstance(name, str):
             name = getUtility(IOCIProjectNameSet).getOrCreateByName(name)
-        store = IMasterStore(OCIProject)
+        store = IPrimaryStore(OCIProject)
         target = OCIProject()
         target.date_created = date_created
         target.date_last_modified = date_created
diff --git a/lib/lp/registry/model/ociprojectname.py b/lib/lp/registry/model/ociprojectname.py
index a35ea93..7117d6a 100644
--- a/lib/lp/registry/model/ociprojectname.py
+++ b/lib/lp/registry/model/ociprojectname.py
@@ -18,7 +18,7 @@ from lp.registry.interfaces.ociprojectname import (
     IOCIProjectName,
     IOCIProjectNameSet,
 )
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 
 
@@ -61,7 +61,7 @@ class OCIProjectNameSet:
 
     def new(self, name):
         """See `IOCIProjectNameSet`."""
-        store = IMasterStore(OCIProjectName)
+        store = IPrimaryStore(OCIProjectName)
         project_name = OCIProjectName(name=name)
         store.add(project_name)
         return project_name
diff --git a/lib/lp/registry/model/persontransferjob.py b/lib/lp/registry/model/persontransferjob.py
index 78bf6a9..bf6bb53 100644
--- a/lib/lp/registry/model/persontransferjob.py
+++ b/lib/lp/registry/model/persontransferjob.py
@@ -53,7 +53,7 @@ from lp.registry.scripts.closeaccount import close_account
 from lp.services.config import config
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import EnumeratedSubclass, Job
 from lp.services.job.runner import BaseRunnableJob
@@ -154,7 +154,7 @@ class PersonTransferJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     @classmethod
     def iterReady(cls):
         """Iterate through all ready PersonTransferJobs."""
-        store = IMasterStore(PersonTransferJob)
+        store = IPrimaryStore(PersonTransferJob)
         jobs = store.find(
             PersonTransferJob,
             And(
diff --git a/lib/lp/registry/model/productjob.py b/lib/lp/registry/model/productjob.py
index 107ffac..2b45425 100644
--- a/lib/lp/registry/model/productjob.py
+++ b/lib/lp/registry/model/productjob.py
@@ -48,7 +48,7 @@ from lp.registry.model.product import Product
 from lp.services.config import config
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import Job
 from lp.services.job.runner import BaseRunnableJob
@@ -191,7 +191,7 @@ class ProductJobDerived(BaseRunnableJob):
     @classmethod
     def iterReady(cls):
         """Iterate through all ready ProductJobs."""
-        store = IMasterStore(ProductJob)
+        store = IPrimaryStore(ProductJob)
         jobs = store.find(
             ProductJob,
             And(
diff --git a/lib/lp/registry/scripts/closeaccount.py b/lib/lp/registry/scripts/closeaccount.py
index bf12cb6..fce1289 100644
--- a/lib/lp/registry/scripts/closeaccount.py
+++ b/lib/lp/registry/scripts/closeaccount.py
@@ -31,7 +31,7 @@ from lp.registry.model.productrelease import ProductRelease, ProductReleaseFile
 from lp.registry.model.productseries import ProductSeries
 from lp.services.database import postgresql
 from lp.services.database.constants import DEFAULT
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.sqlbase import cursor
 from lp.services.identity.interfaces.account import (
     AccountCreationRationale,
@@ -51,7 +51,7 @@ def close_account(username, log):
 
     Return True on success, or log an error message and return False
     """
-    store = IMasterStore(Person)
+    store = IPrimaryStore(Person)
     janitor = getUtility(ILaunchpadCelebrities).janitor
 
     cur = cursor()
diff --git a/lib/lp/registry/scripts/teamparticipation.py b/lib/lp/registry/scripts/teamparticipation.py
index c41a29f..0e23a1d 100644
--- a/lib/lp/registry/scripts/teamparticipation.py
+++ b/lib/lp/registry/scripts/teamparticipation.py
@@ -18,7 +18,7 @@ import transaction
 
 from lp.registry.interfaces.teammembership import ACTIVE_STATES
 from lp.registry.model.teammembership import TeamParticipation
-from lp.services.database.interfaces import IMasterStore, IStandbyStore
+from lp.services.database.interfaces import IPrimaryStore, IStandbyStore
 from lp.services.database.sqlbase import quote, sqlvalues
 from lp.services.scripts.base import LaunchpadScriptFailure
 
@@ -207,7 +207,7 @@ def fix_teamparticipation_consistency(log, errors):
          WHERE team = %(team)s
            AND person IN %(people)s
         """
-    store = IMasterStore(TeamParticipation)
+    store = IPrimaryStore(TeamParticipation)
     for error in errors:
         if error.type == "missing":
             for person in error.people:
diff --git a/lib/lp/registry/tests/test_personset.py b/lib/lp/registry/tests/test_personset.py
index e4e4548..1469c89 100644
--- a/lib/lp/registry/tests/test_personset.py
+++ b/lib/lp/registry/tests/test_personset.py
@@ -39,7 +39,7 @@ from lp.registry.interfaces.ssh import SSHKeyAdditionError, SSHKeyType
 from lp.registry.model.codeofconduct import SignedCodeOfConduct
 from lp.registry.model.person import Person
 from lp.scripts.garbo import PopulateLatestPersonSourcePackageReleaseCache
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import cursor, flush_database_caches
 from lp.services.identity.interfaces.account import (
     AccountCreationRationale,
@@ -333,7 +333,7 @@ class TestPersonSetCreateByOpenId(TestCaseWithFactory):
     def setUp(self):
         super().setUp()
         self.person_set = getUtility(IPersonSet)
-        self.store = IMasterStore(Account)
+        self.store = IPrimaryStore(Account)
 
         # Generate some valid test data.
         self.account = self.makeAccount()
diff --git a/lib/lp/registry/tests/test_team.py b/lib/lp/registry/tests/test_team.py
index 3d6ed85..91ac467 100644
--- a/lib/lp/registry/tests/test_team.py
+++ b/lib/lp/registry/tests/test_team.py
@@ -25,7 +25,7 @@ from lp.registry.interfaces.mailinglist import MailingListStatus
 from lp.registry.interfaces.person import IPersonSet, ITeamPublic
 from lp.registry.interfaces.teammembership import TeamMembershipStatus
 from lp.registry.model.persontransferjob import PersonTransferJob
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.features.testing import FeatureFixture
 from lp.services.identity.interfaces.emailaddress import IEmailAddressSet
 from lp.services.identity.model.emailaddress import EmailAddress
@@ -64,7 +64,7 @@ class TestTeamContactAddress(TestCaseWithFactory):
 
         self.team = self.factory.makeTeam(name="alpha")
         self.address = self.factory.makeEmail("team@xxxxxxxxxxx", self.team)
-        self.store = IMasterStore(self.address)
+        self.store = IPrimaryStore(self.address)
 
     def test_setContactAddress_from_none(self):
         self.team.setContactAddress(self.address)
diff --git a/lib/lp/scripts/garbo.py b/lib/lp/scripts/garbo.py
index 3b974a8..4531f82 100644
--- a/lib/lp/scripts/garbo.py
+++ b/lib/lp/scripts/garbo.py
@@ -76,7 +76,7 @@ from lp.services.config import config
 from lp.services.database import postgresql
 from lp.services.database.bulk import create, dbify_value, load_related
 from lp.services.database.constants import UTC_NOW
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.sqlbase import (
     convert_storm_clause_to_string,
     cursor,
@@ -160,7 +160,7 @@ ONE_DAY_IN_SECONDS = 24 * 60 * 60
 def load_garbo_job_state(job_name):
     # Load the json state data for the given job name.
     job_data = (
-        IMasterStore(Person)
+        IPrimaryStore(Person)
         .execute(
             "SELECT json_data FROM GarboJobState WHERE name = ?",
             params=(six.ensure_text(job_name),),
@@ -174,7 +174,7 @@ def load_garbo_job_state(job_name):
 
 def save_garbo_job_state(job_name, job_data):
     # Save the json state data for the given job name.
-    store = IMasterStore(Person)
+    store = IPrimaryStore(Person)
     json_data = json.dumps(job_data, ensure_ascii=False)
     result = store.execute(
         "UPDATE GarboJobState SET json_data = ? WHERE name = ?",
@@ -230,11 +230,11 @@ class BulkPruner(TunableLoop):
     maximum_chunk_size = 10000
 
     def getStore(self):
-        """The master Store for the table we are pruning.
+        """The primary Store for the table we are pruning.
 
         May be overridden.
         """
-        return IMasterStore(self.target_table_class)
+        return IPrimaryStore(self.target_table_class)
 
     _unique_counter = 0
 
@@ -449,7 +449,7 @@ class BugSummaryJournalRollup(TunableLoop):
 
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
-        self.store = IMasterStore(Bug)
+        self.store = IPrimaryStore(Bug)
 
     def isDone(self):
         has_more = self.store.execute(
@@ -478,7 +478,7 @@ class PopulateDistributionSourcePackageCache(TunableLoop):
 
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
-        self.store = IMasterStore(DistributionSourcePackageCache)
+        self.store = IPrimaryStore(DistributionSourcePackageCache)
         # Keep a record of the processed source publication ID so we know
         # where the job got up to.
         self.last_spph_id = 0
@@ -603,7 +603,7 @@ class PopulateLatestPersonSourcePackageReleaseCache(TunableLoop):
 
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
-        self.store = IMasterStore(LatestPersonSourcePackageReleaseCache)
+        self.store = IPrimaryStore(LatestPersonSourcePackageReleaseCache)
         # Keep a record of the processed source package release id and data
         # type (creator or maintainer) so we know where to job got up to.
         self.last_spph_id = 0
@@ -852,7 +852,7 @@ class OpenIDConsumerNoncePruner(TunableLoop):
 
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
-        self.store = IMasterStore(OpenIDConsumerNonce)
+        self.store = IPrimaryStore(OpenIDConsumerNonce)
         self.earliest_timestamp = self.store.find(
             Min(OpenIDConsumerNonce.timestamp)
         ).one()
@@ -893,7 +893,7 @@ class OpenIDConsumerAssociationPruner(TunableLoop):
 
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
-        self.store = IMasterStore(OpenIDConsumerNonce)
+        self.store = IPrimaryStore(OpenIDConsumerNonce)
 
     def __call__(self, chunksize):
         result = self.store.execute(
@@ -923,7 +923,7 @@ class RevisionCachePruner(TunableLoop):
     def isDone(self):
         """We are done when there are no old revisions to delete."""
         epoch = datetime.now(pytz.UTC) - timedelta(days=30)
-        store = IMasterStore(RevisionCache)
+        store = IPrimaryStore(RevisionCache)
         results = store.find(
             RevisionCache, RevisionCache.revision_date < epoch
         )
@@ -987,8 +987,8 @@ class RevisionAuthorEmailLinker(TunableLoop):
 
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
-        self.author_store = IMasterStore(RevisionAuthor)
-        self.email_store = IMasterStore(EmailAddress)
+        self.author_store = IPrimaryStore(RevisionAuthor)
+        self.email_store = IPrimaryStore(EmailAddress)
 
         (self.min_author_id, self.max_author_id) = self.author_store.find(
             (Min(RevisionAuthor.id), Max(RevisionAuthor.id))
@@ -1052,7 +1052,7 @@ class PersonPruner(TunableLoop):
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
         self.offset = 1
-        self.store = IMasterStore(Person)
+        self.store = IPrimaryStore(Person)
         self.log.debug("Creating LinkedPeople temporary table.")
         self.store.execute(
             "CREATE TEMPORARY TABLE LinkedPeople(person integer primary key)"
@@ -1348,7 +1348,7 @@ class WebhookJobPruner(TunableLoop):
     @property
     def old_jobs(self):
         return (
-            IMasterStore(WebhookJob)
+            IPrimaryStore(WebhookJob)
             .using(WebhookJob, Job)
             .find(
                 (WebhookJob.job_id,),
@@ -1380,7 +1380,7 @@ class BugHeatUpdater(TunableLoop):
         self.is_done = False
         self.offset = 0
 
-        self.store = IMasterStore(Bug)
+        self.store = IPrimaryStore(Bug)
 
     @property
     def _outdated_bugs(self):
@@ -1415,7 +1415,7 @@ class BugHeatUpdater(TunableLoop):
         # Storm Bug #820290.
         outdated_bug_ids = [bug.id for bug in outdated_bugs]
         self.log.debug("Updating heat for %s bugs", len(outdated_bug_ids))
-        IMasterStore(Bug).find(Bug, Bug.id.is_in(outdated_bug_ids)).set(
+        IPrimaryStore(Bug).find(Bug, Bug.id.is_in(outdated_bug_ids)).set(
             heat=SQL("calculate_bug_heat(Bug.id)"), heat_last_updated=UTC_NOW
         )
         transaction.commit()
@@ -1582,7 +1582,7 @@ class UnusedPOTMsgSetPruner(TunableLoop):
             """
             % constraints
         )
-        store = IMasterStore(POTMsgSet)
+        store = IPrimaryStore(POTMsgSet)
         results = store.execute(query)
         ids_to_remove = {id for (id,) in results.get_all()}
         return list(ids_to_remove)
@@ -1595,7 +1595,7 @@ class UnusedPOTMsgSetPruner(TunableLoop):
         msgset_ids = self.msgset_ids_to_remove[self.offset :][:chunk_size]
         msgset_ids_to_remove = self._get_msgset_ids_to_remove(msgset_ids)
         # Remove related TranslationTemplateItems.
-        store = IMasterStore(POTMsgSet)
+        store = IPrimaryStore(POTMsgSet)
         related_ttis = store.find(
             TranslationTemplateItem,
             In(TranslationTemplateItem.potmsgsetID, msgset_ids_to_remove),
@@ -1620,7 +1620,7 @@ class UnusedProductAccessPolicyPruner(TunableLoop):
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
         self.start_at = 1
-        self.store = IMasterStore(Product)
+        self.store = IPrimaryStore(Product)
 
     def findProducts(self):
         return self.store.find(Product, Product.id >= self.start_at).order_by(
@@ -1646,7 +1646,7 @@ class UnusedDistributionAccessPolicyPruner(TunableLoop):
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
         self.start_at = 1
-        self.store = IMasterStore(Distribution)
+        self.store = IPrimaryStore(Distribution)
 
     def findDistributions(self):
         return self.store.find(
@@ -1672,7 +1672,7 @@ class ProductVCSPopulator(TunableLoop):
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
         self.start_at = 1
-        self.store = IMasterStore(Product)
+        self.store = IPrimaryStore(Product)
 
     def findProducts(self):
         products = self.store.find(
@@ -1788,7 +1788,7 @@ class GitRepositoryPruner(TunableLoop):
 
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
-        self.store = IMasterStore(GitRepository)
+        self.store = IPrimaryStore(GitRepository)
 
     def findRepositories(self):
         min_date = UTC_NOW - Cast(self.repository_creation_timeout, "interval")
@@ -1999,7 +1999,7 @@ class PopulateSnapBuildStoreRevision(TunableLoop):
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
         self.start_at = 1
-        self.store = IMasterStore(SnapBuild)
+        self.store = IPrimaryStore(SnapBuild)
 
     def findSnapBuilds(self):
         origin = [
@@ -2057,7 +2057,7 @@ class ArchiveArtifactoryColumnsPopulator(TunableLoop):
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
         self.start_at = 1
-        self.store = IMasterStore(Archive)
+        self.store = IPrimaryStore(Archive)
 
     def findArchives(self):
         return self.store.find(
@@ -2103,7 +2103,7 @@ class SourcePackagePublishingHistoryFormatPopulator(TunableLoop):
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
         self.start_at = 1
-        self.store = IMasterStore(SourcePackagePublishingHistory)
+        self.store = IPrimaryStore(SourcePackagePublishingHistory)
 
     def findPublications(self):
         return self.store.find(
@@ -2146,7 +2146,7 @@ class BinaryPackagePublishingHistoryFormatPopulator(TunableLoop):
     def __init__(self, log, abort_time=None):
         super().__init__(log, abort_time)
         self.start_at = 1
-        self.store = IMasterStore(BinaryPackagePublishingHistory)
+        self.store = IPrimaryStore(BinaryPackagePublishingHistory)
 
     def findPublications(self):
         return self.store.find(
diff --git a/lib/lp/scripts/harness.py b/lib/lp/scripts/harness.py
index 4bf8084..a9ccdd8 100644
--- a/lib/lp/scripts/harness.py
+++ b/lib/lp/scripts/harness.py
@@ -38,7 +38,7 @@ from lp.registry.model.person import Person
 from lp.registry.model.product import Product
 from lp.registry.model.projectgroup import ProjectGroup
 from lp.services.config import dbconfig
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.scripts import execute_zcml_for_scripts
 from lp.services.webapp import canonical_url
 from lp.testing.factory import LaunchpadObjectFactory
@@ -69,7 +69,7 @@ def _get_locals():
     if startup:
         with open(startup) as f:
             exec(f.read(), globals())
-    store = IMasterStore(Person)
+    store = IPrimaryStore(Person)
 
     if dbuser == "launchpad":
         # Create a few variables "in case they come in handy."
diff --git a/lib/lp/scripts/tests/test_garbo.py b/lib/lp/scripts/tests/test_garbo.py
index b33159e..dbf67dd 100644
--- a/lib/lp/scripts/tests/test_garbo.py
+++ b/lib/lp/scripts/tests/test_garbo.py
@@ -94,7 +94,7 @@ from lp.services.database.constants import (
     THIRTY_DAYS_AGO,
     UTC_NOW,
 )
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.features.model import FeatureFlag
 from lp.services.features.testing import FeatureFixture
 from lp.services.identity.interfaces.account import AccountStatus
@@ -198,7 +198,7 @@ class TestBulkPruner(TestCase):
     def setUp(self):
         super().setUp()
 
-        self.store = IMasterStore(CommercialSubscription)
+        self.store = IPrimaryStore(CommercialSubscription)
         self.store.execute("CREATE TABLE BulkFoo (id serial PRIMARY KEY)")
 
         for i in range(10):
@@ -269,7 +269,7 @@ class TestSessionPruner(TestCase):
 
         # Session database isn't reset between tests. We need to do this
         # manually.
-        nuke_all_sessions = IMasterStore(SessionData).find(SessionData).remove
+        nuke_all_sessions = IPrimaryStore(SessionData).find(SessionData).remove
         nuke_all_sessions()
         self.addCleanup(nuke_all_sessions)
 
@@ -290,7 +290,7 @@ class TestSessionPruner(TestCase):
         session_data = SessionData()
         session_data.client_id = client_id
         session_data.last_accessed = accessed
-        IMasterStore(SessionData).add(session_data)
+        IPrimaryStore(SessionData).add(session_data)
 
         if authenticated:
             # Add login time information.
@@ -299,7 +299,7 @@ class TestSessionPruner(TestCase):
             session_pkg_data.product_id = "launchpad.authenticateduser"
             session_pkg_data.key = "logintime"
             session_pkg_data.pickle = b"value is ignored"
-            IMasterStore(SessionPkgData).add(session_pkg_data)
+            IPrimaryStore(SessionPkgData).add(session_pkg_data)
 
             # Add authenticated as information.
             session_pkg_data = SessionPkgData()
@@ -309,10 +309,10 @@ class TestSessionPruner(TestCase):
             # Normally Account.id, but the session pruning works
             # at the SQL level and doesn't unpickle anything.
             session_pkg_data.pickle = authenticated
-            IMasterStore(SessionPkgData).add(session_pkg_data)
+            IPrimaryStore(SessionPkgData).add(session_pkg_data)
 
     def sessionExists(self, client_id):
-        store = IMasterStore(SessionData)
+        store = IPrimaryStore(SessionData)
         return not store.find(
             SessionData, SessionData.client_id == client_id
         ).is_empty()
@@ -336,7 +336,7 @@ class TestSessionPruner(TestCase):
         }
 
         found_sessions = set(
-            IMasterStore(SessionData).find(SessionData.client_id)
+            IPrimaryStore(SessionData).find(SessionData.client_id)
         )
 
         self.assertEqual(expected_sessions, found_sessions)
@@ -360,7 +360,7 @@ class TestSessionPruner(TestCase):
         }
 
         found_sessions = set(
-            IMasterStore(SessionData).find(SessionData.client_id)
+            IPrimaryStore(SessionData).find(SessionData.client_id)
         )
 
         self.assertEqual(expected_sessions, found_sessions)
@@ -406,7 +406,7 @@ class TestSessionPruner(TestCase):
             pruner.cleanUp()
 
         found_sessions = set(
-            IMasterStore(SessionData).find(SessionData.client_id)
+            IPrimaryStore(SessionData).find(SessionData.client_id)
         )
 
         self.assertEqual(expected_sessions, found_sessions)
@@ -482,7 +482,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         ]
         switch_dbuser("testadmin")
 
-        store = IMasterStore(OpenIDConsumerNonce)
+        store = IPrimaryStore(OpenIDConsumerNonce)
 
         # Make sure we start with 0 nonces.
         self.assertEqual(store.find(OpenIDConsumerNonce).count(), 0)
@@ -497,7 +497,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # Run the garbage collector.
         self.runFrequently(maximum_chunk_size=60)  # 1 minute maximum chunks.
 
-        store = IMasterStore(OpenIDConsumerNonce)
+        store = IPrimaryStore(OpenIDConsumerNonce)
 
         # We should now have 2 nonces.
         self.assertEqual(store.find(OpenIDConsumerNonce).count(), 2)
@@ -510,7 +510,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
 
     def test_CodeImportResultPruner(self):
         now = datetime.now(UTC)
-        store = IMasterStore(CodeImportResult)
+        store = IPrimaryStore(CodeImportResult)
 
         results_to_keep_count = config.codeimport.consecutive_failure_limit - 1
 
@@ -541,21 +541,21 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
 
         # Nothing is removed, because we always keep the
         # ``results_to_keep_count`` latest.
-        store = IMasterStore(CodeImportResult)
+        store = IPrimaryStore(CodeImportResult)
         self.assertEqual(
             results_to_keep_count, store.find(CodeImportResult).count()
         )
 
         new_code_import_result(now - timedelta(days=31))
         self.runDaily()
-        store = IMasterStore(CodeImportResult)
+        store = IPrimaryStore(CodeImportResult)
         self.assertEqual(
             results_to_keep_count, store.find(CodeImportResult).count()
         )
 
         new_code_import_result(now - timedelta(days=29))
         self.runDaily()
-        store = IMasterStore(CodeImportResult)
+        store = IPrimaryStore(CodeImportResult)
         self.assertEqual(
             results_to_keep_count, store.find(CodeImportResult).count()
         )
@@ -570,7 +570,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
 
     def test_CodeImportEventPruner(self):
         now = datetime.now(UTC)
-        store = IMasterStore(CodeImportResult)
+        store = IPrimaryStore(CodeImportResult)
 
         switch_dbuser("testadmin")
         machine = self.factory.makeCodeImportMachine()
@@ -606,7 +606,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         pruner = OpenIDConsumerAssociationPruner
         table_name = pruner.table_name
         switch_dbuser("testadmin")
-        store = IMasterStore(CommercialSubscription)
+        store = IPrimaryStore(CommercialSubscription)
         now = time.time()
         # Create some associations in the past with lifetimes
         for delta in range(0, 20):
@@ -635,7 +635,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         self.runFrequently()
 
         switch_dbuser("testadmin")
-        store = IMasterStore(CommercialSubscription)
+        store = IPrimaryStore(CommercialSubscription)
         # Confirm all the rows we know should have been expired have
         # been expired. These are the ones that would be expired using
         # the test start time as 'now'.
@@ -710,7 +710,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         switch_dbuser("testadmin")
         diff_id = removeSecurityProxy(self.factory.makeDiff()).id
         self.runDaily()
-        store = IMasterStore(Diff)
+        store = IPrimaryStore(Diff)
         self.assertContentEqual([], store.find(Diff, Diff.id == diff_id))
 
     def test_RevisionAuthorEmailLinker(self):
@@ -825,7 +825,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
                 reason_body="Whatever",
             )
 
-        store = IMasterStore(BugNotification)
+        store = IPrimaryStore(BugNotification)
 
         # Ensure we are at a known starting point.
         num_unsent = store.find(
@@ -869,7 +869,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # Garbo should remove answer contacts for accounts with given 'status'
         # which was set more than 'interval' days ago.
         switch_dbuser("testadmin")
-        store = IMasterStore(AnswerContact)
+        store = IPrimaryStore(AnswerContact)
 
         person = self.factory.makePerson()
         person.addLanguage(getUtility(ILanguageSet)["en"])
@@ -933,7 +933,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
     def test_BranchJobPruner(self):
         # Garbo should remove jobs completed over 30 days ago.
         switch_dbuser("testadmin")
-        store = IMasterStore(Job)
+        store = IPrimaryStore(Job)
 
         db_branch = self.factory.makeAnyBranch()
         db_branch.branch_format = BranchFormat.BZR_BRANCH_5
@@ -959,7 +959,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # Check to make sure the garbo doesn't remove jobs that aren't more
         # than thirty days old.
         switch_dbuser("testadmin")
-        store = IMasterStore(Job)
+        store = IPrimaryStore(Job)
 
         db_branch = self.factory.makeAnyBranch(
             branch_format=BranchFormat.BZR_BRANCH_5,
@@ -985,7 +985,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
     def test_GitJobPruner(self):
         # Garbo should remove jobs completed over 30 days ago.
         switch_dbuser("testadmin")
-        store = IMasterStore(Job)
+        store = IPrimaryStore(Job)
 
         db_repository = self.factory.makeGitRepository()
         Store.of(db_repository).flush()
@@ -1009,7 +1009,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # Check to make sure the garbo doesn't remove jobs that aren't more
         # than thirty days old.
         switch_dbuser("testadmin")
-        store = IMasterStore(Job)
+        store = IPrimaryStore(Job)
 
         db_repository = self.factory.makeGitRepository()
 
@@ -1028,7 +1028,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # Garbo removes jobs completed over 30 days ago.
         self.useFixture(FeatureFixture(SNAP_TESTING_FLAGS))
         switch_dbuser("testadmin")
-        store = IMasterStore(Job)
+        store = IPrimaryStore(Job)
 
         snapbuild = self.factory.makeSnapBuild()
         snapbuild_job = SnapStoreUploadJob.create(snapbuild)
@@ -1046,7 +1046,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # Garbo doesn't remove jobs under thirty days old.
         self.useFixture(FeatureFixture(SNAP_TESTING_FLAGS))
         switch_dbuser("testadmin")
-        store = IMasterStore(Job)
+        store = IPrimaryStore(Job)
 
         snapbuild = self.factory.makeSnapBuild()
         snapbuild_job = SnapStoreUploadJob.create(snapbuild)
@@ -1070,7 +1070,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # Garbo doesn't remove the most recent job for a build.
         self.useFixture(FeatureFixture(SNAP_TESTING_FLAGS))
         switch_dbuser("testadmin")
-        store = IMasterStore(Job)
+        store = IPrimaryStore(Job)
 
         snapbuild = self.factory.makeSnapBuild()
         snapbuild_job = SnapStoreUploadJob.create(snapbuild)
@@ -1087,7 +1087,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: "on"}))
         # Garbo removes files that haven't been used in 7 days
         switch_dbuser("testadmin")
-        store = IMasterStore(OCIFile)
+        store = IPrimaryStore(OCIFile)
         ocifile = self.factory.makeOCIFile()
         removeSecurityProxy(ocifile).date_last_used = THIRTY_DAYS_AGO
         self.assertEqual(1, store.find(OCIFile).count())
@@ -1101,7 +1101,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: "on"}))
         # Garbo removes files that haven't been used in 7 days
         switch_dbuser("testadmin")
-        store = IMasterStore(OCIFile)
+        store = IPrimaryStore(OCIFile)
         self.factory.makeOCIFile()
         self.assertEqual(1, store.find(OCIFile).count())
 
@@ -1114,7 +1114,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: "on"}))
         # Garbo removes files that haven't been used in 7 days
         switch_dbuser("testadmin")
-        store = IMasterStore(OCIFile)
+        store = IPrimaryStore(OCIFile)
         ocifile = self.factory.makeOCIFile()
         removeSecurityProxy(ocifile).date_last_used = THIRTY_DAYS_AGO
         self.factory.makeOCIFile()
@@ -1129,7 +1129,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # Garbo removes GitRepository with status = CREATING for too long.
         self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: "on"}))
         switch_dbuser("testadmin")
-        store = IMasterStore(GitRepository)
+        store = IPrimaryStore(GitRepository)
         now = datetime.now(UTC)
         recently = now - timedelta(minutes=2)
         long_ago = now - timedelta(minutes=65)
@@ -1279,7 +1279,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         self.runDaily()
 
         (count,) = (
-            IMasterStore(CommercialSubscription)
+            IPrimaryStore(CommercialSubscription)
             .execute(
                 """
             SELECT count(*)
@@ -1295,7 +1295,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
 
     def test_BugSummaryJournalRollup(self):
         switch_dbuser("testadmin")
-        store = IMasterStore(CommercialSubscription)
+        store = IPrimaryStore(CommercialSubscription)
 
         # Generate a load of entries in BugSummaryJournal.
         store.execute("UPDATE BugTask SET status=42")
@@ -1325,7 +1325,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         )
         translation_message.potmsgset.setSequence(pofile.potemplate, 0)
         transaction.commit()
-        store = IMasterStore(POTMsgSet)
+        store = IPrimaryStore(POTMsgSet)
         obsolete_msgsets = store.find(
             POTMsgSet,
             TranslationTemplateItem.potmsgset == POTMsgSet.id,
@@ -1348,7 +1348,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
             translation_message.potmsgset.setSequence(pofile.potemplate, 0)
             potmsgset_pofile[translation_message.potmsgset.id] = pofile.id
         transaction.commit()
-        store = IMasterStore(POTMsgSet)
+        store = IPrimaryStore(POTMsgSet)
         test_ids = list(potmsgset_pofile)
         obsolete_msgsets = store.find(
             POTMsgSet,
@@ -1383,7 +1383,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         switch_dbuser("testadmin")
         potmsgset = self.factory.makePOTMsgSet()
         # Cheekily drop any references to the POTMsgSet we just created.
-        store = IMasterStore(POTMsgSet)
+        store = IPrimaryStore(POTMsgSet)
         store.execute(
             "DELETE FROM TranslationTemplateItem WHERE potmsgset = %s"
             % potmsgset.id
@@ -1413,7 +1413,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         old_update = now - timedelta(days=2)
         naked_bug = removeSecurityProxy(bug)
         naked_bug.heat_last_updated = old_update
-        IMasterStore(FeatureFlag).add(
+        IPrimaryStore(FeatureFlag).add(
             FeatureFlag(
                 "default",
                 0,
@@ -1701,7 +1701,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         transaction.commit()
         self.runFrequently()
 
-        store = IMasterStore(LatestPersonSourcePackageReleaseCache)
+        store = IPrimaryStore(LatestPersonSourcePackageReleaseCache)
         # Check that the garbo state table has data.
         self.assertIsNotNone(
             store.execute(
@@ -1811,7 +1811,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         now = datetime.now(UTC)
         switch_dbuser("testadmin")
         self.useFixture(FeatureFixture({LIVEFS_FEATURE_FLAG: "on"}))
-        store = IMasterStore(LiveFSFile)
+        store = IPrimaryStore(LiveFSFile)
         initial_count = store.find(LiveFSFile).count()
 
         livefsbuild_kwargs = dict(livefsbuild_kwargs)
@@ -1895,7 +1895,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # exists.
         switch_dbuser("testadmin")
         self.useFixture(FeatureFixture({LIVEFS_FEATURE_FLAG: "on"}))
-        store = IMasterStore(LiveFSFile)
+        store = IPrimaryStore(LiveFSFile)
         other_build = self.factory.makeLiveFSBuild(
             status=BuildStatus.FULLYBUILT, duration=timedelta(minutes=10)
         )
@@ -1925,7 +1925,7 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
         # ago.
         now = datetime.now(UTC)
         switch_dbuser("testadmin")
-        store = IMasterStore(SnapFile)
+        store = IPrimaryStore(SnapFile)
 
         db_build = self.factory.makeSnapBuild(
             date_created=now - timedelta(days=interval, minutes=15),
@@ -2562,7 +2562,7 @@ class TestGarboTasks(TestCaseWithFactory):
     layer = LaunchpadZopelessLayer
 
     def test_LoginTokenPruner(self):
-        store = IMasterStore(LoginToken)
+        store = IPrimaryStore(LoginToken)
         now = datetime.now(UTC)
         switch_dbuser("testadmin")
 
diff --git a/lib/lp/services/auth/model.py b/lib/lp/services/auth/model.py
index 3cb6cb2..a629fcc 100644
--- a/lib/lp/services/auth/model.py
+++ b/lib/lp/services/auth/model.py
@@ -25,7 +25,7 @@ from lp.registry.model.teammembership import TeamParticipation
 from lp.services.auth.enums import AccessTokenScope
 from lp.services.auth.interfaces import IAccessToken, IAccessTokenSet
 from lp.services.database.constants import UTC_NOW
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 
 
@@ -100,7 +100,7 @@ class AccessToken(StormBase):
 
     def updateLastUsed(self):
         """See `IAccessToken`."""
-        store = IMasterStore(AccessToken)
+        store = IPrimaryStore(AccessToken)
         store.execute(
             Update(
                 {AccessToken.date_last_used: UTC_NOW},
diff --git a/lib/lp/services/config/__init__.py b/lib/lp/services/config/__init__.py
index 859a38a..d76835e 100644
--- a/lib/lp/services/config/__init__.py
+++ b/lib/lp/services/config/__init__.py
@@ -309,16 +309,6 @@ class DatabaseConfig:
     def main_standby(self):
         return random.choice(self.rw_main_standby.split(","))
 
-    # XXX cjwatson 2021-10-01: Remove these once Launchpad's store flavors
-    # have been renamed.
-    @property
-    def main_master(self):
-        return self.main_primary
-
-    @property
-    def main_slave(self):
-        return self.main_standby
-
     def override(self, **kwargs):
         """Override one or more config attributes.
 
diff --git a/lib/lp/services/database/doc/db-policy.rst b/lib/lp/services/database/doc/db-policy.rst
index 01bd33f..ddd48e7 100644
--- a/lib/lp/services/database/doc/db-policy.rst
+++ b/lib/lp/services/database/doc/db-policy.rst
@@ -12,12 +12,12 @@ If you know your code needs to change data, or must have the latest possible
 information, you retrieve objects from the primary database that stores
 the data for your database class.
 
-    >>> from lp.services.database.interfaces import IMasterStore
+    >>> from lp.services.database.interfaces import IPrimaryStore
     >>> from lp.registry.model.person import Person
     >>> import transaction
 
     >>> writable_janitor = (
-    ...     IMasterStore(Person).find(Person, Person.name == "janitor").one()
+    ...     IPrimaryStore(Person).find(Person, Person.name == "janitor").one()
     ... )
 
     >>> writable_janitor.display_name = "Jack the Janitor"
@@ -84,7 +84,7 @@ resources.
     >>> from lp.services.database.policy import StandbyOnlyDatabasePolicy
     >>> with StandbyOnlyDatabasePolicy():
     ...     whoops = (
-    ...         IMasterStore(Person)
+    ...         IPrimaryStore(Person)
     ...         .find(Person, Person.name == "janitor")
     ...         .one()
     ...     )
@@ -132,6 +132,6 @@ unsure if the object is writable or not, you can easily cast it
 to a writable copy. This is a noop if the object is already writable
 so is good defensive programming.
 
-    >>> from lp.services.database.interfaces import IMasterObject
-    >>> IMasterObject(ro_janitor) is writable_janitor
+    >>> from lp.services.database.interfaces import IPrimaryObject
+    >>> IPrimaryObject(ro_janitor) is writable_janitor
     True
diff --git a/lib/lp/services/database/doc/storm-store-reset.rst b/lib/lp/services/database/doc/storm-store-reset.rst
index efa287c..d068943 100644
--- a/lib/lp/services/database/doc/storm-store-reset.rst
+++ b/lib/lp/services/database/doc/storm-store-reset.rst
@@ -8,7 +8,7 @@ the stores these caches may end up being carried from one request to
 another.
 
 However, we have many tests that want to use an object after we've
-committed a transaction, so we decided not to reset MASTER stores when
+committed a transaction, so we decided not to reset PRIMARY stores when
 running the test suite.
 
 Since the web app is not run in the main thread (unlike the test suite)
diff --git a/lib/lp/services/database/doc/storm.rst b/lib/lp/services/database/doc/storm.rst
index 1e4038b..e9b84c9 100644
--- a/lib/lp/services/database/doc/storm.rst
+++ b/lib/lp/services/database/doc/storm.rst
@@ -4,15 +4,15 @@ back into the main replication set as part of login server separation.
 -- StuartBishop 20100222
 
 In addition to what Storm provides, we also have some Launchpad
-specific Storm tools to cope with our master and standby store arrangement.
+specific Storm tools to cope with our primary and standby store arrangement.
 
     >>> from lp.services.identity.interfaces.emailaddress import (
     ...     EmailAddressStatus,
     ...     IEmailAddressSet,
     ... )
     >>> from lp.services.database.interfaces import (
-    ...     IMasterObject,
-    ...     IMasterStore,
+    ...     IPrimaryObject,
+    ...     IPrimaryStore,
     ...     IStandbyStore,
     ...     IStore,
     ... )
@@ -22,30 +22,30 @@ specific Storm tools to cope with our master and standby store arrangement.
     >>> from lp.registry.model.person import Person
 
 
-You need to use the correct master Store to make changes to
+You need to use the correct primary Store to make changes to
 a Launchpad database object. You can use adapters to
 retrieve the correct Store.
 
-    >>> main_master = IMasterStore(Person)
+    >>> main_primary = IPrimaryStore(Person)
 
 You can detect if a store is writable by checking what interfaces it
 provides.
 
-    >>> IMasterStore.providedBy(main_master)
+    >>> IPrimaryStore.providedBy(main_primary)
     True
-    >>> IStandbyStore.providedBy(main_master)
+    >>> IStandbyStore.providedBy(main_primary)
     False
 
 
-Changes to the standby Stores will lag behind the master Stores. If
+Changes to the standby Stores will lag behind the primary Stores. If
 you only need to read an object but require it to be in sync with the
-master, you should use the default Store. Launchpad will give you the
+primary, you should use the default Store. Launchpad will give you the
 standby store if it is sure all your recent changes have been replicated.
-Otherwise, it gives you the master. See IStoreSelector for details.
+Otherwise, it gives you the primary. See IStoreSelector for details.
 
     >>> main_default = IStore(Person)
     >>> main_standby = IStandbyStore(Person)
-    >>> main_default is main_master
+    >>> main_default is main_primary
     True
     >>> main_default is main_standby
     False
@@ -57,16 +57,16 @@ this is less generally useful.
     >>> janitor = IStandbyStore(Person).find(Person, name="janitor").one()
     >>> IStandbyStore(janitor) is IStandbyStore(Person)
     True
-    >>> IMasterStore(janitor) is IMasterStore(Person)
+    >>> IPrimaryStore(janitor) is IPrimaryStore(Person)
     True
-    >>> IMasterStore(janitor) is IStandbyStore(Person)
+    >>> IPrimaryStore(janitor) is IStandbyStore(Person)
     False
 
 
-If we need the master copy of an object, we can adapt it to IMasterObject.
+If we need the primary copy of an object, we can adapt it to IPrimaryObject.
 Good defensive programming is to use this adapter if you want to make
 changes to an object, just in case you have been passed an instance
-from a store other than the correct Master.
+from a store other than the correct Primary.
 
     >>> main_standby = IStandbyStore(Person)
     >>> t = transaction.begin()
@@ -80,11 +80,11 @@ from a store other than the correct Master.
     >>> transaction.abort()
     >>> t = transaction.begin()
     >>> person = main_standby.find(Person, name="mark").one()
-    >>> IMasterObject(person).display_name = "Can change"
+    >>> IPrimaryObject(person).display_name = "Can change"
     >>> transaction.commit()
 
 
-If the adapted object was security proxied, the master copy is
+If the adapted object was security proxied, the primary copy is
 similarly wrapped.
 
     >>> from zope.security.proxy import removeSecurityProxy
@@ -98,7 +98,7 @@ similarly wrapped.
     ...
     zope.security.interfaces.Unauthorized: ...
 
-    >>> person = IMasterObject(person)
+    >>> person = IPrimaryObject(person)
     >>> removeSecurityProxy(person) is person
     False
     >>> print(person.displayname)
@@ -108,7 +108,7 @@ similarly wrapped.
     ...
     zope.security.interfaces.Unauthorized: ...
 
-    >>> person = IMasterObject(removeSecurityProxy(person))
+    >>> person = IPrimaryObject(removeSecurityProxy(person))
     >>> removeSecurityProxy(person) is person
     True
     >>> print(person.displayname)
@@ -118,8 +118,8 @@ similarly wrapped.
 Our objects may compare equal even if they have come from different
 stores.
 
-    >>> master_email = (
-    ...     IMasterStore(EmailAddress)
+    >>> primary_email = (
+    ...     IPrimaryStore(EmailAddress)
     ...     .find(
     ...         EmailAddress,
     ...         Person.name == "janitor",
@@ -136,25 +136,25 @@ stores.
     ...     )
     ...     .one()
     ... )
-    >>> master_email is standby_email
+    >>> primary_email is standby_email
     False
-    >>> master_email == standby_email
+    >>> primary_email == standby_email
     True
-    >>> master_email != standby_email
+    >>> primary_email != standby_email
     False
 
 Comparison works for security wrapped objects too.
 
     >>> wrapped_email = getUtility(IEmailAddressSet).getByEmail(
-    ...     master_email.email
+    ...     primary_email.email
     ... )
-    >>> removeSecurityProxy(wrapped_email) is master_email
+    >>> removeSecurityProxy(wrapped_email) is primary_email
     True
-    >>> wrapped_email is master_email
+    >>> wrapped_email is primary_email
     False
-    >>> wrapped_email == master_email
+    >>> wrapped_email == primary_email
     True
-    >>> wrapped_email != master_email
+    >>> wrapped_email != primary_email
     False
 
 Objects not yet flushed to the database also compare equal.
@@ -178,8 +178,8 @@ Objects not yet flushed to the database also compare equal.
 
 Objects differing by class never compare equal.
 
-    >>> email_one = IMasterStore(EmailAddress).get(EmailAddress, 1)
-    >>> person_one = IMasterStore(Person).get(Person, 1)
+    >>> email_one = IPrimaryStore(EmailAddress).get(EmailAddress, 1)
+    >>> person_one = IPrimaryStore(Person).get(Person, 1)
     >>> email_one == person_one
     False
     >>> email_one != person_one
diff --git a/lib/lp/services/database/interfaces.py b/lib/lp/services/database/interfaces.py
index 3f9a9af..808e25b 100644
--- a/lib/lp/services/database/interfaces.py
+++ b/lib/lp/services/database/interfaces.py
@@ -6,8 +6,8 @@ __all__ = [
     "DisallowedStore",
     "IDatabasePolicy",
     "IDBObject",
-    "IMasterObject",
-    "IMasterStore",
+    "IPrimaryObject",
+    "IPrimaryStore",
     "IRequestExpired",
     "ISQLBase",
     "IStandbyStore",
@@ -153,7 +153,7 @@ class IStore(Interface):
         """See storm.store.Store."""
 
 
-class IMasterStore(IStore):
+class IPrimaryStore(IStore):
     """A writeable Storm Stores."""
 
 
@@ -165,5 +165,5 @@ class IDBObject(Interface):
     """A Storm database object."""
 
 
-class IMasterObject(IDBObject):
-    """A Storm database object associated with its master Store."""
+class IPrimaryObject(IDBObject):
+    """A Storm database object associated with its primary Store."""
diff --git a/lib/lp/services/database/policy.py b/lib/lp/services/database/policy.py
index 97ef62d..39e5c2a 100644
--- a/lib/lp/services/database/policy.py
+++ b/lib/lp/services/database/policy.py
@@ -30,7 +30,7 @@ from lp.services.database.interfaces import (
     STANDBY_FLAVOR,
     DisallowedStore,
     IDatabasePolicy,
-    IMasterStore,
+    IPrimaryStore,
     IStandbyStore,
     IStoreSelector,
 )
@@ -142,7 +142,7 @@ class BaseDatabasePolicy:
 
             # Attach our marker interfaces so our adapters don't lie.
             if flavor == PRIMARY_FLAVOR:
-                alsoProvides(store, IMasterStore)
+                alsoProvides(store, IPrimaryStore)
             else:
                 alsoProvides(store, IStandbyStore)
 
diff --git a/lib/lp/services/database/sqlbase.py b/lib/lp/services/database/sqlbase.py
index 36473d5..419b0c7 100644
--- a/lib/lp/services/database/sqlbase.py
+++ b/lib/lp/services/database/sqlbase.py
@@ -54,8 +54,8 @@ from lp.services.database.interfaces import (
     DEFAULT_FLAVOR,
     MAIN_STORE,
     DisallowedStore,
-    IMasterObject,
-    IMasterStore,
+    IPrimaryObject,
+    IPrimaryStore,
     ISQLBase,
     IStore,
     IStoreSelector,
@@ -164,20 +164,20 @@ class SQLBase(storm.sqlobject.SQLObjectBase):
     def __init__(self, *args, **kwargs):
         """Extended version of the SQLObjectBase constructor.
 
-        We force use of the master Store.
+        We force use of the primary Store.
 
         We refetch any parameters from different stores from the
-        correct master Store.
+        correct primary Store.
         """
         # Make it simple to write dumb-invalidators - initialized
         # _cached_properties to a valid list rather than just-in-time
         # creation.
         self._cached_properties = []
-        store = IMasterStore(self.__class__)
+        store = IPrimaryStore(self.__class__)
 
         # The constructor will fail if objects from a different Store
         # are passed in. We need to refetch these objects from the correct
-        # master Store if necessary so the foreign key references can be
+        # primary Store if necessary so the foreign key references can be
         # constructed.
         # XXX StuartBishop 2009-03-02 bug=336867: We probably want to remove
         # this code - there are enough other places developers have to be
@@ -216,11 +216,11 @@ class SQLBase(storm.sqlobject.SQLObjectBase):
         return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
 
     def destroySelf(self):
-        my_master = IMasterObject(self)
-        if self is my_master:
+        my_primary = IPrimaryObject(self)
+        if self is my_primary:
             super().destroySelf()
         else:
-            my_master.destroySelf()
+            my_primary.destroySelf()
 
     def __eq__(self, other):
         """Equality operator.
@@ -569,7 +569,7 @@ def reset_store(func):
 
 
 def connect(user=None, dbname=None, isolation=ISOLATION_LEVEL_DEFAULT):
-    """Return a fresh DB-API connection to the MAIN MASTER database.
+    """Return a fresh DB-API connection to the MAIN PRIMARY database.
 
     Can be used without first setting up the Component Architecture,
     unlike the usual stores.
diff --git a/lib/lp/services/database/tests/test_bulk.py b/lib/lp/services/database/tests/test_bulk.py
index 4422d64..ba5972a 100644
--- a/lib/lp/services/database/tests/test_bulk.py
+++ b/lib/lp/services/database/tests/test_bulk.py
@@ -25,7 +25,11 @@ from lp.code.model.branchjob import (
 from lp.code.model.branchsubscription import BranchSubscription
 from lp.registry.model.person import Person
 from lp.services.database import bulk
-from lp.services.database.interfaces import IMasterStore, IStandbyStore, IStore
+from lp.services.database.interfaces import (
+    IPrimaryStore,
+    IStandbyStore,
+    IStore,
+)
 from lp.services.database.sqlbase import (
     convert_storm_clause_to_string,
     get_transaction_timestamp,
@@ -110,13 +114,13 @@ class TestLoaders(TestCaseWithFactory):
         # store even for the same object type.
         db_object = self.factory.makeComponent()
         db_object_type = bulk.get_type(db_object)
-        # Commit so the database object is available in both master
+        # Commit so the database object is available in both primary
         # and standby stores.
         transaction.commit()
         # Use a list, since objects corresponding to the same DB row from
         # different stores compare equal.
         db_objects = [
-            IMasterStore(db_object).get(db_object_type, db_object.id),
+            IPrimaryStore(db_object).get(db_object_type, db_object.id),
             IStandbyStore(db_object).get(db_object_type, db_object.id),
         ]
         db_object_ids = {id(obj) for obj in db_objects}
@@ -226,15 +230,15 @@ class TestLoaders(TestCaseWithFactory):
     def test_load_with_store(self):
         # load() can use an alternative store.
         db_object = self.factory.makeComponent()
-        # Commit so the database object is available in both master
+        # Commit so the database object is available in both primary
         # and standby stores.
         transaction.commit()
-        # Master store.
-        master_store = IMasterStore(db_object)
-        [db_object_from_master] = bulk.load(
-            Component, [db_object.id], store=master_store
+        # Primary store.
+        primary_store = IPrimaryStore(db_object)
+        [db_object_from_primary] = bulk.load(
+            Component, [db_object.id], store=primary_store
         )
-        self.assertEqual(Store.of(db_object_from_master), master_store)
+        self.assertEqual(Store.of(db_object_from_primary), primary_store)
         # Standby store.
         standby_store = IStandbyStore(db_object)
         [db_object_from_standby] = bulk.load(
diff --git a/lib/lp/services/database/transaction_policy.py b/lib/lp/services/database/transaction_policy.py
index 14bd41d..d7c7cd0 100644
--- a/lib/lp/services/database/transaction_policy.py
+++ b/lib/lp/services/database/transaction_policy.py
@@ -11,7 +11,7 @@ import transaction
 from psycopg2.extensions import TRANSACTION_STATUS_IDLE
 
 from lp.registry.model.person import Person
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.isolation import TransactionInProgress
 from lp.services.database.sqlbase import quote
 
@@ -68,13 +68,13 @@ class DatabaseTransactionPolicy:
         Merely creating a policy has no effect.  Use it with "with" to affect
         writability of database transactions.
 
-        :param store: The store to set policy on.  Defaults to the main master
-            store.  You don't want to use this on a standby store!
+        :param store: The store to set policy on.  Defaults to the main
+            primary store.  You don't want to use this on a standby store!
         :param read_only: Is this policy read-only?
         """
         self.read_only = read_only
         if store is None:
-            self.store = IMasterStore(Person)
+            self.store = IPrimaryStore(Person)
         else:
             self.store = store
 
diff --git a/lib/lp/services/doc/collection.rst b/lib/lp/services/doc/collection.rst
index 1134578..06bf9c4 100644
--- a/lib/lp/services/doc/collection.rst
+++ b/lib/lp/services/doc/collection.rst
@@ -15,9 +15,9 @@ them.
 
     >>> from storm.locals import Count, Int, Storm
     >>> from lp.registry.model.product import Product
-    >>> from lp.services.database.interfaces import IMasterStore
+    >>> from lp.services.database.interfaces import IPrimaryStore
 
-    >>> store = IMasterStore(Product)
+    >>> store = IPrimaryStore(Product)
     >>> ok = store.execute("CREATE TEMP TABLE Kumquat(id integer UNIQUE)")
     >>> class Kumquat(Storm):
     ...     __storm_table__ = "Kumquat"
diff --git a/lib/lp/services/identity/model/account.py b/lib/lp/services/identity/model/account.py
index 4a92e42..5412f60 100644
--- a/lib/lp/services/identity/model/account.py
+++ b/lib/lp/services/identity/model/account.py
@@ -16,7 +16,7 @@ from zope.interface import implementer
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase
 from lp.services.database.sqlobject import StringCol
 from lp.services.helpers import backslashreplace
@@ -119,7 +119,7 @@ class AccountSet:
             identifier = OpenIdIdentifier()
             identifier.account = account
             identifier.identifier = openid_identifier
-            IMasterStore(OpenIdIdentifier).add(identifier)
+            IPrimaryStore(OpenIdIdentifier).add(identifier)
 
         return account
 
diff --git a/lib/lp/services/identity/model/emailaddress.py b/lib/lp/services/identity/model/emailaddress.py
index 14da414..e6557e5 100644
--- a/lib/lp/services/identity/model/emailaddress.py
+++ b/lib/lp/services/identity/model/emailaddress.py
@@ -18,7 +18,7 @@ from zope.interface import implementer
 
 from lp.app.validators.email import valid_email
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase, sqlvalues
 from lp.services.database.sqlobject import ForeignKey, StringCol
 from lp.services.identity.interfaces.emailaddress import (
@@ -83,7 +83,7 @@ class EmailAddress(SQLBase, HasOwnerMixin):
         # XXX 2009-05-04 jamesh bug=371567: This function should not
         # be responsible for removing subscriptions, since the SSO
         # server can't write to that table.
-        store = IMasterStore(MailingListSubscription)
+        store = IPrimaryStore(MailingListSubscription)
         for subscription in store.find(
             MailingListSubscription, email_address=self
         ):
diff --git a/lib/lp/services/librarian/client.py b/lib/lp/services/librarian/client.py
index 35ea151..7be460f 100644
--- a/lib/lp/services/librarian/client.py
+++ b/lib/lp/services/librarian/client.py
@@ -28,7 +28,7 @@ from storm.store import Store
 from zope.interface import implementer
 
 from lp.services.config import config, dbconfig
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.postgresql import ConnectionString
 from lp.services.database.sqlobject import SQLObjectNotFound
 from lp.services.librarian.interfaces.client import (
@@ -176,7 +176,7 @@ class FileUploadClient:
             # Get the name of the database the client is using, so that
             # the server can check that the client is using the same
             # database as the server.
-            store = IMasterStore(LibraryFileAlias)
+            store = IPrimaryStore(LibraryFileAlias)
             databaseName = self._getDatabaseName(store)
 
             # Generate new content and alias IDs.
@@ -274,7 +274,7 @@ class FileUploadClient:
         name = six.ensure_binary(name)
         self._connect()
         try:
-            database_name = ConnectionString(dbconfig.main_master).dbname
+            database_name = ConnectionString(dbconfig.main_primary).dbname
             self._sendLine(b"STORE %d %s" % (size, name))
             self._sendHeader("Database-Name", database_name)
             self._sendHeader("Content-Type", str(contentType))
diff --git a/lib/lp/services/librarian/doc/librarian.rst b/lib/lp/services/librarian/doc/librarian.rst
index 00824ba..39099b6 100644
--- a/lib/lp/services/librarian/doc/librarian.rst
+++ b/lib/lp/services/librarian/doc/librarian.rst
@@ -641,12 +641,12 @@ We need some files to test different ways of accessing them.
     # record. Such records are considered as being deleted.
 
     >>> from lp.services.librarian.model import LibraryFileAlias
-    >>> from lp.services.database.interfaces import IMasterStore
+    >>> from lp.services.database.interfaces import IPrimaryStore
 
     >>> deleted_file = LibraryFileAlias(
     ...     content=None, filename="deleted.txt", mimetype="text/plain"
     ... )
-    >>> ignore = IMasterStore(LibraryFileAlias).add(deleted_file)
+    >>> ignore = IPrimaryStore(LibraryFileAlias).add(deleted_file)
 
 Commit the just-created files.
 
diff --git a/lib/lp/services/librarian/model.py b/lib/lp/services/librarian/model.py
index f51a293..210ffcf 100644
--- a/lib/lp/services/librarian/model.py
+++ b/lib/lp/services/librarian/model.py
@@ -24,7 +24,7 @@ from lp.registry.errors import InvalidFilename
 from lp.services.config import config
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase, session_store
 from lp.services.database.sqlobject import (
     BoolCol,
@@ -270,7 +270,7 @@ class LibraryFileAliasSet:
             allow_zero_length=allow_zero_length,
         )
         lfa = (
-            IMasterStore(LibraryFileAlias)
+            IPrimaryStore(LibraryFileAlias)
             .find(LibraryFileAlias, LibraryFileAlias.id == fid)
             .one()
         )
diff --git a/lib/lp/services/librarianserver/tests/test_gc.py b/lib/lp/services/librarianserver/tests/test_gc.py
index ab3117f..613b53a 100644
--- a/lib/lp/services/librarianserver/tests/test_gc.py
+++ b/lib/lp/services/librarianserver/tests/test_gc.py
@@ -23,7 +23,7 @@ from swiftclient import client as swiftclient
 from testtools.matchers import AnyMatch, Equals, MatchesListwise, MatchesRegex
 
 from lp.services.config import config
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.sqlbase import (
     ISOLATION_LEVEL_AUTOCOMMIT,
     connect,
@@ -75,7 +75,7 @@ class TestLibrarianGarbageCollectionBase:
         # Make sure that every file the database knows about exists on disk.
         # We manually remove them for tests that need to cope with missing
         # library items.
-        store = IMasterStore(LibraryFileContent)
+        store = IPrimaryStore(LibraryFileContent)
         for content in store.find(LibraryFileContent):
             path = librariangc.get_file_path(content.id)
             if not os.path.exists(path):
diff --git a/lib/lp/services/librarianserver/tests/test_web.py b/lib/lp/services/librarianserver/tests/test_web.py
index 555891f..1127213 100644
--- a/lib/lp/services/librarianserver/tests/test_web.py
+++ b/lib/lp/services/librarianserver/tests/test_web.py
@@ -22,7 +22,7 @@ from zope.security.proxy import removeSecurityProxy
 from lp.buildmaster.enums import BuildStatus
 from lp.services.config import config
 from lp.services.config.fixture import ConfigUseFixture
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.sqlbase import (
     cursor,
     flush_database_updates,
@@ -79,7 +79,7 @@ class LibrarianWebTestMixin:
         url = client.getURLForAlias(fileAlias)
         # Now that we have a url which talks to the public librarian, make the
         # file restricted.
-        IMasterStore(LibraryFileAlias).find(
+        IPrimaryStore(LibraryFileAlias).find(
             LibraryFileAlias, LibraryFileAlias.id == fileAlias
         ).set(restricted=True)
         self.commit()
@@ -292,7 +292,7 @@ class LibrarianWebTestCase(LibrarianWebTestMixin, TestCaseWithFactory):
 
         # Change the date_created to a known value that doesn't match
         # the disk timestamp. The timestamp on disk cannot be trusted.
-        file_alias = IMasterStore(LibraryFileAlias).get(
+        file_alias = IPrimaryStore(LibraryFileAlias).get(
             LibraryFileAlias, file_alias_id
         )
         file_alias.date_created = datetime(
@@ -332,7 +332,7 @@ class LibrarianWebTestCase(LibrarianWebTestMixin, TestCaseWithFactory):
 
         # Change the date_created to a known value that doesn't match
         # the disk timestamp. The timestamp on disk cannot be trusted.
-        file_alias = IMasterStore(LibraryFileAlias).get(
+        file_alias = IPrimaryStore(LibraryFileAlias).get(
             LibraryFileAlias, file_alias_id
         )
 
@@ -473,7 +473,7 @@ class LibrarianWebTestCase(LibrarianWebTestMixin, TestCaseWithFactory):
         fileAlias, url = self.get_restricted_file_and_public_url()
         token = TimeLimitedToken.allocate(url)
         # Change the date_created to a known value for testing.
-        file_alias = IMasterStore(LibraryFileAlias).get(
+        file_alias = IPrimaryStore(LibraryFileAlias).get(
             LibraryFileAlias, fileAlias
         )
         file_alias.date_created = datetime(
@@ -527,7 +527,7 @@ class LibrarianWebMacaroonTestCase(LibrarianWebTestMixin, TestCaseWithFactory):
 
     def test_restricted_with_macaroon(self):
         fileAlias, url = self.get_restricted_file_and_public_url()
-        lfa = IMasterStore(LibraryFileAlias).get(LibraryFileAlias, fileAlias)
+        lfa = IPrimaryStore(LibraryFileAlias).get(LibraryFileAlias, fileAlias)
         with dbuser("testadmin"):
             build = self.factory.makeBinaryPackageBuild(
                 archive=self.factory.makeArchive(private=True)
@@ -547,7 +547,7 @@ class LibrarianWebMacaroonTestCase(LibrarianWebTestMixin, TestCaseWithFactory):
 
     def test_restricted_with_invalid_macaroon(self):
         fileAlias, url = self.get_restricted_file_and_public_url()
-        lfa = IMasterStore(LibraryFileAlias).get(LibraryFileAlias, fileAlias)
+        lfa = IPrimaryStore(LibraryFileAlias).get(LibraryFileAlias, fileAlias)
         with dbuser("testadmin"):
             build = self.factory.makeBinaryPackageBuild(
                 archive=self.factory.makeArchive(private=True)
diff --git a/lib/lp/services/looptuner.py b/lib/lp/services/looptuner.py
index 40c0882..766a765 100644
--- a/lib/lp/services/looptuner.py
+++ b/lib/lp/services/looptuner.py
@@ -19,7 +19,7 @@ from zope.interface import Interface, implementer
 
 import lp.services.scripts
 from lp.services.database import activity_cols
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 
 
 class ITunableLoop(Interface):
@@ -298,10 +298,10 @@ class DBLoopTuner(LoopTuner):
 
     def _blockWhenLagged(self):
         """When database replication lag is high, block until it drops."""
-        # Lag is most meaningful on the master.
+        # Lag is most meaningful on the primary.
         from lp.services.librarian.model import LibraryFileAlias
 
-        store = IMasterStore(LibraryFileAlias)
+        store = IPrimaryStore(LibraryFileAlias)
         msg_counter = 0
         while not self._isTimedOut():
             lag = store.execute("SELECT replication_lag()").get_one()[0]
@@ -328,7 +328,7 @@ class DBLoopTuner(LoopTuner):
             return
         from lp.services.librarian.model import LibraryFileAlias
 
-        store = IMasterStore(LibraryFileAlias)
+        store = IPrimaryStore(LibraryFileAlias)
         msg_counter = 0
         while not self._isTimedOut():
             results = list(
diff --git a/lib/lp/services/oauth/model.py b/lib/lp/services/oauth/model.py
index ca1e8a5..9736101 100644
--- a/lib/lp/services/oauth/model.py
+++ b/lib/lp/services/oauth/model.py
@@ -26,7 +26,7 @@ from lp.registry.interfaces.product import IProduct
 from lp.registry.interfaces.projectgroup import IProjectGroup
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.stormbase import StormBase
 from lp.services.oauth.interfaces import (
     IOAuthAccessToken,
@@ -53,12 +53,12 @@ class OAuthBase:
     def _getStore(cls):
         """Return the correct store for this class.
 
-        We want all OAuth classes to be retrieved from the master flavour.  If
-        they are retrieved from the standby, there will be problems in the
+        We want all OAuth classes to be retrieved from the primary flavour.
+        If they are retrieved from the standby, there will be problems in the
         authorization exchange, since it will be done across applications that
         won't share the session cookies.
         """
-        return IMasterStore(cls)
+        return IPrimaryStore(cls)
 
 
 def sha256_digest(data: str):
diff --git a/lib/lp/services/openid/model/baseopenidstore.py b/lib/lp/services/openid/model/baseopenidstore.py
index 4e40494..e024175 100644
--- a/lib/lp/services/openid/model/baseopenidstore.py
+++ b/lib/lp/services/openid/model/baseopenidstore.py
@@ -18,7 +18,7 @@ from openid.store import nonce
 from openid.store.interface import OpenIDStore
 from storm.properties import Bytes, Int, Unicode
 
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 
 
 class BaseStormOpenIDAssociation:
@@ -86,7 +86,7 @@ class BaseStormOpenIDStore(OpenIDStore):
 
     def storeAssociation(self, server_url, association):
         """See `OpenIDStore`."""
-        store = IMasterStore(self.Association)
+        store = IPrimaryStore(self.Association)
         db_assoc = store.get(
             self.Association,
             (
@@ -102,7 +102,7 @@ class BaseStormOpenIDStore(OpenIDStore):
 
     def getAssociation(self, server_url, handle=None):
         """See `OpenIDStore`."""
-        store = IMasterStore(self.Association)
+        store = IPrimaryStore(self.Association)
         server_url = six.ensure_text(server_url)
         if handle is None:
             result = store.find(self.Association, server_url=server_url)
@@ -128,7 +128,7 @@ class BaseStormOpenIDStore(OpenIDStore):
 
     def removeAssociation(self, server_url, handle):
         """See `OpenIDStore`."""
-        store = IMasterStore(self.Association)
+        store = IPrimaryStore(self.Association)
         assoc = store.get(
             self.Association,
             (six.ensure_text(server_url), six.ensure_text(handle, "ASCII")),
@@ -147,7 +147,7 @@ class BaseStormOpenIDStore(OpenIDStore):
         server_url = six.ensure_text(server_url)
         salt = six.ensure_text(salt, "ASCII")
 
-        store = IMasterStore(self.Nonce)
+        store = IPrimaryStore(self.Nonce)
         old_nonce = store.get(self.Nonce, (server_url, timestamp, salt))
         if old_nonce is not None:
             # The nonce has already been seen, so reject it.
@@ -158,7 +158,7 @@ class BaseStormOpenIDStore(OpenIDStore):
 
     def cleanupAssociations(self):
         """See `OpenIDStore`."""
-        store = IMasterStore(self.Association)
+        store = IPrimaryStore(self.Association)
         now = int(time.time())
         expired = store.find(
             self.Association,
diff --git a/lib/lp/services/openid/tests/test_baseopenidstore.py b/lib/lp/services/openid/tests/test_baseopenidstore.py
index ff491f8..74eebdc 100644
--- a/lib/lp/services/openid/tests/test_baseopenidstore.py
+++ b/lib/lp/services/openid/tests/test_baseopenidstore.py
@@ -13,7 +13,7 @@ import unittest
 from openid.association import Association
 from openid.store import nonce
 
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.openid.model.baseopenidstore import BaseStormOpenIDStore
 
 
@@ -28,7 +28,7 @@ class BaseStormOpenIDStoreTestsMixin:
             "server-url\xA9",
             Association(b"handle", b"secret", 42, 600, "HMAC-SHA1"),
         )
-        db_assoc = IMasterStore(self.store.Association).get(
+        db_assoc = IPrimaryStore(self.store.Association).get(
             self.store.Association, ("server-url\xA9", "handle")
         )
         self.assertEqual(db_assoc.server_url, "server-url\xA9")
@@ -43,7 +43,7 @@ class BaseStormOpenIDStoreTestsMixin:
             "server-url",
             Association(b"handle", b"secret", 42, 600, "HMAC-SHA1"),
         )
-        db_assoc = IMasterStore(self.store.Association).get(
+        db_assoc = IPrimaryStore(self.store.Association).get(
             self.store.Association, ("server-url", "handle")
         )
         self.assertNotEqual(db_assoc, None)
@@ -91,7 +91,7 @@ class BaseStormOpenIDStoreTestsMixin:
         assoc = self.store.getAssociation("server-url", "handle")
         self.assertEqual(assoc, None)
 
-        store = IMasterStore(self.store.Association)
+        store = IPrimaryStore(self.store.Association)
         db_assoc = store.get(self.store.Association, ("server-url", "handle"))
         self.assertEqual(db_assoc, None)
 
@@ -137,7 +137,7 @@ class BaseStormOpenIDStoreTestsMixin:
         self.assertEqual(
             self.store.useNonce("server-url", timestamp, "salt"), True
         )
-        storm_store = IMasterStore(self.store.Nonce)
+        storm_store = IPrimaryStore(self.store.Nonce)
         new_nonce = storm_store.get(
             self.store.Nonce, ("server-url", timestamp, "salt")
         )
diff --git a/lib/lp/services/scripts/base.py b/lib/lp/services/scripts/base.py
index 59afada..24018e0 100644
--- a/lib/lp/services/scripts/base.py
+++ b/lib/lp/services/scripts/base.py
@@ -345,7 +345,7 @@ class LaunchpadScript:
         """
         dbuser = self.dbuser
         if dbuser is None:
-            connstr = ConnectionString(dbconfig.main_master)
+            connstr = ConnectionString(dbconfig.main_primary)
             dbuser = connstr.user or dbconfig.dbuser
         dbconfig.override(dbuser=dbuser, isolation_level=isolation)
         self.txn = transaction
diff --git a/lib/lp/services/session/adapters.py b/lib/lp/services/session/adapters.py
index 52c2407..f40145c 100644
--- a/lib/lp/services/session/adapters.py
+++ b/lib/lp/services/session/adapters.py
@@ -9,15 +9,19 @@ __all__ = []
 from zope.component import adapter
 from zope.interface import implementer
 
-from lp.services.database.interfaces import IMasterStore, IStandbyStore, IStore
+from lp.services.database.interfaces import (
+    IPrimaryStore,
+    IStandbyStore,
+    IStore,
+)
 from lp.services.database.sqlbase import session_store
 from lp.services.session.interfaces import IUseSessionStore
 
 
 @adapter(IUseSessionStore)
-@implementer(IMasterStore)
-def session_master_store(cls):
-    """Adapt a Session database object to an `IMasterStore`."""
+@implementer(IPrimaryStore)
+def session_primary_store(cls):
+    """Adapt a Session database object to an `IPrimaryStore`."""
     return session_store()
 
 
diff --git a/lib/lp/services/session/configure.zcml b/lib/lp/services/session/configure.zcml
index 8c3c399..0bb0d0c 100644
--- a/lib/lp/services/session/configure.zcml
+++ b/lib/lp/services/session/configure.zcml
@@ -6,7 +6,7 @@
     xmlns:browser="http://namespaces.zope.org/browser";
     xmlns:i18n="http://namespaces.zope.org/i18n";
     i18n_domain="launchpad">
-    <adapter factory=".adapters.session_master_store" />
+    <adapter factory=".adapters.session_primary_store" />
     <adapter factory=".adapters.session_standby_store" />
     <adapter factory=".adapters.session_default_store" />
 </configure>
diff --git a/lib/lp/services/session/tests/test_session.py b/lib/lp/services/session/tests/test_session.py
index ef8cb4b..77a47a8 100644
--- a/lib/lp/services/session/tests/test_session.py
+++ b/lib/lp/services/session/tests/test_session.py
@@ -3,7 +3,11 @@
 
 """Session tests."""
 
-from lp.services.database.interfaces import IMasterStore, IStandbyStore, IStore
+from lp.services.database.interfaces import (
+    IPrimaryStore,
+    IStandbyStore,
+    IStore,
+)
 from lp.services.session.model import SessionData, SessionPkgData
 from lp.testing import TestCase
 from lp.testing.layers import DatabaseFunctionalLayer
@@ -13,7 +17,7 @@ class TestSessionModelAdapters(TestCase):
     layer = DatabaseFunctionalLayer
 
     def test_adapters(self):
-        for adapter in [IMasterStore, IStandbyStore, IStore]:
+        for adapter in [IPrimaryStore, IStandbyStore, IStore]:
             for cls in [SessionData, SessionPkgData]:
                 for obj in [cls, cls()]:
                     store = adapter(obj)
diff --git a/lib/lp/services/signing/model/signingkey.py b/lib/lp/services/signing/model/signingkey.py
index 66e889e..868b634 100644
--- a/lib/lp/services/signing/model/signingkey.py
+++ b/lib/lp/services/signing/model/signingkey.py
@@ -16,7 +16,7 @@ from zope.interface import implementer, provider
 
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.signing.enums import SigningKeyType, SigningMode
 from lp.services.signing.interfaces.signingkey import (
@@ -96,7 +96,7 @@ class SigningKey(StormBase):
             public_key=generated_key["public-key"],
             description=description,
         )
-        store = IMasterStore(SigningKey)
+        store = IPrimaryStore(SigningKey)
         store.add(signing_key)
         return signing_key
 
@@ -110,7 +110,7 @@ class SigningKey(StormBase):
         )
         fingerprint = generated_key["fingerprint"]
 
-        store = IMasterStore(SigningKey)
+        store = IPrimaryStore(SigningKey)
         # Check if the key is already saved in the database.
         db_key = store.find(
             SigningKey,
@@ -188,7 +188,7 @@ class ArchiveSigningKey(StormBase):
 class ArchiveSigningKeySet:
     @classmethod
     def create(cls, archive, earliest_distro_series, signing_key):
-        store = IMasterStore(SigningKey)
+        store = IPrimaryStore(SigningKey)
         obj = ArchiveSigningKey(archive, earliest_distro_series, signing_key)
         store.add(obj)
         return obj
diff --git a/lib/lp/services/signing/tests/test_signingkey.py b/lib/lp/services/signing/tests/test_signingkey.py
index 5785619..74c5d5b 100644
--- a/lib/lp/services/signing/tests/test_signingkey.py
+++ b/lib/lp/services/signing/tests/test_signingkey.py
@@ -18,7 +18,7 @@ from testtools.matchers import (
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.signing.enums import SigningKeyType, SigningMode
 from lp.services.signing.interfaces.signingkey import (
     IArchiveSigningKeySet,
@@ -75,7 +75,7 @@ class TestSigningKey(TestCaseWithFactory, TestWithFixtures):
         key = SigningKey.generate(SigningKeyType.UEFI, "this is my key")
         self.assertIsInstance(key, SigningKey)
 
-        store = IMasterStore(SigningKey)
+        store = IPrimaryStore(SigningKey)
         store.invalidate()
 
         rs = store.find(SigningKey)
@@ -109,7 +109,7 @@ class TestSigningKey(TestCaseWithFactory, TestWithFixtures):
         )
         self.assertIsInstance(key, SigningKey)
 
-        store = IMasterStore(SigningKey)
+        store = IPrimaryStore(SigningKey)
         store.invalidate()
 
         rs = store.find(SigningKey)
@@ -141,7 +141,7 @@ class TestSigningKey(TestCaseWithFactory, TestWithFixtures):
         )
         self.assertIsInstance(key, SigningKey)
 
-        store = IMasterStore(SigningKey)
+        store = IPrimaryStore(SigningKey)
         store.flush()
 
         # This should give back the same key
diff --git a/lib/lp/services/verification/model/logintoken.py b/lib/lp/services/verification/model/logintoken.py
index 405f9f8..f9175b0 100644
--- a/lib/lp/services/verification/model/logintoken.py
+++ b/lib/lp/services/verification/model/logintoken.py
@@ -22,7 +22,7 @@ from lp.services.config import config
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase, sqlvalues
 from lp.services.database.sqlobject import (
     ForeignKey,
@@ -302,7 +302,7 @@ class LoginTokenSet:
 
         # It's important to always use the PRIMARY_FLAVOR store here
         # because we don't want replication lag to cause a 404 error.
-        return IMasterStore(LoginToken).find(LoginToken, conditions)
+        return IPrimaryStore(LoginToken).find(LoginToken, conditions)
 
     def deleteByEmailRequesterAndType(self, email, requester, type):
         """See ILoginTokenSet."""
@@ -333,7 +333,7 @@ class LoginTokenSet:
 
         # It's important to always use the PRIMARY_FLAVOR store here
         # because we don't want replication lag to cause a 404 error.
-        return IMasterStore(LoginToken).find(LoginToken, conditions)
+        return IPrimaryStore(LoginToken).find(LoginToken, conditions)
 
     def getPendingGPGKeys(self, requesterid=None):
         """See ILoginTokenSet."""
diff --git a/lib/lp/services/webapp/adapter.py b/lib/lp/services/webapp/adapter.py
index a2585dd..d7fe40d 100644
--- a/lib/lp/services/webapp/adapter.py
+++ b/lib/lp/services/webapp/adapter.py
@@ -41,8 +41,8 @@ from lp.services.database.interfaces import (
     MAIN_STORE,
     PRIMARY_FLAVOR,
     STANDBY_FLAVOR,
-    IMasterObject,
-    IMasterStore,
+    IPrimaryObject,
+    IPrimaryStore,
     IRequestExpired,
     IStoreSelector,
 )
@@ -754,7 +754,7 @@ class StoreSelector:
         return db_policy.getStore(name, flavor)
 
 
-# We want to be able to adapt a Storm class to an IStore, IMasterStore or
+# We want to be able to adapt a Storm class to an IStore, IPrimaryStore or
 # IStandbyStore. Unfortunately, the component architecture provides no
 # way for us to declare that a class, and all its subclasses, provides
 # a given interface. This means we need to use an global adapter.
@@ -769,8 +769,8 @@ def get_store(storm_class, flavor=DEFAULT_FLAVOR):
         return None
 
 
-def get_master_store(storm_class):
-    """Return the master Store for the given database class."""
+def get_primary_store(storm_class):
+    """Return the primary Store for the given database class."""
     return get_store(storm_class, PRIMARY_FLAVOR)
 
 
@@ -779,20 +779,20 @@ def get_standby_store(storm_class):
     return get_store(storm_class, STANDBY_FLAVOR)
 
 
-def get_object_from_master_store(obj):
-    """Return a copy of the given object retrieved from its master Store.
+def get_object_from_primary_store(obj):
+    """Return a copy of the given object retrieved from its primary Store.
 
-    Returns the object if it already comes from the relevant master Store.
+    Returns the object if it already comes from the relevant primary Store.
 
     Registered as a trusted adapter, so if the input is security wrapped,
     so is the result. Otherwise an unwrapped object is returned.
     """
-    master_store = IMasterStore(obj)
-    if master_store is not Store.of(obj):
-        obj = master_store.get(obj.__class__, obj.id)
+    primary_store = IPrimaryStore(obj)
+    if primary_store is not Store.of(obj):
+        obj = primary_store.get(obj.__class__, obj.id)
         if obj is None:
             return None
-    alsoProvides(obj, IMasterObject)
+    alsoProvides(obj, IPrimaryObject)
     return obj
 
 
diff --git a/lib/lp/services/webapp/database.zcml b/lib/lp/services/webapp/database.zcml
index c283ee1..4c9bd10 100644
--- a/lib/lp/services/webapp/database.zcml
+++ b/lib/lp/services/webapp/database.zcml
@@ -41,9 +41,9 @@
         factory="lp.services.webapp.adapter.get_store"
         />
     <adapter
-        provides="lp.services.database.interfaces.IMasterStore"
+        provides="lp.services.database.interfaces.IPrimaryStore"
         for="zope.interface.Interface"
-        factory="lp.services.webapp.adapter.get_master_store"
+        factory="lp.services.webapp.adapter.get_primary_store"
         />
     <adapter
         provides="lp.services.database.interfaces.IStandbyStore"
@@ -54,10 +54,10 @@
          We have no way of specifying that all subclasses of
          storm.locals.Storm implement an Interface. -->
     <adapter
-        provides="lp.services.database.interfaces.IMasterObject"
+        provides="lp.services.database.interfaces.IPrimaryObject"
         for="zope.interface.Interface"
         trusted="yes"
-        factory="lp.services.webapp.adapter.get_object_from_master_store"
+        factory="lp.services.webapp.adapter.get_object_from_primary_store"
         />
     <class class="storm.store.Store">
         <implements interface="lp.services.database.interfaces.IStore" />
diff --git a/lib/lp/services/webapp/doc/webapp-publication.rst b/lib/lp/services/webapp/doc/webapp-publication.rst
index c478f92..984c038 100644
--- a/lib/lp/services/webapp/doc/webapp-publication.rst
+++ b/lib/lp/services/webapp/doc/webapp-publication.rst
@@ -1013,13 +1013,13 @@ In the default implementation, the following database modification will
 be automatically reverted in a GET request.
 
     >>> from lp.services.identity.model.emailaddress import EmailAddress
-    >>> from lp.services.database.interfaces import IMasterStore
+    >>> from lp.services.database.interfaces import IPrimaryStore
     >>> from lp.registry.model.person import Person
     >>> login("foo.bar@xxxxxxxxxxxxx")
     >>> txn = transaction.begin()
     >>> def get_foo_bar_person():
     ...     return (
-    ...         IMasterStore(Person)
+    ...         IPrimaryStore(Person)
     ...         .find(
     ...             Person,
     ...             Person.id == EmailAddress.personID,
diff --git a/lib/lp/services/webapp/tests/test_dbpolicy.py b/lib/lp/services/webapp/tests/test_dbpolicy.py
index 43e0862..aa08442 100644
--- a/lib/lp/services/webapp/tests/test_dbpolicy.py
+++ b/lib/lp/services/webapp/tests/test_dbpolicy.py
@@ -27,7 +27,7 @@ from lp.services.database.interfaces import (
     STANDBY_FLAVOR,
     DisallowedStore,
     IDatabasePolicy,
-    IMasterStore,
+    IPrimaryStore,
     IStandbyStore,
     IStoreSelector,
 )
@@ -58,7 +58,7 @@ class ImplicitDatabasePolicyTestCase(TestCase):
         for store in ALL_STORES:
             self.assertProvides(
                 getUtility(IStoreSelector).get(store, DEFAULT_FLAVOR),
-                IMasterStore,
+                IPrimaryStore,
             )
 
     def test_dbusers(self):
@@ -108,7 +108,7 @@ class StandbyDatabasePolicyTestCase(BaseDatabasePolicyTestCase):
         for store in ALL_STORES:
             self.assertProvides(
                 getUtility(IStoreSelector).get(store, PRIMARY_FLAVOR),
-                IMasterStore,
+                IPrimaryStore,
             )
 
 
@@ -270,35 +270,35 @@ class PrimaryFallbackTestCase(TestCase):
 
     def test_can_shutdown_standby_only(self):
         """Confirm that this TestCase's test infrastructure works as needed."""
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
 
         # Both Stores work when pgbouncer is up.
-        master_store.get(Person, 1)
+        primary_store.get(Person, 1)
         standby_store.get(Person, 1)
 
-        # Standby Store breaks when pgbouncer is torn down. Master Store
+        # Standby Store breaks when pgbouncer is torn down.  Primary Store
         # is fine.
         self.pgbouncer_fixture.stop()
-        master_store.get(Person, 2)
+        primary_store.get(Person, 2)
         self.assertRaises(DisconnectionError, standby_store.get, Person, 2)
 
     def test_startup_with_no_standby(self):
         """An attempt is made for the first time to connect to a standby."""
         self.pgbouncer_fixture.stop()
 
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
 
-        # The master and standby Stores are the same object.
-        self.assertIs(master_store, standby_store)
+        # The primary and standby Stores are the same object.
+        self.assertIs(primary_store, standby_store)
 
     def test_standby_shutdown_during_transaction(self):
         """Standby is shutdown while running, but we can recover."""
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
 
-        self.assertIsNot(master_store, standby_store)
+        self.assertIsNot(primary_store, standby_store)
 
         self.pgbouncer_fixture.stop()
 
@@ -309,50 +309,50 @@ class PrimaryFallbackTestCase(TestCase):
 
         transaction.abort()
 
-        # But in the next transaction, we get the master Store if we ask
+        # But in the next transaction, we get the primary Store if we ask
         # for the standby Store so we can continue.
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
 
-        self.assertIs(master_store, standby_store)
+        self.assertIs(primary_store, standby_store)
 
     def test_standby_shutdown_between_transactions(self):
         """Standby is shutdown in between transactions."""
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
-        self.assertIsNot(master_store, standby_store)
+        self.assertIsNot(primary_store, standby_store)
 
         transaction.abort()
         self.pgbouncer_fixture.stop()
 
         # The process doesn't notice the standby going down, and things
         # will fail the next time the standby is used.
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
-        self.assertIsNot(master_store, standby_store)
+        self.assertIsNot(primary_store, standby_store)
         self.assertRaises(DisconnectionError, standby_store.get, Person, 1)
 
         # But now it has been discovered the socket is no longer
-        # connected to anything, next transaction we get a master
+        # connected to anything, next transaction we get a primary
         # Store when we ask for a standby.
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
-        self.assertIs(master_store, standby_store)
+        self.assertIs(primary_store, standby_store)
 
     def test_standby_reconnect_after_outage(self):
         """The standby is again used once it becomes available."""
         self.pgbouncer_fixture.stop()
 
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
-        self.assertIs(master_store, standby_store)
+        self.assertIs(primary_store, standby_store)
 
         self.pgbouncer_fixture.start()
         transaction.abort()
 
-        master_store = IMasterStore(Person)
+        primary_store = IPrimaryStore(Person)
         standby_store = IStandbyStore(Person)
-        self.assertIsNot(master_store, standby_store)
+        self.assertIsNot(primary_store, standby_store)
 
 
 class TestFastDowntimeRollout(TestCase):
@@ -482,9 +482,9 @@ class TestFastDowntimeRollout(TestCase):
     def test_primary_standby_fast_downtime_rollout(self):
         """Parts of your app can keep working during a fast downtime update."""
         # Everything is running happily.
-        master_store = IMasterStore(Person)
-        self.assertTrue(self.store_is_primary(master_store))
-        self.assertTrue(self.store_is_working(master_store))
+        primary_store = IPrimaryStore(Person)
+        self.assertTrue(self.store_is_primary(primary_store))
+        self.assertTrue(self.store_is_working(primary_store))
 
         standby_store = IStandbyStore(Person)
         self.assertTrue(self.store_is_standby(standby_store))
@@ -504,7 +504,7 @@ class TestFastDowntimeRollout(TestCase):
         self.assertTrue(self.store_is_working(standby_store))
 
         # But attempts to use a primary store will fail.
-        self.assertFalse(self.store_is_working(master_store))
+        self.assertFalse(self.store_is_working(primary_store))
         transaction.abort()
 
         # After schema updates have been made to the primary, it is
@@ -518,9 +518,9 @@ class TestFastDowntimeRollout(TestCase):
         self.pgbouncer_cur.execute("KILL %s" % self.standby_dbname)
 
         # The primary store is working again.
-        master_store = IMasterStore(Person)
-        self.assertTrue(self.store_is_primary(master_store))
-        self.assertTrue(self.store_is_working(master_store))
+        primary_store = IPrimaryStore(Person)
+        self.assertTrue(self.store_is_primary(primary_store))
+        self.assertTrue(self.store_is_working(primary_store))
 
         # The next attempt at accessing the standby store will fail
         # with a DisconnectionError.
@@ -544,9 +544,9 @@ class TestFastDowntimeRollout(TestCase):
 
         # And next transaction, we are back to normal.
         transaction.abort()
-        master_store = IMasterStore(Person)
-        self.assertTrue(self.store_is_primary(master_store))
-        self.assertTrue(self.store_is_working(master_store))
+        primary_store = IPrimaryStore(Person)
+        self.assertTrue(self.store_is_primary(primary_store))
+        self.assertTrue(self.store_is_working(primary_store))
 
         standby_store = IStandbyStore(Person)
         self.assertTrue(self.store_is_standby(standby_store))
diff --git a/lib/lp/services/webapp/tests/test_publication.py b/lib/lp/services/webapp/tests/test_publication.py
index 0496ab4..ef68112 100644
--- a/lib/lp/services/webapp/tests/test_publication.py
+++ b/lib/lp/services/webapp/tests/test_publication.py
@@ -18,7 +18,7 @@ from zope.security.management import thread_local as zope_security_thread_local
 
 import lp.services.webapp.adapter as dbadapter
 from lp.services.auth.interfaces import IAccessTokenVerifiedRequest
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.identity.model.emailaddress import EmailAddress
 from lp.services.oauth.interfaces import IOAuthConsumerSet, IOAuthSignedRequest
 from lp.services.statsd.tests import StatsMixin
@@ -168,7 +168,7 @@ class TestWebServicePublication(TestCaseWithFactory):
         dbadapter.set_request_started()
 
         # Disconnect a store
-        store = IMasterStore(EmailAddress)
+        store = IPrimaryStore(EmailAddress)
         store._connection._state = STATE_DISCONNECTED
 
         # Invoke the endRequest hook.
diff --git a/lib/lp/services/webhooks/model.py b/lib/lp/services/webhooks/model.py
index 2cff028..69cd705 100644
--- a/lib/lp/services/webhooks/model.py
+++ b/lib/lp/services/webhooks/model.py
@@ -36,7 +36,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import EnumeratedSubclass, Job
 from lp.services.job.runner import BaseRunnableJob
@@ -391,7 +391,7 @@ class WebhookJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     def iterReady(cls):
         """See `IJobSource`."""
         jobs = (
-            IMasterStore(WebhookJob)
+            IPrimaryStore(WebhookJob)
             .find(
                 WebhookJob,
                 WebhookJob.job_type == cls.class_job_type,
diff --git a/lib/lp/snappy/model/snap.py b/lib/lp/snappy/model/snap.py
index e60040f..35c54d3 100644
--- a/lib/lp/snappy/model/snap.py
+++ b/lib/lp/snappy/model/snap.py
@@ -123,7 +123,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormexpr import (
     Array,
     ArrayAgg,
@@ -1582,7 +1582,7 @@ class SnapSet:
         ):
             raise SnapPrivacyMismatch
 
-        store = IMasterStore(Snap)
+        store = IPrimaryStore(Snap)
         snap = Snap(
             registrant,
             owner,
diff --git a/lib/lp/snappy/model/snapbase.py b/lib/lp/snappy/model/snapbase.py
index a67159b..c8c200d 100644
--- a/lib/lp/snappy/model/snapbase.py
+++ b/lib/lp/snappy/model/snapbase.py
@@ -31,7 +31,7 @@ from lp.buildmaster.model.processor import Processor
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.model.person import Person
 from lp.services.database.constants import DEFAULT
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.snappy.interfaces.snapbase import (
     CannotDeleteSnapBase,
     ISnapBase,
@@ -251,7 +251,7 @@ class SnapBaseSet:
         date_created=DEFAULT,
     ):
         """See `ISnapBaseSet`."""
-        store = IMasterStore(SnapBase)
+        store = IPrimaryStore(SnapBase)
         snap_base = SnapBase(
             registrant,
             name,
diff --git a/lib/lp/snappy/model/snapbuild.py b/lib/lp/snappy/model/snapbuild.py
index 7c38a91..05eddc9 100644
--- a/lib/lp/snappy/model/snapbuild.py
+++ b/lib/lp/snappy/model/snapbuild.py
@@ -55,7 +55,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.model.job import Job
 from lp.services.librarian.browser import ProxiedLibraryFileAlias
@@ -364,7 +364,7 @@ class SnapBuild(PackageBuildMixin, Storm):
     def addFile(self, lfa):
         """See `ISnapBuild`."""
         snapfile = SnapFile(snapbuild=self, libraryfile=lfa)
-        IMasterStore(SnapFile).add(snapfile)
+        IPrimaryStore(SnapFile).add(snapfile)
         return snapfile
 
     def verifySuccessfulUpload(self) -> bool:
@@ -568,7 +568,7 @@ class SnapBuildSet(SpecificBuildFarmJobSourceMixin):
         target_architectures=None,
     ):
         """See `ISnapBuildSet`."""
-        store = IMasterStore(SnapBuild)
+        store = IPrimaryStore(SnapBuild)
         build_farm_job = getUtility(IBuildFarmJobSource).new(
             SnapBuild.job_type,
             BuildStatus.NEEDSBUILD,
@@ -600,7 +600,7 @@ class SnapBuildSet(SpecificBuildFarmJobSourceMixin):
 
     def getByID(self, build_id):
         """See `ISpecificBuildFarmJobSource`."""
-        store = IMasterStore(SnapBuild)
+        store = IPrimaryStore(SnapBuild)
         return store.get(SnapBuild, build_id)
 
     def getByBuildFarmJob(self, build_farm_job):
diff --git a/lib/lp/snappy/model/snapbuildjob.py b/lib/lp/snappy/model/snapbuildjob.py
index ad34729..7ebd3f0 100644
--- a/lib/lp/snappy/model/snapbuildjob.py
+++ b/lib/lp/snappy/model/snapbuildjob.py
@@ -24,7 +24,7 @@ from zope.interface.interfaces import ObjectEvent
 from lp.app.errors import NotFoundError
 from lp.services.config import config
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import EnumeratedSubclass, Job
 from lp.services.job.runner import BaseRunnableJob
@@ -133,7 +133,7 @@ class SnapBuildJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     @classmethod
     def iterReady(cls):
         """See `IJobSource`."""
-        jobs = IMasterStore(SnapBuildJob).find(
+        jobs = IPrimaryStore(SnapBuildJob).find(
             SnapBuildJob,
             SnapBuildJob.job_type == cls.class_job_type,
             SnapBuildJob.job == Job.id,
diff --git a/lib/lp/snappy/model/snapjob.py b/lib/lp/snappy/model/snapjob.py
index 078b55f..5ec3be9 100644
--- a/lib/lp/snappy/model/snapjob.py
+++ b/lib/lp/snappy/model/snapjob.py
@@ -27,7 +27,7 @@ from lp.services.config import config
 from lp.services.database.bulk import load_related
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import EnumeratedSubclass, Job
 from lp.services.job.runner import BaseRunnableJob
@@ -132,7 +132,7 @@ class SnapJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     @classmethod
     def iterReady(cls):
         """See `IJobSource`."""
-        jobs = IMasterStore(SnapJob).find(
+        jobs = IPrimaryStore(SnapJob).find(
             SnapJob,
             SnapJob.job_type == cls.class_job_type,
             SnapJob.job == Job.id,
diff --git a/lib/lp/snappy/model/snappyseries.py b/lib/lp/snappy/model/snappyseries.py
index 45b3cf0..fec61f1 100644
--- a/lib/lp/snappy/model/snappyseries.py
+++ b/lib/lp/snappy/model/snappyseries.py
@@ -26,7 +26,7 @@ from lp.registry.interfaces.series import SeriesStatus
 from lp.registry.model.distroseries import DistroSeries
 from lp.services.database.constants import DEFAULT
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.propertycache import cachedproperty, get_property_cache
 from lp.snappy.interfaces.snappyseries import (
     ISnappyDistroSeries,
@@ -256,7 +256,7 @@ class SnappySeriesSet:
         date_created=DEFAULT,
     ):
         """See `ISnappySeriesSet`."""
-        store = IMasterStore(SnappySeries)
+        store = IPrimaryStore(SnappySeries)
         snappy_series = SnappySeries(
             registrant,
             name,
diff --git a/lib/lp/soyuz/model/archivefile.py b/lib/lp/soyuz/model/archivefile.py
index fe4ba4c..670cc96 100644
--- a/lib/lp/soyuz/model/archivefile.py
+++ b/lib/lp/soyuz/model/archivefile.py
@@ -20,7 +20,7 @@ from zope.interface import implementer
 from lp.services.database.bulk import load_related
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import convert_storm_clause_to_string
 from lp.services.database.stormexpr import BulkUpdate, RegexpMatch
 from lp.services.librarian.interfaces import ILibraryFileAliasSet
@@ -77,7 +77,7 @@ class ArchiveFileSet:
     def new(archive, container, path, library_file):
         """See `IArchiveFileSet`."""
         archive_file = ArchiveFile(archive, container, path, library_file)
-        IMasterStore(ArchiveFile).add(archive_file)
+        IPrimaryStore(ArchiveFile).add(archive_file)
         return archive_file
 
     @classmethod
@@ -159,7 +159,7 @@ class ArchiveFileSet:
             LibraryFileContent.sha256,
         ]
         return list(
-            IMasterStore(ArchiveFile).execute(
+            IPrimaryStore(ArchiveFile).execute(
                 Returning(
                     BulkUpdate(
                         {ArchiveFile.scheduled_deletion_date: new_date},
@@ -188,7 +188,7 @@ class ArchiveFileSet:
             LibraryFileContent.sha256,
         ]
         return list(
-            IMasterStore(ArchiveFile).execute(
+            IPrimaryStore(ArchiveFile).execute(
                 Returning(
                     BulkUpdate(
                         {ArchiveFile.scheduled_deletion_date: None},
@@ -230,7 +230,7 @@ class ArchiveFileSet:
             clauses.append(ArchiveFile.container == container)
         where = convert_storm_clause_to_string(And(*clauses))
         return list(
-            IMasterStore(ArchiveFile).execute(
+            IPrimaryStore(ArchiveFile).execute(
                 """
             DELETE FROM ArchiveFile
             USING LibraryFileAlias, LibraryFileContent
diff --git a/lib/lp/soyuz/model/archivejob.py b/lib/lp/soyuz/model/archivejob.py
index f73cec1..50c6b8f 100644
--- a/lib/lp/soyuz/model/archivejob.py
+++ b/lib/lp/soyuz/model/archivejob.py
@@ -36,7 +36,7 @@ from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.interfaces.sourcepackage import SourcePackageFileType
 from lp.services.config import config
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import EnumeratedSubclass, Job
 from lp.services.job.runner import BaseRunnableJob
@@ -124,7 +124,7 @@ class ArchiveJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
     @classmethod
     def iterReady(cls):
         """Iterate through all ready ArchiveJobs."""
-        store = IMasterStore(ArchiveJob)
+        store = IPrimaryStore(ArchiveJob)
         jobs = store.find(
             ArchiveJob,
             And(
diff --git a/lib/lp/soyuz/model/archivepermission.py b/lib/lp/soyuz/model/archivepermission.py
index 3eafe57..86ca48c 100644
--- a/lib/lp/soyuz/model/archivepermission.py
+++ b/lib/lp/soyuz/model/archivepermission.py
@@ -38,7 +38,7 @@ from lp.registry.interfaces.sourcepackagename import (
 from lp.registry.model.teammembership import TeamParticipation
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import sqlvalues
 from lp.services.database.stormbase import StormBase
 from lp.soyuz.enums import ArchivePermissionType
@@ -611,7 +611,7 @@ class ArchivePermissionSet:
         self, archive, person, packageset, explicit=False
     ):
         """See `IArchivePermissionSet`."""
-        store = IMasterStore(ArchivePermission)
+        store = IPrimaryStore(ArchivePermission)
 
         # First see whether we have a matching permission in the database
         # already.
@@ -681,7 +681,7 @@ class ArchivePermissionSet:
         self, archive, person, packageset, explicit=False
     ):
         """See `IArchivePermissionSet`."""
-        store = IMasterStore(ArchivePermission)
+        store = IPrimaryStore(ArchivePermission)
 
         # Do we have the permission the user wants removed in the database?
         permission = store.find(
diff --git a/lib/lp/soyuz/model/distroarchseriesfilter.py b/lib/lp/soyuz/model/distroarchseriesfilter.py
index 219aa8d..ac48343 100644
--- a/lib/lp/soyuz/model/distroarchseriesfilter.py
+++ b/lib/lp/soyuz/model/distroarchseriesfilter.py
@@ -14,7 +14,7 @@ from zope.security.proxy import removeSecurityProxy
 
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.soyuz.enums import DistroArchSeriesFilterSense
 from lp.soyuz.interfaces.distroarchseriesfilter import (
     IDistroArchSeriesFilter,
@@ -105,7 +105,7 @@ class DistroArchSeriesFilterSet:
         The caller must check that the creator has suitable permissions on
         `distroarchseries`.
         """
-        store = IMasterStore(DistroArchSeriesFilter)
+        store = IPrimaryStore(DistroArchSeriesFilter)
         dasf = DistroArchSeriesFilter(
             distroarchseries,
             packageset,
diff --git a/lib/lp/soyuz/model/distroseriesdifferencejob.py b/lib/lp/soyuz/model/distroseriesdifferencejob.py
index 7aef9bc..9420cb6 100644
--- a/lib/lp/soyuz/model/distroseriesdifferencejob.py
+++ b/lib/lp/soyuz/model/distroseriesdifferencejob.py
@@ -19,7 +19,7 @@ from lp.registry.model.distroseriesdifference import DistroSeriesDifference
 from lp.registry.model.sourcepackagename import SourcePackageName
 from lp.services.config import config
 from lp.services.database import bulk
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.job.model.job import Job
 from lp.soyuz.interfaces.distributionjob import (
     DistributionJobType,
@@ -60,7 +60,7 @@ def create_job(derived_series, sourcepackagename, parent_series):
         job_type=DistributionJobType.DISTROSERIESDIFFERENCE,
         metadata=make_metadata(sourcepackagename.id, parent_series.id),
     )
-    IMasterStore(DistributionJob).add(db_job)
+    IPrimaryStore(DistributionJob).add(db_job)
     job = DistroSeriesDifferenceJob(db_job)
     job.celeryRunOnCommit()
     return job
@@ -117,9 +117,9 @@ def find_waiting_jobs(derived_series, sourcepackagename, parent_series):
     # redundant jobs occasionally.
     json_metadata = make_metadata(sourcepackagename.id, parent_series.id)
 
-    # Use master store because we don't like outdated information
+    # Use primary store because we don't like outdated information
     # here.
-    store = IMasterStore(DistributionJob)
+    store = IPrimaryStore(DistributionJob)
 
     candidates = store.find(
         DistributionJob,
@@ -315,7 +315,7 @@ class DistroSeriesDifferenceJob(DistributionJobDerived):
         """Find an existing `DistroSeriesDifference` for this difference."""
         spn_id = self.metadata["sourcepackagename"]
         parent_id = self.metadata["parent_series"]
-        store = IMasterStore(DistroSeriesDifference)
+        store = IPrimaryStore(DistroSeriesDifference)
         search = store.find(
             DistroSeriesDifference,
             DistroSeriesDifference.derived_series == self.derived_series,
diff --git a/lib/lp/soyuz/model/initializedistroseriesjob.py b/lib/lp/soyuz/model/initializedistroseriesjob.py
index 077a442..2068ac5 100644
--- a/lib/lp/soyuz/model/initializedistroseriesjob.py
+++ b/lib/lp/soyuz/model/initializedistroseriesjob.py
@@ -9,7 +9,7 @@ from zope.interface import implementer, provider
 
 from lp.registry.model.distroseries import DistroSeries
 from lp.services.config import config
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.model.job import Job
 from lp.soyuz.interfaces.distributionjob import (
@@ -79,7 +79,7 @@ class InitializeDistroSeriesJob(DistributionJobDerived):
             corresponding exactly to each parent.  The  name *must* be set
             if the corresponding overlays boolean is True.
         """
-        store = IMasterStore(DistributionJob)
+        store = IPrimaryStore(DistributionJob)
         # Only one InitializeDistroSeriesJob can be present at a time.
         distribution_job = store.find(
             DistributionJob,
diff --git a/lib/lp/soyuz/model/livefs.py b/lib/lp/soyuz/model/livefs.py
index 2be9368..1f19e00 100644
--- a/lib/lp/soyuz/model/livefs.py
+++ b/lib/lp/soyuz/model/livefs.py
@@ -42,7 +42,7 @@ from lp.registry.interfaces.person import IPersonSet, NoSuchPerson
 from lp.registry.interfaces.role import IHasOwner
 from lp.registry.model.person import Person, get_person_visibility_terms
 from lp.services.database.constants import DEFAULT, UTC_NOW
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormexpr import Greatest, IsDistinctFrom, NullsLast
 from lp.services.features import getFeatureFlag
 from lp.services.webapp.interfaces import ILaunchBag
@@ -343,7 +343,7 @@ class LiveFSSet:
         if self.exists(owner, distro_series, name):
             raise DuplicateLiveFSName
 
-        store = IMasterStore(LiveFS)
+        store = IPrimaryStore(LiveFS)
         livefs = LiveFS(
             registrant,
             owner,
diff --git a/lib/lp/soyuz/model/livefsbuild.py b/lib/lp/soyuz/model/livefsbuild.py
index 3ca8bde..f912b5d 100644
--- a/lib/lp/soyuz/model/livefsbuild.py
+++ b/lib/lp/soyuz/model/livefsbuild.py
@@ -40,7 +40,7 @@ from lp.services.database.bulk import load_related
 from lp.services.database.constants import DEFAULT
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.features import getFeatureFlag
 from lp.services.librarian.browser import ProxiedLibraryFileAlias
 from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent
@@ -307,7 +307,7 @@ class LiveFSBuild(PackageBuildMixin, Storm):
     def addFile(self, lfa):
         """See `ILiveFSBuild`."""
         livefsfile = LiveFSFile(livefsbuild=self, libraryfile=lfa)
-        IMasterStore(LiveFSFile).add(livefsfile)
+        IPrimaryStore(LiveFSFile).add(livefsfile)
         return livefsfile
 
     def verifySuccessfulUpload(self) -> bool:
@@ -382,7 +382,7 @@ class LiveFSBuildSet(SpecificBuildFarmJobSourceMixin):
         date_created=DEFAULT,
     ):
         """See `ILiveFSBuildSet`."""
-        store = IMasterStore(LiveFSBuild)
+        store = IPrimaryStore(LiveFSBuild)
         build_farm_job = getUtility(IBuildFarmJobSource).new(
             LiveFSBuild.job_type,
             BuildStatus.NEEDSBUILD,
@@ -411,7 +411,7 @@ class LiveFSBuildSet(SpecificBuildFarmJobSourceMixin):
 
     def getByID(self, build_id):
         """See `ISpecificBuildFarmJobSource`."""
-        store = IMasterStore(LiveFSBuild)
+        store = IPrimaryStore(LiveFSBuild)
         return store.get(LiveFSBuild, build_id)
 
     def getByBuildFarmJob(self, build_farm_job):
diff --git a/lib/lp/soyuz/model/packagecopyjob.py b/lib/lp/soyuz/model/packagecopyjob.py
index aee140f..fa87145 100644
--- a/lib/lp/soyuz/model/packagecopyjob.py
+++ b/lib/lp/soyuz/model/packagecopyjob.py
@@ -37,7 +37,7 @@ from lp.services.config import config
 from lp.services.database import bulk
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.locking import (
     AdvisoryLockHeld,
     LockType,
@@ -352,7 +352,7 @@ class PlainPackageCopyJob(PackageCopyJobDerived):
             metadata=metadata,
             requester=requester,
         )
-        IMasterStore(PackageCopyJob).add(job)
+        IPrimaryStore(PackageCopyJob).add(job)
         derived = cls(job)
         derived.celeryRunOnCommit()
         return derived
@@ -420,7 +420,7 @@ class PlainPackageCopyJob(PackageCopyJobDerived):
         move=False,
     ):
         """See `IPlainPackageCopyJobSource`."""
-        store = IMasterStore(Job)
+        store = IPrimaryStore(Job)
         job_ids = Job.createMultiple(store, len(copy_tasks), requester)
         job_contents = [
             cls._composeJobInsertionTuple(
diff --git a/lib/lp/soyuz/model/packageset.py b/lib/lp/soyuz/model/packageset.py
index cb68aff..7b4ee88 100644
--- a/lib/lp/soyuz/model/packageset.py
+++ b/lib/lp/soyuz/model/packageset.py
@@ -15,7 +15,7 @@ from lp.registry.interfaces.sourcepackagename import (
     ISourcePackageNameSet,
 )
 from lp.registry.model.sourcepackagename import SourcePackageName
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.soyuz.interfaces.packageset import (
     DuplicatePackagesetName,
     IPackageset,
@@ -81,7 +81,7 @@ class Packageset(Storm):
             interface a datum should implement and the second is the handler
             to invoke in that case respectively.
         """
-        store = IMasterStore(Packageset)
+        store = IPrimaryStore(Packageset)
         if not isinstance(data, (list, tuple)):
             data = list(data)
         count = len(data)
@@ -299,7 +299,7 @@ class Packageset(Storm):
 
     def _api_add_or_remove(self, clauses, handler):
         """Look up the data to be added/removed and call the handler."""
-        store = IMasterStore(Packageset)
+        store = IPrimaryStore(Packageset)
         data = list(store.find(*clauses))
         if len(data) > 0:
             handler(data, store)
@@ -361,7 +361,7 @@ class PackagesetSet:
 
     def new(self, name, description, owner, distroseries, related_set=None):
         """See `IPackagesetSet`."""
-        store = IMasterStore(Packageset)
+        store = IPrimaryStore(Packageset)
 
         try:
             self.getByName(distroseries, name)
diff --git a/lib/lp/soyuz/model/processacceptedbugsjob.py b/lib/lp/soyuz/model/processacceptedbugsjob.py
index f5a67d7..c4a1515 100644
--- a/lib/lp/soyuz/model/processacceptedbugsjob.py
+++ b/lib/lp/soyuz/model/processacceptedbugsjob.py
@@ -22,7 +22,7 @@ from lp.bugs.interfaces.bugtask import BugTaskStatus
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.model.distroseries import DistroSeries
 from lp.services.config import config
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.stormbase import StormBase
 from lp.services.job.model.job import Job
 from lp.services.job.runner import BaseRunnableJob
@@ -259,7 +259,7 @@ class ProcessAcceptedBugsJob(StormBase, BaseRunnableJob):
         job = ProcessAcceptedBugsJob(
             distroseries, sourcepackagerelease, bug_ids
         )
-        IMasterStore(ProcessAcceptedBugsJob).add(job)
+        IPrimaryStore(ProcessAcceptedBugsJob).add(job)
         job.celeryRunOnCommit()
         return job
 
diff --git a/lib/lp/soyuz/model/publishing.py b/lib/lp/soyuz/model/publishing.py
index 9db21bc..b4611ac 100644
--- a/lib/lp/soyuz/model/publishing.py
+++ b/lib/lp/soyuz/model/publishing.py
@@ -43,7 +43,7 @@ from lp.services.database.constants import UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase
 from lp.services.database.sqlobject import ForeignKey, IntCol, StringCol
 from lp.services.database.stormexpr import IsDistinctFrom
@@ -942,7 +942,7 @@ class BinaryPackagePublishingHistory(SQLBase, ArchivePublisherBase):
         available_architectures = [
             das.id for das in self.distroarchseries.distroseries.architectures
         ]
-        return IMasterStore(BinaryPackagePublishingHistory).find(
+        return IPrimaryStore(BinaryPackagePublishingHistory).find(
             BinaryPackagePublishingHistory,
             BinaryPackagePublishingHistory.status.is_in(
                 active_publishing_status
@@ -1349,7 +1349,7 @@ class PublishingSet:
             for das, bpr, overrides in expanded
         )
         already_published = (
-            IMasterStore(BinaryPackagePublishingHistory)
+            IPrimaryStore(BinaryPackagePublishingHistory)
             .find(
                 (
                     BinaryPackagePublishingHistory.distroarchseriesID,
@@ -2237,7 +2237,7 @@ class PublishingSet:
         else:
             removed_by_id = removed_by.id
 
-        affected_pubs = IMasterStore(publication_class).find(
+        affected_pubs = IPrimaryStore(publication_class).find(
             publication_class, publication_class.id.is_in(ids)
         )
         affected_pubs.set(
@@ -2255,7 +2255,7 @@ class PublishingSet:
                     affected_pubs
                 )
             ]
-            IMasterStore(publication_class).find(
+            IPrimaryStore(publication_class).find(
                 BinaryPackagePublishingHistory,
                 BinaryPackagePublishingHistory.id.is_in(debug_ids),
             ).set(
@@ -2283,7 +2283,7 @@ class PublishingSet:
             ),
         ]
         return (
-            IMasterStore(debug_bpph)
+            IPrimaryStore(debug_bpph)
             .using(*origin)
             .find(
                 debug_bpph,
diff --git a/lib/lp/soyuz/model/queue.py b/lib/lp/soyuz/model/queue.py
index f99e2e9..0e20f49 100644
--- a/lib/lp/soyuz/model/queue.py
+++ b/lib/lp/soyuz/model/queue.py
@@ -43,7 +43,7 @@ from lp.services.database.bulk import load_referencing, load_related
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import sqlvalues
 from lp.services.database.stormbase import StormBase
 from lp.services.database.stormexpr import Array, ArrayContains
@@ -1635,7 +1635,7 @@ class PackageUploadSet:
         from lp.registry.model.distroseries import DistroSeries
         from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
 
-        store = IMasterStore(PackageUpload)
+        store = IPrimaryStore(PackageUpload)
         origin = (
             PackageUpload,
             Join(DistroSeries, PackageUpload.distroseries == DistroSeries.id),
diff --git a/lib/lp/soyuz/model/sourcepackageformat.py b/lib/lp/soyuz/model/sourcepackageformat.py
index eda9ef8..49dd49a 100644
--- a/lib/lp/soyuz/model/sourcepackageformat.py
+++ b/lib/lp/soyuz/model/sourcepackageformat.py
@@ -10,7 +10,7 @@ from storm.locals import Int, Reference, Storm
 from zope.interface import implementer
 
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.soyuz.enums import SourcePackageFormat
 from lp.soyuz.interfaces.sourcepackageformat import (
     ISourcePackageFormatSelection,
@@ -53,4 +53,4 @@ class SourcePackageFormatSelectionSet:
         spfs = SourcePackageFormatSelection()
         spfs.distroseries = distroseries
         spfs.format = format
-        return IMasterStore(SourcePackageFormatSelection).add(spfs)
+        return IPrimaryStore(SourcePackageFormatSelection).add(spfs)
diff --git a/lib/lp/soyuz/scripts/initialize_distroseries.py b/lib/lp/soyuz/scripts/initialize_distroseries.py
index d2681d0..5ffe47d 100644
--- a/lib/lp/soyuz/scripts/initialize_distroseries.py
+++ b/lib/lp/soyuz/scripts/initialize_distroseries.py
@@ -22,7 +22,7 @@ from lp.registry.interfaces.distroseriesparent import IDistroSeriesParentSet
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.model.distroseries import DistroSeries
 from lp.services.database import bulk
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.database.sqlbase import sqlvalues
 from lp.services.scripts import log
 from lp.soyuz.adapters.packagelocation import PackageLocation
@@ -135,7 +135,7 @@ class InitializeDistroSeries:
         self.overlays = overlays
         self.overlay_pockets = overlay_pockets
         self.overlay_components = overlay_components
-        self._store = IMasterStore(DistroSeries)
+        self._store = IPrimaryStore(DistroSeries)
 
         self.first_derivation = (
             not self.distroseries.distribution.has_published_sources
diff --git a/lib/lp/soyuz/stories/ppa/xx-ppa-files.rst b/lib/lp/soyuz/stories/ppa/xx-ppa-files.rst
index d777962..525a1f1 100644
--- a/lib/lp/soyuz/stories/ppa/xx-ppa-files.rst
+++ b/lib/lp/soyuz/stories/ppa/xx-ppa-files.rst
@@ -472,9 +472,9 @@ immediately deleted in case of reported ToS violation.
     # Attach an existing file (the 'test-pkg_1.0.dsc') to a deleted
     # LibraryFileContent.
     >>> from lp.soyuz.model.archive import Archive
-    >>> from lp.services.database.interfaces import IMasterStore
+    >>> from lp.services.database.interfaces import IPrimaryStore
     >>> login("foo.bar@xxxxxxxxxxxxx")
-    >>> IMasterStore(Archive).commit()
+    >>> IPrimaryStore(Archive).commit()
     >>> from zope.security.proxy import removeSecurityProxy
     >>> removeSecurityProxy(dsc_file).content = None
     >>> transaction.commit()
diff --git a/lib/lp/soyuz/stories/soyuz/xx-person-packages.rst b/lib/lp/soyuz/stories/soyuz/xx-person-packages.rst
index f1f1bc1..f876e04 100644
--- a/lib/lp/soyuz/stories/soyuz/xx-person-packages.rst
+++ b/lib/lp/soyuz/stories/soyuz/xx-person-packages.rst
@@ -172,10 +172,10 @@ records.
     >>> from lp.services.log.logger import FakeLogger
     >>> from lp.testing.dbuser import switch_dbuser
     >>> from lp.soyuz.model.archive import Archive
-    >>> from lp.services.database.interfaces import IMasterStore
+    >>> from lp.services.database.interfaces import IPrimaryStore
 
     >>> def update_cached_records(delete_all=False):
-    ...     store = IMasterStore(Archive)
+    ...     store = IPrimaryStore(Archive)
     ...     if delete_all:
     ...         store.execute(
     ...             "delete from latestpersonsourcepackagereleasecache"
diff --git a/lib/lp/soyuz/tests/test_distroseriesdifferencejob.py b/lib/lp/soyuz/tests/test_distroseriesdifferencejob.py
index b9fb66c..6f372ff 100644
--- a/lib/lp/soyuz/tests/test_distroseriesdifferencejob.py
+++ b/lib/lp/soyuz/tests/test_distroseriesdifferencejob.py
@@ -17,7 +17,7 @@ from lp.registry.enums import (
 from lp.registry.interfaces.pocket import PackagePublishingPocket
 from lp.registry.model.distroseriesdifference import DistroSeriesDifference
 from lp.services.database import bulk
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.features.testing import FeatureFixture
 from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.tests import block_on_job
@@ -51,7 +51,7 @@ def find_dsd_for(dsp, package):
     :param dsp: `DistroSeriesParent`.
     :param package: `SourcePackageName`.
     """
-    store = IMasterStore(DistroSeriesDifference)
+    store = IPrimaryStore(DistroSeriesDifference)
     return store.find(
         DistroSeriesDifference,
         DistroSeriesDifference.derived_series == dsp.derived_series,
@@ -695,7 +695,7 @@ class TestDistroSeriesDifferenceJobEndToEnd(TestCaseWithFactory):
 
     def setUp(self):
         super().setUp()
-        self.store = IMasterStore(DistroSeriesDifference)
+        self.store = IPrimaryStore(DistroSeriesDifference)
 
     def getJobSource(self):
         return getUtility(IDistroSeriesDifferenceJobSource)
diff --git a/lib/lp/testing/factory.py b/lib/lp/testing/factory.py
index e25375f..0a13183 100644
--- a/lib/lp/testing/factory.py
+++ b/lib/lp/testing/factory.py
@@ -216,7 +216,7 @@ from lp.services.compat import message_as_bytes
 from lp.services.config import config
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.interfaces import (
-    IMasterStore,
+    IPrimaryStore,
     IStore,
     IStoreSelector,
 )
@@ -335,18 +335,18 @@ from lp.translations.utilities.sanitize import sanitize_translations_from_webui
 SPACE = " "
 
 
-def default_master_store(func):
-    """Decorator to temporarily set the default Store to the master.
+def default_primary_store(func):
+    """Decorator to temporarily set the default Store to the primary.
 
     In some cases, such as in the middle of a page test story,
     we might be calling factory methods with the default Store set
     to the standby which breaks stuff. For instance, if we set an account's
-    password that needs to happen on the master store and this is forced.
+    password that needs to happen on the primary store and this is forced.
     However, if we then read it back the default Store has to be used.
     """
 
     @wraps(func)
-    def with_default_master_store(*args, **kw):
+    def with_default_primary_store(*args, **kw):
         try:
             store_selector = getUtility(IStoreSelector)
         except ComponentLookupError:
@@ -358,7 +358,7 @@ def default_master_store(func):
         finally:
             store_selector.pop()
 
-    return mergeFunctionMetadata(func, with_default_master_store)
+    return mergeFunctionMetadata(func, with_default_primary_store)
 
 
 # We use this for default parameters where None has a specific meaning. For
@@ -382,7 +382,7 @@ class ObjectFactory(metaclass=AutoDecorateMetaClass):
     # This allocates process-wide unique integers.  We count on Python doing
     # only cooperative threading to make this safe across threads.
 
-    __decorators = (default_master_store,)
+    __decorators = (default_primary_store,)
 
     _unique_int_counter = count(100000)
 
@@ -575,7 +575,7 @@ class LaunchpadObjectFactory(ObjectFactory):
         # Identifier needed to be created, it will not be usable in the
         # production environments so access to execute this stored
         # procedure cannot be used to compromise accounts.
-        IMasterStore(OpenIdIdentifier).execute(
+        IPrimaryStore(OpenIdIdentifier).execute(
             "SELECT add_test_openid_identifier(%s)", (account.id,)
         )
 
@@ -674,7 +674,7 @@ class LaunchpadObjectFactory(ObjectFactory):
         # To make the person someone valid in Launchpad, validate the
         # email.
         if email_address_status == EmailAddressStatus.PREFERRED:
-            account = IMasterStore(Account).get(Account, person.accountID)
+            account = IPrimaryStore(Account).get(Account, person.accountID)
             account.status = AccountStatus.ACTIVE
             person.setPreferredEmail(email)
 
@@ -750,7 +750,7 @@ class LaunchpadObjectFactory(ObjectFactory):
         if set_preferred_email:
             # setPreferredEmail no longer activates the account
             # automatically.
-            account = IMasterStore(Account).get(Account, person.accountID)
+            account = IPrimaryStore(Account).get(Account, person.accountID)
             account.reactivate("Activated by factory.makePersonByName")
             person.setPreferredEmail(email)
 
@@ -761,7 +761,7 @@ class LaunchpadObjectFactory(ObjectFactory):
                 person.mailing_list_auto_subscribe_policy = (
                     MailingListAutoSubscribePolicy.NEVER
                 )
-        account = IMasterStore(Account).get(Account, person.accountID)
+        account = IPrimaryStore(Account).get(Account, person.accountID)
         getUtility(IEmailAddressSet).new(
             alternative_address, person, EmailAddressStatus.VALIDATED
         )
@@ -5365,7 +5365,7 @@ class LaunchpadObjectFactory(ObjectFactory):
             fingerprint = self.getUniqueUnicode("fingerprint")
         if public_key is None:
             public_key = self.getUniqueHexString(64).encode("ASCII")
-        store = IMasterStore(SigningKey)
+        store = IPrimaryStore(SigningKey)
         signing_key = SigningKey(
             key_type=key_type,
             fingerprint=fingerprint,
diff --git a/lib/lp/testing/tests/test_fixture.py b/lib/lp/testing/tests/test_fixture.py
index 42ee9d4..015787a 100644
--- a/lib/lp/testing/tests/test_fixture.py
+++ b/lib/lp/testing/tests/test_fixture.py
@@ -16,7 +16,7 @@ from zope.sendmail.interfaces import IMailDelivery
 
 from lp.registry.model.person import Person
 from lp.services.config import config, dbconfig
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.messaging import rabbit
 from lp.services.webapp.errorlog import globalErrorUtility, notify_publisher
 from lp.testing import TestCase
@@ -161,7 +161,7 @@ class TestPGBouncerFixtureWithCA(TestCase):
         transaction.abort()
 
         try:
-            IMasterStore(Person).find(Person).first()
+            IPrimaryStore(Person).find(Person).first()
             return True
         except DisconnectionError:
             return False
diff --git a/lib/lp/testing/tests/test_layers_functional.py b/lib/lp/testing/tests/test_layers_functional.py
index 17a3412..3a0ae41 100644
--- a/lib/lp/testing/tests/test_layers_functional.py
+++ b/lib/lp/testing/tests/test_layers_functional.py
@@ -221,7 +221,7 @@ class BaseTestCase(TestCase):
             self.assertFalse(
                 want_librarian_working, "Librarian should be fully operational"
             )
-        # Since we use IMasterStore that doesn't throw either AttributeError
+        # Since we use IPrimaryStore that doesn't throw either AttributeError
         # or ComponentLookupError.
         except TypeError:
             self.assertFalse(
diff --git a/lib/lp/translations/browser/tests/test_baseexportview.py b/lib/lp/translations/browser/tests/test_baseexportview.py
index 9ac120f..9260b82 100644
--- a/lib/lp/translations/browser/tests/test_baseexportview.py
+++ b/lib/lp/translations/browser/tests/test_baseexportview.py
@@ -6,7 +6,7 @@ from datetime import timedelta
 
 import transaction
 
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.webapp.servers import LaunchpadTestRequest
 from lp.testing import TestCaseWithFactory
 from lp.testing.layers import ZopelessDatabaseLayer
@@ -24,7 +24,7 @@ from lp.translations.model.poexportrequest import POExportRequest
 
 def wipe_queue(queue):
     """Erase all export queue entries."""
-    IMasterStore(POExportRequest).execute("DELETE FROM POExportRequest")
+    IPrimaryStore(POExportRequest).execute("DELETE FROM POExportRequest")
 
 
 class BaseExportViewMixin(TestCaseWithFactory):
diff --git a/lib/lp/translations/doc/poexportqueue-replication-lag.rst b/lib/lp/translations/doc/poexportqueue-replication-lag.rst
index 9c3b12f..840137c 100644
--- a/lib/lp/translations/doc/poexportqueue-replication-lag.rst
+++ b/lib/lp/translations/doc/poexportqueue-replication-lag.rst
@@ -2,20 +2,20 @@ Replication Lag and the Export Queue
 ====================================
 
 Due to replication lag it's possible for the export queue to see a
-request on the standby store that it actually just removed from the master
+request on the standby store that it actually just removed from the primary
 store.
 
 We start our story with an empty export queue.
 
     >>> import transaction
     >>> from zope.component import getUtility
-    >>> from lp.services.database.interfaces import IMasterStore
+    >>> from lp.services.database.interfaces import IPrimaryStore
     >>> from lp.translations.interfaces.poexportrequest import (
     ...     IPOExportRequestSet,
     ... )
     >>> from lp.translations.interfaces.pofile import IPOFile
     >>> from lp.translations.model.poexportrequest import POExportRequest
-    >>> query = IMasterStore(POExportRequest).execute(
+    >>> query = IPrimaryStore(POExportRequest).execute(
     ...     "DELETE FROM POExportRequest"
     ... )
 
@@ -31,7 +31,7 @@ We have somebody making an export request.
     >>> pofile1_be = factory.makePOFile("be", potemplate=template1)
     >>> pofile1_ja = factory.makePOFile("ja", potemplate=template1)
     >>> queue.addRequest(requester, template1, [pofile1_be, pofile1_ja])
-    >>> query = IMasterStore(POExportRequest).execute(
+    >>> query = IPrimaryStore(POExportRequest).execute(
     ...     "UPDATE POExportRequest SET date_created = '2010-01-10'::date"
     ... )
 
@@ -75,7 +75,7 @@ were to ask again.
     be
     ja
 
-The first request is removed from the master store after processing, but
+The first request is removed from the primary store after processing, but
 not yet from the standby store.  (Since this test is all one session, we
 can reproduce this by not committing the removal).  The second request
 is still technically on the queue, but no longer "live."
diff --git a/lib/lp/translations/model/poexportrequest.py b/lib/lp/translations/model/poexportrequest.py
index 04ec1d2..b7a7aaf 100644
--- a/lib/lp/translations/model/poexportrequest.py
+++ b/lib/lp/translations/model/poexportrequest.py
@@ -13,7 +13,11 @@ from zope.interface import implementer
 from lp.registry.interfaces.person import validate_public_person
 from lp.services.database.constants import DEFAULT
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStandbyStore, IStore
+from lp.services.database.interfaces import (
+    IPrimaryStore,
+    IStandbyStore,
+    IStore,
+)
 from lp.services.database.sqlbase import quote
 from lp.services.database.stormbase import StormBase
 from lp.translations.interfaces.poexportrequest import (
@@ -79,7 +83,7 @@ class POExportRequestSet:
             "pofiles": pofile_ids,
         }
 
-        store = IMasterStore(POExportRequest)
+        store = IPrimaryStore(POExportRequest)
 
         if potemplates:
             # Create requests for all these templates, insofar as the same
@@ -124,13 +128,13 @@ class POExportRequestSet:
             )
 
     def _getOldestLiveRequest(self):
-        """Return the oldest live request on the master store.
+        """Return the oldest live request on the primary store.
 
-        Due to replication lag, the master store is always a little
+        Due to replication lag, the primary store is always a little
         ahead of the standby store that exports come from.
         """
-        master_store = IMasterStore(POExportRequest)
-        sorted_by_id = master_store.find(POExportRequest).order_by(
+        primary_store = IPrimaryStore(POExportRequest)
+        sorted_by_id = primary_store.find(POExportRequest).order_by(
             POExportRequest.id
         )
         return sorted_by_id.first()
@@ -139,8 +143,8 @@ class POExportRequestSet:
         """Return oldest request on the queue."""
         # Due to replication lag, it's possible that the standby store
         # still has copies of requests that have already been completed
-        # and deleted from the master store.  So first get the oldest
-        # request that is "live," i.e. still present on the master
+        # and deleted from the primary store.  So first get the oldest
+        # request that is "live," i.e. still present on the primary
         # store.
         oldest_live = self._getOldestLiveRequest()
         if oldest_live is None:
@@ -157,7 +161,7 @@ class POExportRequestSet:
         # Exports happen off the standby store.  To ensure that export
         # does not happen until requests have been replicated to the
         # standby, they are read primarily from the standby even though they
-        # are deleted on the master afterwards.
+        # are deleted on the primary afterwards.
         head = self._getHeadRequest()
         if head is None:
             return None, None, None, None
@@ -186,7 +190,7 @@ class POExportRequestSet:
     def removeRequest(self, request_ids):
         """See `IPOExportRequestSet`."""
         if request_ids:
-            IMasterStore(POExportRequest).find(
+            IPrimaryStore(POExportRequest).find(
                 POExportRequest, POExportRequest.id.is_in(request_ids)
             ).remove()
 
diff --git a/lib/lp/translations/model/pofile.py b/lib/lp/translations/model/pofile.py
index af6f85d..06a70ce 100644
--- a/lib/lp/translations/model/pofile.py
+++ b/lib/lp/translations/model/pofile.py
@@ -40,7 +40,7 @@ from lp.app.interfaces.launchpad import ILaunchpadCelebrities
 from lp.registry.interfaces.person import validate_public_person
 from lp.services.database.constants import UTC_NOW
 from lp.services.database.datetimecol import UtcDateTimeCol
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase, flush_database_updates, quote
 from lp.services.database.sqlobject import (
     BoolCol,
@@ -260,7 +260,9 @@ class POFileMixIn(RosettaStats):
         be among `origin_tables`.
         """
         results = (
-            IMasterStore(POTMsgSet).using(origin_tables).find(POTMsgSet, query)
+            IPrimaryStore(POTMsgSet)
+            .using(origin_tables)
+            .find(POTMsgSet, query)
         )
         return results.order_by(TranslationTemplateItem.sequence)
 
@@ -1654,7 +1656,7 @@ class POFileSet:
             )
             clauses.append(POTemplate.id == POFile.potemplateID)
             clauses.append(Not(Exists(message_select)))
-        result = IMasterStore(POFile).find((POFile, POTMsgSet), clauses)
+        result = IPrimaryStore(POFile).find((POFile, POTMsgSet), clauses)
         return result.order_by("POFile.id")
 
     def getPOFilesTouchedSince(self, date):
@@ -1664,7 +1666,7 @@ class POFileSet:
         from lp.registry.model.productseries import ProductSeries
         from lp.translations.model.potemplate import POTemplate
 
-        store = IMasterStore(POTemplate)
+        store = IPrimaryStore(POTemplate)
 
         # Find a matching POTemplate and its ProductSeries
         # and DistroSeries, if they are defined.
diff --git a/lib/lp/translations/model/potemplate.py b/lib/lp/translations/model/potemplate.py
index f52cb6a..67f74c9 100644
--- a/lib/lp/translations/model/potemplate.py
+++ b/lib/lp/translations/model/potemplate.py
@@ -37,7 +37,7 @@ from lp.services.database.constants import DEFAULT
 from lp.services.database.datetimecol import UtcDateTimeCol
 from lp.services.database.decoratedresultset import DecoratedResultSet
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.database.sqlbase import SQLBase, flush_database_updates
 from lp.services.database.sqlobject import (
     BoolCol,
@@ -1541,7 +1541,7 @@ class POTemplateSet:
     def wipeSuggestivePOTemplatesCache(self):
         """See `IPOTemplateSet`."""
         return (
-            IMasterStore(POTemplate)
+            IPrimaryStore(POTemplate)
             .execute("DELETE FROM SuggestivePOTemplate")
             .rowcount
         )
@@ -1549,7 +1549,7 @@ class POTemplateSet:
     def removeFromSuggestivePOTemplatesCache(self, potemplate):
         """See `IPOTemplateSet`."""
         rowcount = (
-            IMasterStore(POTemplate)
+            IPrimaryStore(POTemplate)
             .execute(
                 "DELETE FROM SuggestivePOTemplate " "WHERE potemplate = ?",
                 params=(potemplate.id,),
@@ -1561,7 +1561,7 @@ class POTemplateSet:
     def populateSuggestivePOTemplatesCache(self):
         """See `IPOTemplateSet`."""
         return (
-            IMasterStore(POTemplate)
+            IPrimaryStore(POTemplate)
             .execute(
                 """
             INSERT INTO SuggestivePOTemplate (
diff --git a/lib/lp/translations/model/translationimportqueue.py b/lib/lp/translations/model/translationimportqueue.py
index f7fa1b8..0b7cc8c 100644
--- a/lib/lp/translations/model/translationimportqueue.py
+++ b/lib/lp/translations/model/translationimportqueue.py
@@ -36,7 +36,11 @@ from lp.registry.interfaces.series import SeriesStatus
 from lp.registry.interfaces.sourcepackage import ISourcePackage
 from lp.services.database.constants import DEFAULT, UTC_NOW
 from lp.services.database.enumcol import DBEnum
-from lp.services.database.interfaces import IMasterStore, IStandbyStore, IStore
+from lp.services.database.interfaces import (
+    IPrimaryStore,
+    IStandbyStore,
+    IStore,
+)
 from lp.services.database.sqlbase import quote
 from lp.services.database.stormbase import StormBase
 from lp.services.database.stormexpr import IsFalse
@@ -1086,7 +1090,7 @@ class TranslationImportQueue:
                 TranslationImportQueueEntry.productseries_id
                 == productseries.id
             )
-        store = IMasterStore(TranslationImportQueueEntry)
+        store = IPrimaryStore(TranslationImportQueueEntry)
         entries = store.find(TranslationImportQueueEntry, *clauses)
         entries = list(
             entries.order_by(
@@ -1206,7 +1210,7 @@ class TranslationImportQueue:
         except TranslationImportQueueConflictError:
             return None
 
-        store = IMasterStore(TranslationImportQueueEntry)
+        store = IPrimaryStore(TranslationImportQueueEntry)
         if entry is None:
             # It's a new row.
             entry = TranslationImportQueueEntry(
@@ -1755,7 +1759,7 @@ class TranslationImportQueue:
 
     def cleanUpQueue(self):
         """See `ITranslationImportQueue`."""
-        store = IMasterStore(TranslationImportQueueEntry)
+        store = IPrimaryStore(TranslationImportQueueEntry)
 
         return (
             self._cleanUpObsoleteEntries(store)
@@ -1765,4 +1769,4 @@ class TranslationImportQueue:
 
     def remove(self, entry):
         """See ITranslationImportQueue."""
-        IMasterStore(TranslationImportQueueEntry).remove(entry)
+        IPrimaryStore(TranslationImportQueueEntry).remove(entry)
diff --git a/lib/lp/translations/model/vpoexport.py b/lib/lp/translations/model/vpoexport.py
index 384cc92..33111ff 100644
--- a/lib/lp/translations/model/vpoexport.py
+++ b/lib/lp/translations/model/vpoexport.py
@@ -73,7 +73,7 @@ class VPOExportSet:
         # Use the standby store.  We may want to write to the distroseries
         # to register a language pack, but not to the translation data
         # we retrieve for it.
-        # XXX wgrant 2017-03-21: Moved to master to avoid termination
+        # XXX wgrant 2017-03-21: Moved to primary to avoid termination
         # due to long transactions.
         query = IStore(POFile).using(*tables).find(POFile, And(*conditions))
 
diff --git a/lib/lp/translations/scripts/migrate_current_flag.py b/lib/lp/translations/scripts/migrate_current_flag.py
index adaee46..9b5bb46 100644
--- a/lib/lp/translations/scripts/migrate_current_flag.py
+++ b/lib/lp/translations/scripts/migrate_current_flag.py
@@ -13,7 +13,7 @@ from zope.interface import implementer
 
 from lp.registry.model.product import Product
 from lp.registry.model.productseries import ProductSeries
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.looptuner import DBLoopTuner, ITunableLoop
 from lp.translations.model.potemplate import POTemplate
 from lp.translations.model.translationmessage import TranslationMessage
@@ -36,7 +36,7 @@ class TranslationMessageImportedFlagUpdater:
         self.logger.info(
             "Fixing up a total of %d TranslationMessages." % (self.total)
         )
-        self.store = IMasterStore(Product)
+        self.store = IPrimaryStore(Product)
 
     def isDone(self):
         """See `ITunableLoop`."""
@@ -126,7 +126,7 @@ class MigrateCurrentFlagProcess:
         self.logger = logger
         if logger is None:
             self.logger = logging.getLogger("migrate-current-flag")
-        self.store = IMasterStore(Product)
+        self.store = IPrimaryStore(Product)
 
     def getProductsWithTemplates(self):
         """Get Product.ids for projects with any translations templates."""
diff --git a/lib/lp/translations/scripts/translations_to_branch.py b/lib/lp/translations/scripts/translations_to_branch.py
index e3d57b6..abbb8fa 100644
--- a/lib/lp/translations/scripts/translations_to_branch.py
+++ b/lib/lp/translations/scripts/translations_to_branch.py
@@ -30,7 +30,7 @@ from lp.code.model.directbranchcommit import (
 )
 from lp.codehosting.vfs import get_rw_server
 from lp.services.config import config
-from lp.services.database.interfaces import IMasterStore, IStandbyStore
+from lp.services.database.interfaces import IPrimaryStore, IStandbyStore
 from lp.services.helpers import shortlist
 from lp.services.mail.helpers import (
     get_contact_email_addresses,
@@ -175,12 +175,12 @@ class ExportTranslationsToBranch(LaunchpadCronScript):
         try:
             committer = self._makeDirectBranchCommit(branch)
         except StaleLastMirrored as e:
-            # Request a rescan of the branch.  Do this on the master
+            # Request a rescan of the branch.  Do this on the primary
             # store, or we won't be able to modify the branch object.
-            # (The master copy may also be more recent, in which case
+            # (The primary copy may also be more recent, in which case
             # the rescan won't be necessary).
-            master_branch = IMasterStore(branch).get(Branch, branch.id)
-            master_branch.branchChanged(**get_db_branch_info(**e.info))
+            primary_branch = IPrimaryStore(branch).get(Branch, branch.id)
+            primary_branch.branchChanged(**get_db_branch_info(**e.info))
             self.logger.warning(
                 "Skipped %s due to stale DB info, and scheduled a new scan.",
                 branch.bzr_identity,
diff --git a/lib/lp/translations/tests/test_autoapproval.py b/lib/lp/translations/tests/test_autoapproval.py
index 6f88742..5fbd6ba 100644
--- a/lib/lp/translations/tests/test_autoapproval.py
+++ b/lib/lp/translations/tests/test_autoapproval.py
@@ -25,7 +25,7 @@ from lp.registry.model.sourcepackagename import (
     SourcePackageName,
     SourcePackageNameSet,
 )
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.worlddata.interfaces.language import ILanguageSet
 from lp.testing import TestCaseWithFactory, verifyObject
 from lp.testing.fakemethod import FakeMethod
@@ -840,7 +840,7 @@ class TestTemplateGuess(TestCaseWithFactory, GardenerDbUserMixin):
         self.assertEqual(RosettaImportStatus.NEEDS_REVIEW, old_entry.status)
         self.assertIs(None, old_entry.potemplate)
         self.assertEqual(template, new_entry.potemplate)
-        IMasterStore(old_entry).flush()
+        IPrimaryStore(old_entry).flush()
 
         # The approver deals with the problem by skipping the entry.
         queue._attemptToApprove(old_entry)
@@ -1014,7 +1014,7 @@ class TestCleanup(TestCaseWithFactory, GardenerDbUserMixin):
     def setUp(self):
         super().setUp()
         self.queue = TranslationImportQueue()
-        self.store = IMasterStore(TranslationImportQueueEntry)
+        self.store = IPrimaryStore(TranslationImportQueueEntry)
 
     def _makeProductEntry(self, path="foo.pot", status=None):
         """Simulate upload for a product."""
@@ -1301,10 +1301,10 @@ class TestAutoBlocking(TestCaseWithFactory):
     def setUp(self):
         super().setUp()
         self.queue = TranslationImportQueue()
-        # Our test queue operates on the master store instead of the
+        # Our test queue operates on the primary store instead of the
         # standby store so we don't have to synchronize stores.
-        master_store = IMasterStore(TranslationImportQueueEntry)
-        self.queue._getStandbyStore = FakeMethod(result=master_store)
+        primary_store = IPrimaryStore(TranslationImportQueueEntry)
+        self.queue._getStandbyStore = FakeMethod(result=primary_store)
 
     def _copyTargetFromEntry(self, entry):
         """Return a dict representing `entry`'s translation target.
diff --git a/scripts/fix-translations-opening.py b/scripts/fix-translations-opening.py
index 09d16f3..ad30977 100755
--- a/scripts/fix-translations-opening.py
+++ b/scripts/fix-translations-opening.py
@@ -10,7 +10,7 @@ from zope.interface import implementer
 
 from lp.registry.interfaces.distribution import IDistributionSet
 from lp.registry.model.distroseries import DistroSeries
-from lp.services.database.interfaces import IMasterStore
+from lp.services.database.interfaces import IPrimaryStore
 from lp.services.looptuner import DBLoopTuner, ITunableLoop
 from lp.services.scripts.base import LaunchpadScript
 
@@ -115,7 +115,7 @@ class ExecuteLoop:
             self.statement.splitlines()[0],
             chunk_size,
         )
-        store = IMasterStore(DistroSeries)
+        store = IPrimaryStore(DistroSeries)
         result = store.execute(
             self.statement,
             (
diff --git a/utilities/soyuz-sampledata-setup.py b/utilities/soyuz-sampledata-setup.py
index ed70561..68aa035 100755
--- a/utilities/soyuz-sampledata-setup.py
+++ b/utilities/soyuz-sampledata-setup.py
@@ -38,7 +38,7 @@ from lp.registry.interfaces.codeofconduct import ISignedCodeOfConductSet
 from lp.registry.interfaces.person import IPersonSet
 from lp.registry.interfaces.series import SeriesStatus
 from lp.registry.model.codeofconduct import SignedCodeOfConduct
-from lp.services.database.interfaces import IMasterStore, IStandbyStore
+from lp.services.database.interfaces import IPrimaryStore, IStandbyStore
 from lp.services.scripts.base import LaunchpadScript
 from lp.soyuz.enums import SourcePackageFormat
 from lp.soyuz.interfaces.component import IComponentSet
@@ -154,7 +154,7 @@ def add_architecture(distroseries, architecture_name):
         official=True,
         architecturetag=architecture_name,
     )
-    IMasterStore(DistroArchSeries).add(archseries)
+    IPrimaryStore(DistroArchSeries).add(archseries)
 
 
 def create_sections(distroseries):
@@ -334,7 +334,7 @@ def create_sample_series(original_series, log):
 def add_series_component(series):
     """Permit a component in the given series."""
     component = getUtility(IComponentSet)["main"]
-    IMasterStore(ComponentSelection).add(
+    IPrimaryStore(ComponentSelection).add(
         ComponentSelection(distroseries=series, component=component)
     )