launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #30507
[Merge] launchpad:master into launchpad:db-devel
Ines Almeida has proposed merging launchpad:master into launchpad:db-devel.
Commit message:
Merge branch 'master' into db-devel
This was done in an attempt for the tests in the buildbot not failing as consistently (there is a test fix merged into master, and a few other changes that could potentially help with the tests)
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
Related bugs:
Bug #2037033 in Launchpad itself: "The +new-snap text does mention snappy and points to an invalid (404) URL for documentation"
https://bugs.launchpad.net/launchpad/+bug/2037033
For more details, see:
https://code.launchpad.net/~launchpad/launchpad/+git/launchpad/+merge/452145
--
Your team Launchpad code reviewers is requested to review the proposed merge of launchpad:master into launchpad:db-devel.
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index eafc49d..12369b7 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -35,8 +35,8 @@ repos:
lib/contrib/.*
|utilities/community-contributions\.py
)$
-- repo: https://github.com/psf/black
- rev: 23.3.0
+- repo: https://github.com/psf/black-pre-commit-mirror
+ rev: 23.9.1
hooks:
- id: black
exclude: |
@@ -65,6 +65,9 @@ repos:
hooks:
- id: eslint
args: [--quiet]
+ # 20.7.0 upgraded from npm 9.8.1 to 10.1.0, which appears to fail to
+ # use the proxy correctly in Launchpad CI builds.
+ language_version: "20.6.1"
- repo: https://github.com/keewis/blackdoc
rev: v0.3.8
hooks:
diff --git a/charm/launchpad-ppa-publisher/charmcraft.yaml b/charm/launchpad-ppa-publisher/charmcraft.yaml
index 76dc78c..13d0d15 100644
--- a/charm/launchpad-ppa-publisher/charmcraft.yaml
+++ b/charm/launchpad-ppa-publisher/charmcraft.yaml
@@ -73,3 +73,4 @@ parts:
- CHARM_INTERFACES_DIR: $CRAFT_STAGE/layers/interface
- PIP_NO_INDEX: "true"
- PIP_FIND_LINKS: $CRAFT_STAGE/charm-wheels
+ reactive-charm-build-arguments: [--binary-wheels-from-source]
diff --git a/charm/launchpad-ppa-uploader/charmcraft.yaml b/charm/launchpad-ppa-uploader/charmcraft.yaml
index 4ff69d8..392d3ad 100644
--- a/charm/launchpad-ppa-uploader/charmcraft.yaml
+++ b/charm/launchpad-ppa-uploader/charmcraft.yaml
@@ -61,3 +61,4 @@ parts:
- CHARM_INTERFACES_DIR: $CRAFT_STAGE/layers/interface
- PIP_NO_INDEX: "true"
- PIP_FIND_LINKS: $CRAFT_STAGE/charm-wheels
+ reactive-charm-build-arguments: [--binary-wheels-from-source]
diff --git a/cronscripts/parse-librarian-apache-access-logs.py b/cronscripts/parse-librarian-apache-access-logs.py
index 935f12e..1a724b5 100755
--- a/cronscripts/parse-librarian-apache-access-logs.py
+++ b/cronscripts/parse-librarian-apache-access-logs.py
@@ -16,9 +16,9 @@ updating the counts of every LFA, in order to get through the backlog.
import _pythonpath # noqa: F401
-from storm.sqlobject import SQLObjectNotFound
from zope.component import getUtility
+from lp.app.errors import NotFoundError
from lp.services.apachelogparser.script import ParseApacheLogs
from lp.services.config import config
from lp.services.librarian.interfaces import ILibraryFileAliasSet
@@ -47,10 +47,11 @@ class ParseLibrarianApacheLogs(ParseApacheLogs):
def getDownloadCountUpdater(self, file_id):
"""See `ParseApacheLogs`."""
try:
- return self.libraryfilealias_set[file_id].updateDownloadCount
- except SQLObjectNotFound:
- # This file has been deleted from the librarian, so don't
- # try to store download counters for it.
+ return self.libraryfilealias_set[int(file_id)].updateDownloadCount
+ except (ValueError, NotFoundError):
+ # Either this isn't a valid file ID or this file has been
+ # deleted from the librarian, so don't try to store download
+ # counters for it.
return None
diff --git a/database/replication/Makefile b/database/replication/Makefile
index 8f63dc0..7cbfa77 100644
--- a/database/replication/Makefile
+++ b/database/replication/Makefile
@@ -31,9 +31,18 @@ STAGING_CONFIG=staging-db # For swapping fresh db into place.
STAGING_DUMP=launchpad.dump # Dumpfile to build new staging from.
STAGING_TABLESPACE=pg_default # 'pg_default' for default
STAGING_LOGDIR=/srv/staging.launchpad.net/staging-logs
-DOGFOOD_DBNAME=launchpad_dogfood
+STAGING_POSTGRESQL_VERSION=10
DOGFOOD_DUMP=launchpad.dump
+# Names of underlying PostgreSQL databases.
+STAGING_DBNAME_MAIN=lpmain_staging
+STAGING_DBNAME_SESSION=session_staging
+DOGFOOD_DBNAME=launchpad_dogfood
+
+# Names in pgbouncer.ini's `[databases]` section.
+STAGING_PGBOUNCER_MAIN=launchpad_staging launchpad_staging_slave
+STAGING_PGBOUNCER_SESSION=session_staging
+
STAGING_PGBOUNCER=psql -p 6432 -U pgbouncer -d pgbouncer
PGMASSACRE=../../utilities/pgmassacre.py
@@ -60,28 +69,27 @@ stagingsetup:
grep -v -E 'TRIGGER public [^ ]+ _sl_' > ${DUMPLIST}
# Deny new connections to the main DBs and kill any leftovers.
- ${STAGING_PGBOUNCER} -c 'DISABLE launchpad_staging'
- ${STAGING_PGBOUNCER} -c 'DISABLE launchpad_staging_slave'
- ${STAGING_PGBOUNCER} -c 'KILL launchpad_staging'
- ${STAGING_PGBOUNCER} -c 'KILL launchpad_staging_slave'
- ${STAGING_PGBOUNCER} -c 'RESUME launchpad_staging'
- ${STAGING_PGBOUNCER} -c 'RESUME launchpad_staging_slave'
- -${PGMASSACRE} lpmain_staging
+ set -e; for verb in DISABLE KILL RESUME; do \
+ for db in ${STAGING_PGBOUNCER_MAIN}; do \
+ ${STAGING_PGBOUNCER} -c "$$verb $$db"; \
+ done; \
+ done
+ -${PGMASSACRE} ${STAGING_DBNAME_MAIN}
# Quickly clear out the session DB. No need to DISABLE here, as
# we bring the DB back quickly.
- ${STAGING_PGBOUNCER} -c 'KILL session_staging'
- psql -d session_staging -c 'TRUNCATE sessiondata CASCADE;'
- ${STAGING_PGBOUNCER} -c 'RESUME session_staging'
+ ${STAGING_PGBOUNCER} -c 'KILL ${STAGING_PGBOUNCER_SESSION}'
+ psql -d ${STAGING_DBNAME_SESSION} -c 'TRUNCATE sessiondata CASCADE;'
+ ${STAGING_PGBOUNCER} -c 'RESUME ${STAGING_PGBOUNCER_SESSION}'
# Create the DB with the desired default tablespace.
- ${CREATEDB} --tablespace ${STAGING_TABLESPACE} lpmain_staging
+ ${CREATEDB} --tablespace ${STAGING_TABLESPACE} ${STAGING_DBNAME_MAIN}
# Restore the database. We need to restore permissions, despite
# later running security.py, to pull in permissions granted on
# production to users not maintained by security.py.
cat ${STAGING_DUMP} \
- | ./walblock.py -n 5000 -d /var/lib/postgresql/10/staging/pg_wal \
- | pg_restore --dbname=lpmain_staging --no-owner ${EXIT_ON_ERROR} \
+ | ./walblock.py -n 5000 -d /var/lib/postgresql/${STAGING_POSTGRESQL_VERSION}/staging/pg_wal \
+ | pg_restore --dbname=${STAGING_DBNAME_MAIN} --no-owner ${EXIT_ON_ERROR} \
--use-list=${DUMPLIST} -v
rm ${DUMPLIST}
# Apply database patches.
@@ -92,10 +100,11 @@ stagingsetup:
LPCONFIG=${STAGING_CONFIG} ${SHHH} ../schema/security.py \
--log-file=INFO:${STAGING_LOGDIR}/dbupgrade.log
@echo Setting feature flags
- psql -d lpmain_staging -c "INSERT INTO featureflag (flag, scope, priority, value) VALUES ('profiling.enabled', 'team:launchpad', 0, 'on') ON CONFLICT DO NOTHING"
- psql -d lpmain_staging -c "INSERT INTO featureflag (flag, scope, priority, value) VALUES ('librarian.swift.enabled', 'default', 0, 'on') ON CONFLICT DO NOTHING"
- ${STAGING_PGBOUNCER} -c 'ENABLE launchpad_staging'
- ${STAGING_PGBOUNCER} -c 'ENABLE launchpad_staging_slave'
+ psql -d ${STAGING_DBNAME_MAIN} -c "INSERT INTO featureflag (flag, scope, priority, value) VALUES ('profiling.enabled', 'team:launchpad', 0, 'on') ON CONFLICT DO NOTHING"
+ psql -d ${STAGING_DBNAME_MAIN} -c "INSERT INTO featureflag (flag, scope, priority, value) VALUES ('librarian.swift.enabled', 'default', 0, 'on') ON CONFLICT DO NOTHING"
+ set -e; for db in ${STAGING_PGBOUNCER_MAIN}; do \
+ ${STAGING_PGBOUNCER} -c "ENABLE $$db"; \
+ done
stagingswitch:
echo Nothing to do. Staging already built inplace.
diff --git a/database/schema/security.cfg b/database/schema/security.cfg
index ab71472..50328ff 100644
--- a/database/schema/security.cfg
+++ b/database/schema/security.cfg
@@ -3,9 +3,9 @@
#
# Possible permissions: SELECT, INSERT, UPDATE, EXECUTE
#
-# Note that we cannot have INSERT only tables if we are using SQLObject, as it
-# creates new entries by first doing an insert (to get the id) and then
-# issuing an update
+# Note that we cannot have INSERT only tables if we are using Storm, as it
+# sometimes creates new entries by first doing an insert (to get the id) and
+# then issuing an update.
[DEFAULT]
public_schemas=
diff --git a/doc/reference/python.rst b/doc/reference/python.rst
index e3a76ab..a616784 100644
--- a/doc/reference/python.rst
+++ b/doc/reference/python.rst
@@ -264,21 +264,6 @@ passes and returns them easier to debug.
Database-related
================
-Storm
------
-
-We use two database ORM (object-relational mapper) APIs in Launchpad, the
-older and deprecated SQLObject API and the new and improved `Storm
-<https://storm.canonical.com>`_ API. All new code should use the Storm API,
-and you are encouraged to convert existing code to Storm as part of your
-tech-debt payments.
-
-.. note::
-
- The SQLObject and Storm ``ResultSet`` interfaces are not compatible, so
- e.g. if you need to ``UNION`` between these two, you will run into
- trouble. We are looking into ways to address this.
-
Field attributes
----------------
@@ -298,7 +283,7 @@ queries or fragments, e.g.:
FROM TeamParticipation
INNER JOIN Person ON TeamParticipation.team = Person.id
WHERE TeamParticipation.person = %s
- """ % sqlvalues(personID)
+ """ % sqlvalues(person_id)
This is also easy to cut-and-paste into ``psql`` for interactive testing,
unlike if you use several lines of single quoted strings.
diff --git a/lib/lp/answers/browser/questiontarget.py b/lib/lp/answers/browser/questiontarget.py
index ed993fb..e9556fc 100644
--- a/lib/lp/answers/browser/questiontarget.py
+++ b/lib/lp/answers/browser/questiontarget.py
@@ -510,7 +510,7 @@ class SearchQuestionsView(UserSupportLanguagesMixin, LaunchpadFormView):
to question or mdash if there is no related source package.
"""
# XXX sinzui 2007-11-27 bug=164435:
- # SQLObject can refetch the question, so we are comparing ids.
+ # Storm can refetch the question, so we are comparing ids.
assert self.context.id == question.distribution.id, (
"The question.distribution (%s) must be equal to the context (%s)"
% (question.distribution, self.context)
diff --git a/lib/lp/answers/interfaces/question.py b/lib/lp/answers/interfaces/question.py
index dea2c45..00f27ff 100644
--- a/lib/lp/answers/interfaces/question.py
+++ b/lib/lp/answers/interfaces/question.py
@@ -482,8 +482,8 @@ class IQuestion(IHasOwner):
Return the created IQuestionMessage.
- (Note this method is named expireQuestion and not expire because of
- conflicts with SQLObject.)
+ (Note this method is named expireQuestion and not expire because it
+ used to conflict with SQLObject.)
This method should fire an IObjectCreatedEvent for the created
IQuestionMessage and an IObjectModifiedEvent for the question.
diff --git a/lib/lp/answers/interfaces/questionmessage.py b/lib/lp/answers/interfaces/questionmessage.py
index a35a787..e9f8610 100644
--- a/lib/lp/answers/interfaces/questionmessage.py
+++ b/lib/lp/answers/interfaces/questionmessage.py
@@ -10,7 +10,7 @@ __all__ = [
from lazr.restful.declarations import exported, exported_as_webservice_entry
from lazr.restful.fields import Reference
from zope.interface import Interface
-from zope.schema import Bool, Choice, Int
+from zope.schema import Choice, Int
from lp import _
from lp.answers.enums import QuestionAction, QuestionStatus
@@ -74,14 +74,6 @@ class IQuestionMessageView(IMessageView):
),
exported_as="index",
)
- visible = exported(
- Bool(
- title=_("Message visibility."),
- description=_("Whether or not the message is visible."),
- readonly=True,
- ),
- as_of="devel",
- )
@exported_as_webservice_entry(as_of="devel")
diff --git a/lib/lp/app/doc/batch-navigation.rst b/lib/lp/app/doc/batch-navigation.rst
index 25274f1..ab762aa 100644
--- a/lib/lp/app/doc/batch-navigation.rst
+++ b/lib/lp/app/doc/batch-navigation.rst
@@ -8,8 +8,7 @@ This documents and tests the Launchpad-specific elements of its usage.
Note that our use of the batching code relies on the registration of
lp.services.webapp.batching.FiniteSequenceAdapter for
-storm.zope.interfaces.IResultSet and
-storm.zope.interfaces.ISQLObjectResultSet.
+storm.zope.interfaces.IResultSet.
Batch navigation provides a way to navigate batch results in a web
page by providing URL links to the next, previous and numbered pages
diff --git a/lib/lp/app/validators/README.txt b/lib/lp/app/validators/README.txt
index 2efe490..bae777f 100644
--- a/lib/lp/app/validators/README.txt
+++ b/lib/lp/app/validators/README.txt
@@ -1,4 +1,2 @@
-Validators in this directory are either simple functions that correspond
-to database constraints such as valid_name(name), or they can be
-subclasses of sqlobject.include.validators.Validator such as
-PersonValidatorBase.
+Validators in this directory are simple functions that correspond to
+database constraints such as valid_name(name).
diff --git a/lib/lp/archivepublisher/tests/test_sync_signingkeys.py b/lib/lp/archivepublisher/tests/test_sync_signingkeys.py
index 3c73754..7e8a1bb 100644
--- a/lib/lp/archivepublisher/tests/test_sync_signingkeys.py
+++ b/lib/lp/archivepublisher/tests/test_sync_signingkeys.py
@@ -53,7 +53,10 @@ from lp.testing.script import run_script
class TestSyncSigningKeysScript(TestCaseWithFactory):
layer = ZopelessDatabaseLayer
- run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=30)
+ # A timeout of 30 seconds is slightly too short and can lead to
+ # non-relevant test failures. 45 seconds is a value estimated from trial
+ # and error.
+ run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=45)
def setUp(self):
super().setUp()
diff --git a/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py b/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py
index 1b27830..f163b4f 100644
--- a/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py
+++ b/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py
@@ -12,6 +12,7 @@ from lp.archiveuploader.nascentupload import NascentUpload
from lp.archiveuploader.tests import datadir, getPolicy
from lp.archiveuploader.uploadpolicy import ArchiveUploadType
from lp.registry.interfaces.distribution import IDistributionSet
+from lp.services.database.interfaces import IStore
from lp.services.librarian.model import LibraryFileAlias
from lp.services.log.logger import DevNullLogger
from lp.soyuz.interfaces.component import IComponentSet
@@ -76,7 +77,7 @@ def prepareHoaryForUploads(test):
ComponentSelection(distroseries=hoary, component=universe)
# Create a fake hoary/i386 chroot.
- fake_chroot = LibraryFileAlias.get(1)
+ fake_chroot = IStore(LibraryFileAlias).get(LibraryFileAlias, 1)
hoary["i386"].addOrUpdateChroot(fake_chroot)
LaunchpadZopelessLayer.txn.commit()
diff --git a/lib/lp/archiveuploader/uploadprocessor.py b/lib/lp/archiveuploader/uploadprocessor.py
index f7cd1ae..9093bdc 100644
--- a/lib/lp/archiveuploader/uploadprocessor.py
+++ b/lib/lp/archiveuploader/uploadprocessor.py
@@ -77,7 +77,6 @@ from lp.code.interfaces.sourcepackagerecipebuild import (
from lp.oci.interfaces.ocirecipebuild import IOCIRecipeBuild
from lp.registry.interfaces.distribution import IDistributionSet
from lp.registry.interfaces.person import IPersonSet
-from lp.services.database.sqlobject import SQLObjectNotFound
from lp.services.log.logger import BufferLogger
from lp.services.statsd.interfaces.statsd_client import IStatsdClient
from lp.services.webapp.adapter import (
@@ -1020,9 +1019,8 @@ def parse_upload_path(relative_path):
elif first_path.isdigit():
# This must be a binary upload from a build worker.
- try:
- archive = getUtility(IArchiveSet).get(int(first_path))
- except SQLObjectNotFound:
+ archive = getUtility(IArchiveSet).get(int(first_path))
+ if archive is None:
raise UploadPathError(
"Could not find archive with id=%s." % first_path
)
diff --git a/lib/lp/blueprints/doc/specification.rst b/lib/lp/blueprints/doc/specification.rst
index 7ba3314..ede5c54 100644
--- a/lib/lp/blueprints/doc/specification.rst
+++ b/lib/lp/blueprints/doc/specification.rst
@@ -21,11 +21,12 @@ IMilestoneSet can be accessed as a utility.
To create a new Specification, use ISpecificationSet.new:
>>> from lp.registry.interfaces.product import IProductSet
+ >>> from lp.registry.model.person import Person
+ >>> from lp.services.database.interfaces import IStore
>>> productset = getUtility(IProductSet)
>>> upstream_firefox = productset.get(4)
- >>> from lp.registry.model.person import Person
- >>> mark = Person.byName("mark")
+ >>> mark = IStore(Person).find(Person, name="mark").one()
>>> newspec = specset.new(
... "mng",
... "Support MNG Format",
@@ -78,7 +79,7 @@ We attach now a spec to a distribution.
>>> from lp.app.interfaces.launchpad import ILaunchpadCelebrities
>>> ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
- >>> mark = Person.byName("mark")
+ >>> mark = IStore(Person).find(Person, name="mark").one()
>>> ubuspec = specset.new(
... "fix-spec-permissions",
... "Fix Specification Permissions",
@@ -99,7 +100,7 @@ member, and therefore should be able to edit any spec attached to it
>>> print(ubuntu.owner.name)
ubuntu-team
- >>> jdub = Person.byName("jdub")
+ >>> jdub = IStore(Person).find(Person, name="jdub").one()
>>> jdub.inTeam(ubuntu.owner)
True
diff --git a/lib/lp/blueprints/model/specificationworkitem.py b/lib/lp/blueprints/model/specificationworkitem.py
index 79f9316..23ac31b 100644
--- a/lib/lp/blueprints/model/specificationworkitem.py
+++ b/lib/lp/blueprints/model/specificationworkitem.py
@@ -5,7 +5,9 @@ __all__ = [
"SpecificationWorkItem",
]
-from storm.locals import Bool, Int, Reference, Unicode
+from datetime import timezone
+
+from storm.locals import Bool, DateTime, Int, Reference, Unicode
from storm.store import Store
from zope.interface import implementer
@@ -16,7 +18,6 @@ from lp.blueprints.interfaces.specificationworkitem import (
)
from lp.registry.interfaces.person import validate_public_person
from lp.services.database.constants import DEFAULT
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.enumcol import DBEnum
from lp.services.database.stormbase import StormBase
from lp.services.helpers import backslashreplace
@@ -40,7 +41,9 @@ class SpecificationWorkItem(StormBase):
allow_none=False,
default=SpecificationWorkItemStatus.TODO,
)
- date_created = UtcDateTimeCol(notNull=True, default=DEFAULT)
+ date_created = DateTime(
+ allow_none=False, default=DEFAULT, tzinfo=timezone.utc
+ )
sequence = Int(allow_none=False)
deleted = Bool(allow_none=False, default=False)
diff --git a/lib/lp/blueprints/model/sprint.py b/lib/lp/blueprints/model/sprint.py
index 3a45f35..15f20ea 100644
--- a/lib/lp/blueprints/model/sprint.py
+++ b/lib/lp/blueprints/model/sprint.py
@@ -425,12 +425,7 @@ class HasSprintsMixin:
Subclasses must overwrite this method if it doesn't suit them.
"""
- try:
- table = getattr(self, "__storm_table__")
- except AttributeError:
- # XXX cjwatson 2020-09-10: Remove this once all inheritors have
- # been converted from SQLObject to Storm.
- table = getattr(self, "_table")
+ table = getattr(self, "__storm_table__")
return [
getattr(Specification, table.lower()) == self,
Specification.id == SprintSpecification.specification_id,
diff --git a/lib/lp/blueprints/model/sprintattendance.py b/lib/lp/blueprints/model/sprintattendance.py
index 693d4f3..82b677b 100644
--- a/lib/lp/blueprints/model/sprintattendance.py
+++ b/lib/lp/blueprints/model/sprintattendance.py
@@ -3,12 +3,13 @@
__all__ = ["SprintAttendance"]
-from storm.locals import Bool, Int, Reference
+from datetime import timezone
+
+from storm.locals import Bool, DateTime, Int, Reference
from zope.interface import implementer
from lp.blueprints.interfaces.sprintattendance import ISprintAttendance
from lp.registry.interfaces.person import validate_public_person
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.stormbase import StormBase
@@ -26,8 +27,8 @@ class SprintAttendance(StormBase):
attendeeID = Int(name="attendee", validator=validate_public_person)
attendee = Reference(attendeeID, "Person.id")
- time_starts = UtcDateTimeCol(notNull=True)
- time_ends = UtcDateTimeCol(notNull=True)
+ time_starts = DateTime(allow_none=False, tzinfo=timezone.utc)
+ time_ends = DateTime(allow_none=False, tzinfo=timezone.utc)
_is_physical = Bool(name="is_physical", default=True)
def __init__(self, sprint, attendee):
diff --git a/lib/lp/blueprints/vocabularies/specificationdependency.py b/lib/lp/blueprints/vocabularies/specificationdependency.py
index d458c05..f6edb5f 100644
--- a/lib/lp/blueprints/vocabularies/specificationdependency.py
+++ b/lib/lp/blueprints/vocabularies/specificationdependency.py
@@ -168,7 +168,7 @@ class SpecificationDepCandidatesVocabulary(StormVocabularyBase):
raise LookupError(token)
def search(self, query, vocab_filter=None):
- """See `SQLObjectVocabularyBase.search`.
+ """See `StormVocabularyBase.search`.
We find specs where query is in the text of name or title, or matches
the full text index and then filter out ineligible specs using
diff --git a/lib/lp/bugs/browser/tests/buglinktarget-views.rst b/lib/lp/bugs/browser/tests/buglinktarget-views.rst
index d3a139a..caebb44 100644
--- a/lib/lp/bugs/browser/tests/buglinktarget-views.rst
+++ b/lib/lp/bugs/browser/tests/buglinktarget-views.rst
@@ -94,7 +94,7 @@ IBugLinkTarget.
>>> print(view.cancel_url)
http://bugs.launchpad.test/bugs/cve/2005-2730
-After removing the bugs, it sends a SQLObjectModified event.
+After removing the bugs, it sends an ObjectModifiedEvent.
>>> request = LaunchpadTestRequest(
... method="POST",
diff --git a/lib/lp/bugs/model/bug.py b/lib/lp/bugs/model/bug.py
index 5da7f9b..989abe8 100644
--- a/lib/lp/bugs/model/bug.py
+++ b/lib/lp/bugs/model/bug.py
@@ -2610,7 +2610,7 @@ class Bug(StormBase, InformationTypeMixin):
),
LeftJoin(
LibraryFileContent,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
),
)
.find(
@@ -2618,7 +2618,7 @@ class Bug(StormBase, InformationTypeMixin):
BugAttachment.bug == self,
Or(
BugAttachment.url != None,
- LibraryFileAlias.contentID != None,
+ LibraryFileAlias.content_id != None,
),
)
.order_by(BugAttachment.id)
diff --git a/lib/lp/bugs/model/bugtask.py b/lib/lp/bugs/model/bugtask.py
index d66c62a..2d9845e 100644
--- a/lib/lp/bugs/model/bugtask.py
+++ b/lib/lp/bugs/model/bugtask.py
@@ -932,7 +932,7 @@ class BugTask(StormBase):
for synched_attr in self._CONJOINED_ATTRIBUTES:
replica_attr_value = getattr(conjoined_replica, synched_attr)
# Bypass our checks that prevent setting attributes on
- # conjoined primaries by calling the underlying sqlobject
+ # conjoined primaries by calling the underlying Storm
# setter methods directly.
setattr(self, synched_attr, PassthroughValue(replica_attr_value))
diff --git a/lib/lp/bugs/model/bugtasksearch.py b/lib/lp/bugs/model/bugtasksearch.py
index 8de8c55..d8170b6 100644
--- a/lib/lp/bugs/model/bugtasksearch.py
+++ b/lib/lp/bugs/model/bugtasksearch.py
@@ -29,8 +29,9 @@ from storm.expr import (
Row,
Select,
Union,
+ With,
)
-from storm.info import ClassAlias
+from storm.info import ClassAlias, get_cls_info
from storm.references import Reference
from zope.component import getUtility
from zope.security.proxy import isinstance as zope_isinstance
@@ -77,10 +78,6 @@ from lp.registry.model.teammembership import TeamParticipation
from lp.services.database.bulk import load
from lp.services.database.decoratedresultset import DecoratedResultSet
from lp.services.database.interfaces import IStore
-from lp.services.database.sqlbase import (
- convert_storm_clause_to_string,
- sqlvalues,
-)
from lp.services.database.stormexpr import (
ArrayAgg,
ArrayIntersects,
@@ -224,10 +221,10 @@ def search_bugs(pre_iter_hook, alternatives, just_bug_ids=False):
clauseTables,
bugtask_decorator,
join_tables,
- with_clause,
+ with_clauses,
] = _build_query(alternatives[0])
- if with_clause:
- store = store.with_(with_clause)
+ if with_clauses:
+ store = store.with_(with_clauses)
decorators.append(bugtask_decorator)
origin = _build_origin(
join_tables + orderby_joins, clauseTables, start
@@ -242,12 +239,12 @@ def search_bugs(pre_iter_hook, alternatives, just_bug_ids=False):
clauseTables,
decorator,
join_tables,
- with_clause,
+ with_clauses,
] = _build_query(params)
origin = _build_origin(join_tables, clauseTables, start)
localstore = store
- if with_clause:
- localstore = store.with_(with_clause)
+ if with_clauses:
+ localstore = store.with_(with_clauses)
next_result = localstore.using(*origin).find(BugTaskFlat, query)
results.append(next_result)
# NB: assumes the decorators are all compatible.
@@ -337,8 +334,8 @@ def _build_query(params):
# * a searchbuilder.any object, representing a set of acceptable
# filter values
# * a searchbuilder.NULL object
- # * an sqlobject
- # * a dbschema item
+ # * a Storm instance
+ # * a `DBItem`
# * None (meaning no filter criteria specified for that arg_name)
#
# XXX: kiko 2006-03-16:
@@ -492,9 +489,16 @@ def _build_query(params):
if params.structural_subscriber is not None:
with_clauses.append(
- """ss as (SELECT * from StructuralSubscription
- WHERE StructuralSubscription.subscriber = %s)"""
- % sqlvalues(params.structural_subscriber)
+ With(
+ "ss",
+ Select(
+ get_cls_info(StructuralSubscription).columns,
+ where=(
+ StructuralSubscription.subscriber
+ == params.structural_subscriber
+ ),
+ ),
+ )
)
class StructuralSubscriptionCTE(StructuralSubscription):
@@ -761,27 +765,23 @@ def _build_query(params):
)
store = IStore(Bug)
with_clauses.append(
- convert_storm_clause_to_string(
- WithMaterialized(
- "commented_bug_ids",
- store,
- Union(commented_messages, commented_activities),
- )
+ WithMaterialized(
+ "commented_bug_ids",
+ store,
+ Union(commented_messages, commented_activities),
)
)
with_clauses.append(
- convert_storm_clause_to_string(
- WithMaterialized(
- "commented_bugtask_ids",
- store,
- Select(
- BugTaskFlat.bugtask_id,
- tables=[BugTaskFlat],
- where=BugTaskFlat.bug_id.is_in(
- Select(Column("bug", "commented_bug_ids"))
- ),
+ WithMaterialized(
+ "commented_bugtask_ids",
+ store,
+ Select(
+ BugTaskFlat.bugtask_id,
+ tables=[BugTaskFlat],
+ where=BugTaskFlat.bug_id.is_in(
+ Select(Column("bug", "commented_bug_ids"))
),
- )
+ ),
)
)
extra_clauses.append(
@@ -921,11 +921,7 @@ def _build_query(params):
obj = decor(obj)
return obj
- if with_clauses:
- with_clause = SQL(", ".join(with_clauses))
- else:
- with_clause = None
- return (query, clauseTables, decorator, join_tables, with_clause)
+ return (query, clauseTables, decorator, join_tables, with_clauses)
def _process_order_by(params):
diff --git a/lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst b/lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst
index 116baaa..9b64266 100644
--- a/lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst
+++ b/lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst
@@ -15,7 +15,7 @@ We'll start by adding some attachments to the bug:
>>> from lp.services.database.sqlbase import flush_database_updates
>>> from lp.testing import login, logout
>>> login("foo.bar@xxxxxxxxxxxxx")
- >>> mark = Person.selectOneBy(name="mark")
+ >>> mark = IStore(Person).find(Person, name="mark").one()
>>> mark.display_name = "M\xe1rk Sh\xfattlew\xf2rth"
>>> bug = IStore(Bug).get(Bug, 1)
>>> content = BytesIO(b"<html><body>bogus</body></html>")
diff --git a/lib/lp/bugs/stories/webservice/xx-bug.rst b/lib/lp/bugs/stories/webservice/xx-bug.rst
index 0bc2373..0aa7da2 100644
--- a/lib/lp/bugs/stories/webservice/xx-bug.rst
+++ b/lib/lp/bugs/stories/webservice/xx-bug.rst
@@ -387,6 +387,51 @@ We don't have to submit a subject when we add a new message.
subject: 'Re: Firefox install instructions should be complete'
web_link: '...'
+The "visible" field is exported in the "devel" version of the web service API
+and it defaults to True.
+
+ >>> response = webservice.get("/bugs/5/messages", api_version="devel")
+ >>> messages = response.jsonBody()["entries"]
+ >>> pprint_entry(messages[0])
+ bug_attachments_collection_link:
+ 'http://.../firefox/+bug/5/comments/0/bug_attachments'
+ content: 'All ways of downloading firefox should provide...'
+ date_created: '2005-01-14T17:27:03.702622+00:00'
+ date_deleted: None
+ date_last_edited: None
+ owner_link: 'http://.../~name12'
+ parent_link: None
+ resource_type_link: 'http://.../#message'
+ revisions_collection_link: 'http://.../firefox/+bug/5/comments/0/revisions'
+ self_link: 'http://.../firefox/+bug/5/comments/0'
+ subject: 'Firefox install instructions should be complete'
+ visible: True
+ web_link: 'http://bugs.../firefox/+bug/5/comments/0'
+
+The "visible" field will be False when a comment is hidden.
+
+ >>> response = webservice.named_post(
+ ... "/bugs/5", "setCommentVisibility", comment_number=0, visible=False
+ ... )
+ >>> response.status
+ 200
+ >>> response = webservice.get("/bugs/5/messages", api_version="devel")
+ >>> messages = response.jsonBody()["entries"]
+ >>> pprint_entry(messages[0])
+ bug_attachments_collection_link:
+ 'http://.../firefox/+bug/5/comments/0/bug_attachments'
+ content: 'All ways of downloading firefox should provide...'
+ date_created: '2005-01-14T17:27:03.702622+00:00'
+ date_deleted: None
+ date_last_edited: None
+ owner_link: 'http://.../~name12'
+ parent_link: None
+ resource_type_link: 'http://.../#message'
+ revisions_collection_link: 'http://.../firefox/+bug/5/comments/0/revisions'
+ self_link: 'http://.../firefox/+bug/5/comments/0'
+ subject: 'Firefox install instructions should be complete'
+ visible: False
+ web_link: 'http://bugs.../firefox/+bug/5/comments/0'
Bug tasks
---------
diff --git a/lib/lp/buildmaster/interfaces/processor.py b/lib/lp/buildmaster/interfaces/processor.py
index 67f0a58..b9bb853 100644
--- a/lib/lp/buildmaster/interfaces/processor.py
+++ b/lib/lp/buildmaster/interfaces/processor.py
@@ -40,7 +40,7 @@ class ProcessorNotFound(NameLookupFailed):
# 'devel' as their version.
@exported_as_webservice_entry(publish_web_link=False, as_of="beta")
class IProcessor(Interface):
- """The SQLObject Processor Interface"""
+ """The Storm Processor Interface"""
id = Attribute("The Processor ID")
name = exported(
diff --git a/lib/lp/charms/model/charmrecipebuild.py b/lib/lp/charms/model/charmrecipebuild.py
index 6024b31..e63743e 100644
--- a/lib/lp/charms/model/charmrecipebuild.py
+++ b/lib/lp/charms/model/charmrecipebuild.py
@@ -377,7 +377,7 @@ class CharmRecipeBuild(PackageBuildMixin, StormBase):
(CharmFile, LibraryFileAlias, LibraryFileContent),
CharmFile.build == self.id,
LibraryFileAlias.id == CharmFile.library_file_id,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
)
return result.order_by([LibraryFileAlias.filename, CharmFile.id])
@@ -520,7 +520,7 @@ class CharmRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
load_related(Person, builds, ["requester_id"])
lfas = load_related(LibraryFileAlias, builds, ["log_id"])
- load_related(LibraryFileContent, lfas, ["contentID"])
+ load_related(LibraryFileContent, lfas, ["content_id"])
distroarchserieses = load_related(
DistroArchSeries, builds, ["distro_arch_series_id"]
)
diff --git a/lib/lp/code/doc/codeimport-machine.rst b/lib/lp/code/doc/codeimport-machine.rst
index 1b7b523..22fedca 100644
--- a/lib/lp/code/doc/codeimport-machine.rst
+++ b/lib/lp/code/doc/codeimport-machine.rst
@@ -213,7 +213,7 @@ setQuiescing methods must fail.
Since our scripts and daemons run at "READ COMMITTED" isolation level,
there are races that we cannot easily detect within the limitation of
-SQLObject, when the watchdog process and the controller daemon
+Storm, when the watchdog process and the controller daemon
concurrently call setOffline. Those undetected races will lead to the
creation of redundant OFFLINE events with different reason values, where
one of the reasons will be WATCHDOG. Those races should not have any
diff --git a/lib/lp/code/interfaces/branch.py b/lib/lp/code/interfaces/branch.py
index 0bf790f..16db888 100644
--- a/lib/lp/code/interfaces/branch.py
+++ b/lib/lp/code/interfaces/branch.py
@@ -1045,7 +1045,7 @@ class IBranchView(
:param notification_levels: An iterable of
`BranchSubscriptionNotificationLevel`s
- :return: An SQLObject query result.
+ :return: A `ResultSet` of `BranchSubscription`s.
"""
def getBranchRevision(sequence=None, revision=None, revision_id=None):
diff --git a/lib/lp/code/mail/tests/test_codehandler.py b/lib/lp/code/mail/tests/test_codehandler.py
index 82d770d..eeb65e1 100644
--- a/lib/lp/code/mail/tests/test_codehandler.py
+++ b/lib/lp/code/mail/tests/test_codehandler.py
@@ -149,7 +149,7 @@ class TestCodeHandler(TestCaseWithFactory):
self.code_handler.process(mail, email_addr, None),
"Succeeded, but didn't return True",
)
- # if the message has not been created, this raises SQLObjectNotFound
+ # if the message has not been created, this raises NotFoundError.
MessageSet().get("<my-id>")
def test_process_packagebranch(self):
diff --git a/lib/lp/code/model/cibuild.py b/lib/lp/code/model/cibuild.py
index 8c32605..f6faff6 100644
--- a/lib/lp/code/model/cibuild.py
+++ b/lib/lp/code/model/cibuild.py
@@ -901,7 +901,7 @@ class CIBuildSet(SpecificBuildFarmJobSourceMixin):
def preloadBuildsData(self, builds):
lfas = load_related(LibraryFileAlias, builds, ["log_id"])
- load_related(LibraryFileContent, lfas, ["contentID"])
+ load_related(LibraryFileContent, lfas, ["content_id"])
distroarchseries = load_related(
DistroArchSeries, builds, ["distro_arch_series_id"]
)
diff --git a/lib/lp/code/model/sourcepackagerecipe.py b/lib/lp/code/model/sourcepackagerecipe.py
index 937024a..4cd3891 100644
--- a/lib/lp/code/model/sourcepackagerecipe.py
+++ b/lib/lp/code/model/sourcepackagerecipe.py
@@ -15,6 +15,7 @@ from lazr.delegates import delegate_to
from storm.expr import And, LeftJoin
from storm.locals import (
Bool,
+ DateTime,
Desc,
Int,
Reference,
@@ -44,7 +45,6 @@ from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.registry.model.distroseries import DistroSeries
from lp.services.database.bulk import load_referencing
from lp.services.database.constants import DEFAULT, UTC_NOW
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.interfaces import IPrimaryStore, IStore
from lp.services.database.stormbase import StormBase
from lp.services.database.stormexpr import Greatest, NullsLast
@@ -95,8 +95,8 @@ class SourcePackageRecipe(StormBase):
daily_build_archive_id = Int(name="daily_build_archive", allow_none=True)
daily_build_archive = Reference(daily_build_archive_id, "Archive.id")
- date_created = UtcDateTimeCol(notNull=True)
- date_last_modified = UtcDateTimeCol(notNull=True)
+ date_created = DateTime(allow_none=False, tzinfo=timezone.utc)
+ date_last_modified = DateTime(allow_none=False, tzinfo=timezone.utc)
owner_id = Int(name="owner", allow_none=True)
owner = Reference(owner_id, "Person.id")
diff --git a/lib/lp/code/model/sourcepackagerecipebuild.py b/lib/lp/code/model/sourcepackagerecipebuild.py
index 1ec64c9..00d6ddb 100644
--- a/lib/lp/code/model/sourcepackagerecipebuild.py
+++ b/lib/lp/code/model/sourcepackagerecipebuild.py
@@ -328,7 +328,7 @@ class SourcePackageRecipeBuild(
load_related(LibraryFileAlias, builds, ["log_id"])
archives = load_related(Archive, builds, ["archive_id"])
- load_related(Person, archives, ["ownerID"])
+ load_related(Person, archives, ["owner_id"])
distroseries = load_related(DistroSeries, builds, ["distroseries_id"])
load_related(Distribution, distroseries, ["distribution_id"])
sprs = load_related(SourcePackageRecipe, builds, ["recipe_id"])
diff --git a/lib/lp/code/model/tests/test_codereviewkarma.py b/lib/lp/code/model/tests/test_codereviewkarma.py
index fc4cdce..4278a3c 100644
--- a/lib/lp/code/model/tests/test_codereviewkarma.py
+++ b/lib/lp/code/model/tests/test_codereviewkarma.py
@@ -56,7 +56,7 @@ class TestCodeReviewKarma(TestCaseWithFactory):
# target as there would be other karma events for the branch
# creations.
self.karma_events = []
- # The normal SQLObject events use the logged in person.
+ # The normal Storm events use the logged in person.
login_person(registrant)
source_branch.addLandingTarget(registrant, target_branch)
self.assertOneKarmaEvent(registrant, "branchmergeproposed")
diff --git a/lib/lp/code/model/tests/test_revisionauthor.py b/lib/lp/code/model/tests/test_revisionauthor.py
index a256b90..37401f6 100644
--- a/lib/lp/code/model/tests/test_revisionauthor.py
+++ b/lib/lp/code/model/tests/test_revisionauthor.py
@@ -100,8 +100,8 @@ class TestRevisionAuthorMatching(MakeHarryTestCase):
# Check a VALIDATED email address is used to link.
harry = self._makeHarry(EmailAddressStatus.VALIDATED)
author = self._createRevisionAuthor()
- # Reget harry as the SQLObject cache has been flushed on
- # transaction boundary.
+ # Reget harry as the Storm cache has been flushed on transaction
+ # boundary.
harry = getUtility(IPersonSet).getByName("harry")
self.assertEqual("harry@xxxxxxxxxxxxx", author.email)
self.assertEqual(harry, author.person)
@@ -110,8 +110,8 @@ class TestRevisionAuthorMatching(MakeHarryTestCase):
# Check a OLD email address is used to link.
harry = self._makeHarry(EmailAddressStatus.OLD)
author = self._createRevisionAuthor()
- # Reget harry as the SQLObject cache has been flushed on
- # transaction boundary.
+ # Reget harry as the Storm cache has been flushed on transaction
+ # boundary.
harry = getUtility(IPersonSet).getByName("harry")
self.assertEqual("harry@xxxxxxxxxxxxx", author.email)
self.assertEqual(harry, author.person)
@@ -120,8 +120,8 @@ class TestRevisionAuthorMatching(MakeHarryTestCase):
# Check a PREFERRED email address is used to link.
harry = self._makeHarry(EmailAddressStatus.PREFERRED)
author = self._createRevisionAuthor()
- # Reget harry as the SQLObject cache has been flushed on
- # transaction boundary.
+ # Reget harry as the Storm cache has been flushed on transaction
+ # boundary.
harry = getUtility(IPersonSet).getByName("harry")
self.assertEqual("harry@xxxxxxxxxxxxx", author.email)
self.assertEqual(harry, author.person)
diff --git a/lib/lp/code/xmlrpc/tests/test_git.py b/lib/lp/code/xmlrpc/tests/test_git.py
index 0467947..15e4f09 100644
--- a/lib/lp/code/xmlrpc/tests/test_git.py
+++ b/lib/lp/code/xmlrpc/tests/test_git.py
@@ -1161,6 +1161,15 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory):
layer = LaunchpadFunctionalLayer
+ def _makeGitRepositoryWithRefs(self, **kwargs):
+ """Helper method to create a git repository with a default branch"""
+ repository = self.factory.makeGitRepository(**kwargs)
+ self.factory.makeGitRefs(
+ repository=repository, paths=["refs/heads/main"]
+ )
+ removeSecurityProxy(repository).default_branch = "refs/heads/main"
+ return repository
+
def test_confirm_git_repository_creation(self):
owner = self.factory.makePerson()
repo = removeSecurityProxy(self.factory.makeGitRepository(owner=owner))
@@ -2821,11 +2830,7 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory):
# pushed by a user that has their ordinary privileges on the
# corresponding repository.
requester_owner = self.factory.makePerson()
- repository = self.factory.makeGitRepository(owner=requester_owner)
- self.factory.makeGitRefs(
- repository=repository, paths=["refs/heads/master"]
- )
- removeSecurityProxy(repository).default_branch = "refs/heads/master"
+ repository = self._makeGitRepositoryWithRefs(owner=requester_owner)
pushed_branch = "branch1"
self.assertHasMergeProposalURL(
repository, pushed_branch, {"uid": requester_owner.id}
@@ -2857,12 +2862,8 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory):
self.pushConfig("codehosting", git_macaroon_secret_key="some-secret")
requester = self.factory.makePerson()
- repository = self.factory.makeGitRepository(owner=requester)
+ repository = self._makeGitRepositoryWithRefs(owner=requester)
issuer = getUtility(IMacaroonIssuer, "git-repository")
- self.factory.makeGitRefs(
- repository=repository, paths=["refs/heads/master"]
- )
- removeSecurityProxy(repository).default_branch = "refs/heads/master"
pushed_branch = "branch1"
with person_logged_in(requester):
@@ -2890,11 +2891,7 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory):
)
requesters = [self.factory.makePerson() for _ in range(2)]
owner = self.factory.makeTeam(members=requesters)
- repository = self.factory.makeGitRepository(owner=owner)
- self.factory.makeGitRefs(
- repository=repository, paths=["refs/heads/master"]
- )
- removeSecurityProxy(repository).default_branch = "refs/heads/master"
+ repository = self._makeGitRepositoryWithRefs(owner=owner)
pushed_branch = "branch1"
macaroon = issuer.issueMacaroon(repository)
@@ -2935,11 +2932,7 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory):
# pushed by a user with a suitable access token that has their
# ordinary privileges on the corresponding repository.
requester = self.factory.makePerson()
- repository = self.factory.makeGitRepository(owner=requester)
- self.factory.makeGitRefs(
- repository=repository, paths=["refs/heads/main"]
- )
- removeSecurityProxy(repository).default_branch = "refs/heads/main"
+ repository = self._makeGitRepositoryWithRefs(owner=requester)
_, token = self.factory.makeAccessToken(
owner=requester,
target=repository,
@@ -2954,11 +2947,7 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory):
# getMergeProposalURL refuses access tokens for a different
# repository.
requester = self.factory.makePerson()
- repository = self.factory.makeGitRepository(owner=requester)
- self.factory.makeGitRefs(
- repository=repository, paths=["refs/heads/main"]
- )
- removeSecurityProxy(repository).default_branch = "refs/heads/main"
+ repository = self._makeGitRepositoryWithRefs(owner=requester)
_, token = self.factory.makeAccessToken(
owner=requester, scopes=[AccessTokenScope.REPOSITORY_PUSH]
)
diff --git a/lib/lp/codehosting/tests/test_acceptance.py b/lib/lp/codehosting/tests/test_acceptance.py
index ece2d62..a03af35 100644
--- a/lib/lp/codehosting/tests/test_acceptance.py
+++ b/lib/lp/codehosting/tests/test_acceptance.py
@@ -207,7 +207,7 @@ class SSHTestCase(TestCaseWithTransport, LoomTestMixin, TestCaseWithFactory):
def getDatabaseBranch(self, personName, productName, branchName):
"""Look up and return the specified branch from the database."""
- owner = Person.byName(personName)
+ owner = IStore(Person).find(Person, name=personName).one()
if productName is None:
product = None
else:
@@ -335,7 +335,7 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
branch_type=BranchType.HOSTED,
):
"""Create a new branch in the database."""
- owner = Person.selectOneBy(name=owner_name)
+ owner = IStore(Person).find(Person, name=owner_name).one()
if product_name == "+junk":
product = None
else:
@@ -508,8 +508,10 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
# the branch doesn't exist.
# 'salgado' is a member of landscape-developers.
- salgado = Person.selectOneBy(name="salgado")
- landscape_dev = Person.selectOneBy(name="landscape-developers")
+ salgado = IStore(Person).find(Person, name="salgado").one()
+ landscape_dev = (
+ IStore(Person).find(Person, name="landscape-developers").one()
+ )
self.assertTrue(
salgado.inTeam(landscape_dev),
"salgado should be a member of landscape-developers, but isn't.",
@@ -547,7 +549,7 @@ class AcceptanceTests(WithScenarios, SSHTestCase):
# Hack 'firefox' so we have permission to do this.
ZopelessAppServerLayer.txn.begin()
firefox = IStore(Product).find(Product, name="firefox").one()
- testuser = Person.selectOneBy(name="testuser")
+ testuser = IStore(Person).find(Person, name="testuser").one()
firefox.development_focus.owner = testuser
ZopelessAppServerLayer.txn.commit()
remote_url = self.getTransportURL("+branch/firefox")
diff --git a/lib/lp/oci/model/ocirecipebuild.py b/lib/lp/oci/model/ocirecipebuild.py
index b1548a5..fbc5b61 100644
--- a/lib/lp/oci/model/ocirecipebuild.py
+++ b/lib/lp/oci/model/ocirecipebuild.py
@@ -282,7 +282,7 @@ class OCIRecipeBuild(PackageBuildMixin, StormBase):
(OCIFile, LibraryFileAlias, LibraryFileContent),
OCIFile.build == self.id,
LibraryFileAlias.id == OCIFile.library_file_id,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
)
return result.order_by([LibraryFileAlias.filename, OCIFile.id])
@@ -421,7 +421,7 @@ class OCIRecipeBuild(PackageBuildMixin, StormBase):
(OCIFile, LibraryFileAlias, LibraryFileContent),
OCIFile.build == self.id,
LibraryFileAlias.id == OCIFile.library_file_id,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
OCIFile.layer_file_digest == layer_file_digest,
)
.one()
@@ -593,7 +593,7 @@ class OCIRecipeBuildSet(SpecificBuildFarmJobSourceMixin):
load_related(Person, builds, ["requester_id"])
lfas = load_related(LibraryFileAlias, builds, ["log_id"])
- load_related(LibraryFileContent, lfas, ["contentID"])
+ load_related(LibraryFileContent, lfas, ["content_id"])
recipes = load_related(OCIRecipe, builds, ["recipe_id"])
getUtility(IOCIRecipeSet).preloadDataForOCIRecipes(recipes)
# XXX twom 2019-12-05 This needs to be extended to include
diff --git a/lib/lp/registry/browser/person.py b/lib/lp/registry/browser/person.py
index 5ca39fe..c359df7 100644
--- a/lib/lp/registry/browser/person.py
+++ b/lib/lp/registry/browser/person.py
@@ -248,8 +248,8 @@ class RestrictedMembershipsPersonView(LaunchpadView):
Private teams are filtered out if the user is not a member of them.
"""
# This method returns a list as opposed to the database object's
- # getLatestApprovedMembershipsForPerson which returns a sqlobject
- # result set.
+ # getLatestApprovedMembershipsForPerson which returns a Storm
+ # ResultSet.
membership_list = self.context.getLatestApprovedMembershipsForPerson()
return [
membership
@@ -265,8 +265,7 @@ class RestrictedMembershipsPersonView(LaunchpadView):
Private teams are filtered out if the user is not a member of them.
"""
# This method returns a list as opposed to the database object's
- # teams_with_icons which returns a sqlobject
- # result set.
+ # teams_with_icons which returns a Storm ResultSet.
return [
team
for team in self.context.teams_with_icons
@@ -576,7 +575,7 @@ class PersonNavigation(BranchTraversalMixin, Navigation):
if not archive_id.isdigit():
return None
return traverse_archive_subscription_for_subscriber(
- self.context, archive_id
+ self.context, int(archive_id)
)
else:
# Otherwise we return the normal view for a person's
@@ -2099,7 +2098,7 @@ class PersonParticipationView(LaunchpadView):
# The member is a direct member; use the membership data.
datejoined = membership.datejoined
dateexpires = membership.dateexpires
- if membership.person_id == team.teamownerID:
+ if membership.person_id == team.teamowner_id:
role = "Owner"
elif membership.status == TeamMembershipStatus.ADMIN:
role = "Admin"
diff --git a/lib/lp/registry/browser/tests/test_person_webservice.py b/lib/lp/registry/browser/tests/test_person_webservice.py
index e0dfcb9..0081dc8 100644
--- a/lib/lp/registry/browser/tests/test_person_webservice.py
+++ b/lib/lp/registry/browser/tests/test_person_webservice.py
@@ -179,6 +179,22 @@ class TestPersonExportedID(TestCaseWithFactory):
)
self.assertEqual(person_id, body["id"])
+ def test_commercial_admin_can_see_id(self):
+ # A member of ~commercial-admins can read the `id` field.
+ person = self.factory.makePerson()
+ person_id = person.id
+ person_url = api_url(person)
+
+ body = (
+ webservice_for_person(
+ self.factory.makeCommercialAdmin(),
+ permission=OAuthPermission.WRITE_PRIVATE,
+ )
+ .get(person_url, api_version="devel")
+ .jsonBody()
+ )
+ self.assertEqual(person_id, body["id"])
+
class TestPersonRepresentation(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
diff --git a/lib/lp/registry/doc/person-account.rst b/lib/lp/registry/doc/person-account.rst
index 21bb628..6495c01 100644
--- a/lib/lp/registry/doc/person-account.rst
+++ b/lib/lp/registry/doc/person-account.rst
@@ -130,7 +130,7 @@ will cause this spec to be reassigned.
>>> len(foobar_pillars) > 0
True
- >>> foobar_teams = list(Person.selectBy(teamowner=foobar))
+ >>> foobar_teams = list(IStore(Person).find(Person, teamowner=foobar))
>>> len(foobar_teams) > 0
True
@@ -202,7 +202,7 @@ adds a '-deactivatedaccount' suffix to the person's name...
...no owned teams...
- >>> Person.selectBy(teamowner=foobar).is_empty()
+ >>> IStore(Person).find(Person, teamowner=foobar).is_empty()
True
...no owned or driven pillars...
@@ -226,7 +226,9 @@ Bar are now owned/driven by the registry admins team.
>>> registry_pillars.issuperset(foobar_pillars)
True
- >>> registry_teams = set(Person.selectBy(teamowner=registry_experts))
+ >>> registry_teams = set(
+ ... IStore(Person).find(Person, teamowner=registry_experts)
+ ... )
>>> registry_teams.issuperset(foobar_teams)
True
diff --git a/lib/lp/registry/doc/person-merge.rst b/lib/lp/registry/doc/person-merge.rst
index f50b89c..65d98e4 100644
--- a/lib/lp/registry/doc/person-merge.rst
+++ b/lib/lp/registry/doc/person-merge.rst
@@ -278,6 +278,7 @@ create, and then delete, the needed two people.
>>> from lp.registry.model.person import PersonSet, Person
>>> from lp.registry.interfaces.person import PersonCreationRationale
+ >>> from lp.services.database.interfaces import IStore
>>> personset = PersonSet()
>>> skip = []
@@ -312,11 +313,14 @@ create, and then delete, the needed two people.
... display_name="Merge Winner",
... creation_rationale=lp,
... )
+ ... IStore(Person).add(winner)
... loser = Person(
... name=name + ".loser",
... display_name="Merge Loser",
... creation_rationale=lp,
... )
+ ... IStore(Person).add(loser)
+ ... IStore(Person).flush()
... yield winner, loser
...
>>> endless_supply_of_players = new_players()
diff --git a/lib/lp/registry/doc/pillar.rst b/lib/lp/registry/doc/pillar.rst
index e961f18..72aba17 100644
--- a/lib/lp/registry/doc/pillar.rst
+++ b/lib/lp/registry/doc/pillar.rst
@@ -269,13 +269,17 @@ by that pillar name
>>> from lp.registry.interfaces.distribution import IDistributionSet
>>> from lp.registry.interfaces.projectgroup import IProjectGroupSet
>>> from lp.registry.model.pillar import PillarName
+ >>> from lp.services.database.interfaces import IStore
>>> ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
>>> gnome = getUtility(IProjectGroupSet).getByName("gnome")
- >>> ubuntu_pillarname = PillarName.selectOneBy(name="ubuntu")
+ >>> ubuntu_pillarname = (
+ ... IStore(PillarName).find(PillarName, name="ubuntu").one()
+ ... )
>>> ubuntu_pillarname.pillar == ubuntu
True
- >>> gnome_pillarname = PillarName.selectOneBy(name="gnome")
+ >>> gnome_pillarname = (
+ ... IStore(PillarName).find(PillarName, name="gnome").one()
+ ... )
>>> gnome_pillarname.pillar == gnome
True
-
diff --git a/lib/lp/registry/doc/vocabularies.rst b/lib/lp/registry/doc/vocabularies.rst
index ad6bf4b..ab7f5c5 100644
--- a/lib/lp/registry/doc/vocabularies.rst
+++ b/lib/lp/registry/doc/vocabularies.rst
@@ -611,7 +611,6 @@ Any person that's already merged is not part of this vocabulary:
>>> naked_cprov = removeSecurityProxy(cprov)
>>> naked_cprov.merged = 1
- >>> naked_cprov.syncUpdate()
>>> cprov in vocab
False
diff --git a/lib/lp/registry/interfaces/person.py b/lib/lp/registry/interfaces/person.py
index ec14854..4e66cfd 100644
--- a/lib/lp/registry/interfaces/person.py
+++ b/lib/lp/registry/interfaces/person.py
@@ -128,6 +128,7 @@ from lp.registry.interfaces.teammembership import (
TeamMembershipStatus,
)
from lp.registry.interfaces.wikiname import IWikiName
+from lp.services.database.interfaces import IStore
from lp.services.database.sqlbase import block_implicit_flushes
from lp.services.fields import (
BlocklistableContentNameField,
@@ -172,7 +173,7 @@ def validate_person_common(
# Importing here to avoid a cyclic import.
from lp.registry.model.person import Person
- person = Person.get(value)
+ person = IStore(Person).get(Person, value)
if not validate_func(person):
raise error_class(
"Cannot link person (name=%s, visibility=%s) to %s (name=%s)"
@@ -219,7 +220,7 @@ def validate_membership_policy(obj, attr, value):
return None
# If we are just creating a new team, it can have any membership policy.
- if getattr(obj, "_SO_creating", True):
+ if getattr(obj, "_creating", True):
return value
team = obj
@@ -791,7 +792,7 @@ class IPersonLimitedView(IHasIcon, IHasLogo):
"in listings of bugs or on a person's membership table."
),
)
- iconID = Int(title=_("Icon ID"), required=True, readonly=True)
+ icon_id = Int(title=_("Icon ID"), required=True, readonly=True)
logo = exported(
LogoImageUpload(
title=_("Logo"),
@@ -805,7 +806,7 @@ class IPersonLimitedView(IHasIcon, IHasLogo):
),
)
)
- logoID = Int(title=_("Logo ID"), required=True, readonly=True)
+ logo_id = Int(title=_("Logo ID"), required=True, readonly=True)
# title is required for the Launchpad Page Layout main template
title = Attribute("Person Page Title")
is_probationary = exported(
@@ -889,7 +890,7 @@ class IPersonViewRestricted(
),
)
)
- mugshotID = Int(title=_("Mugshot ID"), required=True, readonly=True)
+ mugshot_id = Int(title=_("Mugshot ID"), required=True, readonly=True)
languages = exported(
CollectionField(
@@ -1110,7 +1111,7 @@ class IPersonViewRestricted(
),
exported_as="team_owner",
)
- teamownerID = Int(
+ teamowner_id = Int(
title=_("The Team Owner's ID or None"), required=False, readonly=True
)
preferredemail = exported(
diff --git a/lib/lp/registry/model/distribution.py b/lib/lp/registry/model/distribution.py
index 8f4bb03..924c672 100644
--- a/lib/lp/registry/model/distribution.py
+++ b/lib/lp/registry/model/distribution.py
@@ -158,7 +158,6 @@ from lp.services.database.constants import UTC_NOW
from lp.services.database.decoratedresultset import DecoratedResultSet
from lp.services.database.enumcol import DBEnum
from lp.services.database.interfaces import IStore
-from lp.services.database.sqlbase import sqlvalues
from lp.services.database.stormbase import StormBase
from lp.services.database.stormexpr import (
ArrayAgg,
@@ -195,6 +194,7 @@ from lp.soyuz.model.publishing import (
SourcePackagePublishingHistory,
get_current_source_releases,
)
+from lp.soyuz.model.queue import PackageUpload
from lp.translations.enums import TranslationPermission
from lp.translations.model.hastranslationimports import (
HasTranslationImportsMixin,
@@ -1877,20 +1877,17 @@ class Distribution(
def getPendingAcceptancePPAs(self):
"""See `IDistribution`."""
- query = """
- Archive.purpose = %s AND
- Archive.distribution = %s AND
- PackageUpload.archive = Archive.id AND
- PackageUpload.status = %s
- """ % sqlvalues(
- ArchivePurpose.PPA, self.id, PackageUploadStatus.ACCEPTED
- )
-
- return Archive.select(
- query,
- clauseTables=["PackageUpload"],
- orderBy=["archive.id"],
- distinct=True,
+ return (
+ IStore(Archive)
+ .find(
+ Archive,
+ Archive.purpose == ArchivePurpose.PPA,
+ Archive.distribution == self,
+ PackageUpload.archive == Archive.id,
+ PackageUpload.status == PackageUploadStatus.ACCEPTED,
+ )
+ .order_by(Archive.id)
+ .config(distinct=True)
)
def getPendingPublicationPPAs(self):
diff --git a/lib/lp/registry/model/distributionsourcepackage.py b/lib/lp/registry/model/distributionsourcepackage.py
index 67312ee..bc80a4f 100644
--- a/lib/lp/registry/model/distributionsourcepackage.py
+++ b/lib/lp/registry/model/distributionsourcepackage.py
@@ -91,8 +91,8 @@ class DistributionSourcePackage(
HasDriversMixin,
WebhookTargetMixin,
):
- """This is a "Magic Distribution Source Package". It is not an
- SQLObject, but instead it represents a source package with a particular
+ """This is a "Magic Distribution Source Package". It is not a
+ Storm model, but instead it represents a source package with a particular
name in a particular distribution. You can then ask it all sorts of
things about the releases that are published under its name, the latest
or current release, etc.
diff --git a/lib/lp/registry/model/distroseries.py b/lib/lp/registry/model/distroseries.py
index 22d2895..2e19dfb 100644
--- a/lib/lp/registry/model/distroseries.py
+++ b/lib/lp/registry/model/distroseries.py
@@ -1580,7 +1580,7 @@ class DistroSeries(
POTemplate.distroseries == self,
POTemplate.iscurrent == True,
)
- contributors = contributors.order_by(*Person._storm_sortingColumns)
+ contributors = contributors.order_by(Person._separated_sortingColumns)
contributors = contributors.config(distinct=True)
return contributors
diff --git a/lib/lp/registry/model/distroseriesdifference.py b/lib/lp/registry/model/distroseriesdifference.py
index e82e975..e367a69 100644
--- a/lib/lp/registry/model/distroseriesdifference.py
+++ b/lib/lp/registry/model/distroseriesdifference.py
@@ -15,7 +15,7 @@ import apt_pkg
from debian.changelog import Changelog, Version
from lazr.enum import DBItem
from storm.expr import And, Cast, Column, Desc, Or, Select, Table
-from storm.locals import Int, Reference
+from storm.locals import Int, Reference, Unicode
from storm.zope.interfaces import IResultSet
from zope.component import getUtility
from zope.interface import implementer, provider
@@ -48,7 +48,6 @@ from lp.services.database import bulk
from lp.services.database.decoratedresultset import DecoratedResultSet
from lp.services.database.enumcol import DBEnum
from lp.services.database.interfaces import IPrimaryStore, IStore
-from lp.services.database.sqlobject import StringCol
from lp.services.database.stormbase import StormBase
from lp.services.messages.model.message import Message, MessageChunk
from lp.services.propertycache import (
@@ -389,11 +388,11 @@ class DistroSeriesDifference(StormBase):
allow_none=False,
enum=DistroSeriesDifferenceType,
)
- source_version = StringCol(dbName="source_version", notNull=False)
- parent_source_version = StringCol(
- dbName="parent_source_version", notNull=False
+ source_version = Unicode(name="source_version", allow_none=True)
+ parent_source_version = Unicode(
+ name="parent_source_version", allow_none=True
)
- base_version = StringCol(dbName="base_version", notNull=False)
+ base_version = Unicode(name="base_version", allow_none=True)
@staticmethod
def new(derived_series, source_package_name, parent_series):
diff --git a/lib/lp/registry/model/mailinglist.py b/lib/lp/registry/model/mailinglist.py
index fd26b09..f3fcaf5 100644
--- a/lib/lp/registry/model/mailinglist.py
+++ b/lib/lp/registry/model/mailinglist.py
@@ -336,8 +336,7 @@ class MailingList(StormBase):
), "Email already associated with another team."
def _setAndNotifyDateActivated(self):
- """Set the date_activated field and fire a
- SQLObjectModified event.
+ """Set the date_activated field and fire an ObjectModifiedEvent.
The date_activated field is only set once - repeated calls
will not change the field's value.
diff --git a/lib/lp/registry/model/person.py b/lib/lp/registry/model/person.py
index 8aef249..788333b 100644
--- a/lib/lp/registry/model/person.py
+++ b/lib/lp/registry/model/person.py
@@ -62,7 +62,7 @@ from storm.expr import (
With,
)
from storm.info import ClassAlias
-from storm.locals import Int, Reference, ReferenceSet, Unicode
+from storm.locals import Bool, DateTime, Int, Reference, ReferenceSet, Unicode
from storm.store import EmptyResultSet, Store
from twisted.conch.ssh.common import getNS
from twisted.conch.ssh.keys import Key
@@ -193,25 +193,16 @@ from lp.registry.model.teammembership import (
)
from lp.services.config import config
from lp.services.database import bulk, postgresql
-from lp.services.database.constants import UTC_NOW
-from lp.services.database.datetimecol import UtcDateTimeCol
+from lp.services.database.constants import DEFAULT, UTC_NOW
from lp.services.database.decoratedresultset import DecoratedResultSet
from lp.services.database.enumcol import DBEnum
from lp.services.database.interfaces import IStore
from lp.services.database.policy import PrimaryDatabasePolicy
from lp.services.database.sqlbase import (
- SQLBase,
convert_storm_clause_to_string,
cursor,
sqlvalues,
)
-from lp.services.database.sqlobject import (
- BoolCol,
- ForeignKey,
- IntCol,
- SQLObjectNotFound,
- StringCol,
-)
from lp.services.database.stormbase import StormBase
from lp.services.database.stormexpr import WithMaterialized, fti_search
from lp.services.helpers import backslashreplace, shortlist
@@ -234,7 +225,7 @@ from lp.services.identity.interfaces.emailaddress import (
)
from lp.services.identity.model.account import Account
from lp.services.identity.model.emailaddress import EmailAddress, HasOwnerMixin
-from lp.services.librarian.model import LibraryFileAlias
+from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent
from lp.services.mail.helpers import (
get_contact_email_addresses,
get_email_template,
@@ -283,7 +274,7 @@ class TeamInvitationEvent:
self.team = team
-class ValidPersonCache(SQLBase):
+class ValidPersonCache(StormBase):
"""Flags if a Person is active and usable in Launchpad.
This is readonly, as this is a view in the database.
@@ -295,6 +286,10 @@ class ValidPersonCache(SQLBase):
corroborating information.
"""
+ __storm_table__ = "ValidPersonCache"
+
+ id = Int(primary=True)
+
def validate_person_visibility(person, attr, value):
"""Validate changes in visibility.
@@ -356,14 +351,14 @@ class PersonSettings(StormBase):
__storm_table__ = "PersonSettings"
- personID = Int("person", default=None, primary=True)
- person = Reference(personID, "Person.id")
+ person_id = Int("person", default=None, primary=True)
+ person = Reference(person_id, "Person.id")
- selfgenerated_bugnotifications = BoolCol(notNull=True, default=False)
+ selfgenerated_bugnotifications = Bool(allow_none=False, default=False)
- expanded_notification_footers = BoolCol(notNull=False, default=False)
+ expanded_notification_footers = Bool(allow_none=True, default=False)
- require_strong_email_authentication = BoolCol(notNull=False, default=False)
+ require_strong_email_authentication = Bool(allow_none=True, default=False)
def readonly_settings(message, interface):
@@ -421,7 +416,7 @@ _readonly_person_settings = readonly_settings(
@implementer(IPerson)
@delegate_to(IPersonSettings, context="_person_settings")
class Person(
- SQLBase,
+ StormBase,
HasBugsBase,
HasSpecificationsMixin,
HasTranslationImportsMixin,
@@ -432,14 +427,54 @@ class Person(
):
"""A Person."""
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- # Initialize our PersonSettings object/record.
+ __storm_table__ = "Person"
+
+ id = Int(primary=True)
+
+ _creating = False
+
+ def __init__(
+ self,
+ name,
+ display_name,
+ account=None,
+ teamowner=None,
+ description=None,
+ membership_policy=DEFAULT,
+ defaultrenewalperiod=None,
+ defaultmembershipperiod=None,
+ creation_rationale=None,
+ creation_comment=None,
+ registrant=None,
+ hide_email_addresses=False,
+ ):
+ super().__init__()
+ self._creating = True
+ self.name = name
+ self.display_name = display_name
+ self.account = account
+ self.teamowner = teamowner
+ self.description = description
+ self.membership_policy = membership_policy
+ self.defaultrenewalperiod = defaultrenewalperiod
+ self.defaultmembershipperiod = defaultmembershipperiod
+ self.creation_rationale = creation_rationale
+ self.creation_comment = creation_comment
+ self.registrant = registrant
+ self.hide_email_addresses = hide_email_addresses
if not self.is_team:
- # This is a Person, not a team. Teams may want a TeamSettings
- # in the future.
+ # Initialize our PersonSettings object/record. This is a
+ # Person, not a team. Teams may want a TeamSettings in the
+ # future.
settings = PersonSettings()
settings.person = self
+ self.__storm_loaded__()
+ del self._creating
+
+ def __storm_loaded__(self):
+ """Mark the person as a team when created or fetched from database."""
+ if self.is_team:
+ alsoProvides(self, ITeam)
@cachedproperty
def _person_settings(self):
@@ -463,13 +498,16 @@ class Person(
return self.id
sortingColumns = SQL("person_sort_key(Person.displayname, Person.name)")
- # Redefine the default ordering into Storm syntax.
- _storm_sortingColumns = ("Person.displayname", "Person.name")
+ # If we're using SELECT DISTINCT, then we can't use sortingColumns
+ # unless `person_sort_key(Person.displayname, Person.name)` is also in
+ # the select list, which usually isn't convenient. Provide a separated
+ # version instead.
+ _separated_sortingColumns = ("Person.displayname", "Person.name")
# When doing any sort of set operations (union, intersect, except_) with
- # SQLObject we can't use sortingColumns because the table name Person is
- # not available in that context, so we use this one.
+ # Storm we can't use sortingColumns because the table name Person is not
+ # available in that context, so we use this one.
_sortingColumnsForSetOperations = SQL("person_sort_key(displayname, name)")
- _defaultOrder = sortingColumns
+ __storm_order__ = sortingColumns
_visibility_warning_cache_key = None
_visibility_warning_cache = None
@@ -482,53 +520,44 @@ class Person(
# mailing list. This is because renaming a mailing list is not
# trivial in Mailman 2.1 (see Mailman FAQ item 4.70). We prohibit
# such renames in the team edit details view, but just to be safe, we
- # also assert that such an attempt is not being made here. To do
- # this, we must override the SQLObject method for setting the 'name'
- # database column. Watch out for when SQLObject is creating this row,
- # because in that case self.name isn't yet available.
+ # also assert that such an attempt is not being made here. Watch
+ # out for when Storm is creating this row, because in that case
+ # self.name isn't yet available.
if self.name is None:
mailing_list = None
else:
mailing_list = getUtility(IMailingListSet).get(self.name)
can_rename = (
- self._SO_creating
+ self._creating
or not self.is_team
or mailing_list is None
or mailing_list.status == MailingListStatus.PURGED
)
assert can_rename, "Cannot rename teams with mailing lists"
- # Everything's okay, so let SQLObject do the normal thing.
+ # Everything's okay, so let Storm do the normal thing.
return value
- name = StringCol(
- dbName="name",
- alternateID=True,
- notNull=True,
- storm_validator=_validate_name,
- )
+ name = Unicode(name="name", allow_none=False, validator=_validate_name)
def __repr__(self):
displayname = backslashreplace(self.displayname)
return "<Person %s (%s)>" % (self.name, displayname)
- display_name = StringCol(dbName="displayname", notNull=True)
+ display_name = Unicode(name="displayname", allow_none=False)
@property
def displayname(self):
return self.display_name
- teamdescription = StringCol(dbName="teamdescription", default=None)
- homepage_content = StringCol(default=None)
- _description = StringCol(dbName="description", default=None)
- icon = ForeignKey(
- dbName="icon", foreignKey="LibraryFileAlias", default=None
- )
- logo = ForeignKey(
- dbName="logo", foreignKey="LibraryFileAlias", default=None
- )
- mugshot = ForeignKey(
- dbName="mugshot", foreignKey="LibraryFileAlias", default=None
- )
+ teamdescription = Unicode(name="teamdescription", default=None)
+ homepage_content = Unicode(default=None)
+ _description = Unicode(name="description", default=None)
+ icon_id = Int(name="icon", allow_none=True, default=None)
+ icon = Reference(icon_id, "LibraryFileAlias.id")
+ logo_id = Int(name="logo", allow_none=True, default=None)
+ logo = Reference(logo_id, "LibraryFileAlias.id")
+ mugshot_id = Int(name="mugshot", allow_none=True, default=None)
+ mugshot = Reference(mugshot_id, "LibraryFileAlias.id")
@property
def account_status(self):
@@ -547,12 +576,13 @@ class Person(
raise NoAccountError()
self.account.setStatus(status, user, comment)
- teamowner = ForeignKey(
- dbName="teamowner",
- foreignKey="Person",
+ teamowner_id = Int(
+ name="teamowner",
+ validator=validate_public_person,
+ allow_none=True,
default=None,
- storm_validator=validate_public_person,
)
+ teamowner = Reference(teamowner_id, "Person.id")
sshkeys = ReferenceSet("id", "SSHKey.person_id")
@@ -566,30 +596,32 @@ class Person(
default=TeamMembershipPolicy.RESTRICTED,
validator=validate_membership_policy,
)
- defaultrenewalperiod = IntCol(dbName="defaultrenewalperiod", default=None)
- defaultmembershipperiod = IntCol(
- dbName="defaultmembershipperiod", default=None
- )
+ defaultrenewalperiod = Int(name="defaultrenewalperiod", default=None)
+ defaultmembershipperiod = Int(name="defaultmembershipperiod", default=None)
mailing_list_auto_subscribe_policy = DBEnum(
enum=MailingListAutoSubscribePolicy,
default=MailingListAutoSubscribePolicy.ON_REGISTRATION,
)
- merged = ForeignKey(dbName="merged", foreignKey="Person", default=None)
+ merged_id = Int(name="merged", allow_none=True, default=None)
+ merged = Reference(merged_id, "Person.id")
- datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW)
+ datecreated = DateTime(
+ allow_none=False, default=UTC_NOW, tzinfo=timezone.utc
+ )
creation_rationale = DBEnum(enum=PersonCreationRationale, default=None)
- creation_comment = StringCol(default=None)
- registrant = ForeignKey(
- dbName="registrant",
- foreignKey="Person",
+ creation_comment = Unicode(default=None)
+ registrant_id = Int(
+ name="registrant",
+ validator=validate_public_person,
+ allow_none=True,
default=None,
- storm_validator=validate_public_person,
)
- hide_email_addresses = BoolCol(notNull=True, default=False)
- verbose_bugnotifications = BoolCol(notNull=True, default=True)
+ registrant = Reference(registrant_id, "Person.id")
+ hide_email_addresses = Bool(allow_none=False, default=False)
+ verbose_bugnotifications = Bool(allow_none=False, default=True)
- signedcocs = ReferenceSet("<primary key>", "SignedCodeOfConduct.owner_id")
+ signedcocs = ReferenceSet("id", "SignedCodeOfConduct.owner_id")
_ircnicknames = ReferenceSet("id", "IrcID.person_id")
jabberids = ReferenceSet("id", "JabberID.person_id")
@@ -605,7 +637,7 @@ class Person(
allow_none=False,
)
- personal_standing_reason = StringCol(default=None)
+ personal_standing_reason = Unicode(default=None)
@property
def description(self):
@@ -704,12 +736,6 @@ class Person(
person_language.delete()
self.deleteLanguagesCache()
- def _init(self, *args, **kw):
- """Mark the person as a team when created or fetched from database."""
- SQLBase._init(self, *args, **kw)
- if self.teamownerID is not None:
- alsoProvides(self, ITeam)
-
def convertToTeam(self, team_owner):
"""See `IPerson`."""
if self.is_team:
@@ -1010,7 +1036,7 @@ class Person(
@property
def is_team(self):
"""See `IPerson`."""
- return self.teamownerID is not None
+ return self.teamowner_id is not None
@property
def mailing_list(self):
@@ -1118,7 +1144,7 @@ class Person(
OR product.bug_supervisor = %(person)s
)
""" % sqlvalues(
- person=self
+ person=self.id
)
return "%s AND (%s)" % (
@@ -1158,7 +1184,7 @@ class Person(
) _pillar
ON PillarName.name = _pillar.name
"""
- % sqlvalues(person=self)
+ % sqlvalues(person=self.id)
)
results = IStore(self).using(SQL(origin)).find(find_spec)
@@ -1261,7 +1287,6 @@ class Person(
CommercialSubscription,
)
from lp.registry.model.distribution import Distribution
- from lp.registry.model.person import Person
from lp.registry.model.product import Product
from lp.registry.model.teammembership import TeamParticipation
@@ -1370,11 +1395,9 @@ class Person(
# This is prepopulated by various queries in and out of person.py.
if self.is_team:
return False
- try:
- ValidPersonCache.get(self.id)
- return True
- except SQLObjectNotFound:
- return False
+ return (
+ IStore(ValidPersonCache).get(ValidPersonCache, self.id) is not None
+ )
@property
def is_probationary(self):
@@ -1602,7 +1625,6 @@ class Person(
def getAssignedSpecificationWorkItemsDueBefore(self, date, user):
"""See `IPerson`."""
from lp.registry.model.distribution import Distribution
- from lp.registry.model.person import Person
from lp.registry.model.product import Product
store = Store.of(self)
@@ -1814,7 +1836,7 @@ class Person(
And(
TeamParticipation.team_id == self.id,
TeamParticipation.person_id != self.id,
- Person.teamownerID != None,
+ IsNot(Person.teamowner_id, None),
),
need_api=True,
)
@@ -2064,7 +2086,7 @@ class Person(
Select(
Person.id,
tables=[Person],
- where=Person.teamownerID.is_in(team_select),
+ where=Person.teamowner_id.is_in(team_select),
),
Select(
TeamMembership.team_id,
@@ -2607,7 +2629,7 @@ class Person(
spec.assignee = None
registry_experts = getUtility(ILaunchpadCelebrities).registry_experts
- for team in Person.selectBy(teamowner=self):
+ for team in IStore(Person).find(Person, teamowner=self):
team.teamowner = registry_experts
for pillar_name in self.getAffiliatedPillars(self):
pillar = pillar_name.pillar
@@ -2664,7 +2686,7 @@ class Person(
"""Return a unique name."""
new_name = base_new_name
count = 1
- while Person.selectOneBy(name=new_name) is not None:
+ while not IStore(Person).find(Person, name=new_name).is_empty():
new_name = base_new_name + str(count)
count += 1
return new_name
@@ -2945,7 +2967,7 @@ class Person(
Person,
Person.id == TeamParticipation.team_id,
TeamParticipation.person == self,
- IsNot(Person.teamownerID, None),
+ IsNot(Person.teamowner_id, None),
)
.order_by(Person.sortingColumns)
)
@@ -2953,11 +2975,10 @@ class Person(
@property
def teams_indirectly_participated_in(self):
"""See `IPerson`."""
- Team = ClassAlias(Person, "Team")
store = Store.of(self)
origin = [
- Team,
- Join(TeamParticipation, Team.id == TeamParticipation.team_id),
+ Person,
+ Join(TeamParticipation, Person.id == TeamParticipation.team_id),
LeftJoin(
TeamMembership,
And(
@@ -2972,9 +2993,8 @@ class Person(
),
),
]
- find_objects = Team
return store.using(*origin).find(
- find_objects,
+ Person,
And(
TeamParticipation.person == self.id,
TeamParticipation.person != TeamParticipation.team_id,
@@ -2991,8 +3011,8 @@ class Person(
Person,
Person.id == TeamParticipation.team_id,
TeamParticipation.person == self,
- IsNot(Person.teamownerID, None),
- IsNot(Person.iconID, None),
+ IsNot(Person.teamowner_id, None),
+ IsNot(Person.icon_id, None),
TeamParticipation.team != self,
)
.order_by(Person.sortingColumns)
@@ -3576,8 +3596,10 @@ class Person(
@property
def ppas(self):
"""See `IPerson`."""
- return Archive.selectBy(
- owner=self, purpose=ArchivePurpose.PPA, orderBy="name"
+ return (
+ IStore(Archive)
+ .find(Archive, owner=self, purpose=ArchivePurpose.PPA)
+ .order_by(Archive.name)
)
def getVisiblePPAs(self, user):
@@ -3834,17 +3856,21 @@ class PersonSet:
"""See `IPersonSet`."""
# The odd ordering here is to ensure we hit the PostgreSQL
# indexes. It will not make any real difference outside of tests.
- query = (
- """
- id IN (
- SELECT person FROM KarmaTotalCache
- ORDER BY karma_total DESC, person DESC
- LIMIT %s
- )
- """
- % limit
+ top_people = shortlist(
+ IStore(Person).find(
+ Person,
+ Person.id.is_in(
+ Select(
+ KarmaTotalCache.person_id,
+ order_by=(
+ Desc(KarmaTotalCache.karma_total),
+ Desc(KarmaTotalCache.person_id),
+ ),
+ limit=limit,
+ )
+ ),
+ )
)
- top_people = shortlist(Person.select(query))
return sorted(
top_people,
key=lambda obj: (obj.karma, obj.displayname, obj.id),
@@ -4152,6 +4178,9 @@ class PersonSet:
defaultrenewalperiod=defaultrenewalperiod,
membership_policy=membership_policy,
)
+ store = IStore(Person)
+ store.add(team)
+ store.flush()
notify(ObjectCreatedEvent(team))
# Here we add the owner as a team admin manually because we know what
# we're doing (so we don't need to do any sanity checks) and we don't
@@ -4264,19 +4293,18 @@ class PersonSet:
if not displayname:
displayname = name.capitalize()
- if account is None:
- account_id = None
- else:
- account_id = account.id
person = Person(
name=name,
display_name=displayname,
- account_id=account_id,
+ account=account,
creation_rationale=rationale,
creation_comment=comment,
hide_email_addresses=hide_email_addresses,
registrant=registrant,
)
+ store = IStore(Person)
+ store.add(person)
+ store.flush()
return person
def ensurePerson(
@@ -4304,10 +4332,10 @@ class PersonSet:
def getByName(self, name, ignore_merged=True):
"""See `IPersonSet`."""
- query = Person.name == name
+ clauses = [Person.name == name]
if ignore_merged:
- query = And(query, Person.mergedID == None)
- return Person.selectOne(query)
+ clauses.append(Is(Person.merged_id, None))
+ return IStore(Person).find(Person, *clauses).one()
def getByAccount(self, account):
"""See `IPersonSet`."""
@@ -4316,14 +4344,26 @@ class PersonSet:
def updateStatistics(self):
"""See `IPersonSet`."""
stats = getUtility(ILaunchpadStatisticSet)
- people_count = Person.select(
- And(Person.teamownerID == None, Person.mergedID == None)
- ).count()
+ people_count = (
+ IStore(Person)
+ .find(
+ Person,
+ Is(Person.teamowner_id, None),
+ Is(Person.merged_id, None),
+ )
+ .count()
+ )
stats.update("people_count", people_count)
transaction.commit()
- teams_count = Person.select(
- And(Person.q.teamownerID != None, Person.q.mergedID == None)
- ).count()
+ teams_count = (
+ IStore(Person)
+ .find(
+ Person,
+ IsNot(Person.teamowner_id, None),
+ Is(Person.merged_id, None),
+ )
+ .count()
+ )
stats.update("teams_count", teams_count)
transaction.commit()
@@ -4483,10 +4523,7 @@ class PersonSet:
def get(self, personid):
"""See `IPersonSet`."""
- try:
- return Person.get(personid)
- except SQLObjectNotFound:
- return None
+ return IStore(Person).get(Person, personid)
def getByEmail(self, email, filter_status=True):
"""See `IPersonSet`."""
@@ -4543,8 +4580,8 @@ class PersonSet:
# not hit the DB.
valid_person_ids = {
person_id.id
- for person_id in ValidPersonCache.select(
- "id IN %s" % sqlvalues(person_ids)
+ for person_id in IStore(ValidPersonCache).find(
+ ValidPersonCache, ValidPersonCache.id.is_in(person_ids)
)
}
return [person for person in persons if person.id in valid_person_ids]
@@ -4589,23 +4626,24 @@ class PersonSet:
"""See `IPersonSet`."""
aliases = []
aliases.extend(
- person.iconID for person in people if person.iconID is not None
+ person.icon_id for person in people if person.icon_id is not None
)
aliases.extend(
- person.logoID for person in people if person.logoID is not None
+ person.logo_id for person in people if person.logo_id is not None
)
aliases.extend(
- person.mugshotID
+ person.mugshot_id
for person in people
- if person.mugshotID is not None
+ if person.mugshot_id is not None
)
if not aliases:
return
# Listify, since this is a pure cache.
list(
- LibraryFileAlias.select(
- "LibraryFileAlias.id IN %s" % sqlvalues(aliases),
- prejoins=["content"],
+ IStore(LibraryFileAlias).find(
+ (LibraryFileAlias, LibraryFileContent),
+ LibraryFileAlias.id.is_in(aliases),
+ LibraryFileAlias.content == LibraryFileContent.id,
)
)
@@ -4792,7 +4830,7 @@ class PersonSet:
def preload_for_people(rows):
if need_teamowner or need_api:
- bulk.load(Person, [row[0].teamownerID for row in rows])
+ bulk.load(Person, [row[0].teamowner_id for row in rows])
def prepopulate_person(row):
result = row[0]
@@ -5546,7 +5584,7 @@ def _get_recipients_for_team(team):
EmailAddress.person != None,
Account.status == AccountStatus.ACTIVE,
),
- Person.teamownerID != None,
+ IsNot(Person.teamowner_id, None),
),
).config(distinct=True)
next_ids = []
diff --git a/lib/lp/registry/model/pillar.py b/lib/lp/registry/model/pillar.py
index ad9af3a..14d4fc8 100644
--- a/lib/lp/registry/model/pillar.py
+++ b/lib/lp/registry/model/pillar.py
@@ -13,7 +13,7 @@ import six
from storm.databases.postgres import Case
from storm.expr import And, Coalesce, Desc, LeftJoin, Lower, Or
from storm.info import ClassAlias
-from storm.locals import Int, Reference
+from storm.locals import Bool, Int, Reference, Unicode
from storm.store import Store
from zope.component import getUtility
from zope.interface import implementer, provider
@@ -33,8 +33,7 @@ from lp.services.config import config
from lp.services.database.bulk import load_related
from lp.services.database.decoratedresultset import DecoratedResultSet
from lp.services.database.interfaces import IStore
-from lp.services.database.sqlbase import SQLBase
-from lp.services.database.sqlobject import BoolCol, ForeignKey, StringCol
+from lp.services.database.stormbase import StormBase
from lp.services.database.stormexpr import fti_search, rank_by_fti
from lp.services.librarian.model import LibraryFileAlias
@@ -101,7 +100,7 @@ class PillarNameSet:
# We could attempt to do this in a single database query, but I
# expect that doing two queries will be faster that OUTER JOINing
# the Project, Product and Distribution tables (and this approach
- # works better with SQLObject too.
+ # is easier with Storm too).
# Retrieve information out of the PillarName table.
query = """
@@ -326,23 +325,26 @@ class PillarNameSet:
@implementer(IPillarName)
-class PillarName(SQLBase):
- _table = "PillarName"
- _defaultOrder = "name"
+class PillarName(StormBase):
+ __storm_table__ = "PillarName"
+ __storm_order__ = "name"
- name = StringCol(
- dbName="name", notNull=True, unique=True, alternateID=True
- )
+ id = Int(primary=True)
+ name = Unicode(name="name", allow_none=False)
product_id = Int(name="product", allow_none=True)
product = Reference(product_id, "Product.id")
projectgroup_id = Int(name="project", allow_none=True)
projectgroup = Reference(projectgroup_id, "ProjectGroup.id")
distribution_id = Int(name="distribution", allow_none=True)
distribution = Reference(distribution_id, "Distribution.id")
- active = BoolCol(dbName="active", notNull=True, default=True)
- alias_for = ForeignKey(
- foreignKey="PillarName", dbName="alias_for", default=None
- )
+ active = Bool(name="active", allow_none=False, default=True)
+ alias_for_id = Int(name="alias_for", allow_none=True, default=None)
+ alias_for = Reference(alias_for_id, "PillarName.id")
+
+ def __init__(self, name, alias_for=None):
+ super().__init__()
+ self.name = name
+ self.alias_for = alias_for
@property
def pillar(self):
@@ -366,7 +368,10 @@ class HasAliasMixin:
@property
def aliases(self):
"""See `IHasAlias`."""
- aliases = PillarName.selectBy(alias_for=PillarName.byName(self.name))
+ store = IStore(PillarName)
+ aliases = store.find(
+ PillarName, alias_for=store.find(PillarName, name=self.name).one()
+ )
return [alias.name for alias in aliases]
def setAliases(self, names):
diff --git a/lib/lp/registry/model/productrelease.py b/lib/lp/registry/model/productrelease.py
index 8a87785..a2dd584 100644
--- a/lib/lp/registry/model/productrelease.py
+++ b/lib/lp/registry/model/productrelease.py
@@ -365,7 +365,7 @@ class ProductReleaseSet:
),
LeftJoin(
LibraryFileContent,
- LibraryFileAlias.contentID == LibraryFileContent.id,
+ LibraryFileAlias.content == LibraryFileContent.id,
),
Join(
ProductRelease,
diff --git a/lib/lp/registry/model/sharingjob.py b/lib/lp/registry/model/sharingjob.py
index bf2bb78..3bb50f1 100644
--- a/lib/lp/registry/model/sharingjob.py
+++ b/lib/lp/registry/model/sharingjob.py
@@ -20,6 +20,7 @@ from zope.component import getUtility
from zope.interface import implementer, provider
from lp.app.enums import InformationType
+from lp.app.errors import NotFoundError
from lp.blueprints.interfaces.specification import ISpecification
from lp.blueprints.model.specification import Specification
from lp.blueprints.model.specificationsearch import (
@@ -61,7 +62,6 @@ from lp.registry.model.teammembership import TeamParticipation
from lp.services.config import config
from lp.services.database.enumcol import DBEnum
from lp.services.database.interfaces import IStore
-from lp.services.database.sqlobject import SQLObjectNotFound
from lp.services.database.stormbase import StormBase
from lp.services.job.model.job import EnumeratedSubclass, Job
from lp.services.job.runner import BaseRunnableJob
@@ -193,12 +193,12 @@ class SharingJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass):
:return: the SharingJob with the specified id, as the
current SharingJobDereived subclass.
- :raises: SQLObjectNotFound if there is no job with the specified id,
+ :raises: NotFoundError if there is no job with the specified id,
or its job_type does not match the desired subclass.
"""
job = SharingJob.get(job_id)
if job.job_type != cls.class_job_type:
- raise SQLObjectNotFound(
+ raise NotFoundError(
"No object found with id %d and type %s"
% (job_id, cls.class_job_type.title)
)
diff --git a/lib/lp/registry/model/teammembership.py b/lib/lp/registry/model/teammembership.py
index 0532e4a..0d5396b 100644
--- a/lib/lp/registry/model/teammembership.py
+++ b/lib/lp/registry/model/teammembership.py
@@ -499,12 +499,25 @@ def _cleanTeamParticipation(child, parent):
* onto the parent.team, since we want the top and
* bottom of the hierarchy to calculate the
* TeamParticipation. The query above makes sure
- * that we do this for all the ancestors.
+ * that we do this for all the ancestors. We exclude
+ * direct members that weren't already ancestors or
+ * descendants of the child from the TeamParticipation
+ * table, since they can't help us to establish entries
+ * that we need to keep.
*/
SELECT child.person, parent.team
FROM TeamMembership child
JOIN parent ON child.team = parent.person
WHERE child.status IN %(active_states)s
+ AND child.person IN (
+ SELECT team
+ FROM TeamParticipation
+ WHERE person = %(child)s
+ UNION
+ SELECT person
+ FROM TeamParticipation
+ WHERE team = %(child)s
+ )
)
SELECT person, team
FROM parent
diff --git a/lib/lp/registry/personmerge.py b/lib/lp/registry/personmerge.py
index cec406c..2787bc2 100644
--- a/lib/lp/registry/personmerge.py
+++ b/lib/lp/registry/personmerge.py
@@ -1230,8 +1230,7 @@ def merge_people(from_person, to_person, reviewer, delete=False):
cur.execute("SELECT id FROM Person WHERE name = %s" % sqlvalues(name))
i += 1
cur.execute(
- "UPDATE Person SET name = %s WHERE id = %s"
- % sqlvalues(name, from_person)
+ "UPDATE Person SET name = %s WHERE id = %s" % sqlvalues(name, from_id)
)
# Since we've updated the database behind Storm's back,
diff --git a/lib/lp/registry/scripts/closeaccount.py b/lib/lp/registry/scripts/closeaccount.py
index c7d4c55..7df78d0 100644
--- a/lib/lp/registry/scripts/closeaccount.py
+++ b/lib/lp/registry/scripts/closeaccount.py
@@ -236,7 +236,7 @@ def close_account(username, log):
# Keep the corresponding PersonSettings row, but reset everything to the
# defaults.
table_notification("PersonSettings")
- store.find(PersonSettings, PersonSettings.personID == person.id).set(
+ store.find(PersonSettings, PersonSettings.person == person).set(
selfgenerated_bugnotifications=DEFAULT,
# XXX cjwatson 2018-11-29: These two columns have NULL defaults, but
# perhaps shouldn't?
diff --git a/lib/lp/registry/scripts/populate_distroseriesdiff.py b/lib/lp/registry/scripts/populate_distroseriesdiff.py
index 7847ee6..97adebd 100644
--- a/lib/lp/registry/scripts/populate_distroseriesdiff.py
+++ b/lib/lp/registry/scripts/populate_distroseriesdiff.py
@@ -56,7 +56,7 @@ def compose_sql_find_latest_source_package_releases(distroseries):
parameters = {
"active_status": quote(active_publishing_status),
"distroseries": quote(distroseries.id),
- "main_archive": quote(distroseries.distribution.main_archive),
+ "main_archive": quote(distroseries.distribution.main_archive.id),
"release_pocket": quote(PackagePublishingPocket.RELEASE),
}
return (
diff --git a/lib/lp/registry/security.py b/lib/lp/registry/security.py
index 03692fe..259455c 100644
--- a/lib/lp/registry/security.py
+++ b/lib/lp/registry/security.py
@@ -126,10 +126,28 @@ class ModerateProjectGroupSet(ModerateByRegistryExpertsOrAdmins):
usedfor = IProjectGroupSet
-class ModeratePerson(ModerateByRegistryExpertsOrAdmins):
+class ModeratePerson(AuthorizationBase):
permission = "launchpad.Moderate"
usedfor = IPerson
+ def checkAuthenticated(self, user):
+ """Allow admins, commercial admins, and registry experts.
+
+ Allowing commercial admins here is a bit of a cheat, but it allows
+ IS automation to see Person.id
+ (https://portal.admin.canonical.com/C158967) without needing to use
+ an account that's a fully-fledged member of ~admins. The only extra
+ exposure here is that commercial admins gain the ability to set the
+ status of other people's accounts, which isn't completely ideal, but
+ in practice people in the commercial admins team are always
+ highly-privileged anyway.
+ """
+ return (
+ user.in_admin
+ or user.in_commercial_admin
+ or user.in_registry_experts
+ )
+
class ViewPillar(AuthorizationBase):
usedfor = IPillar
diff --git a/lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst b/lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst
index 9db9db6..89ff351 100644
--- a/lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst
+++ b/lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst
@@ -52,7 +52,10 @@ We also try to use the name of an unvalidated account, which can't be used as
the owner of something.
>>> from lp.registry.model.person import Person
- >>> Person.byName("matsubara").is_valid_person_or_team
+ >>> from lp.services.database.interfaces import IStore
+ >>> IStore(Person).find(
+ ... Person, name="matsubara"
+ ... ).one().is_valid_person_or_team
False
>>> browser.getControl(name="field.owner").value = "matsubara"
>>> browser.getControl("Change").click()
@@ -80,7 +83,7 @@ Now we try to create a team using a name that is already taken.
Okay, let's do it properly now and reassign it to an existing (and validated)
account.
- >>> salgado = Person.byName("salgado")
+ >>> salgado = IStore(Person).find(Person, name="salgado").one()
>>> salgado.is_valid_person_or_team
True
diff --git a/lib/lp/registry/stories/person/xx-approve-members.rst b/lib/lp/registry/stories/person/xx-approve-members.rst
index 6428641..bb6c369 100644
--- a/lib/lp/registry/stories/person/xx-approve-members.rst
+++ b/lib/lp/registry/stories/person/xx-approve-members.rst
@@ -66,8 +66,10 @@ as an inactive one.
# listed anywhere.
>>> from lp.registry.model.person import Person
>>> from lp.registry.model.teammembership import TeamMembershipSet
+ >>> from lp.services.database.interfaces import IStore
>>> membership = TeamMembershipSet().getByPersonAndTeam(
- ... Person.byName("name12"), Person.byName("ubuntu-team")
+ ... IStore(Person).find(Person, name="name12").one(),
+ ... IStore(Person).find(Person, name="ubuntu-team").one(),
... )
>>> membership.status.title
'Declined'
diff --git a/lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst b/lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst
index f25171d..036b0e7 100644
--- a/lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst
+++ b/lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst
@@ -169,7 +169,7 @@ Celso is a member of ubuntu-team, so he can edit this release too:
And if no-priv drives the series...
- >>> no_priv = Person.selectOneBy(name="no-priv")
+ >>> no_priv = IStore(Person).find(Person, name="no-priv").one()
>>> tomcat.getSeries("trunk").driver = no_priv
... they can edit existing releases as well, even if they are owned by
diff --git a/lib/lp/registry/stories/productrelease/xx-productrelease-view.rst b/lib/lp/registry/stories/productrelease/xx-productrelease-view.rst
index 867b8aa..b411b02 100644
--- a/lib/lp/registry/stories/productrelease/xx-productrelease-view.rst
+++ b/lib/lp/registry/stories/productrelease/xx-productrelease-view.rst
@@ -41,9 +41,12 @@ downloaded and the date of the last download on that table as well.
# Manually update the download counter for that file above so that we can
# test it.
>>> from datetime import date, datetime, timezone
+ >>> from lp.services.database.interfaces import IStore
>>> from lp.services.librarian.model import LibraryFileAlias
- >>> lfa = LibraryFileAlias.selectOne(
- ... LibraryFileAlias.q.filename == "firefox_0.9.2.orig.tar.gz"
+ >>> lfa = (
+ ... IStore(LibraryFileAlias)
+ ... .find(LibraryFileAlias, filename="firefox_0.9.2.orig.tar.gz")
+ ... .one()
... )
>>> lfa.updateDownloadCount(date(2006, 5, 4), None, 1)
diff --git a/lib/lp/registry/stories/teammembership/xx-add-member.rst b/lib/lp/registry/stories/teammembership/xx-add-member.rst
index b80bb89..c8ea2b2 100644
--- a/lib/lp/registry/stories/teammembership/xx-add-member.rst
+++ b/lib/lp/registry/stories/teammembership/xx-add-member.rst
@@ -23,8 +23,10 @@ Let's make sure that 'cprov' is now an Approved member of
>>> from lp.registry.model.person import Person
>>> from lp.registry.model.teammembership import TeamMembership
>>> from lp.services.database.interfaces import IStore
- >>> cprov = Person.byName("cprov")
- >>> landscape_team = Person.byName("landscape-developers")
+ >>> cprov = IStore(Person).find(Person, name="cprov").one()
+ >>> landscape_team = (
+ ... IStore(Person).find(Person, name="landscape-developers").one()
+ ... )
>>> cprov_landscape_membership = (
... IStore(TeamMembership)
... .find(TeamMembership, person=cprov, team=landscape_team)
@@ -56,7 +58,7 @@ become a member.
As we can see, the launchpad team will not be one of the team's active
members.
- >>> launchpad = Person.byName("launchpad")
+ >>> launchpad = IStore(Person).find(Person, name="launchpad").one()
>>> launchpad in landscape_team.activemembers
False
>>> membership = (
diff --git a/lib/lp/registry/stories/teammembership/xx-teammembership.rst b/lib/lp/registry/stories/teammembership/xx-teammembership.rst
index a7b5a3b..9d1e8a7 100644
--- a/lib/lp/registry/stories/teammembership/xx-teammembership.rst
+++ b/lib/lp/registry/stories/teammembership/xx-teammembership.rst
@@ -28,9 +28,11 @@ Regular users can create teams.
The owner of a team is always added as an administrator of their team.
>>> from lp.registry.model.person import Person
- >>> for a in Person.byName("myemail").adminmembers:
+ >>> from lp.services.database.interfaces import IStore
+ >>> for a in (
+ ... IStore(Person).find(Person, name="myemail").one().adminmembers
+ ... ):
... print(a.name)
- ...
name12
@@ -90,8 +92,7 @@ approved, though.
>>> from storm.locals import Store
>>> from lp.registry.interfaces.person import TeamMembershipPolicy
- >>> from lp.registry.model.person import Person
- >>> myemail = Person.selectOneBy(name="myemail")
+ >>> myemail = IStore(Person).find(Person, name="myemail").one()
>>> myemail.membership_policy = TeamMembershipPolicy.MODERATED
>>> Store.of(myemail).flush()
diff --git a/lib/lp/registry/tests/test_person.py b/lib/lp/registry/tests/test_person.py
index 131ceff..cb3a5a7 100644
--- a/lib/lp/registry/tests/test_person.py
+++ b/lib/lp/registry/tests/test_person.py
@@ -946,12 +946,12 @@ class TestPersonStates(TestCaseWithFactory):
is already in use. If this happens, we'll simply append an integer to
that name until we can find one that is free.
"""
- sample_person = Person.byName("name12")
+ sample_person = IStore(Person).find(Person, name="name12").one()
login(sample_person.preferredemail.email)
sample_person.deactivate(comment="blah!")
self.assertEqual(sample_person.name, "name12-deactivatedaccount")
# Now that name12 is free Foo Bar can use it.
- foo_bar = Person.byName("name16")
+ foo_bar = IStore(Person).find(Person, name="name16").one()
foo_bar.name = "name12"
# If Foo Bar deactivates their account, though, we'll have to use a
# name other than name12-deactivatedaccount because that is already
@@ -980,9 +980,9 @@ class TestPersonStates(TestCaseWithFactory):
self.assertIs(None, product.bug_supervisor)
def test_getDirectMemberIParticipateIn(self):
- sample_person = Person.byName("name12")
- warty_team = Person.byName("name20")
- ubuntu_team = Person.byName("ubuntu-team")
+ sample_person = IStore(Person).find(Person, name="name12").one()
+ warty_team = IStore(Person).find(Person, name="name20").one()
+ ubuntu_team = IStore(Person).find(Person, name="ubuntu-team").one()
# Sample Person is an active member of Warty Security Team which in
# turn is a proposed member of Ubuntu Team. That means
# sample_person._getDirectMemberIParticipateIn(ubuntu_team) will fail
@@ -1061,7 +1061,7 @@ class TestPersonStates(TestCaseWithFactory):
def test_visibility_validator_team_ss_prod_pub_to_private(self):
# A PUBLIC team with a structural subscription to a product can
# convert to a PRIVATE team.
- foo_bar = Person.byName("name16")
+ foo_bar = IStore(Person).find(Person, name="name16").one()
self.bzr.addSubscription(self.otherteam, foo_bar)
self.otherteam.visibility = PersonVisibility.PRIVATE
diff --git a/lib/lp/registry/tests/test_teammembership.py b/lib/lp/registry/tests/test_teammembership.py
index ec7e3c9..5a5fb5b 100644
--- a/lib/lp/registry/tests/test_teammembership.py
+++ b/lib/lp/registry/tests/test_teammembership.py
@@ -821,7 +821,7 @@ class TestTeamMembership(TestCaseWithFactory):
TeamMembershipStatus.DEACTIVATED,
getUtility(IPersonSet).getByName("name16"),
)
- # Bypass SQLObject to make sure the update was really flushed to the
+ # Bypass Storm to make sure the update was really flushed to the
# database.
cur = cursor()
cur.execute("SELECT status FROM teammembership WHERE id = %d" % tm.id)
diff --git a/lib/lp/registry/vocabularies.py b/lib/lp/registry/vocabularies.py
index 3ef19fe..79e938c 100644
--- a/lib/lp/registry/vocabularies.py
+++ b/lib/lp/registry/vocabularies.py
@@ -71,6 +71,8 @@ from storm.expr import (
And,
Column,
Desc,
+ Is,
+ IsNot,
Join,
LeftJoin,
Not,
@@ -176,7 +178,6 @@ from lp.services.webapp.vocabulary import (
IHugeVocabulary,
NamedStormHugeVocabulary,
NamedStormVocabulary,
- SQLObjectVocabularyBase,
StormVocabularyBase,
VocabularyFilter,
)
@@ -208,7 +209,6 @@ class BasePersonVocabulary:
If the token contains an '@', treat it like an email. Otherwise,
treat it like a name.
"""
- token = six.ensure_text(token)
if "@" in token:
# This looks like an email token, so let's do an object
# lookup based on that.
@@ -272,7 +272,7 @@ class ProductVocabulary(StormVocabularyBase):
return self.toTerm(product)
def search(self, query, vocab_filter=None):
- """See `SQLObjectVocabularyBase`.
+ """See `StormVocabularyBase`.
Returns products where the product name, displayname, title,
summary, or description contain the given query. Returns an empty list
@@ -338,7 +338,7 @@ class ProjectGroupVocabulary(StormVocabularyBase):
return self.toTerm(project)
def search(self, query, vocab_filter=None):
- """See `SQLObjectVocabularyBase`.
+ """See `StormVocabularyBase`.
Returns projects where the project name, displayname, title,
summary, or description contain the given query. Returns an empty list
@@ -369,11 +369,11 @@ def project_products_vocabulary_factory(context):
)
-class UserTeamsParticipationVocabulary(SQLObjectVocabularyBase):
+class UserTeamsParticipationVocabulary(StormVocabularyBase):
"""Describes the public teams in which the current user participates."""
_table = Person
- _orderBy = "display_name"
+ _order_by = "display_name"
INCLUDE_PRIVATE_TEAM = False
@@ -401,7 +401,7 @@ class UserTeamsParticipationVocabulary(SQLObjectVocabularyBase):
teams = list(
IStore(Person)
.find(Person, *clauses)
- .order_by(Person._storm_sortingColumns)
+ .order_by(Person.sortingColumns)
)
# Users can view all the teams they belong to.
precache_permission_for_objects(
@@ -428,7 +428,7 @@ class UserTeamsParticipationVocabulary(SQLObjectVocabularyBase):
@implementer(IHugeVocabulary)
class NonMergedPeopleAndTeamsVocabulary(
- BasePersonVocabulary, SQLObjectVocabularyBase
+ BasePersonVocabulary, StormVocabularyBase
):
"""The set of all non-merged people and teams.
@@ -437,7 +437,7 @@ class NonMergedPeopleAndTeamsVocabulary(
a preferred email address, that is, unvalidated person profiles.
"""
- _orderBy = ["display_name"]
+ _order_by = ["display_name"]
displayname = "Select a Person or Team"
step_title = "Search"
@@ -449,7 +449,7 @@ class NonMergedPeopleAndTeamsVocabulary(
return getUtility(IPersonSet).find(text)
def search(self, text, vocab_filter=None):
- """See `SQLObjectVocabularyBase`.
+ """See `StormVocabularyBase`.
Return people/teams whose fti or email address match :text.
"""
@@ -461,7 +461,7 @@ class NonMergedPeopleAndTeamsVocabulary(
@implementer(IHugeVocabulary)
class PersonAccountToMergeVocabulary(
- BasePersonVocabulary, SQLObjectVocabularyBase
+ BasePersonVocabulary, StormVocabularyBase
):
"""The set of all non-merged people with at least one email address.
@@ -469,7 +469,7 @@ class PersonAccountToMergeVocabulary(
accounts to merge. You *don't* want to use it.
"""
- _orderBy = ["display_name"]
+ _order_by = ["display_name"]
displayname = "Select a Person to Merge"
step_title = "Search"
must_have_email = True
@@ -486,7 +486,7 @@ class PersonAccountToMergeVocabulary(
)
def search(self, text, vocab_filter=None):
- """See `SQLObjectVocabularyBase`.
+ """See `StormVocabularyBase`.
Return people whose fti or email address match :text.
"""
@@ -516,7 +516,7 @@ class VocabularyFilterPerson(VocabularyFilter):
@property
def filter_terms(self):
- return [Person.teamownerID == None]
+ return [Is(Person.teamowner_id, None)]
class VocabularyFilterTeam(VocabularyFilter):
@@ -529,13 +529,11 @@ class VocabularyFilterTeam(VocabularyFilter):
@property
def filter_terms(self):
- return [Person.teamownerID != None]
+ return [IsNot(Person.teamowner_id, None)]
@implementer(IHugeVocabulary)
-class ValidPersonOrTeamVocabulary(
- BasePersonVocabulary, SQLObjectVocabularyBase
-):
+class ValidPersonOrTeamVocabulary(BasePersonVocabulary, StormVocabularyBase):
"""The set of valid, viewable Persons/Teams in Launchpad.
A Person is considered valid if they have a preferred email address, and
@@ -1572,7 +1570,7 @@ class CommercialProjectsVocabulary(NamedStormVocabulary):
raise LookupError(token)
def searchForTerms(self, query=None, vocab_filter=None):
- """See `SQLObjectVocabularyBase`."""
+ """See `StormVocabularyBase`."""
results = self._doSearch(query)
num = results.count()
return CountableIterator(num, results, self.toTerm)
@@ -1944,7 +1942,7 @@ class PillarVocabularyBase(NamedStormHugeVocabulary):
def toTerm(self, obj):
"""See `IVocabulary`."""
if type(obj) == int:
- return self.toTerm(PillarName.get(obj))
+ return self.toTerm(IStore(PillarName).get(PillarName, obj))
if IPillarName.providedBy(obj):
assert obj.active, "Inactive object %s %d" % (
obj.__class__.__name__,
diff --git a/lib/lp/scripts/garbo.py b/lib/lp/scripts/garbo.py
index eab11db..06da799 100644
--- a/lib/lp/scripts/garbo.py
+++ b/lib/lp/scripts/garbo.py
@@ -1994,7 +1994,7 @@ class ArchiveAuthTokenDeactivator(BulkPruner):
)
)
affected_ppas = load_related(Archive, tokens, ["archive_id"])
- load_related(Person, affected_ppas, ["ownerID"])
+ load_related(Person, affected_ppas, ["owner_id"])
getUtility(IPersonSet).getPrecachedPersonsFromIDs(
[token.person_id for token in tokens], need_preferred_email=True
)
diff --git a/lib/lp/scripts/harness.py b/lib/lp/scripts/harness.py
index 1949e57..4d65d31 100644
--- a/lib/lp/scripts/harness.py
+++ b/lib/lp/scripts/harness.py
@@ -73,7 +73,7 @@ def _get_locals():
# Create a few variables "in case they come in handy."
# Do we really use these? Are they worth carrying around?
d = store.get(Distribution, 1)
- p = Person.get(1)
+ p = store.get(Person, 1)
ds = store.get(DistroSeries, 1)
prod = store.get(Product, 1)
proj = store.get(ProjectGroup, 1)
diff --git a/lib/lp/services/apachelogparser/model/parsedapachelog.py b/lib/lp/services/apachelogparser/model/parsedapachelog.py
index 66a4b35..f8541da 100644
--- a/lib/lp/services/apachelogparser/model/parsedapachelog.py
+++ b/lib/lp/services/apachelogparser/model/parsedapachelog.py
@@ -3,15 +3,16 @@
__all__ = ["ParsedApacheLog"]
+from datetime import timezone
+
import six
-from storm.locals import Int, Unicode
+from storm.locals import DateTime, Int, Unicode
from zope.interface import implementer
from lp.services.apachelogparser.interfaces.parsedapachelog import (
IParsedApacheLog,
)
from lp.services.database.constants import UTC_NOW
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.interfaces import IStore
from lp.services.database.stormbase import StormBase
@@ -25,7 +26,9 @@ class ParsedApacheLog(StormBase):
id = Int(primary=True)
first_line = Unicode(allow_none=False)
bytes_read = Int(allow_none=False)
- date_last_parsed = UtcDateTimeCol(notNull=True, default=UTC_NOW)
+ date_last_parsed = DateTime(
+ allow_none=False, default=UTC_NOW, tzinfo=timezone.utc
+ )
def __init__(self, first_line, bytes_read):
super().__init__()
diff --git a/lib/lp/services/auth/tests/test_model.py b/lib/lp/services/auth/tests/test_model.py
index b33ccf7..ca33eec 100644
--- a/lib/lp/services/auth/tests/test_model.py
+++ b/lib/lp/services/auth/tests/test_model.py
@@ -46,31 +46,36 @@ from lp.testing.matchers import HasQueryCount
from lp.testing.pages import webservice_for_person
-class TestAccessToken(TestCaseWithFactory):
+class TestAccessTokenBase:
layer = DatabaseFunctionalLayer
def test_owner_can_edit(self):
owner = self.factory.makePerson()
- _, token = self.factory.makeAccessToken(owner=owner)
+ _, token = self.factory.makeAccessToken(
+ owner=owner, target=self.makeTarget()
+ )
login_person(owner)
self.assertTrue(check_permission("launchpad.Edit", token))
def test_target_owner_can_edit(self):
target_owner = self.factory.makePerson()
- repository = self.factory.makeGitRepository(owner=target_owner)
- _, token = self.factory.makeAccessToken(target=repository)
+ _, token = self.factory.makeAccessToken(
+ target=self.makeTarget(target_owner)
+ )
login_person(target_owner)
self.assertTrue(check_permission("launchpad.Edit", token))
def test_other_user_cannot_edit(self):
- _, token = self.factory.makeAccessToken()
+ _, token = self.factory.makeAccessToken(target=self.makeTarget())
login_person(self.factory.makePerson())
self.assertFalse(check_permission("launchpad.Edit", token))
def test_updateLastUsed_never_used(self):
# If the token has never been used, we update its last-used date.
owner = self.factory.makePerson()
- _, token = self.factory.makeAccessToken(owner=owner)
+ _, token = self.factory.makeAccessToken(
+ owner=owner, target=self.makeTarget()
+ )
login_person(owner)
self.assertIsNone(token.date_last_used)
transaction.commit()
@@ -82,7 +87,9 @@ class TestAccessToken(TestCaseWithFactory):
# If the token's last-used date was updated recently, we leave it
# alone.
owner = self.factory.makePerson()
- _, token = self.factory.makeAccessToken(owner=owner)
+ _, token = self.factory.makeAccessToken(
+ owner=owner, target=self.makeTarget()
+ )
login_person(owner)
recent = datetime.now(timezone.utc) - timedelta(minutes=1)
removeSecurityProxy(token).date_last_used = recent
@@ -94,7 +101,9 @@ class TestAccessToken(TestCaseWithFactory):
# If the token's last-used date is outside our update resolution, we
# update it.
owner = self.factory.makePerson()
- _, token = self.factory.makeAccessToken(owner=owner)
+ _, token = self.factory.makeAccessToken(
+ owner=owner, target=self.makeTarget()
+ )
login_person(owner)
recent = datetime.now(timezone.utc) - timedelta(hours=1)
removeSecurityProxy(token).date_last_used = recent
@@ -107,7 +116,9 @@ class TestAccessToken(TestCaseWithFactory):
# If the token is locked by another transaction, we leave it alone.
owner = self.factory.makePerson()
owner_email = removeSecurityProxy(owner.preferredemail).email
- secret, token = self.factory.makeAccessToken(owner=owner)
+ secret, token = self.factory.makeAccessToken(
+ owner=owner, target=self.makeTarget()
+ )
login_person(owner)
self.assertIsNone(token.date_last_used)
transaction.commit()
@@ -150,7 +161,9 @@ class TestAccessToken(TestCaseWithFactory):
def test_is_expired(self):
owner = self.factory.makePerson()
login_person(owner)
- _, current_token = self.factory.makeAccessToken(owner=owner)
+ _, current_token = self.factory.makeAccessToken(
+ owner=owner, target=self.makeTarget()
+ )
_, expired_token = self.factory.makeAccessToken(
owner=owner,
date_expires=datetime.now(timezone.utc) - timedelta(minutes=1),
@@ -161,7 +174,9 @@ class TestAccessToken(TestCaseWithFactory):
def test_revoke(self):
owner = self.factory.makePerson()
_, token = self.factory.makeAccessToken(
- owner=owner, scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS]
+ owner=owner,
+ scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS],
+ target=self.makeTarget(),
)
login_person(owner)
self.assertThat(
@@ -177,7 +192,12 @@ class TestAccessToken(TestCaseWithFactory):
)
-class TestAccessTokenSet(TestCaseWithFactory):
+class TestAccessTokenGitRepository(TestAccessTokenBase, TestCaseWithFactory):
+ def makeTarget(self, owner=None):
+ return self.factory.makeGitRepository(owner=owner)
+
+
+class TestAccessTokenSetBase:
layer = DatabaseFunctionalLayer
def test_new(self):
@@ -185,7 +205,7 @@ class TestAccessTokenSet(TestCaseWithFactory):
self.assertEqual(64, len(secret))
owner = self.factory.makePerson()
description = "Test token"
- target = self.factory.makeGitRepository()
+ target = self.makeTarget()
scopes = [AccessTokenScope.REPOSITORY_BUILD_STATUS]
_, token = self.factory.makeAccessToken(
secret=secret,
@@ -206,13 +226,13 @@ class TestAccessTokenSet(TestCaseWithFactory):
)
def test_getByID(self):
- secret, token = self.factory.makeAccessToken()
+ secret, token = self.factory.makeAccessToken(target=self.makeTarget())
token_id = removeSecurityProxy(token).id
self.assertEqual(token, getUtility(IAccessTokenSet).getByID(token_id))
self.assertIsNone(getUtility(IAccessTokenSet).getByID(token_id + 1))
def test_getBySecret(self):
- secret, token = self.factory.makeAccessToken()
+ secret, token = self.factory.makeAccessToken(target=self.makeTarget())
self.assertEqual(
token, getUtility(IAccessTokenSet).getBySecret(secret)
)
@@ -225,9 +245,15 @@ class TestAccessTokenSet(TestCaseWithFactory):
def test_findByOwner(self):
owners = [self.factory.makePerson() for _ in range(3)]
tokens = [
- self.factory.makeAccessToken(owner=owners[0])[1],
- self.factory.makeAccessToken(owner=owners[0])[1],
- self.factory.makeAccessToken(owner=owners[1])[1],
+ self.factory.makeAccessToken(
+ owner=owners[0], target=self.makeTarget()
+ )[1],
+ self.factory.makeAccessToken(
+ owner=owners[0], target=self.makeTarget()
+ )[1],
+ self.factory.makeAccessToken(
+ owner=owners[1], target=self.makeTarget()
+ )[1],
]
self.assertContentEqual(
tokens[:2], getUtility(IAccessTokenSet).findByOwner(owners[0])
@@ -240,7 +266,7 @@ class TestAccessTokenSet(TestCaseWithFactory):
)
def test_findByTarget(self):
- targets = [self.factory.makeGitRepository() for _ in range(3)]
+ targets = [self.makeTarget() for _ in range(3)]
tokens = [
self.factory.makeAccessToken(target=targets[0])[1],
self.factory.makeAccessToken(target=targets[0])[1],
@@ -257,7 +283,7 @@ class TestAccessTokenSet(TestCaseWithFactory):
)
def test_findByTarget_visible_by_user(self):
- targets = [self.factory.makeGitRepository() for _ in range(3)]
+ targets = [self.makeTarget() for _ in range(3)]
owners = [self.factory.makePerson() for _ in range(3)]
tokens = [
self.factory.makeAccessToken(
@@ -290,7 +316,7 @@ class TestAccessTokenSet(TestCaseWithFactory):
)
def test_findByTarget_excludes_expired(self):
- target = self.factory.makeGitRepository()
+ target = self.makeTarget()
_, current_token = self.factory.makeAccessToken(target=target)
_, expires_soon_token = self.factory.makeAccessToken(
target=target,
@@ -312,7 +338,7 @@ class TestAccessTokenSet(TestCaseWithFactory):
)
def test_getByTargetAndID(self):
- targets = [self.factory.makeGitRepository() for _ in range(3)]
+ targets = [self.makeTarget() for _ in range(3)]
tokens = [
self.factory.makeAccessToken(target=targets[0])[1],
self.factory.makeAccessToken(target=targets[0])[1],
@@ -337,7 +363,7 @@ class TestAccessTokenSet(TestCaseWithFactory):
)
def test_getByTargetAndID_visible_by_user(self):
- targets = [self.factory.makeGitRepository() for _ in range(3)]
+ targets = [self.makeTarget() for _ in range(3)]
owners = [self.factory.makePerson() for _ in range(3)]
tokens = [
self.factory.makeAccessToken(
@@ -374,7 +400,7 @@ class TestAccessTokenSet(TestCaseWithFactory):
self.assertIsNone(fetched_token)
def test_getByTargetAndID_excludes_expired(self):
- target = self.factory.makeGitRepository()
+ target = self.makeTarget()
_, current_token = self.factory.makeAccessToken(target=target)
_, expires_soon_token = self.factory.makeAccessToken(
target=target,
@@ -403,6 +429,13 @@ class TestAccessTokenSet(TestCaseWithFactory):
)
+class TestGitRepositoryAccessTokenSet(
+ TestAccessTokenSetBase, TestCaseWithFactory
+):
+ def makeTarget(self):
+ return self.factory.makeGitRepository()
+
+
class TestAccessTokenTargetBase:
layer = DatabaseFunctionalLayer
diff --git a/lib/lp/services/authserver/tests/test_authserver.py b/lib/lp/services/authserver/tests/test_authserver.py
index 2f29fd1..3961957 100644
--- a/lib/lp/services/authserver/tests/test_authserver.py
+++ b/lib/lp/services/authserver/tests/test_authserver.py
@@ -6,12 +6,12 @@
import xmlrpc.client
from pymacaroons import Macaroon
-from storm.sqlobject import SQLObjectNotFound
from testtools.matchers import Equals, Is, MatchesListwise, MatchesStructure
from zope.component import getUtility
from zope.interface import implementer
from zope.publisher.xmlrpc import TestRequest
+from lp.app.errors import NotFoundError
from lp.services.authserver.interfaces import (
IAuthServer,
IAuthServerApplication,
@@ -266,7 +266,7 @@ class MacaroonTests(TestCaseWithFactory):
# Pick a large ID that doesn't exist in sampledata.
lfa_id = 1000000
self.assertRaises(
- SQLObjectNotFound,
+ NotFoundError,
getUtility(ILibraryFileAliasSet).__getitem__,
lfa_id,
)
diff --git a/lib/lp/services/authserver/xmlrpc.py b/lib/lp/services/authserver/xmlrpc.py
index 54312d7..74758fe 100644
--- a/lib/lp/services/authserver/xmlrpc.py
+++ b/lib/lp/services/authserver/xmlrpc.py
@@ -9,12 +9,12 @@ __all__ = [
]
from pymacaroons import Macaroon
-from storm.sqlobject import SQLObjectNotFound
from zope.component import getUtility
from zope.interface import implementer
from zope.interface.interfaces import ComponentLookupError
from zope.security.proxy import removeSecurityProxy
+from lp.app.errors import NotFoundError
from lp.code.interfaces.cibuild import ICIBuildSet
from lp.oci.interfaces.ocirecipebuild import IOCIRecipeBuildSet
from lp.registry.interfaces.person import IPersonSet
@@ -69,7 +69,7 @@ class AuthServerAPIView(LaunchpadXMLRPCView):
# The context is a `LibraryFileAlias` ID.
try:
return getUtility(ILibraryFileAliasSet)[context]
- except SQLObjectNotFound:
+ except NotFoundError:
return None
elif context_type == "BinaryPackageBuild":
# The context is a `BinaryPackageBuild` ID.
diff --git a/lib/lp/services/database/datetimecol.py b/lib/lp/services/database/datetimecol.py
deleted file mode 100644
index b23a381..0000000
--- a/lib/lp/services/database/datetimecol.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2009 Canonical Ltd. This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""UtcDateTimeCol for SQLObject"""
-
-__all__ = ["UtcDateTimeCol"]
-
-from datetime import timezone
-
-import storm.sqlobject
-
-
-class UtcDateTimeCol(storm.sqlobject.UtcDateTimeCol):
- _kwargs = {"tzinfo": timezone.utc}
diff --git a/lib/lp/services/database/doc/security-proxies.rst b/lib/lp/services/database/doc/security-proxies.rst
index 3675907..ac20f99 100644
--- a/lib/lp/services/database/doc/security-proxies.rst
+++ b/lib/lp/services/database/doc/security-proxies.rst
@@ -1,7 +1,7 @@
Security proxies
----------------
-SQLObjects that are security proxied should still behave normally, this
+Storm objects that are security proxied should still behave normally, this
includes being comparable with non-security proxied objects.
First, some imports and set up::
@@ -9,11 +9,12 @@ First, some imports and set up::
>>> from zope.component import getUtility
>>> from lp.registry.interfaces.person import IPersonSet
>>> from lp.registry.model.person import Person
+ >>> from lp.services.database.interfaces import IStore
Get a proxied and unproxied person object for the same person, and demonstrate
working comparisons::
- >>> mark = Person.get(1)
+ >>> mark = IStore(Person).get(Person, 1)
>>> mark_proxied = getUtility(IPersonSet).get(1)
>>> mark is mark_proxied
False
@@ -26,8 +27,7 @@ working comparisons::
>>> mark_proxied == mark_proxied
True
-A dbschema Item can also be given to sqlobject's select() method, or any
-of its variants.
+A ``lazr.enum.DBItem`` can also be given to Storm's find() method.
>>> proxied_policy = mark_proxied.membership_policy
>>> type(proxied_policy)
@@ -35,19 +35,21 @@ of its variants.
# We don't want this test to fail when we add new person entries, so we
# compare it against a base number.
- >>> Person.select(
- ... Person.q.membership_policy == proxied_policy
+ >>> IStore(Person).find(
+ ... Person, membership_policy=proxied_policy
... ).count() > 60
True
- >>> person = Person.select(Person.q.membership_policy == proxied_policy)[
- ... 0
- ... ]
+ >>> person = (
+ ... IStore(Person)
+ ... .find(Person, membership_policy=proxied_policy)
+ ... .first()
+ ... )
>>> person.membership_policy.name
'MODERATED'
XXX: stevea: 20051018: Rewrite this test to use security proxies directly
XXX: bug 3315
-DB schema objects should be comparable correctly when proxied...
+``lazr.enum.DBItem`` objects are comparable correctly when proxied.
>>> from lp.registry.interfaces.distroseries import IDistroSeriesSet
>>> from lp.registry.interfaces.series import SeriesStatus
diff --git a/lib/lp/services/database/doc/storm-security-proxies.rst b/lib/lp/services/database/doc/storm-security-proxies.rst
index e95fb66..600dcc2 100644
--- a/lib/lp/services/database/doc/storm-security-proxies.rst
+++ b/lib/lp/services/database/doc/storm-security-proxies.rst
@@ -1,5 +1,5 @@
-Demonstrate that SQLObject works with security proxies
-------------------------------------------------------
+Demonstrate that Storm works with security proxies
+--------------------------------------------------
Do some imports.
diff --git a/lib/lp/services/database/interfaces.py b/lib/lp/services/database/interfaces.py
index 808e25b..f619539 100644
--- a/lib/lp/services/database/interfaces.py
+++ b/lib/lp/services/database/interfaces.py
@@ -9,7 +9,6 @@ __all__ = [
"IPrimaryObject",
"IPrimaryStore",
"IRequestExpired",
- "ISQLBase",
"IStandbyStore",
"IStore",
"IStoreSelector",
@@ -21,7 +20,6 @@ __all__ = [
from zope.interface import Interface
from zope.interface.common.interfaces import IRuntimeError
-from zope.schema import Int
class IRequestExpired(IRuntimeError):
@@ -30,15 +28,6 @@ class IRequestExpired(IRuntimeError):
"""
-# XXX 2007-02-09 jamesh:
-# This derived from sqlos.interfaces.ISQLObject before hand. I don't
-# think it is ever used though ...
-class ISQLBase(Interface):
- """An extension of ISQLObject that provides an ID."""
-
- id = Int(title="The integer ID for the instance")
-
-
#
# Database policies
#
diff --git a/lib/lp/services/database/multitablecopy.py b/lib/lp/services/database/multitablecopy.py
index 1865206..b34e1dd 100644
--- a/lib/lp/services/database/multitablecopy.py
+++ b/lib/lp/services/database/multitablecopy.py
@@ -10,7 +10,7 @@ import time
from zope.interface import implementer
from lp.services.database import postgresql
-from lp.services.database.sqlbase import cursor, quote, quoteIdentifier
+from lp.services.database.sqlbase import cursor, quote, quote_identifier
from lp.services.looptuner import DBLoopTuner, ITunableLoop
@@ -295,7 +295,7 @@ class MultiTableCopy:
Return value is properly quoted for use as an SQL identifier.
"""
raw_name = self.getRawHoldingTableName(tablename, suffix)
- return quoteIdentifier(raw_name)
+ return quote_identifier(raw_name)
def _pointsToTable(self, source_table, foreign_key):
"""Name of table that source_table.foreign_key refers to.
@@ -353,9 +353,9 @@ class MultiTableCopy:
extracted. The WHERE clause may refer to rows from table being
extracted as "source."
:param id_sequence: SQL sequence that should assign new identifiers
- for the extracted rows. Defaults to `source_table` with "_seq_id"
- appended, which by SQLObject/Launchpad convention is the sequence
- that provides `source_table`'s primary key values. Used verbatim,
+ for the extracted rows. Defaults to `source_table` with "_id_seq"
+ appended, which by Launchpad convention is the sequence that
+ provides `source_table`'s primary key values. Used verbatim,
without quoting.
:param inert_where: Boolean SQL expression characterizing rows that
are extracted, but should not poured back into `source_table`
diff --git a/lib/lp/services/database/postgresql.py b/lib/lp/services/database/postgresql.py
index 8ba5252..afbfcb0 100644
--- a/lib/lp/services/database/postgresql.py
+++ b/lib/lp/services/database/postgresql.py
@@ -8,7 +8,7 @@ and table manipulation
import re
-from lp.services.database.sqlbase import quote, quoteIdentifier, sqlvalues
+from lp.services.database.sqlbase import quote, quote_identifier, sqlvalues
def listReferences(cur, table, column, indirect=True, _state=None):
@@ -308,8 +308,8 @@ def generateResetSequencesSQL(cur):
if table is None or column is None:
continue
sql = "SELECT max(%s) FROM %s" % (
- quoteIdentifier(column),
- quoteIdentifier(table),
+ quote_identifier(column),
+ quote_identifier(table),
)
cur.execute(sql)
last_value = cur.fetchone()[0]
diff --git a/lib/lp/services/database/sqlbase.py b/lib/lp/services/database/sqlbase.py
index ebbc1d9..f604f4c 100644
--- a/lib/lp/services/database/sqlbase.py
+++ b/lib/lp/services/database/sqlbase.py
@@ -16,20 +16,16 @@ __all__ = [
"ISOLATION_LEVEL_REPEATABLE_READ",
"ISOLATION_LEVEL_SERIALIZABLE",
"quote",
- "quoteIdentifier",
"quote_identifier",
"reset_store",
"session_store",
- "SQLBase",
"sqlvalues",
"StupidCache",
]
-
from datetime import datetime, timezone
import psycopg2
-import storm
import transaction
from psycopg2.extensions import (
ISOLATION_LEVEL_AUTOCOMMIT,
@@ -42,27 +38,18 @@ from psycopg2.extensions import (
from storm.databases.postgres import compile as postgres_compile
from storm.expr import State
from storm.expr import compile as storm_compile
-from storm.locals import Storm # noqa: B1
-from storm.locals import Store
from storm.zope.interfaces import IZStorm
from twisted.python.util import mergeFunctionMetadata
from zope.component import getUtility
-from zope.interface import implementer
-from zope.security.proxy import removeSecurityProxy
from lp.services.config import dbconfig
from lp.services.database.interfaces import (
DEFAULT_FLAVOR,
MAIN_STORE,
DisallowedStore,
- IPrimaryObject,
- IPrimaryStore,
- ISQLBase,
- IStore,
IStoreSelector,
)
from lp.services.database.sqlobject import sqlrepr
-from lp.services.propertycache import clear_property_cache
# Default we want for scripts, and the PostgreSQL default. Note psycopg1 will
# use SERIALIZABLE unless we override, but psycopg2 will not.
@@ -84,9 +71,6 @@ class StupidCache:
This class is basically equivalent to Storm's standard Cache class
with a very large size but without the overhead of maintaining the
LRU list.
-
- This provides caching behaviour equivalent to what we were using
- under SQLObject.
"""
def __init__(self, size):
@@ -112,168 +96,15 @@ class StupidCache:
return self._cache.keys()
-def _get_sqlobject_store():
- """Return the store used by the SQLObject compatibility layer."""
- return getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
-
-
-class LaunchpadStyle(storm.sqlobject.SQLObjectStyle):
- """A SQLObject style for launchpad.
+def _get_main_default_store():
+ """Return the main store using the default flavor.
- Python attributes and database columns are lowercase.
- Class names and database tables are MixedCase. Using this style should
- simplify SQLBase class definitions since more defaults will be correct.
+ For web requests, the default flavor uses a primary or standby database
+ depending on the type of request (see
+ `lp.services.database.policy.LaunchpadDatabasePolicy`); in all other
+ situations, it uses the primary database.
"""
-
- def pythonAttrToDBColumn(self, attr):
- return attr
-
- def dbColumnToPythonAttr(self, col):
- return col
-
- def pythonClassToDBTable(self, className):
- return className
-
- def dbTableToPythonClass(self, table):
- return table
-
- def idForTable(self, table):
- return "id"
-
- def pythonClassToAttr(self, className):
- return className.lower()
-
- # dsilvers: 20050322: If you take this method out; then RelativeJoin
- # instances in our SQLObject classes cause the following error:
- # AttributeError: 'LaunchpadStyle' object has no attribute
- # 'tableReference'
- def tableReference(self, table):
- """Return the tablename mapped for use in RelativeJoin statements."""
- return table.__str__()
-
-
-@implementer(ISQLBase)
-class SQLBase(storm.sqlobject.SQLObjectBase):
- """Base class emulating SQLObject for legacy database classes."""
-
- _style = LaunchpadStyle()
-
- # Silence warnings in linter script, which complains about all
- # SQLBase-derived objects missing an id.
- id = None
-
- def __init__(self, *args, **kwargs):
- """Extended version of the SQLObjectBase constructor.
-
- We force use of the primary Store.
-
- We refetch any parameters from different stores from the
- correct primary Store.
- """
- # Make it simple to write dumb-invalidators - initialized
- # _cached_properties to a valid list rather than just-in-time
- # creation.
- self._cached_properties = []
- store = IPrimaryStore(self.__class__)
-
- # The constructor will fail if objects from a different Store
- # are passed in. We need to refetch these objects from the correct
- # primary Store if necessary so the foreign key references can be
- # constructed.
- # XXX StuartBishop 2009-03-02 bug=336867: We probably want to remove
- # this code - there are enough other places developers have to be
- # aware of the replication # set boundaries. Why should
- # Person(..., account=an_account) work but
- # some_person.account = an_account fail?
- for key, argument in kwargs.items():
- argument = removeSecurityProxy(argument)
- if not isinstance(argument, Storm): # noqa: B1
- continue
- argument_store = Store.of(argument)
- if argument_store is not store:
- new_argument = store.find(
- argument.__class__, id=argument.id
- ).one()
- assert (
- new_argument is not None
- ), "%s not yet synced to this store" % repr(argument)
- kwargs[key] = new_argument
-
- store.add(self)
- try:
- self._create(None, **kwargs)
- except Exception:
- store.remove(self)
- raise
-
- @classmethod
- def _get_store(cls):
- return IStore(cls)
-
- def __repr__(self):
- return "<%s object>" % (self.__class__.__name__)
-
- def destroySelf(self):
- my_primary = IPrimaryObject(self)
- if self is my_primary:
- super().destroySelf()
- else:
- my_primary.destroySelf()
-
- def __eq__(self, other):
- """Equality operator.
-
- Objects compare equal if they have the same class and id, and the id
- is not None.
-
- This rule allows objects retrieved from different stores to compare
- equal. Newly-created objects may not yet have an id; in such cases
- we flush the store so that we can find out their id.
- """
- naked_self = removeSecurityProxy(self)
- naked_other = removeSecurityProxy(other)
- if naked_self.__class__ != naked_other.__class__:
- return False
- try:
- self_id = naked_self.id
- except KeyError:
- self.syncUpdate()
- self_id = naked_self.id
- if self_id is None:
- return False
- try:
- other_id = naked_other.id
- except KeyError:
- other.syncUpdate()
- other_id = naked_other.id
- return self_id == other_id
-
- def __ne__(self, other):
- """Inverse of __eq__."""
- return not (self == other)
-
- def __hash__(self):
- """Hash operator.
-
- We must define __hash__ since we define __eq__ (Python 3 requires
- this), but we need to take care to preserve the invariant that
- objects that compare equal have the same hash value. Newly-created
- objects may not yet have an id; in such cases we flush the store so
- that we can find out their id.
- """
- try:
- id = self.id
- except KeyError:
- self.syncUpdate()
- id = self.id
- return hash((self.__class__, id))
-
- def __storm_invalidated__(self):
- """Flush cached properties."""
- # XXX: RobertCollins 2010-08-16 bug=622648: Note this is not directly
- # tested, but the entire test suite blows up awesomely if it's broken.
- # It's entirely unclear where tests for this should be.
- clear_property_cache(self)
+ return getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
def get_transaction_timestamp(store):
@@ -319,21 +150,14 @@ def quote(x):
>>> from datetime import datetime, date, time
>>> quote(datetime(2003, 12, 4, 13, 45, 50))
"'2003-12-04 13:45:50'"
+ >>> quote(datetime(2003, 12, 4, 13, 45, 50, 123456))
+ "'2003-12-04 13:45:50.123456'"
>>> quote(date(2003, 12, 4))
"'2003-12-04'"
>>> quote(time(13, 45, 50))
"'13:45:50'"
- This function special cases datetime objects, due to a bug that has
- since been fixed in SQLOS (it installed an SQLObject converter that
- stripped the time component from the value). By itself, the sqlrepr
- function has the following output:
-
- >>> sqlrepr(datetime(2003, 12, 4, 13, 45, 50), "postgres")
- "'2003-12-04T13:45:50'"
-
- This function also special cases set objects, which SQLObject's
- sqlrepr() doesn't know how to handle.
+ sqlrepr() also special-cases set objects.
>>> quote(set([1, 2, 3]))
'(1, 2, 3)'
@@ -343,12 +167,6 @@ def quote(x):
"""
if isinstance(x, datetime):
return "'%s'" % x
- elif ISQLBase(x, None) is not None:
- return str(x.id)
- elif isinstance(x, (set, frozenset)):
- # SQLObject can't cope with sets, so convert to a list, which it
- # /does/ know how to handle.
- x = list(x)
return sqlrepr(x, "postgres")
@@ -407,23 +225,20 @@ def quote_identifier(identifier):
In SQL, identifiers are quoted using " rather than ' which is reserved
for strings.
- >>> print(quoteIdentifier("hello"))
+ >>> print(quote_identifier("hello"))
"hello"
- >>> print(quoteIdentifier("'"))
+ >>> print(quote_identifier("'"))
"'"
- >>> print(quoteIdentifier('"'))
+ >>> print(quote_identifier('"'))
""""
- >>> print(quoteIdentifier("\\"))
+ >>> print(quote_identifier("\\"))
"\"
- >>> print(quoteIdentifier('\\"'))
+ >>> print(quote_identifier('\\"'))
"\"""
'''
return '"%s"' % identifier.replace('"', '""')
-quoteIdentifier = quote_identifier # Backwards compatibility for now.
-
-
def convert_storm_clause_to_string(storm_clause):
"""Convert a Storm expression into a plain string.
@@ -489,25 +304,28 @@ def convert_storm_clause_to_string(storm_clause):
def flush_database_updates():
"""Flushes all pending database updates.
- When SQLObject's _lazyUpdate flag is set, then it's possible to have
- changes written to objects that aren't flushed to the database, leading to
- inconsistencies when doing e.g.::
+ Storm normally flushes changes to objects before it needs to issue the
+ next query, but there are situations where it doesn't realize that it
+ needs to do so. One common case is when creating an object and
+ immediately fetching its ID, which is typically assigned by the database
+ based on a sequence when the row is inserted::
- # Assuming the Beer table already has a 'Victoria Bitter' row...
- assert Beer.select("name LIKE 'Vic%'").count() == 1 # This will pass
- beer = Beer.byName('Victoria Bitter')
- beer.name = 'VB'
- assert Beer.select("name LIKE 'Vic%'").count() == 0 # This will fail
+ store = IStore(Beer)
+ beer = Beer(name="Victoria Bitter")
+ store.add(beer)
+ assert beer.id is not None # This will fail
To avoid this problem, use this function::
- # Assuming the Beer table already has a 'Victoria Bitter' row...
- assert Beer.select("name LIKE 'Vic%'").count() == 1 # This will pass
- beer = Beer.byName('Victoria Bitter')
- beer.name = 'VB'
+ store = IStore(Beer)
+ beer = Beer(name="Victoria Bitter")
+ store.add(beer)
flush_database_updates()
- assert Beer.select("name LIKE 'Vic%'").count() == 0 # This will pass
+ assert beer.id is not None # This will pass
+ (You can also flush individual stores using `store.flush()`, which is
+ normally sufficient, but sometimes this function is a convenient
+ shorthand if you don't already have a store object handy.)
"""
zstorm = getUtility(IZStorm)
for name, store in zstorm.iterstores():
@@ -517,14 +335,13 @@ def flush_database_updates():
def flush_database_caches():
"""Flush all database caches.
- SQLObject caches field values from the database in SQLObject
- instances. If SQL statements are issued that change the state of
- the database behind SQLObject's back, these cached values will be
- invalid.
+ Storm caches field values from the database in Storm instances. If SQL
+ statements are issued that change the state of the database behind
+ Storm's back, these cached values will be invalid.
- This function iterates through all the objects in the SQLObject
- connection's cache, and synchronises them with the database. This
- ensures that they all reflect the values in the database.
+ This function iterates through all the objects in the Storm connection's
+ cache, and synchronises them with the database. This ensures that they
+ all reflect the values in the database.
"""
zstorm = getUtility(IZStorm)
for name, store in zstorm.iterstores():
@@ -537,7 +354,7 @@ def block_implicit_flushes(func):
def block_implicit_flushes_decorator(*args, **kwargs):
try:
- store = _get_sqlobject_store()
+ store = _get_main_default_store()
except DisallowedStore:
return func(*args, **kwargs)
store.block_implicit_flushes()
@@ -556,7 +373,7 @@ def reset_store(func):
try:
return func(*args, **kwargs)
finally:
- _get_sqlobject_store().reset()
+ _get_main_default_store().reset()
return mergeFunctionMetadata(func, reset_store_decorator)
@@ -608,7 +425,7 @@ class cursor:
"""
def __init__(self):
- self._connection = _get_sqlobject_store()._connection
+ self._connection = _get_main_default_store()._connection
self._result = None
def execute(self, query, params=None):
diff --git a/lib/lp/services/database/sqlobject/__init__.py b/lib/lp/services/database/sqlobject/__init__.py
index 4013d35..bd5e645 100644
--- a/lib/lp/services/database/sqlobject/__init__.py
+++ b/lib/lp/services/database/sqlobject/__init__.py
@@ -7,31 +7,6 @@
import datetime
from storm.expr import SQL
-from storm.sqlobject import ( # noqa: F401
- AND,
- CONTAINSSTRING,
- DESC,
- IN,
- LIKE,
- NOT,
- OR,
- BoolCol,
- DateCol,
- FloatCol,
- ForeignKey,
- IntCol,
- IntervalCol,
- SingleJoin,
- SQLConstant,
- SQLMultipleJoin,
- SQLObjectBase,
- SQLObjectMoreThanOneResultError,
- SQLObjectNotFound,
- SQLObjectResultSet,
- SQLRelatedJoin,
- StringCol,
- UtcDateTimeCol,
-)
_sqlStringReplace = [
("\\", "\\\\"),
@@ -70,7 +45,7 @@ def sqlrepr(value, dbname=None):
return repr(value)
elif value is None:
return "NULL"
- elif isinstance(value, (list, set, tuple)):
+ elif isinstance(value, (frozenset, list, set, tuple)):
return "(%s)" % ", ".join(sqlrepr(v, dbname) for v in value)
elif isinstance(value, datetime.datetime):
return value.strftime("'%Y-%m-%dT%H:%M:%S'")
diff --git a/lib/lp/services/database/tests/test_transaction_decorators.py b/lib/lp/services/database/tests/test_transaction_decorators.py
index ef79e98..c5d4776 100644
--- a/lib/lp/services/database/tests/test_transaction_decorators.py
+++ b/lib/lp/services/database/tests/test_transaction_decorators.py
@@ -21,7 +21,9 @@ class TestTransactionDecorators(unittest.TestCase):
def setUp(self):
switch_dbuser("librarian")
self.store = IStore(LibraryFileContent)
- self.content_id = db.Library().add("deadbeef", 1234, "abababab", "ba")
+ self.content_id = (
+ db.Library().add("deadbeef", 1234, "abababab", "ba").id
+ )
self.file_content = self._getTestFileContent()
transaction.commit()
diff --git a/lib/lp/services/features/model.py b/lib/lp/services/features/model.py
index 29631b0..1d64a9a 100644
--- a/lib/lp/services/features/model.py
+++ b/lib/lp/services/features/model.py
@@ -13,7 +13,6 @@ import six
from storm.locals import DateTime, Int, Reference, Unicode
from zope.interface import implementer
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.interfaces import IStore
from lp.services.database.stormbase import StormBase
from lp.services.features.interfaces import IFeatureRules
@@ -54,7 +53,7 @@ class FeatureFlagChangelogEntry(StormBase):
__storm_table__ = "FeatureFlagChangelogEntry"
id = Int(primary=True)
- date_changed = UtcDateTimeCol(notNull=True)
+ date_changed = DateTime(allow_none=False, tzinfo=timezone.utc)
diff = Unicode(allow_none=False)
comment = Unicode(allow_none=False)
person_id = Int(name="person", allow_none=False)
diff --git a/lib/lp/services/librarian/client.py b/lib/lp/services/librarian/client.py
index c8a803e..71912a9 100644
--- a/lib/lp/services/librarian/client.py
+++ b/lib/lp/services/librarian/client.py
@@ -24,13 +24,11 @@ from urllib.request import urlopen
import six
from lazr.restful.utils import get_current_browser_request
-from storm.store import Store
from zope.interface import implementer
from lp.services.config import config, dbconfig
-from lp.services.database.interfaces import IPrimaryStore
+from lp.services.database.interfaces import IPrimaryStore, IStore
from lp.services.database.postgresql import ConnectionString
-from lp.services.database.sqlobject import SQLObjectNotFound
from lp.services.librarian.interfaces.client import (
LIBRARIAN_SERVER_DEFAULT_TIMEOUT,
DownloadFailed,
@@ -250,6 +248,7 @@ class FileUploadClient:
sha1=sha1_digester.hexdigest(),
md5=md5_digester.hexdigest(),
)
+ store.add(content)
LibraryFileAlias(
id=aliasID,
content=content,
@@ -259,7 +258,7 @@ class FileUploadClient:
restricted=self.restricted,
)
- Store.of(content).flush()
+ store.flush()
assert isinstance(aliasID, int), "aliasID %r not an integer" % (
aliasID,
@@ -410,10 +409,7 @@ class FileDownloadClient:
"""
from lp.services.librarian.model import LibraryFileAlias
- try:
- lfa = LibraryFileAlias.get(aliasID)
- except SQLObjectNotFound:
- lfa = None
+ lfa = IStore(LibraryFileAlias).get(LibraryFileAlias, aliasID)
if lfa is None:
raise DownloadFailed("Alias %d not found" % aliasID)
diff --git a/lib/lp/services/librarian/model.py b/lib/lp/services/librarian/model.py
index 83ca952..25692c8 100644
--- a/lib/lp/services/librarian/model.py
+++ b/lib/lp/services/librarian/model.py
@@ -15,23 +15,25 @@ from datetime import datetime, timezone
from urllib.parse import urlparse
from lazr.delegates import delegate_to
-from storm.locals import Date, Desc, Int, Reference, ReferenceSet, Store
+from storm.locals import (
+ Bool,
+ Date,
+ DateTime,
+ Desc,
+ Int,
+ Reference,
+ Store,
+ Unicode,
+)
from zope.component import adapter, getUtility
from zope.interface import Interface, implementer
+from lp.app.errors import NotFoundError
from lp.registry.errors import InvalidFilename
from lp.services.config import config
from lp.services.database.constants import DEFAULT, UTC_NOW
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.interfaces import IPrimaryStore, IStore
-from lp.services.database.sqlbase import SQLBase, session_store
-from lp.services.database.sqlobject import (
- BoolCol,
- ForeignKey,
- IntCol,
- SQLRelatedJoin,
- StringCol,
-)
+from lp.services.database.sqlbase import session_store
from lp.services.database.stormbase import StormBase
from lp.services.librarian.interfaces import (
ILibraryFileAlias,
@@ -51,48 +53,65 @@ from lp.services.tokens import create_token
@implementer(ILibraryFileContent)
-class LibraryFileContent(SQLBase):
+class LibraryFileContent(StormBase):
"""A pointer to file content in the librarian."""
- _table = "LibraryFileContent"
+ __storm_table__ = "LibraryFileContent"
+
+ id = Int(primary=True)
+ datecreated = DateTime(
+ allow_none=False, default=UTC_NOW, tzinfo=timezone.utc
+ )
+ filesize = Int(allow_none=False)
+ sha256 = Unicode()
+ sha1 = Unicode(allow_none=False)
+ md5 = Unicode(allow_none=False)
- datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW)
- filesize = IntCol(notNull=True)
- sha256 = StringCol()
- sha1 = StringCol(notNull=True)
- md5 = StringCol(notNull=True)
+ def __init__(self, filesize, md5, sha1, sha256, id=None):
+ super().__init__()
+ if id is not None:
+ self.id = id
+ self.filesize = filesize
+ self.md5 = md5
+ self.sha1 = sha1
+ self.sha256 = sha256
@implementer(ILibraryFileAlias)
-class LibraryFileAlias(SQLBase):
+class LibraryFileAlias(StormBase):
"""A filename and mimetype that we can serve some given content with."""
- _table = "LibraryFileAlias"
- date_created = UtcDateTimeCol(notNull=False, default=DEFAULT)
- content = ForeignKey(
- foreignKey="LibraryFileContent",
- dbName="content",
- notNull=False,
- )
- filename = StringCol(notNull=True)
- mimetype = StringCol(notNull=True)
- expires = UtcDateTimeCol(notNull=False, default=None)
- restricted = BoolCol(notNull=True, default=False)
- hits = IntCol(notNull=True, default=0)
-
- products = SQLRelatedJoin(
- "ProductRelease",
- joinColumn="libraryfile",
- otherColumn="productrelease",
- intermediateTable="ProductReleaseFile",
- )
+ __storm_table__ = "LibraryFileAlias"
- sourcepackages = ReferenceSet(
- "id",
- "SourcePackageReleaseFile.libraryfile_id",
- "SourcePackageReleaseFile.sourcepackagerelease_id",
- "SourcePackageRelease.id",
+ id = Int(primary=True)
+ date_created = DateTime(
+ allow_none=True, default=DEFAULT, tzinfo=timezone.utc
)
+ content_id = Int(name="content", allow_none=True)
+ content = Reference(content_id, "LibraryFileContent.id")
+ filename = Unicode(allow_none=False)
+ mimetype = Unicode(allow_none=False)
+ expires = DateTime(allow_none=True, default=None, tzinfo=timezone.utc)
+ restricted = Bool(allow_none=False, default=False)
+ hits = Int(allow_none=False, default=0)
+
+ def __init__(
+ self,
+ content,
+ filename,
+ mimetype,
+ id=None,
+ expires=None,
+ restricted=False,
+ ):
+ super().__init__()
+ if id is not None:
+ self.id = id
+ self.content = content
+ self.filename = filename
+ self.mimetype = mimetype
+ self.expires = expires
+ self.restricted = restricted
@property
def client(self):
@@ -198,23 +217,9 @@ class LibraryFileAlias(SQLBase):
entry.count += count
self.hits += count
- products = SQLRelatedJoin(
- "ProductRelease",
- joinColumn="libraryfile",
- otherColumn="productrelease",
- intermediateTable="ProductReleaseFile",
- )
-
- sourcepackages = ReferenceSet(
- "id",
- "SourcePackageReleaseFile.libraryfile_id",
- "SourcePackageReleaseFile.sourcepackagerelease_id",
- "SourcePackageRelease.id",
- )
-
@property
def deleted(self):
- return self.contentID is None
+ return self.content_id is None
def __storm_invalidated__(self):
"""Make sure that the file is closed across transaction boundary."""
@@ -278,17 +283,17 @@ class LibraryFileAliasSet:
def __getitem__(self, key):
"""See ILibraryFileAliasSet.__getitem__"""
- return LibraryFileAlias.get(key)
+ lfa = IStore(LibraryFileAlias).get(LibraryFileAlias, key)
+ if lfa is None:
+ raise NotFoundError(key)
+ return lfa
def findBySHA256(self, sha256):
"""See ILibraryFileAliasSet."""
- return LibraryFileAlias.select(
- """
- content = LibraryFileContent.id
- AND LibraryFileContent.sha256 = '%s'
- """
- % sha256,
- clauseTables=["LibraryFileContent"],
+ return IStore(LibraryFileAlias).find(
+ LibraryFileAlias,
+ LibraryFileAlias.content == LibraryFileContent.id,
+ LibraryFileContent.sha256 == sha256,
)
def preloadLastDownloaded(self, lfas):
@@ -326,7 +331,7 @@ class LibraryFileAliasSet:
@implementer(ILibraryFileDownloadCount)
-class LibraryFileDownloadCount(SQLBase):
+class LibraryFileDownloadCount(StormBase):
"""See `ILibraryFileDownloadCount`"""
__storm_table__ = "LibraryFileDownloadCount"
@@ -339,16 +344,23 @@ class LibraryFileDownloadCount(SQLBase):
country_id = Int(name="country", allow_none=True)
country = Reference(country_id, "Country.id")
+ def __init__(self, libraryfilealias, day, count, country=None):
+ super().__init__()
+ self.libraryfilealias = libraryfilealias
+ self.day = day
+ self.count = count
+ self.country = country
+
class TimeLimitedToken(StormBase):
"""A time limited access token for accessing a private file."""
__storm_table__ = "TimeLimitedToken"
- created = UtcDateTimeCol(notNull=True, default=UTC_NOW)
- path = StringCol(notNull=True)
+ created = DateTime(allow_none=False, default=UTC_NOW, tzinfo=timezone.utc)
+ path = Unicode(allow_none=False)
# The hex SHA-256 hash of the token.
- token = StringCol(notNull=True)
+ token = Unicode(allow_none=False)
__storm_primary__ = ("path", "token")
diff --git a/lib/lp/services/librarian/tests/test_client.py b/lib/lp/services/librarian/tests/test_client.py
index 39f6ee8..012d1e1 100644
--- a/lib/lp/services/librarian/tests/test_client.py
+++ b/lib/lp/services/librarian/tests/test_client.py
@@ -20,7 +20,7 @@ from testtools.testcase import ExpectedException
from lp.services.config import config
from lp.services.daemons.tachandler import TacTestSetup
-from lp.services.database.interfaces import IStandbyStore
+from lp.services.database.interfaces import IStandbyStore, IStore
from lp.services.database.policy import StandbyDatabasePolicy
from lp.services.database.sqlbase import block_implicit_flushes
from lp.services.librarian import client as client_module
@@ -387,8 +387,9 @@ class LibrarianClientTestCase(TestCase):
sha256 = hashlib.sha256(data).hexdigest()
client = LibrarianClient()
- lfa = LibraryFileAlias.get(
- client.addFile("file", len(data), io.BytesIO(data), "text/plain")
+ lfa = IStore(LibraryFileAlias).get(
+ LibraryFileAlias,
+ client.addFile("file", len(data), io.BytesIO(data), "text/plain"),
)
self.assertEqual(md5, lfa.content.md5)
@@ -427,7 +428,7 @@ class LibrarianClientTestCase(TestCase):
"expected %s to start with %s" % (download_url, expected_host),
)
# If the alias has been deleted, _getURLForDownload returns None.
- lfa = LibraryFileAlias.get(alias_id)
+ lfa = IStore(LibraryFileAlias).get(LibraryFileAlias, alias_id)
lfa.content = None
call = block_implicit_flushes( # Prevent a ProgrammingError
LibrarianClient._getURLForDownload
@@ -469,7 +470,7 @@ class LibrarianClientTestCase(TestCase):
"expected %s to start with %s" % (download_url, expected_host),
)
# If the alias has been deleted, _getURLForDownload returns None.
- lfa = LibraryFileAlias.get(alias_id)
+ lfa = IStore(LibraryFileAlias).get(LibraryFileAlias, alias_id)
lfa.content = None
call = block_implicit_flushes( # Prevent a ProgrammingError
RestrictedLibrarianClient._getURLForDownload
diff --git a/lib/lp/services/librarianserver/db.py b/lib/lp/services/librarianserver/db.py
index b747c2c..21631b2 100644
--- a/lib/lp/services/librarianserver/db.py
+++ b/lib/lp/services/librarianserver/db.py
@@ -49,7 +49,12 @@ class Library:
# The following methods are read-only queries.
def lookupBySHA1(self, digest):
- return [fc.id for fc in LibraryFileContent.selectBy(sha1=digest)]
+ return [
+ fc.id
+ for fc in IStore(LibraryFileContent).find(
+ LibraryFileContent, sha1=digest
+ )
+ ]
@defer.inlineCallbacks
def _verifyMacaroon(self, macaroon, aliasid):
@@ -142,7 +147,7 @@ class Library:
def getAliases(self, fileid):
results = IStore(LibraryFileAlias).find(
LibraryFileAlias,
- LibraryFileAlias.contentID == LibraryFileContent.id,
+ LibraryFileAlias.content_id == LibraryFileContent.id,
LibraryFileAlias.restricted == self.restricted,
LibraryFileContent.id == fileid,
)
@@ -151,20 +156,22 @@ class Library:
# the following methods are used for adding to the library
def add(self, digest, size, md5_digest, sha256_digest):
+ store = IStore(LibraryFileContent)
lfc = LibraryFileContent(
filesize=size, sha1=digest, md5=md5_digest, sha256=sha256_digest
)
- return lfc.id
-
- def addAlias(self, fileid, filename, mimetype, expires=None):
- """Add an alias, and return its ID.
-
- If a matching alias already exists, it will return that ID instead.
- """
- return LibraryFileAlias(
- contentID=fileid,
+ store.add(lfc)
+ store.flush()
+ return lfc
+
+ def addAlias(self, content, filename, mimetype, expires=None):
+ """Add an alias and return it."""
+ lfa = LibraryFileAlias(
+ content=content,
filename=filename,
mimetype=mimetype,
expires=expires,
restricted=self.restricted,
- ).id
+ )
+ IStore(LibraryFileAlias).flush()
+ return lfa
diff --git a/lib/lp/services/librarianserver/librariangc.py b/lib/lp/services/librarianserver/librariangc.py
index 14423be..aafd7cb 100644
--- a/lib/lp/services/librarianserver/librariangc.py
+++ b/lib/lp/services/librarianserver/librariangc.py
@@ -20,7 +20,7 @@ from lp.services.config import config
from lp.services.database.postgresql import (
drop_tables,
listReferences,
- quoteIdentifier,
+ quote_identifier,
)
from lp.services.features import getFeatureFlag
from lp.services.librarianserver import swift
@@ -436,8 +436,8 @@ class UnreferencedLibraryFileAliasPruner:
WHERE LibraryFileAlias.id = %(table)s.%(column)s
"""
% {
- "table": quoteIdentifier(table),
- "column": quoteIdentifier(column),
+ "table": quote_identifier(table),
+ "column": quote_identifier(column),
}
)
log.debug(
diff --git a/lib/lp/services/librarianserver/storage.py b/lib/lp/services/librarianserver/storage.py
index 2c2437b..cabded8 100644
--- a/lib/lp/services/librarianserver/storage.py
+++ b/lib/lp/services/librarianserver/storage.py
@@ -270,15 +270,16 @@ class LibraryFileUpload:
# If we haven't got a contentID, we need to create one and return
# it to the client.
if self.contentID is None:
- contentID = self.storage.library.add(
+ content = self.storage.library.add(
dstDigest,
self.size,
self.md5_digester.hexdigest(),
self.sha256_digester.hexdigest(),
)
+ contentID = content.id
aliasID = self.storage.library.addAlias(
- contentID, self.filename, self.mimetype, self.expires
- )
+ content, self.filename, self.mimetype, self.expires
+ ).id
self.debugLog.append(
"created contentID: %r, aliasID: %r."
% (contentID, aliasID)
diff --git a/lib/lp/services/librarianserver/testing/fake.py b/lib/lp/services/librarianserver/testing/fake.py
index e265e6a..9898dc2 100644
--- a/lib/lp/services/librarianserver/testing/fake.py
+++ b/lib/lp/services/librarianserver/testing/fake.py
@@ -25,6 +25,7 @@ from transaction.interfaces import ISynchronizer
from zope.interface import implementer
from lp.services.config import config
+from lp.services.database.interfaces import IStore
from lp.services.librarian.client import get_libraryfilealias_download_path
from lp.services.librarian.interfaces import ILibraryFileAliasSet
from lp.services.librarian.interfaces.client import (
@@ -102,7 +103,7 @@ class FakeLibrarian(Fixture):
)
file_ref = self._makeLibraryFileContent(content)
- alias = self._makeAlias(file_ref.id, name, content, contentType)
+ alias = self._makeAlias(file_ref, name, content, contentType)
self.aliases[alias.id] = alias
return alias
@@ -142,12 +143,13 @@ class FakeLibrarian(Fixture):
for alias in self.aliases.values():
alias.file_committed = True
- def _makeAlias(self, file_id, name, content, content_type):
+ def _makeAlias(self, lfc, name, content, content_type):
"""Create a `LibraryFileAlias`."""
alias = InstrumentedLibraryFileAlias(
- contentID=file_id, filename=name, mimetype=content_type
+ content=lfc, filename=name, mimetype=content_type
)
alias.content_bytes = content
+ IStore(LibraryFileAlias).flush()
return alias
def _makeLibraryFileContent(self, content):
@@ -160,6 +162,7 @@ class FakeLibrarian(Fixture):
content_object = LibraryFileContent(
filesize=size, md5=md5, sha1=sha1, sha256=sha256
)
+ IStore(LibraryFileContent).add(content_object)
return content_object
def create(
diff --git a/lib/lp/services/librarianserver/testing/server.py b/lib/lp/services/librarianserver/testing/server.py
index c2ef75b..26a7914 100644
--- a/lib/lp/services/librarianserver/testing/server.py
+++ b/lib/lp/services/librarianserver/testing/server.py
@@ -19,6 +19,7 @@ from fixtures import Fixture, FunctionFixture
from lp.services.config import config
from lp.services.daemons.tachandler import TacException, TacTestSetup
+from lp.services.database.interfaces import IStore
from lp.services.librarian.model import LibraryFileContent
from lp.services.librarianserver.storage import _relFileLocation
from lp.services.osutils import get_pid_from_file
@@ -255,7 +256,7 @@ class LibrarianServerFixture(TacTestSetup):
def fillLibrarianFile(fileid, content=None):
"""Write contents in disk for a librarian sampledata."""
with dbuser("librariangc"):
- lfc = LibraryFileContent.get(fileid)
+ lfc = IStore(LibraryFileContent).get(LibraryFileContent, fileid)
if content is None:
content = b"x" * lfc.filesize
else:
diff --git a/lib/lp/services/librarianserver/tests/test_db.py b/lib/lp/services/librarianserver/tests/test_db.py
index f43d918..48d980b 100644
--- a/lib/lp/services/librarianserver/tests/test_db.py
+++ b/lib/lp/services/librarianserver/tests/test_db.py
@@ -42,21 +42,20 @@ class DBTestCase(TestCase):
self.assertEqual([], library.lookupBySHA1("deadbeef"))
# Add a file, check it is found by lookupBySHA1
- fileID = library.add("deadbeef", 1234, "abababab", "babababa")
- self.assertEqual([fileID], library.lookupBySHA1("deadbeef"))
+ content = library.add("deadbeef", 1234, "abababab", "babababa")
+ self.assertEqual([content.id], library.lookupBySHA1("deadbeef"))
# Add a new file with the same digest
- newFileID = library.add("deadbeef", 1234, "abababab", "babababa")
+ new_content = library.add("deadbeef", 1234, "abababab", "babababa")
# Check it gets a new ID anyway
- self.assertNotEqual(fileID, newFileID)
+ self.assertNotEqual(content.id, new_content.id)
# Check it is found by lookupBySHA1
self.assertEqual(
- sorted([fileID, newFileID]),
+ sorted([content.id, new_content.id]),
sorted(library.lookupBySHA1("deadbeef")),
)
- aliasID = library.addAlias(fileID, "file1", "text/unknown")
- alias = library.getAlias(aliasID, None, "/")
+ alias = library.addAlias(content, "file1", "text/unknown")
self.assertEqual("file1", alias.filename)
self.assertEqual("text/unknown", alias.mimetype)
@@ -97,7 +96,9 @@ class TestLibrarianStuff(TestCase):
super().setUp()
switch_dbuser("librarian")
self.store = IStore(LibraryFileContent)
- self.content_id = db.Library().add("deadbeef", 1234, "abababab", "ba")
+ self.content_id = (
+ db.Library().add("deadbeef", 1234, "abababab", "ba").id
+ )
self.file_content = self._getTestFileContent()
transaction.commit()
diff --git a/lib/lp/services/librarianserver/tests/test_gc.py b/lib/lp/services/librarianserver/tests/test_gc.py
index 41087cc..90f2039 100644
--- a/lib/lp/services/librarianserver/tests/test_gc.py
+++ b/lib/lp/services/librarianserver/tests/test_gc.py
@@ -22,13 +22,12 @@ from swiftclient import client as swiftclient
from testtools.matchers import AnyMatch, Equals, MatchesListwise, MatchesRegex
from lp.services.config import config
-from lp.services.database.interfaces import IPrimaryStore
+from lp.services.database.interfaces import IStore
from lp.services.database.sqlbase import (
ISOLATION_LEVEL_AUTOCOMMIT,
connect,
cursor,
)
-from lp.services.database.sqlobject import SQLObjectNotFound
from lp.services.features.testing import FeatureFixture
from lp.services.librarian.client import LibrarianClient
from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent
@@ -50,6 +49,7 @@ class TestLibrarianGarbageCollectionBase:
def setUp(self):
super().setUp()
+ self.store = IStore(LibraryFileContent)
self.client = LibrarianClient()
self.patch(librariangc, "log", BufferLogger())
@@ -74,8 +74,7 @@ class TestLibrarianGarbageCollectionBase:
# Make sure that every file the database knows about exists on disk.
# We manually remove them for tests that need to cope with missing
# library items.
- store = IPrimaryStore(LibraryFileContent)
- for content in store.find(LibraryFileContent):
+ for content in self.store.find(LibraryFileContent):
path = librariangc.get_file_path(content.id)
if not os.path.exists(path):
if not os.path.exists(os.path.dirname(path)):
@@ -121,18 +120,18 @@ class TestLibrarianGarbageCollectionBase:
io.BytesIO(content),
"text/plain",
)
- f1 = LibraryFileAlias.get(f1_id)
+ f1 = self.store.get(LibraryFileAlias, f1_id)
f2_id = self.client.addFile(
"foo.txt",
len(content),
io.BytesIO(content),
"text/plain",
)
- f2 = LibraryFileAlias.get(f2_id)
+ f2 = self.store.get(LibraryFileAlias, f2_id)
# Make sure the duplicates really are distinct
self.assertNotEqual(f1_id, f2_id)
- self.assertNotEqual(f1.contentID, f2.contentID)
+ self.assertNotEqual(f1.content_id, f2.content_id)
f1.date_created = self.ancient_past
f2.date_created = self.ancient_past
@@ -141,8 +140,8 @@ class TestLibrarianGarbageCollectionBase:
# Set the time on disk to match the database timestamp.
utime = calendar.timegm(self.ancient_past.utctimetuple())
- os.utime(librariangc.get_file_path(f1.contentID), (utime, utime))
- os.utime(librariangc.get_file_path(f2.contentID), (utime, utime))
+ os.utime(librariangc.get_file_path(f1.content_id), (utime, utime))
+ os.utime(librariangc.get_file_path(f2.content_id), (utime, utime))
del f1, f2
@@ -165,20 +164,20 @@ class TestLibrarianGarbageCollectionBase:
# Confirm that the duplicates have been merged
self.ztm.begin()
- f1 = LibraryFileAlias.get(self.f1_id)
- f2 = LibraryFileAlias.get(self.f2_id)
- self.assertEqual(f1.contentID, f2.contentID)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
+ f2 = self.store.get(LibraryFileAlias, self.f2_id)
+ self.assertEqual(f1.content_id, f2.content_id)
def test_DeleteUnreferencedAliases(self):
self.ztm.begin()
# Confirm that our sample files are there.
- f1 = LibraryFileAlias.get(self.f1_id)
- f2 = LibraryFileAlias.get(self.f2_id)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
+ f2 = self.store.get(LibraryFileAlias, self.f2_id)
# Grab the content IDs related to these
# unreferenced LibraryFileAliases
- c1_id = f1.contentID
- c2_id = f2.contentID
+ c1_id = f1.content_id
+ c2_id = f2.content_id
del f1, f2
self.ztm.abort()
@@ -188,13 +187,13 @@ class TestLibrarianGarbageCollectionBase:
# This should have committed
self.ztm.begin()
- # Confirm that the LibaryFileContents are still there.
- LibraryFileContent.get(c1_id)
- LibraryFileContent.get(c2_id)
+ # Confirm that the LibraryFileContents are still there.
+ self.assertIsNotNone(self.store.get(LibraryFileContent, c1_id))
+ self.assertIsNotNone(self.store.get(LibraryFileContent, c2_id))
# But the LibraryFileAliases should be gone
- self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f1_id)
- self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f2_id)
+ self.assertIsNone(self.store.get(LibraryFileAlias, self.f1_id))
+ self.assertIsNone(self.store.get(LibraryFileAlias, self.f2_id))
def test_DeleteUnreferencedAliases2(self):
# Don't delete LibraryFileAliases accessed recently
@@ -205,8 +204,8 @@ class TestLibrarianGarbageCollectionBase:
# We now have two aliases sharing the same content.
self.ztm.begin()
- f1 = LibraryFileAlias.get(self.f1_id)
- f2 = LibraryFileAlias.get(self.f2_id)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
+ f2 = self.store.get(LibraryFileAlias, self.f2_id)
self.assertEqual(f1.content, f2.content)
# Flag one of our LibraryFileAliases as being recently created
@@ -222,8 +221,8 @@ class TestLibrarianGarbageCollectionBase:
librariangc.delete_unreferenced_aliases(self.con)
self.ztm.begin()
- LibraryFileAlias.get(self.f1_id)
- self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f2_id)
+ self.assertIsNotNone(self.store.get(LibraryFileAlias, self.f1_id))
+ self.assertIsNone(self.store.get(LibraryFileAlias, self.f2_id))
def test_DeleteUnreferencedAndWellExpiredAliases(self):
# LibraryFileAliases can be removed after they have expired
@@ -234,7 +233,7 @@ class TestLibrarianGarbageCollectionBase:
# Flag one of our LibraryFileAliases with an expiry date in the past
self.ztm.begin()
- f1 = LibraryFileAlias.get(self.f1_id)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
f1.expires = self.ancient_past
del f1
self.ztm.commit()
@@ -246,8 +245,8 @@ class TestLibrarianGarbageCollectionBase:
# Make sure both our example files are gone
self.ztm.begin()
- self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f1_id)
- self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f2_id)
+ self.assertIsNone(self.store.get(LibraryFileAlias, self.f1_id))
+ self.assertIsNone(self.store.get(LibraryFileAlias, self.f2_id))
def test_DoneDeleteUnreferencedButNotExpiredAliases(self):
# LibraryFileAliases can be removed only after they have expired.
@@ -261,7 +260,7 @@ class TestLibrarianGarbageCollectionBase:
# Flag one of our LibraryFileAliases with an expiry date in the
# recent past.
self.ztm.begin()
- f1 = LibraryFileAlias.get(self.f1_id)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
f1.expires = self.recent_past
del f1
self.ztm.commit()
@@ -274,7 +273,7 @@ class TestLibrarianGarbageCollectionBase:
# Make sure both our example files are still there
self.ztm.begin()
# Our recently expired LibraryFileAlias is still available.
- LibraryFileAlias.get(self.f1_id)
+ self.assertIsNotNone(self.store.get(LibraryFileAlias, self.f1_id))
def test_deleteWellExpiredAliases(self):
# LibraryFileAlias records that are expired are unlinked from their
@@ -282,7 +281,7 @@ class TestLibrarianGarbageCollectionBase:
# Flag one of our LibraryFileAliases with an expiry date in the past
self.ztm.begin()
- f1 = LibraryFileAlias.get(self.f1_id)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
f1.expires = self.ancient_past
del f1
self.ztm.commit()
@@ -292,10 +291,10 @@ class TestLibrarianGarbageCollectionBase:
self.ztm.begin()
# Make sure the well expired f1 is still there, but has no content.
- f1 = LibraryFileAlias.get(self.f1_id)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
self.assertIsNone(f1.content)
# f2 should still have content, as it isn't flagged for expiry.
- f2 = LibraryFileAlias.get(self.f2_id)
+ f2 = self.store.get(LibraryFileAlias, self.f2_id)
self.assertIsNotNone(f2.content)
def test_ignoreRecentlyExpiredAliases(self):
@@ -305,7 +304,7 @@ class TestLibrarianGarbageCollectionBase:
# Flag one of our LibraryFileAliases with an expiry date in the
# recent past.
self.ztm.begin()
- f1 = LibraryFileAlias.get(self.f1_id)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
f1.expires = self.recent_past # Within stay of execution.
del f1
self.ztm.commit()
@@ -316,10 +315,10 @@ class TestLibrarianGarbageCollectionBase:
self.ztm.begin()
# Make sure f1 is still there and has content. This ensures that
# our stay of execution is still working.
- f1 = LibraryFileAlias.get(self.f1_id)
+ f1 = self.store.get(LibraryFileAlias, self.f1_id)
self.assertIsNotNone(f1.content)
# f2 should still have content, as it isn't flagged for expiry.
- f2 = LibraryFileAlias.get(self.f2_id)
+ f2 = self.store.get(LibraryFileAlias, self.f2_id)
self.assertIsNotNone(f2.content)
def test_DeleteUnreferencedContent(self):
@@ -583,11 +582,11 @@ class TestLibrarianGarbageCollectionBase:
# Make sure that our example files have been garbage collected
self.ztm.begin()
- self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f1_id)
- self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f2_id)
+ self.assertIsNone(self.store.get(LibraryFileAlias, self.f1_id))
+ self.assertIsNone(self.store.get(LibraryFileAlias, self.f2_id))
# And make sure stuff that *is* referenced remains
- LibraryFileAlias.get(2)
+ self.assertIsNotNone(self.store.get(LibraryFileAlias, 2))
cur = cursor()
cur.execute("SELECT count(*) FROM LibraryFileAlias")
count = cur.fetchone()[0]
@@ -625,22 +624,24 @@ class TestDiskLibrarianGarbageCollection(
# original file, ignoring the extension.
switch_dbuser("testadmin")
content = b"foo"
- lfa = LibraryFileAlias.get(
+ lfa = self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"foo.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
- id_aborted = lfa.contentID
+ id_aborted = lfa.content_id
# Roll back the database changes, leaving the file on disk.
transaction.abort()
- lfa = LibraryFileAlias.get(
+ lfa = self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"bar.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
transaction.commit()
- id_committed = lfa.contentID
+ id_committed = lfa.content_id
switch_dbuser(config.librarian_gc.dbuser)
@@ -811,19 +812,21 @@ class TestSwiftLibrarianGarbageCollection(
# by a manifest. GC treats the segments like the original file.
switch_dbuser("testadmin")
content = b"uploading to swift bigly"
- big1_lfa = LibraryFileAlias.get(
+ big1_lfa = self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"foo.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
- big1_id = big1_lfa.contentID
+ big1_id = big1_lfa.content_id
- big2_lfa = LibraryFileAlias.get(
+ big2_lfa = self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"bar.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
- big2_id = big2_lfa.contentID
+ big2_id = big2_lfa.content_id
transaction.commit()
for lfc_id in (big1_id, big2_id):
@@ -872,19 +875,21 @@ class TestSwiftLibrarianGarbageCollection(
# suggest that it might happen.
switch_dbuser("testadmin")
content = b"uploading to swift"
- f1_lfa = LibraryFileAlias.get(
+ f1_lfa = self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"foo.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
- f1_id = f1_lfa.contentID
+ f1_id = f1_lfa.content_id
- f2_lfa = LibraryFileAlias.get(
+ f2_lfa = self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"bar.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
- f2_id = f2_lfa.contentID
+ f2_id = f2_lfa.content_id
transaction.commit()
for lfc_id in (f1_id, f2_id):
@@ -937,19 +942,21 @@ class TestSwiftLibrarianGarbageCollection(
# to delete it. It's not clear why this happens in practice.
switch_dbuser("testadmin")
content = b"uploading to swift"
- f1_lfa = LibraryFileAlias.get(
+ f1_lfa = self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"foo.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
- f1_id = f1_lfa.contentID
+ f1_id = f1_lfa.content_id
- f2_lfa = LibraryFileAlias.get(
+ f2_lfa = self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"bar.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
- f2_id = f2_lfa.contentID
+ f2_id = f2_lfa.content_id
transaction.commit()
for lfc_id in (f1_id, f2_id):
@@ -1017,14 +1024,15 @@ class TestTwoSwiftsLibrarianGarbageCollection(
switch_dbuser("testadmin")
content = b"foo"
lfas = [
- LibraryFileAlias.get(
+ self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"foo.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
for _ in range(12)
]
- lfc_ids = [lfa.contentID for lfa in lfas]
+ lfc_ids = [lfa.content_id for lfa in lfas]
transaction.commit()
# Simulate a migration in progress. Some files are only in the old
@@ -1103,14 +1111,15 @@ class TestTwoSwiftsLibrarianGarbageCollection(
switch_dbuser("testadmin")
content = b"foo"
lfas = [
- LibraryFileAlias.get(
+ self.store.get(
+ LibraryFileAlias,
self.client.addFile(
"foo.txt", len(content), io.BytesIO(content), "text/plain"
- )
+ ),
)
for _ in range(12)
]
- lfc_ids = [lfa.contentID for lfa in lfas]
+ lfc_ids = [lfa.content_id for lfa in lfas]
transaction.commit()
for lfc_id in lfc_ids:
diff --git a/lib/lp/services/librarianserver/tests/test_storage.py b/lib/lp/services/librarianserver/tests/test_storage.py
index c95d4f8..4f452f5 100644
--- a/lib/lp/services/librarianserver/tests/test_storage.py
+++ b/lib/lp/services/librarianserver/tests/test_storage.py
@@ -105,23 +105,28 @@ class LibrarianStorageTestCase(unittest.TestCase):
newfile = self.storage.startAddFile("file", len(data))
newfile.append(data)
lfc_id, lfa_id = newfile.store()
- lfc = LibraryFileContent.get(lfc_id)
+ lfc = self.store.get(LibraryFileContent, lfc_id)
self.assertEqual(md5, lfc.md5)
self.assertEqual(sha1, lfc.sha1)
self.assertEqual(sha256, lfc.sha256)
+class StubLibraryFileContent:
+ def __init__(self, id):
+ self.id = id
+
+
class StubLibrary:
# Used by test_multipleFilesInOnePrefixedDirectory
def lookupBySHA1(self, digest):
return []
- def addAlias(self, fileid, filename, mimetype):
+ def addAlias(self, content, filename, mimetype):
pass
id = 0x11111110
def add(self, digest, size):
self.id += 1
- return self.id
+ return StubLibraryFileContent(self.id)
diff --git a/lib/lp/services/librarianserver/tests/test_storage_db.py b/lib/lp/services/librarianserver/tests/test_storage_db.py
index b2d0b1b..05723f1 100644
--- a/lib/lp/services/librarianserver/tests/test_storage_db.py
+++ b/lib/lp/services/librarianserver/tests/test_storage_db.py
@@ -10,6 +10,7 @@ from testtools.testcase import ExpectedException
from testtools.twistedsupport import AsynchronousDeferredRunTest
from twisted.internet import defer
+from lp.services.database.interfaces import IStore
from lp.services.database.sqlbase import flush_database_updates
from lp.services.features.testing import FeatureFixture
from lp.services.librarian.model import LibraryFileContent
@@ -129,11 +130,16 @@ class LibrarianStorageDBTests(TestCase):
fileid2, aliasid2 = newfile2.store()
# Create rows in the database for these files.
- LibraryFileContent(
- filesize=0, sha1="foo", md5="xx", sha256="xx", id=6661
+ store = IStore(LibraryFileContent)
+ store.add(
+ LibraryFileContent(
+ filesize=0, sha1="foo", md5="xx", sha256="xx", id=6661
+ )
)
- LibraryFileContent(
- filesize=0, sha1="foo", md5="xx", sha256="xx", id=6662
+ store.add(
+ LibraryFileContent(
+ filesize=0, sha1="foo", md5="xx", sha256="xx", id=6662
+ )
)
flush_database_updates()
diff --git a/lib/lp/services/librarianserver/tests/test_web.py b/lib/lp/services/librarianserver/tests/test_web.py
index a84b74d..19ebe56 100644
--- a/lib/lp/services/librarianserver/tests/test_web.py
+++ b/lib/lp/services/librarianserver/tests/test_web.py
@@ -342,7 +342,7 @@ class LibrarianWebTestCase(LibrarianWebTestMixin, TestCaseWithFactory):
# Delete the on-disk file.
storage = LibrarianStorage(config.librarian_server.root, None)
- os.remove(storage._fileLocation(file_alias.contentID))
+ os.remove(storage._fileLocation(file_alias.content_id))
# The URL now 500s, since the DB says it should exist.
response = requests.get(url)
diff --git a/lib/lp/services/librarianserver/web.py b/lib/lp/services/librarianserver/web.py
index ef9b6eb..efa7f55 100644
--- a/lib/lp/services/librarianserver/web.py
+++ b/lib/lp/services/librarianserver/web.py
@@ -130,7 +130,7 @@ class LibraryFileAliasResource(resource.Resource):
try:
alias = self.storage.getFileAlias(aliasID, token, path)
return (
- alias.contentID,
+ alias.content_id,
alias.filename,
alias.mimetype,
alias.date_created,
diff --git a/lib/lp/services/messages/interfaces/message.py b/lib/lp/services/messages/interfaces/message.py
index 354a84e..31525df 100644
--- a/lib/lp/services/messages/interfaces/message.py
+++ b/lib/lp/services/messages/interfaces/message.py
@@ -161,8 +161,14 @@ class IMessageView(IMessageCommon):
title = TextLine(
title=_("The message title, usually just the subject."), readonly=True
)
- visible = Bool(
- title="This message is visible or not.", required=False, default=True
+ visible = exported(
+ Bool(
+ title=_("Message visibility."),
+ description=_("Whether or not the message is visible."),
+ readonly=True,
+ default=True,
+ ),
+ as_of="devel",
)
bugattachments = exported(
diff --git a/lib/lp/services/messages/model/message.py b/lib/lp/services/messages/model/message.py
index ade6a5d..9f84932 100644
--- a/lib/lp/services/messages/model/message.py
+++ b/lib/lp/services/messages/model/message.py
@@ -720,8 +720,7 @@ class UserToUserEmail(StormBase):
# On the other hand, we really don't need a UserToUserEmailSet for any
# other purpose. There isn't any other relationship that can be
# inferred, so in this case I think it makes fine sense for the
- # constructor to add self to the store. Also, this closely mimics
- # what the SQLObject compatibility layer does.
+ # constructor to add self to the store.
Store.of(sender).add(self)
diff --git a/lib/lp/services/openid/model/openididentifier.py b/lib/lp/services/openid/model/openididentifier.py
index 18ef165..923c7e5 100644
--- a/lib/lp/services/openid/model/openididentifier.py
+++ b/lib/lp/services/openid/model/openididentifier.py
@@ -5,10 +5,11 @@
__all__ = ["OpenIdIdentifier"]
-from storm.locals import Int, Reference, Unicode
+from datetime import timezone
+
+from storm.locals import DateTime, Int, Reference, Unicode
from lp.services.database.constants import UTC_NOW
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.stormbase import StormBase
@@ -19,4 +20,6 @@ class OpenIdIdentifier(StormBase):
identifier = Unicode(primary=True)
account_id = Int("account")
account = Reference(account_id, "Account.id")
- date_created = UtcDateTimeCol(notNull=True, default=UTC_NOW)
+ date_created = DateTime(
+ allow_none=False, default=UTC_NOW, tzinfo=timezone.utc
+ )
diff --git a/lib/lp/services/session/model.py b/lib/lp/services/session/model.py
index b54d290..8ab900c 100644
--- a/lib/lp/services/session/model.py
+++ b/lib/lp/services/session/model.py
@@ -5,10 +5,11 @@
__all__ = ["SessionData", "SessionPkgData"]
-from storm.locals import Pickle, Unicode
+from datetime import timezone
+
+from storm.locals import DateTime, Pickle, Unicode
from zope.interface import implementer, provider
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.stormbase import StormBase
from lp.services.session.interfaces import IUseSessionStore
@@ -20,8 +21,8 @@ class SessionData(StormBase):
__storm_table__ = "SessionData"
client_id = Unicode(primary=True)
- created = UtcDateTimeCol()
- last_accessed = UtcDateTimeCol()
+ created = DateTime(tzinfo=timezone.utc)
+ last_accessed = DateTime(tzinfo=timezone.utc)
@implementer(IUseSessionStore)
diff --git a/lib/lp/services/statistics/tests/test_update_stats.py b/lib/lp/services/statistics/tests/test_update_stats.py
index 577ff58..2b25e58 100644
--- a/lib/lp/services/statistics/tests/test_update_stats.py
+++ b/lib/lp/services/statistics/tests/test_update_stats.py
@@ -281,7 +281,7 @@ class UpdateTranslationStatsTest(unittest.TestCase):
flush_database_caches()
- # The transaction changed, we need to refetch SQLObjects.
+ # The transaction changed, we need to refetch Storm instances.
ubuntu = self.distribution["ubuntu"]
hoary = self.distroseriesset.queryByName(ubuntu, "hoary")
spanish = self.languageset["es"]
diff --git a/lib/lp/services/tarfile_helpers.py b/lib/lp/services/tarfile_helpers.py
index ef10c54..09dc721 100644
--- a/lib/lp/services/tarfile_helpers.py
+++ b/lib/lp/services/tarfile_helpers.py
@@ -13,16 +13,6 @@ import tarfile
import tempfile
import time
-# A note about tarballs, BytesIO and unicode. SQLObject returns unicode
-# values for columns which are declared as StringCol. We have to be careful
-# not to pass unicode instances to the tarfile module, because when the
-# tarfile's filehandle is a BytesIO object, the BytesIO object gets upset
-# later when we ask it for its value and it tries to join together its
-# buffers. This is why the tarball code is sprinkled with ".encode('ascii')".
-# If we get separate StringCol and UnicodeCol column types, we won't need this
-# any longer.
-# -- Dafydd Harries, 2005-04-07.
-
class LaunchpadWriteTarFile:
"""Convenience wrapper around the tarfile module.
diff --git a/lib/lp/services/verification/doc/logintoken.rst b/lib/lp/services/verification/doc/logintoken.rst
index 2cd0377..95c25e3 100644
--- a/lib/lp/services/verification/doc/logintoken.rst
+++ b/lib/lp/services/verification/doc/logintoken.rst
@@ -19,17 +19,18 @@ follows:
4) The token is now marked as consumed, together with any other
tokens of the same type and for the same email address.
+ >>> import transaction
+ >>> from lp.registry.model.person import Person
+ >>> from lp.services.database.interfaces import IStore
+ >>> from lp.services.database.sqlbase import flush_database_updates
+ >>> from lp.services.mail import stub
>>> from lp.services.verification.interfaces.authtoken import (
... LoginTokenType,
... )
- >>> from lp.registry.model.person import Person
>>> from lp.services.verification.interfaces.logintoken import (
... ILoginTokenSet,
... )
- >>> from lp.services.database.sqlbase import flush_database_updates
- >>> from lp.services.mail import stub
- >>> import transaction
- >>> foobar = Person.byName("name16")
+ >>> foobar = IStore(Person).find(Person, name="name16").one()
Let's create a new LoginToken to confirm an email address for foobar.
diff --git a/lib/lp/services/webapp/configure.zcml b/lib/lp/services/webapp/configure.zcml
index ad25c53..5b61d74 100644
--- a/lib/lp/services/webapp/configure.zcml
+++ b/lib/lp/services/webapp/configure.zcml
@@ -50,10 +50,6 @@
factory='.batching.FiniteSequenceAdapter' />
<adapter
- factory='.batching.FiniteSequenceAdapter'
- for='storm.zope.interfaces.ISQLObjectResultSet' />
-
- <adapter
factory='.batching.BoundReferenceSetAdapter'
for='storm.references.BoundReferenceSet' />
@@ -246,10 +242,6 @@
<adapter
factory="lp.services.webapp.snapshot.snapshot_sql_result" />
- <!-- It also works for the legacy SQLObject interface. -->
- <adapter
- factory="lp.services.webapp.snapshot.snapshot_sql_result"
- for="storm.zope.interfaces.ISQLObjectResultSet" />
<class class="lp.services.webapp.publisher.RenamedView">
<allow interface="zope.publisher.interfaces.browser.IBrowserPublisher"
diff --git a/lib/lp/services/webapp/database.zcml b/lib/lp/services/webapp/database.zcml
index b7130a2..7a5ae12 100644
--- a/lib/lp/services/webapp/database.zcml
+++ b/lib/lp/services/webapp/database.zcml
@@ -60,9 +60,6 @@
<implements interface="lp.services.database.interfaces.IStore" />
<allow attributes="get" />
</class>
- <class class="lp.services.database.sqlbase.SQLBase">
- <implements interface="lp.services.database.interfaces.IDBObject" />
- </class>
<class class="lp.services.database.stormbase.StormBase">
<implements interface="lp.services.database.interfaces.IDBObject" />
</class>
diff --git a/lib/lp/services/webapp/marshallers.py b/lib/lp/services/webapp/marshallers.py
index 7f13a36..48ab7ea 100644
--- a/lib/lp/services/webapp/marshallers.py
+++ b/lib/lp/services/webapp/marshallers.py
@@ -6,14 +6,13 @@ __all__ = ["choiceMarshallerError"]
def choiceMarshallerError(field, request, vocabulary=None):
# We don't support marshalling a normal Choice field with a
- # SQLObjectVocabularyBase/StormVocabularyBase-based vocabulary.
+ # StormVocabularyBase-based vocabulary.
# Normally for this kind of use case, one returns None and
# lets the Zope machinery alert the user that the lookup has gone wrong.
# However, we want to be more helpful, so we make an assertion,
# with a comment on how to make things better.
raise AssertionError(
- "You exported %s as an IChoice based on an "
- "SQLObjectVocabularyBase/StormVocabularyBase; you "
+ "You exported %s as an IChoice based on a StormVocabularyBase; you "
"should use lazr.restful.fields.ReferenceChoice "
"instead." % field.__name__
)
diff --git a/lib/lp/services/webapp/snapshot.py b/lib/lp/services/webapp/snapshot.py
index 2d46805..fd6aaa9 100644
--- a/lib/lp/services/webapp/snapshot.py
+++ b/lib/lp/services/webapp/snapshot.py
@@ -19,13 +19,12 @@ HARD_LIMIT_FOR_SNAPSHOT = 1000
@implementer(ISnapshotValueFactory)
-@adapter(IResultSet) # And ISQLObjectResultSet.
+@adapter(IResultSet)
def snapshot_sql_result(value):
"""Snapshot adapter for the Storm result set."""
- # SQLMultipleJoin and SQLRelatedJoin return
- # SelectResults, which doesn't really help the Snapshot
- # object. We therefore list()ify the values; this isn't
- # perfect but allows deltas to be generated reliably.
+ # ReferenceSet returns ResultSets, which doesn't really help the
+ # Snapshot object. We therefore list()ify the values; this isn't perfect
+ # but allows deltas to be generated reliably.
return shortlist(
value, longest_expected=100, hardlimit=HARD_LIMIT_FOR_SNAPSHOT
)
diff --git a/lib/lp/services/webapp/tests/test_servers.py b/lib/lp/services/webapp/tests/test_servers.py
index f865502..e777ac2 100644
--- a/lib/lp/services/webapp/tests/test_servers.py
+++ b/lib/lp/services/webapp/tests/test_servers.py
@@ -778,7 +778,7 @@ class LoggingTransaction:
self.log.append("ABORT")
-class TestWebServiceAccessTokens(TestCaseWithFactory):
+class TestWebServiceAccessTokensBase:
"""Test personal access tokens for the webservice.
These are bearer tokens with an owner, a context, and some scopes. We
@@ -791,7 +791,9 @@ class TestWebServiceAccessTokens(TestCaseWithFactory):
def test_valid(self):
owner = self.factory.makePerson()
secret, token = self.factory.makeAccessToken(
- owner=owner, scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS]
+ owner=owner,
+ target=self.makeTarget(owner=owner),
+ scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS],
)
self.assertIsNone(removeSecurityProxy(token).date_last_used)
transaction.commit()
@@ -828,6 +830,7 @@ class TestWebServiceAccessTokens(TestCaseWithFactory):
owner = self.factory.makePerson()
secret, token = self.factory.makeAccessToken(
owner=owner,
+ target=self.makeTarget(owner=owner),
date_expires=datetime.now(timezone.utc) - timedelta(days=1),
)
transaction.commit()
@@ -859,7 +862,9 @@ class TestWebServiceAccessTokens(TestCaseWithFactory):
def test_inactive_account(self):
owner = self.factory.makePerson(account_status=AccountStatus.SUSPENDED)
- secret, token = self.factory.makeAccessToken(owner=owner)
+ secret, token = self.factory.makeAccessToken(
+ owner=owner, target=self.makeTarget(owner=owner)
+ )
transaction.commit()
request, publication = get_request_and_publication(
@@ -889,13 +894,13 @@ class TestWebServiceAccessTokens(TestCaseWithFactory):
request.setPrincipal(principal)
def test_checkRequest_valid(self):
- repository = self.factory.makeGitRepository()
+ target = self.makeTarget()
self._makeAccessTokenVerifiedRequest(
- target=repository,
+ target=target,
scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS],
)
getUtility(IWebServiceConfiguration).checkRequest(
- repository, ["repository:build_status", "repository:another_scope"]
+ target, ["repository:build_status", "repository:another_scope"]
)
def test_checkRequest_contains_context(self):
@@ -909,9 +914,9 @@ class TestWebServiceAccessTokens(TestCaseWithFactory):
)
def test_checkRequest_bad_context(self):
- repository = self.factory.makeGitRepository()
+ target = self.makeTarget()
self._makeAccessTokenVerifiedRequest(
- target=repository,
+ target=target,
scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS],
)
self.assertRaisesWithContent(
@@ -923,23 +928,23 @@ class TestWebServiceAccessTokens(TestCaseWithFactory):
)
def test_checkRequest_unscoped_method(self):
- repository = self.factory.makeGitRepository()
+ target = self.makeTarget()
self._makeAccessTokenVerifiedRequest(
- target=repository,
+ target=target,
scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS],
)
self.assertRaisesWithContent(
Unauthorized,
"Current authentication only allows calling scoped methods.",
getUtility(IWebServiceConfiguration).checkRequest,
- repository,
+ target,
None,
)
def test_checkRequest_wrong_scope(self):
- repository = self.factory.makeGitRepository()
+ target = self.makeTarget()
self._makeAccessTokenVerifiedRequest(
- target=repository,
+ target=target,
scopes=[
AccessTokenScope.REPOSITORY_BUILD_STATUS,
AccessTokenScope.REPOSITORY_PUSH,
@@ -951,11 +956,18 @@ class TestWebServiceAccessTokens(TestCaseWithFactory):
"(one of these scopes is required: "
"'repository:scope_1', 'repository:scope_2').",
getUtility(IWebServiceConfiguration).checkRequest,
- repository,
+ target,
["repository:scope_1", "repository:scope_2"],
)
+class TestWebServiceAccessTokensGitRepository(
+ TestWebServiceAccessTokensBase, TestCaseWithFactory
+):
+ def makeTarget(self, owner=None):
+ return self.factory.makeGitRepository(owner=owner)
+
+
def test_suite():
suite = unittest.TestSuite()
suite.addTest(
diff --git a/lib/lp/services/webapp/vocabulary.py b/lib/lp/services/webapp/vocabulary.py
index 535588c..b13b797 100644
--- a/lib/lp/services/webapp/vocabulary.py
+++ b/lib/lp/services/webapp/vocabulary.py
@@ -13,20 +13,17 @@ __all__ = [
"FilteredVocabularyBase",
"ForgivingSimpleVocabulary",
"IHugeVocabulary",
- "NamedSQLObjectVocabulary",
"NamedStormHugeVocabulary",
"NamedStormVocabulary",
- "SQLObjectVocabularyBase",
"StormVocabularyBase",
"VocabularyFilter",
]
from collections import namedtuple
-from typing import Optional, Union
+from typing import Optional
import six
from storm.base import Storm # noqa: B1
-from storm.expr import Expr
from storm.store import EmptyResultSet
from zope.interface import Attribute, Interface, implementer
from zope.schema.interfaces import IVocabularyTokenized
@@ -34,8 +31,6 @@ from zope.schema.vocabulary import SimpleTerm, SimpleVocabulary
from zope.security.proxy import isinstance as zisinstance
from lp.services.database.interfaces import IStore
-from lp.services.database.sqlbase import SQLBase
-from lp.services.database.sqlobject import AND, CONTAINSSTRING
class ForgivingSimpleVocabulary(SimpleVocabulary):
@@ -266,164 +261,6 @@ class FilteredVocabularyBase:
@implementer(IVocabularyTokenized)
-class SQLObjectVocabularyBase(FilteredVocabularyBase):
- """A base class for widgets that are rendered to collect values
- for attributes that are SQLObjects, e.g. ForeignKey.
-
- So if a content class behind some form looks like:
-
- class Foo(SQLObject):
- id = IntCol(...)
- bar = ForeignKey(...)
- ...
-
- Then the vocabulary for the widget that captures a value for bar
- should derive from SQLObjectVocabularyBase.
- """
-
- _orderBy = None # type: Optional[str]
- _filter = None # type: Optional[Union[Expr, bool]]
- _clauseTables = None
-
- def __init__(self, context=None):
- self.context = context
-
- # XXX kiko 2007-01-16: note that the method searchForTerms is part of
- # IHugeVocabulary, and so should not necessarily need to be
- # implemented here; however, many of our vocabularies depend on
- # searchForTerms for popup functionality so I have chosen to just do
- # that. It is possible that a better solution would be to have the
- # search functionality produce a new vocabulary restricted to the
- # desired subset.
- def searchForTerms(self, query=None, vocab_filter=None):
- results = self.search(query, vocab_filter)
- return CountableIterator(results.count(), results, self.toTerm)
-
- def search(self, query, vocab_filter=None):
- # This default implementation of searchForTerms glues together
- # the legacy API of search() with the toTerm method. If you
- # don't reimplement searchForTerms you will need to at least
- # provide your own search() method.
- raise NotImplementedError
-
- def toTerm(self, obj):
- # This default implementation assumes that your object has a
- # title attribute. If it does not you will need to reimplement
- # toTerm, or reimplement the whole searchForTerms.
- return SimpleTerm(obj, obj.id, obj.title)
-
- def __iter__(self):
- """Return an iterator which provides the terms from the vocabulary."""
- params = {}
- if self._orderBy is not None:
- params["orderBy"] = self._orderBy
- if self._clauseTables is not None:
- params["clauseTables"] = self._clauseTables
- for obj in self._table.select(self._filter, **params):
- yield self.toTerm(obj)
-
- def __len__(self):
- return len(list(iter(self)))
-
- def __contains__(self, obj):
- # Sometimes this method is called with an SQLBase instance, but
- # z3 form machinery sends through integer ids. This might be due
- # to a bug somewhere.
- if zisinstance(obj, (SQLBase, Storm)): # noqa: B1
- clause = self._table.id == obj.id
- if self._filter:
- # XXX kiko 2007-01-16: this code is untested.
- clause = AND(clause, self._filter)
- found_obj = IStore(self._table).find(self._table, clause).one()
- return found_obj is not None and found_obj == obj
- else:
- clause = self._table.id == int(obj)
- if self._filter:
- # XXX kiko 2007-01-16: this code is untested.
- clause = AND(clause, self._filter)
- found_obj = IStore(self._table).find(self._table, clause).one()
- return found_obj is not None
-
- def getTerm(self, value):
- # Short circuit. There is probably a design problem here since
- # we sometimes get the id and sometimes an SQLBase instance.
- if zisinstance(value, SQLBase):
- return self.toTerm(value)
-
- try:
- value = int(value)
- except ValueError:
- raise LookupError(value)
-
- clause = self._table.q.id == value
- if self._filter:
- clause = AND(clause, self._filter)
- try:
- obj = self._table.selectOne(clause)
- except ValueError:
- raise LookupError(value)
-
- if obj is None:
- raise LookupError(value)
-
- return self.toTerm(obj)
-
- def getTermByToken(self, token):
- return self.getTerm(token)
-
- def emptySelectResults(self):
- """Return a SelectResults object without any elements.
-
- This is to be used when no search string is given to the search()
- method of subclasses, in order to be consistent and always return
- a SelectResults object.
- """
- return self._table.select("1 = 2")
-
-
-class NamedSQLObjectVocabulary(SQLObjectVocabularyBase):
- """A SQLObjectVocabulary base for database tables that have a unique
- *and* ASCII name column.
-
- Provides all methods required by IHugeVocabulary, although it
- doesn't actually specify this interface since it may not actually
- be huge and require the custom widgets.
- """
-
- _orderBy = "name"
-
- def toTerm(self, obj):
- """See SQLObjectVocabularyBase.
-
- This implementation uses name as a token instead of the object's
- ID, and tries to be smart about deciding to present an object's
- title if it has one.
- """
- if getattr(obj, "title", None) is None:
- return SimpleTerm(obj, obj.name, obj.name)
- else:
- return SimpleTerm(obj, obj.name, obj.title)
-
- def getTermByToken(self, token):
- clause = self._table.q.name == token
- if self._filter:
- clause = AND(clause, self._filter)
- objs = list(self._table.select(clause))
- if not objs:
- raise LookupError(token)
- return self.toTerm(objs[0])
-
- def search(self, query, vocab_filter=None):
- """Return terms where query is a substring of the name."""
- if query:
- clause = CONTAINSSTRING(self._table.q.name, six.ensure_text(query))
- if self._filter:
- clause = AND(clause, self._filter)
- return self._table.select(clause, orderBy=self._orderBy)
- return self.emptySelectResults()
-
-
-@implementer(IVocabularyTokenized)
class StormVocabularyBase(FilteredVocabularyBase):
"""A base class for widgets that are rendered to collect values
for attributes that are Storm references.
diff --git a/lib/lp/services/webservice/configure.zcml b/lib/lp/services/webservice/configure.zcml
index d3136b5..5e27c1f 100644
--- a/lib/lp/services/webservice/configure.zcml
+++ b/lib/lp/services/webservice/configure.zcml
@@ -60,13 +60,6 @@
<adapter
for="zope.schema.interfaces.IChoice
zope.publisher.interfaces.http.IHTTPRequest
- lp.services.webapp.vocabulary.SQLObjectVocabularyBase"
- provides="lazr.restful.interfaces.IFieldMarshaller"
- factory="lp.services.webapp.marshallers.choiceMarshallerError"
- />
- <adapter
- for="zope.schema.interfaces.IChoice
- zope.publisher.interfaces.http.IHTTPRequest
lp.services.webapp.vocabulary.StormVocabularyBase"
provides="lazr.restful.interfaces.IFieldMarshaller"
factory="lp.services.webapp.marshallers.choiceMarshallerError"
@@ -74,13 +67,6 @@
<adapter
for="lazr.restful.interfaces.IReferenceChoice
zope.publisher.interfaces.http.IHTTPRequest
- lp.services.webapp.vocabulary.SQLObjectVocabularyBase"
- provides="lazr.restful.interfaces.IFieldMarshaller"
- factory="lazr.restful.marshallers.ObjectLookupFieldMarshaller"
- />
- <adapter
- for="lazr.restful.interfaces.IReferenceChoice
- zope.publisher.interfaces.http.IHTTPRequest
lp.services.webapp.vocabulary.StormVocabularyBase"
provides="lazr.restful.interfaces.IFieldMarshaller"
factory="lazr.restful.marshallers.ObjectLookupFieldMarshaller"
diff --git a/lib/lp/services/webservice/doc/webservice-marshallers.rst b/lib/lp/services/webservice/doc/webservice-marshallers.rst
index 6dedcee..f49a033 100644
--- a/lib/lp/services/webservice/doc/webservice-marshallers.rst
+++ b/lib/lp/services/webservice/doc/webservice-marshallers.rst
@@ -25,10 +25,10 @@ application root.
>>> getUtility(IOpenLaunchBag).add(root)
-Choice of SQLObjectVocabularyBase
-.................................
+Choice of StormVocabularyBase
+.............................
-For vocabularies based on SQLObjectVocabularyBase, the values are
+For vocabularies based on StormVocabularyBase, the values are
interpreted as URLs referencing objects on the web service. If the given
string is a URL corresponding to a vocabulary item, the marshaller
returns that item. Otherwise it raises a ValueError.
@@ -94,7 +94,7 @@ resource and not a random string.
>>> print(marshaller.representation_name)
some_person_link
-If you export a Choice that uses an SQLObjectVocabularyBase then you
+If you export a Choice that uses a StormVocabularyBase then you
get an error, as you should be using a ReferenceChoice instead to
ensure that the resulting wadl matches lazr.restful conventions.
@@ -104,9 +104,9 @@ ensure that the resulting wadl matches lazr.restful conventions.
... # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
- AssertionError: You exported some_person as an IChoice based on an
- SQLObjectVocabularyBase/StormVocabularyBase; you should use
- lazr.restful.fields.ReferenceChoice instead.
+ AssertionError: You exported some_person as an IChoice based on a
+ StormVocabularyBase; you should use lazr.restful.fields.ReferenceChoice
+ instead.
Cleanup.
diff --git a/lib/lp/services/worlddata/interfaces/language.py b/lib/lp/services/worlddata/interfaces/language.py
index 7f77f9b..63a3ce5 100644
--- a/lib/lp/services/worlddata/interfaces/language.py
+++ b/lib/lp/services/worlddata/interfaces/language.py
@@ -120,16 +120,10 @@ class ILanguage(Interface):
)
def addCountry(country):
- """Add a country to a list of countries this language is spoken in.
-
- Provided by SQLObject.
- """
+ """Add a country to a list of countries this language is spoken in."""
def removeCountry(country):
- """Remove country from list of countries this language is spoken in.
-
- Provided by SQLObject.
- """
+ """Remove country from list of countries this language is spoken in."""
visible = exported(
Bool(
diff --git a/lib/lp/snappy/browser/snapbase.py b/lib/lp/snappy/browser/snapbase.py
index 719bb26..30facc8 100644
--- a/lib/lp/snappy/browser/snapbase.py
+++ b/lib/lp/snappy/browser/snapbase.py
@@ -10,7 +10,6 @@ __all__ = [
from zope.component import getUtility
-from lp.services.database.sqlobject import SQLObjectNotFound
from lp.services.webapp import GetitemNavigation, Navigation, stepthrough
from lp.snappy.interfaces.snapbase import ISnapBase, ISnapBaseSet
from lp.soyuz.interfaces.archive import IArchiveSet
@@ -35,9 +34,8 @@ class SnapBaseNavigation(Navigation):
# Not a number.
return None
- try:
- archive = getUtility(IArchiveSet).get(id)
- except SQLObjectNotFound:
+ archive = getUtility(IArchiveSet).get(id)
+ if archive is None:
return None
return self.context.getArchiveDependency(archive)
diff --git a/lib/lp/snappy/model/snap.py b/lib/lp/snappy/model/snap.py
index c742fc2..a9a0128 100644
--- a/lib/lp/snappy/model/snap.py
+++ b/lib/lp/snappy/model/snap.py
@@ -1143,7 +1143,7 @@ class Snap(StormBase, WebhookTargetMixin):
# Prefetch data to keep DB query count constant
lfas = load_related(LibraryFileAlias, builds, ["log_id"])
- load_related(LibraryFileContent, lfas, ["contentID"])
+ load_related(LibraryFileContent, lfas, ["content_id"])
for build in builds:
if build.date is not None:
diff --git a/lib/lp/snappy/model/snapbuild.py b/lib/lp/snappy/model/snapbuild.py
index afc3275..df2e2c2 100644
--- a/lib/lp/snappy/model/snapbuild.py
+++ b/lib/lp/snappy/model/snapbuild.py
@@ -334,7 +334,7 @@ class SnapBuild(PackageBuildMixin, StormBase):
(SnapFile, LibraryFileAlias, LibraryFileContent),
SnapFile.snapbuild == self.id,
LibraryFileAlias.id == SnapFile.libraryfile_id,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
)
return result.order_by([LibraryFileAlias.filename, SnapFile.id])
@@ -602,9 +602,9 @@ class SnapBuildSet(SpecificBuildFarmJobSourceMixin):
load_related(Person, builds, ["requester_id"])
lfas = load_related(LibraryFileAlias, builds, ["log_id"])
- load_related(LibraryFileContent, lfas, ["contentID"])
+ load_related(LibraryFileContent, lfas, ["content_id"])
archives = load_related(Archive, builds, ["archive_id"])
- load_related(Person, archives, ["ownerID"])
+ load_related(Person, archives, ["owner_id"])
distroarchseries = load_related(
DistroArchSeries, builds, ["distro_arch_series_id"]
)
diff --git a/lib/lp/snappy/templates/snap-new.pt b/lib/lp/snappy/templates/snap-new.pt
index 29e8f64..d08e1d7 100644
--- a/lib/lp/snappy/templates/snap-new.pt
+++ b/lib/lp/snappy/templates/snap-new.pt
@@ -10,10 +10,10 @@
<div metal:fill-slot="main">
<div>
<p>
- A snap package is a self-contained application that can be installed
- on <a href="https://developer.ubuntu.com/en/snappy/">snappy Ubuntu
- Core</a>. Launchpad can build snap packages using <a
- href="https://developer.ubuntu.com/en/snappy/snapcraft/">snapcraft</a>,
+ Snaps are Linux app packages for desktop, cloud and IoT that are
+ simple to install, secure, cross-platform, and dependency-free.
+ Launchpad can build snap packages using <a
+ href="https://snapcraft.io/docs/snapcraft">snapcraft</a>,
from any Git or Bazaar branch on Launchpad that has a
<tt>snap/snapcraft.yaml</tt>, <tt>build-aux/snap/snapcraft.yaml</tt>,
<tt>snapcraft.yaml</tt>, or <tt>.snapcraft.yaml</tt> file at its top
diff --git a/lib/lp/soyuz/browser/archive.py b/lib/lp/soyuz/browser/archive.py
index f46da5b..61186a3 100644
--- a/lib/lp/soyuz/browser/archive.py
+++ b/lib/lp/soyuz/browser/archive.py
@@ -75,7 +75,6 @@ from lp.services.browser_helpers import (
get_user_agent_distroseries,
)
from lp.services.database.bulk import load_related
-from lp.services.database.sqlobject import SQLObjectNotFound
from lp.services.helpers import english_list
from lp.services.job.model.job import Job
from lp.services.librarian.browser import (
@@ -433,9 +432,8 @@ class ArchiveNavigation(Navigation, FileNavigationMixin):
# Not a number.
return None
- try:
- archive = getUtility(IArchiveSet).get(id)
- except SQLObjectNotFound:
+ archive = getUtility(IArchiveSet).get(id)
+ if archive is None:
return None
return self.context.getArchiveDependency(archive)
diff --git a/lib/lp/soyuz/browser/queue.py b/lib/lp/soyuz/browser/queue.py
index bcae209..ff942aa 100644
--- a/lib/lp/soyuz/browser/queue.py
+++ b/lib/lp/soyuz/browser/queue.py
@@ -190,7 +190,7 @@ class QueueItemsView(LaunchpadView):
if upload.package_copy_job_id is not None
}
archives = {pcj.source_archive for pcj in package_copy_jobs}
- person_ids = [archive.ownerID for archive in archives]
+ person_ids = [archive.owner_id for archive in archives]
jobs = load_related(Job, package_copy_jobs, ["job_id"])
person_ids.extend(job.requester_id for job in jobs)
list(
@@ -233,7 +233,7 @@ class QueueItemsView(LaunchpadView):
file_lfas = load_related(
LibraryFileAlias, source_files + binary_files, ["libraryfile_id"]
)
- load_related(LibraryFileContent, file_lfas, ["contentID"])
+ load_related(LibraryFileContent, file_lfas, ["content_id"])
# Get a dictionary of lists of binary files keyed by upload ID.
package_upload_builds_dict = self.builds_dict(upload_ids, binary_files)
diff --git a/lib/lp/soyuz/browser/tests/distroseriesqueue-views.rst b/lib/lp/soyuz/browser/tests/distroseriesqueue-views.rst
index a9acb7b..753568a 100644
--- a/lib/lp/soyuz/browser/tests/distroseriesqueue-views.rst
+++ b/lib/lp/soyuz/browser/tests/distroseriesqueue-views.rst
@@ -7,10 +7,11 @@ for IDistroSeries context (IDistroSeriesView)
Let's instantiate the view for +queue for anonymous access:
>>> from zope.component import queryMultiAdapter
+ >>> from lp.services.database.interfaces import IStore
>>> from lp.services.librarian.model import LibraryFileAlias
>>> from lp.services.webapp.servers import LaunchpadTestRequest
>>> from lp.registry.interfaces.distribution import IDistributionSet
- >>> fake_chroot = LibraryFileAlias.get(1)
+ >>> fake_chroot = IStore(LibraryFileAlias).get(LibraryFileAlias, 1)
>>> ubuntu = getUtility(IDistributionSet)["ubuntu"]
>>> breezy_autotest = ubuntu["breezy-autotest"]
diff --git a/lib/lp/soyuz/doc/gina-multiple-arch.rst b/lib/lp/soyuz/doc/gina-multiple-arch.rst
index d36a2fe..a8ad558 100644
--- a/lib/lp/soyuz/doc/gina-multiple-arch.rst
+++ b/lib/lp/soyuz/doc/gina-multiple-arch.rst
@@ -21,7 +21,7 @@ Get the current counts of stuff in the database:
... IStore(SourcePackageRelease).find(SourcePackageRelease).count()
... )
>>> orig_sspph_count = IStore(SSPPH).find(SSPPH).count()
- >>> orig_person_count = Person.select().count()
+ >>> orig_person_count = IStore(Person).find(Person).count()
>>> orig_tp_count = (
... IStore(TeamParticipation).find(TeamParticipation).count()
... )
@@ -147,7 +147,7 @@ breezy:
Each source package has its own maintainer (in this case, fabbione and
porridge):
- >>> print(Person.select().count() - orig_person_count)
+ >>> print(IStore(Person).find(Person).count() - orig_person_count)
2
>>> print(
... IStore(TeamParticipation).find(TeamParticipation).count()
diff --git a/lib/lp/soyuz/doc/gina.rst b/lib/lp/soyuz/doc/gina.rst
index aac6267..49bbc15 100644
--- a/lib/lp/soyuz/doc/gina.rst
+++ b/lib/lp/soyuz/doc/gina.rst
@@ -28,7 +28,7 @@ Get the current counts of stuff in the database:
... IStore(SourcePackageRelease).find(SourcePackageRelease).count()
... )
>>> orig_sspph_count = IStore(SSPPH).find(SSPPH).count()
- >>> orig_person_count = Person.select().count()
+ >>> orig_person_count = IStore(Person).find(Person).count()
>>> orig_tp_count = (
... IStore(TeamParticipation).find(TeamParticipation).count()
... )
@@ -612,11 +612,14 @@ Ensure only one Kamion was created (he's an uploader on multiple packages),
and that we imported exactly 9 people (13 packages with 3 being uploaded by
Kamion, 2 being uploaded by mdz and 2 by doko).
- >>> from lp.services.database.sqlobject import LIKE
- >>> p = Person.selectOne(LIKE(Person.q.name, "cjwatson%"))
+ >>> p = (
+ ... IStore(Person)
+ ... .find(Person, Person.name.startswith("cjwatson"))
+ ... .one()
+ ... )
>>> print(p.name)
cjwatson
- >>> print(Person.select().count() - orig_person_count)
+ >>> print(IStore(Person).find(Person).count() - orig_person_count)
13
>>> print(
... IStore(TeamParticipation).find(TeamParticipation).count()
@@ -717,7 +720,7 @@ changed, etc.
... - orig_spr_count
... )
17
- >>> print(Person.select().count() - orig_person_count)
+ >>> print(IStore(Person).find(Person).count() - orig_person_count)
13
>>> print(
... IStore(TeamParticipation).find(TeamParticipation).count()
diff --git a/lib/lp/soyuz/doc/package-diff.rst b/lib/lp/soyuz/doc/package-diff.rst
index 9591382..ac90cfe 100644
--- a/lib/lp/soyuz/doc/package-diff.rst
+++ b/lib/lp/soyuz/doc/package-diff.rst
@@ -120,9 +120,10 @@ already requests a package diff against the immediate ancestry.
Before starting let's enable the universe component and add the i386
chroot in hoary in order to be able to accept the NEW packages.
- >>> from lp.soyuz.model.component import ComponentSelection
+ >>> from lp.services.database.interfaces import IStore
>>> from lp.services.librarian.model import LibraryFileAlias
>>> from lp.soyuz.interfaces.component import IComponentSet
+ >>> from lp.soyuz.model.component import ComponentSelection
>>> hoary = ubuntu.getSeries("hoary")
>>> breezy_autotest = ubuntu.getSeries("breezy-autotest")
@@ -130,7 +131,7 @@ chroot in hoary in order to be able to accept the NEW packages.
>>> universe = getUtility(IComponentSet)["universe"]
>>> selection = ComponentSelection(distroseries=hoary, component=universe)
- >>> fake_chroot = LibraryFileAlias.get(1)
+ >>> fake_chroot = IStore(LibraryFileAlias).get(LibraryFileAlias, 1)
>>> hoary_i386 = hoary["i386"]
>>> unused = hoary_i386.addOrUpdateChroot(fake_chroot)
>>> breezy_autotest_i386 = breezy_autotest["i386"]
diff --git a/lib/lp/soyuz/doc/soyuz-set-of-uploads.rst b/lib/lp/soyuz/doc/soyuz-set-of-uploads.rst
index bb8e464..cc0e36d 100644
--- a/lib/lp/soyuz/doc/soyuz-set-of-uploads.rst
+++ b/lib/lp/soyuz/doc/soyuz-set-of-uploads.rst
@@ -76,11 +76,11 @@ for the ubuntutest distribution.
>>> from lp.registry.model.distribution import Distribution
>>> from lp.services.database.interfaces import IStore
+ >>> from lp.services.librarian.model import LibraryFileAlias
>>> from lp.soyuz.enums import PackageUploadStatus
>>> from lp.soyuz.scripts.initialize_distroseries import (
... InitializeDistroSeries,
... )
- >>> from lp.services.librarian.model import LibraryFileAlias
>>> from lp.testing.factory import LaunchpadObjectFactory
>>> ubuntu = IStore(Distribution).find(Distribution, name="ubuntu").one()
>>> breezy_autotest = ubuntu["breezy-autotest"]
@@ -110,7 +110,7 @@ for the ubuntutest distribution.
INFO:...:Copying permissions from parents.
INFO:...:Creating DistroSeriesDifferences.
>>> breezy.changeslist = "breezy-changes@xxxxxxxxxx"
- >>> fake_chroot = LibraryFileAlias.get(1)
+ >>> fake_chroot = IStore(LibraryFileAlias).get(LibraryFileAlias, 1)
>>> unused = breezy["i386"].addOrUpdateChroot(fake_chroot)
Add disk content for file inherited from ubuntu/breezy-autotest:
diff --git a/lib/lp/soyuz/model/archive.py b/lib/lp/soyuz/model/archive.py
index bae615f..c7749ba 100644
--- a/lib/lp/soyuz/model/archive.py
+++ b/lib/lp/soyuz/model/archive.py
@@ -14,18 +14,20 @@ __all__ = [
import logging
import re
import typing
-from datetime import datetime
+from datetime import datetime, timedelta, timezone
from operator import attrgetter
from pathlib import PurePath
import six
from lazr.lifecycle.event import ObjectCreatedEvent
from storm.expr import (
+ Alias,
And,
Cast,
Count,
Desc,
Exists,
+ Is,
Join,
Not,
Or,
@@ -33,7 +35,7 @@ from storm.expr import (
Sum,
Union,
)
-from storm.properties import JSON, Int, Unicode
+from storm.properties import JSON, Bool, DateTime, Int, Unicode
from storm.references import Reference
from storm.store import EmptyResultSet, Store
from zope.component import getAdapter, getUtility
@@ -81,17 +83,9 @@ from lp.registry.model.teammembership import TeamParticipation
from lp.services.config import config
from lp.services.database.bulk import create, load_referencing, load_related
from lp.services.database.constants import UTC_NOW
-from lp.services.database.datetimecol import UtcDateTimeCol
from lp.services.database.decoratedresultset import DecoratedResultSet
from lp.services.database.enumcol import DBEnum
from lp.services.database.interfaces import IStandbyStore, IStore
-from lp.services.database.sqlbase import SQLBase, cursor, sqlvalues
-from lp.services.database.sqlobject import (
- BoolCol,
- ForeignKey,
- IntCol,
- StringCol,
-)
from lp.services.database.stormbase import StormBase
from lp.services.database.stormexpr import BulkUpdate
from lp.services.features import getFeatureFlag
@@ -209,23 +203,21 @@ def storm_validate_external_dependencies(archive, attr, value):
@implementer(IArchive, IHasOwner, IHasBuildRecords)
-class Archive(SQLBase):
- _table = "Archive"
- _defaultOrder = "id"
-
- owner = ForeignKey(
- dbName="owner",
- foreignKey="Person",
- storm_validator=validate_person,
- notNull=True,
- )
+class Archive(StormBase):
+ __storm_table__ = "Archive"
+ __storm_order__ = "id"
+
+ id = Int(primary=True)
+
+ owner_id = Int(name="owner", validator=validate_person, allow_none=False)
+ owner = Reference(owner_id, "Person.id")
def _validate_archive_name(self, attr, value):
"""Only allow renaming of COPY archives.
Also assert the name is valid when set via an unproxied object.
"""
- if not self._SO_creating:
+ if not self._creating:
renamable = self.is_copy or (
self.is_ppa and self.status == ArchiveStatus.DELETED
)
@@ -277,13 +269,13 @@ class Archive(SQLBase):
return value
- name = StringCol(
- dbName="name", notNull=True, storm_validator=_validate_archive_name
+ name = Unicode(
+ name="name", allow_none=False, validator=_validate_archive_name
)
- displayname = StringCol(dbName="displayname", notNull=True)
+ displayname = Unicode(name="displayname", allow_none=False)
- description = StringCol(dbName="description", notNull=False, default=None)
+ description = Unicode(name="description", allow_none=True, default=None)
distribution_id = Int(name="distribution", allow_none=True)
distribution = Reference(distribution_id, "Distribution.id")
@@ -297,78 +289,76 @@ class Archive(SQLBase):
default=ArchiveStatus.ACTIVE,
)
- _enabled = BoolCol(dbName="enabled", notNull=True, default=True)
+ _enabled = Bool(name="enabled", allow_none=False, default=True)
enabled = property(lambda x: x._enabled)
- publish = BoolCol(dbName="publish", notNull=True, default=True)
+ publish = Bool(name="publish", allow_none=False, default=True)
- private = BoolCol(
- dbName="private",
- notNull=True,
+ private = Bool(
+ name="private",
+ allow_none=False,
default=False,
- storm_validator=_validate_archive_privacy,
+ validator=_validate_archive_privacy,
)
- require_virtualized = BoolCol(
- dbName="require_virtualized", notNull=True, default=True
+ require_virtualized = Bool(
+ name="require_virtualized", allow_none=False, default=True
)
- build_debug_symbols = BoolCol(
- dbName="build_debug_symbols", notNull=True, default=False
+ build_debug_symbols = Bool(
+ name="build_debug_symbols", allow_none=False, default=False
)
- publish_debug_symbols = BoolCol(
- dbName="publish_debug_symbols", notNull=False, default=False
+ publish_debug_symbols = Bool(
+ name="publish_debug_symbols", allow_none=True, default=False
)
- permit_obsolete_series_uploads = BoolCol(
- dbName="permit_obsolete_series_uploads", default=False
+ permit_obsolete_series_uploads = Bool(
+ name="permit_obsolete_series_uploads", default=False
)
- authorized_size = IntCol(dbName="authorized_size", notNull=False)
+ authorized_size = Int(name="authorized_size", allow_none=True)
- sources_cached = IntCol(dbName="sources_cached", notNull=False, default=0)
+ sources_cached = Int(name="sources_cached", allow_none=True, default=0)
- binaries_cached = IntCol(
- dbName="binaries_cached", notNull=False, default=0
- )
+ binaries_cached = Int(name="binaries_cached", allow_none=True, default=0)
- package_description_cache = StringCol(
- dbName="package_description_cache", notNull=False, default=None
+ package_description_cache = Unicode(
+ name="package_description_cache", allow_none=True, default=None
)
- total_count = IntCol(dbName="total_count", notNull=True, default=0)
+ total_count = Int(name="total_count", allow_none=False, default=0)
- pending_count = IntCol(dbName="pending_count", notNull=True, default=0)
+ pending_count = Int(name="pending_count", allow_none=False, default=0)
- succeeded_count = IntCol(dbName="succeeded_count", notNull=True, default=0)
+ succeeded_count = Int(name="succeeded_count", allow_none=False, default=0)
- building_count = IntCol(dbName="building_count", notNull=True, default=0)
+ building_count = Int(name="building_count", allow_none=False, default=0)
- failed_count = IntCol(dbName="failed_count", notNull=True, default=0)
+ failed_count = Int(name="failed_count", allow_none=False, default=0)
- date_created = UtcDateTimeCol(dbName="date_created")
+ date_created = DateTime(name="date_created", tzinfo=timezone.utc)
signing_key_owner_id = Int(name="signing_key_owner")
signing_key_owner = Reference(signing_key_owner_id, "Person.id")
signing_key_fingerprint = Unicode()
- relative_build_score = IntCol(
- dbName="relative_build_score", notNull=True, default=0
+ relative_build_score = Int(
+ name="relative_build_score", allow_none=False, default=0
)
# This field is specifically and only intended for OEM migration to
# Launchpad and should be re-examined in October 2010 to see if it
# is still relevant.
- external_dependencies = StringCol(
- dbName="external_dependencies",
- notNull=False,
+ external_dependencies = Unicode(
+ name="external_dependencies",
+ allow_none=True,
default=None,
- storm_validator=storm_validate_external_dependencies,
+ validator=storm_validate_external_dependencies,
)
- suppress_subscription_notifications = BoolCol(
- dbName="suppress_subscription_notifications",
- notNull=True,
+ suppress_subscription_notifications = Bool(
+ name="suppress_subscription_notifications",
+ allow_none=False,
default=False,
)
@@ -382,10 +372,50 @@ class Archive(SQLBase):
name="repository_format", allow_none=True, enum=ArchiveRepositoryFormat
)
- def _init(self, *args, **kw):
- """Provide the right interface for URL traversal."""
- SQLBase._init(self, *args, **kw)
+ _creating = False
+ def __init__(
+ self,
+ owner,
+ distribution,
+ name,
+ displayname,
+ purpose,
+ description=None,
+ publish=True,
+ require_virtualized=True,
+ signing_key_owner=None,
+ signing_key_fingerprint=None,
+ publishing_method=None,
+ repository_format=None,
+ ):
+ super().__init__()
+ try:
+ self._creating = True
+ self.owner = owner
+ self.distribution = distribution
+ self.name = name
+ self.displayname = displayname
+ self.purpose = purpose
+ self.description = description
+ self.publish = publish
+ self.require_virtualized = require_virtualized
+ self.signing_key_owner = signing_key_owner
+ self.signing_key_fingerprint = signing_key_fingerprint
+ self.publishing_method = publishing_method
+ self.repository_format = repository_format
+ except Exception:
+ # If validating references such as `owner` fails, then the new
+ # object may have been added to the store first. Remove it
+ # again in that case.
+ store = Store.of(self)
+ if store is not None:
+ store.remove(self)
+ raise
+ self.__storm_loaded__()
+ del self._creating
+
+ def __storm_loaded__(self):
# Provide the additional marker interface depending on what type
# of archive this is. See also the lp:url declarations in
# zcml/archive.zcml.
@@ -927,7 +957,7 @@ class Archive(SQLBase):
SourcePackagePublishingHistory.sourcepackagerelease_id
== SourcePackageReleaseFile.sourcepackagerelease_id,
SourcePackageReleaseFile.libraryfile_id == LibraryFileAlias.id,
- LibraryFileAlias.contentID == LibraryFileContent.id,
+ LibraryFileAlias.content_id == LibraryFileContent.id,
)
# Note: we can't use the LFC.sha1 instead of LFA.filename above
@@ -1216,7 +1246,7 @@ class Archive(SQLBase):
BinaryPackagePublishingHistory.binarypackagerelease_id
== BinaryPackageFile.binarypackagerelease_id,
BinaryPackageFile.libraryfile_id == LibraryFileAlias.id,
- LibraryFileAlias.contentID == LibraryFileContent.id,
+ LibraryFileAlias.content_id == LibraryFileContent.id,
)
# See `IArchive.sources_size`.
result = result.config(distinct=True)
@@ -2926,7 +2956,7 @@ class Archive(SQLBase):
== SourcePackageReleaseFile.sourcepackagerelease_id,
LibraryFileAlias.id == SourcePackageReleaseFile.libraryfile_id,
LibraryFileAlias.filename.is_in(source_files),
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
)
.config(distinct=True)
)
@@ -3246,7 +3276,7 @@ class ArchiveSet:
def get(self, archive_id):
"""See `IArchiveSet`."""
- return Archive.get(archive_id)
+ return IStore(Archive).get(Archive, archive_id)
def getByReference(self, reference, check_permissions=False, user=None):
"""See `IArchiveSet`."""
@@ -3351,8 +3381,12 @@ class ArchiveSet:
if name is None:
name = self._getDefaultArchiveNameByPurpose(purpose)
- return Archive.selectOneBy(
- distribution=distribution, purpose=purpose, name=name
+ return (
+ IStore(Archive)
+ .find(
+ Archive, distribution=distribution, purpose=purpose, name=name
+ )
+ .one()
)
def getByDistroAndName(self, distribution, name):
@@ -3443,11 +3477,16 @@ class ArchiveSet:
% (name, distribution.name)
)
else:
- archive = Archive.selectOneBy(
- owner=owner,
- distribution=distribution,
- name=name,
- purpose=ArchivePurpose.PPA,
+ archive = (
+ IStore(Archive)
+ .find(
+ Archive,
+ owner=owner,
+ distribution=distribution,
+ name=name,
+ purpose=ArchivePurpose.PPA,
+ )
+ .one()
)
if archive is not None:
raise AssertionError(
@@ -3477,12 +3516,12 @@ class ArchiveSet:
signing_key_owner=signing_key_owner,
signing_key_fingerprint=signing_key_fingerprint,
require_virtualized=require_virtualized,
- _publishing_method=publishing_method,
- _repository_format=repository_format,
+ publishing_method=publishing_method,
+ repository_format=repository_format,
)
# Upon creation archives are enabled by default.
- if enabled == False:
+ if not enabled:
new_archive.disable()
# Private teams cannot have public PPAs.
@@ -3508,11 +3547,12 @@ class ArchiveSet:
]
new_archive.setProcessors(processors)
+ Store.of(new_archive).flush()
return new_archive
def __iter__(self):
"""See `IArchiveSet`."""
- return iter(Archive.select())
+ return iter(IStore(Archive).find(Archive))
def getPPAOwnedByPerson(
self,
@@ -3545,9 +3585,9 @@ class ArchiveSet:
direct_membership = Select(
Archive.id,
where=And(
- Archive._enabled == True,
+ Is(Archive._enabled, True),
Archive.purpose == ArchivePurpose.PPA,
- TeamParticipation.team == Archive.ownerID,
+ TeamParticipation.team == Archive.owner_id,
TeamParticipation.person == user,
),
)
@@ -3576,7 +3616,7 @@ class ArchiveSet:
result.order_by(Archive.displayname)
def preload_owners(rows):
- load_related(Person, rows, ["ownerID"])
+ load_related(Person, rows, ["owner_id"])
return DecoratedResultSet(result, pre_iter_hook=preload_owners)
@@ -3611,7 +3651,7 @@ class ArchiveSet:
Archive,
Archive.signing_key_fingerprint == None,
Archive.purpose == purpose,
- Archive._enabled == True,
+ Is(Archive._enabled, True),
)
)
results.order_by(Archive.date_created)
@@ -3628,8 +3668,8 @@ class ArchiveSet:
SourcePackagePublishingHistory,
SourcePackagePublishingHistory.archive == Archive.id,
SourcePackagePublishingHistory.distroseries == DistroSeries.id,
- Archive.private == False,
- Archive._enabled == True,
+ Is(Archive.private, False),
+ Is(Archive._enabled, True),
Archive.distribution == distribution,
DistroSeries.distribution == distribution,
Archive.purpose == ArchivePurpose.PPA,
@@ -3642,28 +3682,24 @@ class ArchiveSet:
def getMostActivePPAsForDistribution(self, distribution):
"""See `IArchiveSet`."""
- cur = cursor()
- query = """
- SELECT a.id, count(*) as C
- FROM Archive a, SourcePackagePublishingHistory spph
- WHERE
- spph.archive = a.id AND
- a.private = FALSE AND
- spph.datecreated >= now() - INTERVAL '1 week' AND
- a.distribution = %s AND
- a.purpose = %s
- GROUP BY a.id
- ORDER BY C DESC, a.id
- LIMIT 5
- """ % sqlvalues(
- distribution.id, ArchivePurpose.PPA
- )
-
- cur.execute(query)
+ spph_count = Alias(Count(SourcePackagePublishingHistory.id))
+ results = (
+ IStore(Archive)
+ .find(
+ (Archive, spph_count),
+ SourcePackagePublishingHistory.archive == Archive.id,
+ Is(Archive.private, False),
+ SourcePackagePublishingHistory.datecreated
+ >= UTC_NOW - Cast(timedelta(weeks=1), "interval"),
+ Archive.distribution == distribution,
+ Archive.purpose == ArchivePurpose.PPA,
+ )
+ .group_by(Archive.id)
+ .order_by(Desc(spph_count), Archive.id)[:5]
+ )
most_active = []
- for archive_id, number_of_uploads in cur.fetchall():
- archive = Archive.get(int(archive_id))
+ for archive, number_of_uploads in results:
the_dict = {"archive": archive, "uploads": number_of_uploads}
most_active.append(the_dict)
@@ -3673,7 +3709,7 @@ class ArchiveSet:
"""See `IArchiveSet`."""
return IStore(Archive).find(
Archive,
- Archive.private == True,
+ Is(Archive.private, True),
Archive.purpose == ArchivePurpose.PPA,
)
@@ -3702,7 +3738,7 @@ class ArchiveSet:
extra_exprs.append(Archive.name == name)
public_archive = And(
- Archive.private == False, Archive._enabled == True
+ Is(Archive.private, False), Is(Archive._enabled, True)
)
if not check_permissions:
@@ -3720,14 +3756,14 @@ class ArchiveSet:
TeamParticipation.team_id,
where=And(
TeamParticipation.person == user,
- TeamParticipation.team_id == Archive.ownerID,
+ TeamParticipation.team_id == Archive.owner_id,
),
)
# Append the extra expression to capture either public
# archives, or archives owned by the user, or archives
# owned by a team of which the user is a member:
- # Note: 'Archive.ownerID == user.id'
+ # Note: 'Archive.owner_id == user.id'
# is unnecessary below because there is a TeamParticipation
# entry showing that each person is a member of the "team"
# that consists of themselves.
@@ -3736,7 +3772,7 @@ class ArchiveSet:
extra_exprs.append(
Or(
public_archive,
- Archive.ownerID.is_in(user_teams_subselect),
+ Archive.owner_id.is_in(user_teams_subselect),
)
)
else:
@@ -3745,7 +3781,7 @@ class ArchiveSet:
extra_exprs.append(public_archive)
if exclude_disabled:
- extra_exprs.append(Archive._enabled == True)
+ extra_exprs.append(Is(Archive._enabled, True))
if exclude_pristine:
extra_exprs.append(
@@ -3842,7 +3878,7 @@ class ArchiveSet:
# when a user is the direct owner of the PPA.
# Team ownership is accounted for in `get_enabled_archive_filter`
# below
- elif user.id == removeSecurityProxy(archive).ownerID:
+ elif user.id == removeSecurityProxy(archive).owner_id:
allowed_ids.add(archive.id)
else:
@@ -3888,7 +3924,7 @@ def get_archive_privacy_filter(user):
else:
privacy_filter = Or(
Not(Archive.private),
- Archive.ownerID.is_in(
+ Archive.owner_id.is_in(
Select(
TeamParticipation.team_id,
where=(TeamParticipation.person == user),
@@ -3912,8 +3948,8 @@ def get_enabled_archive_filter(
if include_public:
terms = [
purpose_term,
- Archive.private == False,
- Archive._enabled == True,
+ Is(Archive.private, False),
+ Is(Archive._enabled, True),
]
return And(*terms)
else:
@@ -3929,7 +3965,7 @@ def get_enabled_archive_filter(
TeamParticipation.team_id, where=TeamParticipation.person == user
)
- is_owner = Archive.ownerID.is_in(user_teams)
+ is_owner = Archive.owner_id.is_in(user_teams)
from lp.soyuz.model.archivesubscriber import ArchiveSubscriber
@@ -3970,6 +4006,6 @@ def get_enabled_archive_filter(
if include_public:
filter_terms.append(
- And(Archive._enabled == True, Archive.private == False)
+ And(Is(Archive._enabled, True), Is(Archive.private, False))
)
return And(purpose_term, Or(*filter_terms))
diff --git a/lib/lp/soyuz/model/archivefile.py b/lib/lp/soyuz/model/archivefile.py
index f9bbe36..215d0f5 100644
--- a/lib/lp/soyuz/model/archivefile.py
+++ b/lib/lp/soyuz/model/archivefile.py
@@ -143,7 +143,7 @@ class ArchiveFileSet:
clauses.extend(
[
ArchiveFile.library_file == LibraryFileAlias.id,
- LibraryFileAlias.contentID == LibraryFileContent.id,
+ LibraryFileAlias.content_id == LibraryFileContent.id,
LibraryFileContent.sha256 == sha256,
]
)
@@ -189,7 +189,7 @@ class ArchiveFileSet:
def eager_load(rows):
lfas = load_related(LibraryFileAlias, rows, ["library_file_id"])
- load_related(LibraryFileContent, lfas, ["contentID"])
+ load_related(LibraryFileContent, lfas, ["content_id"])
if eager_load:
return DecoratedResultSet(archive_files, pre_iter_hook=eager_load)
diff --git a/lib/lp/soyuz/model/archivesubscriber.py b/lib/lp/soyuz/model/archivesubscriber.py
index 0ff0cc7..d3173df 100644
--- a/lib/lp/soyuz/model/archivesubscriber.py
+++ b/lib/lp/soyuz/model/archivesubscriber.py
@@ -240,7 +240,7 @@ class ArchiveSubscriberSet:
archives = load_related(Archive, subscriptions, ["archive_id"])
list(
getUtility(IPersonSet).getPrecachedPersonsFromIDs(
- [archive.ownerID for archive in archives],
+ [archive.owner_id for archive in archives],
need_validity=True,
)
)
diff --git a/lib/lp/soyuz/model/binarypackagebuild.py b/lib/lp/soyuz/model/binarypackagebuild.py
index ca82a8e..a801a6a 100644
--- a/lib/lp/soyuz/model/binarypackagebuild.py
+++ b/lib/lp/soyuz/model/binarypackagebuild.py
@@ -341,7 +341,7 @@ class BinaryPackageBuild(PackageBuildMixin, StormBase):
),
Join(
LibraryFileContent,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
),
]
results = store.using(*origin).find(
@@ -483,7 +483,7 @@ class BinaryPackageBuild(PackageBuildMixin, StormBase):
BinaryPackageRelease.id
== BinaryPackageFile.binarypackagerelease_id,
LibraryFileAlias.id == BinaryPackageFile.libraryfile_id,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
)
return result.order_by(
[LibraryFileAlias.filename, BinaryPackageRelease.id]
@@ -942,7 +942,7 @@ class BinaryPackageBuildSet(SpecificBuildFarmJobSourceMixin):
self._prefetchBuildData(builds)
das = load_related(DistroArchSeries, builds, ["distro_arch_series_id"])
archives = load_related(Archive, builds, ["archive_id"])
- load_related(Person, archives, ["ownerID"])
+ load_related(Person, archives, ["owner_id"])
distroseries = load_related(DistroSeries, das, ["distroseries_id"])
load_related(Distribution, distroseries, ["distribution_id"])
@@ -1352,7 +1352,7 @@ class BinaryPackageBuildSet(SpecificBuildFarmJobSourceMixin):
),
LeftJoin(
LibraryFileContent,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
),
LeftJoin(Builder, Builder.id == BinaryPackageBuild.builder_id),
)
diff --git a/lib/lp/soyuz/model/distributionsourcepackagerelease.py b/lib/lp/soyuz/model/distributionsourcepackagerelease.py
index d6b3ffa..0d0de46 100644
--- a/lib/lp/soyuz/model/distributionsourcepackagerelease.py
+++ b/lib/lp/soyuz/model/distributionsourcepackagerelease.py
@@ -33,8 +33,8 @@ from lp.soyuz.model.publishing import (
@implementer(IDistributionSourcePackageRelease)
@delegate_to(ISourcePackageRelease, context="sourcepackagerelease")
class DistributionSourcePackageRelease:
- """This is a "Magic Distribution Source Package Release". It is not an
- SQLObject, but it represents the concept of a specific source package
+ """This is a "Magic Distribution Source Package Release". It is not a
+ Storm model, but it represents the concept of a specific source package
release in the distribution. You can then query it for useful
information.
"""
diff --git a/lib/lp/soyuz/model/initializedistroseriesjob.py b/lib/lp/soyuz/model/initializedistroseriesjob.py
index e487713..e6ef349 100644
--- a/lib/lp/soyuz/model/initializedistroseriesjob.py
+++ b/lib/lp/soyuz/model/initializedistroseriesjob.py
@@ -141,7 +141,9 @@ class InitializeDistroSeriesJob(DistributionJobDerived):
parts += ", parent[overlay?/pockets/components]: "
parents = []
for i in range(len(self.overlays)):
- series = IStore(DistroSeries).get(DistroSeries, self.parents[i])
+ series = IStore(DistroSeries).get(
+ DistroSeries, int(self.parents[i])
+ )
parents.append(
"%s[%s/%s/%s]"
% (
diff --git a/lib/lp/soyuz/model/livefsbuild.py b/lib/lp/soyuz/model/livefsbuild.py
index d4a3396..3c4f567 100644
--- a/lib/lp/soyuz/model/livefsbuild.py
+++ b/lib/lp/soyuz/model/livefsbuild.py
@@ -277,7 +277,7 @@ class LiveFSBuild(PackageBuildMixin, StormBase):
(LiveFSFile, LibraryFileAlias, LibraryFileContent),
LiveFSFile.livefsbuild == self.id,
LibraryFileAlias.id == LiveFSFile.libraryfile_id,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
)
return result.order_by([LibraryFileAlias.filename, LiveFSFile.id])
@@ -429,7 +429,7 @@ class LiveFSBuildSet(SpecificBuildFarmJobSourceMixin):
load_related(Person, builds, ["requester_id"])
load_related(LibraryFileAlias, builds, ["log_id"])
archives = load_related(Archive, builds, ["archive_id"])
- load_related(Person, archives, ["ownerID"])
+ load_related(Person, archives, ["owner_id"])
load_related(LiveFS, builds, ["livefs_id"])
def getByBuildFarmJobs(self, build_farm_jobs):
diff --git a/lib/lp/soyuz/model/packagecloner.py b/lib/lp/soyuz/model/packagecloner.py
index 15029b4..fee1b4c 100644
--- a/lib/lp/soyuz/model/packagecloner.py
+++ b/lib/lp/soyuz/model/packagecloner.py
@@ -171,7 +171,7 @@ class PackageCloner:
bpph.binarypackagename
""" % sqlvalues(
destination_das.id,
- destination.archive,
+ destination.archive.id,
UTC_NOW,
UTC_NOW,
destination.pocket,
@@ -188,7 +188,7 @@ class PackageCloner:
PackagePublishingStatus.PENDING,
PackagePublishingStatus.PUBLISHED,
origin.pocket,
- origin.archive,
+ origin.archive.id,
)
if use_names:
@@ -230,7 +230,7 @@ class PackageCloner:
"""
% sqlvalues(
destination.distroseries.id,
- destination.archive,
+ destination.archive.id,
UTC_NOW,
UTC_NOW,
destination.pocket,
@@ -291,7 +291,7 @@ class PackageCloner:
spn.name = mcd.sourcepackagename AND
spr.version > mcd.t_version
""" % sqlvalues(
- origin.archive,
+ origin.archive.id,
PackagePublishingStatus.PENDING,
PackagePublishingStatus.PUBLISHED,
origin.distroseries.id,
@@ -334,7 +334,7 @@ class PackageCloner:
spn.name NOT IN (
SELECT sourcepackagename FROM tmp_merge_copy_data)
""" % sqlvalues(
- origin.archive,
+ origin.archive.id,
PackagePublishingStatus.PENDING,
PackagePublishingStatus.PUBLISHED,
origin.distroseries.id,
@@ -414,7 +414,7 @@ class PackageCloner:
secsrc.distroseries = %s AND
secsrc.pocket = %s
""" % sqlvalues(
- destination.archive,
+ destination.archive.id,
PackagePublishingStatus.PENDING,
PackagePublishingStatus.PUBLISHED,
destination.distroseries.id,
@@ -465,7 +465,7 @@ class PackageCloner:
spph.archive = %s
""" % sqlvalues(
destination.distroseries.id,
- destination.archive,
+ destination.archive.id,
UTC_NOW,
UTC_NOW,
destination.pocket,
@@ -473,7 +473,7 @@ class PackageCloner:
PackagePublishingStatus.PENDING,
PackagePublishingStatus.PUBLISHED,
origin.pocket,
- origin.archive,
+ origin.archive.id,
)
if sourcepackagenames and len(sourcepackagenames) > 0:
diff --git a/lib/lp/soyuz/model/packagediff.py b/lib/lp/soyuz/model/packagediff.py
index e21de8b..14dd76c 100644
--- a/lib/lp/soyuz/model/packagediff.py
+++ b/lib/lp/soyuz/model/packagediff.py
@@ -346,7 +346,7 @@ class PackageDiffSet:
def preload_hook(rows):
lfas = load(LibraryFileAlias, (pd.diff_content_id for pd in rows))
- load(LibraryFileContent, (lfa.contentID for lfa in lfas))
+ load(LibraryFileContent, (lfa.content_id for lfa in lfas))
sprs = load(
SourcePackageRelease,
itertools.chain.from_iterable(
diff --git a/lib/lp/soyuz/model/publishing.py b/lib/lp/soyuz/model/publishing.py
index 3f52ac0..78749c0 100644
--- a/lib/lp/soyuz/model/publishing.py
+++ b/lib/lp/soyuz/model/publishing.py
@@ -34,8 +34,6 @@ from storm.info import ClassAlias
from storm.properties import DateTime, Int, Unicode
from storm.references import Reference
from storm.store import Store
-from storm.zope import IResultSet
-from storm.zope.interfaces import ISQLObjectResultSet
from zope.component import getUtility
from zope.interface import implementer
from zope.security.proxy import isinstance as zope_isinstance
@@ -550,7 +548,7 @@ class SourcePackagePublishingHistory(StormBase, ArchivePublisherBase):
files = self.sourcepackagerelease.files
lfas = bulk.load_related(LibraryFileAlias, files, ["libraryfile_id"])
- bulk.load_related(LibraryFileContent, lfas, ["contentID"])
+ bulk.load_related(LibraryFileContent, lfas, ["content_id"])
return files
def getSourceAndBinaryLibraryFiles(self):
@@ -731,7 +729,7 @@ class SourcePackagePublishingHistory(StormBase, ArchivePublisherBase):
"""See `ISourcePackagePublishingHistory`."""
sources = Store.of(self).find(
(LibraryFileAlias, LibraryFileContent),
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
LibraryFileAlias.id == SourcePackageReleaseFile.libraryfile_id,
SourcePackageReleaseFile.sourcepackagerelease
== self.sourcepackagerelease_id,
@@ -947,7 +945,7 @@ class BinaryPackagePublishingHistory(StormBase, ArchivePublisherBase):
"""See `IPublishing`."""
files = self.binarypackagerelease.files
lfas = bulk.load_related(LibraryFileAlias, files, ["libraryfile_id"])
- bulk.load_related(LibraryFileContent, lfas, ["contentID"])
+ bulk.load_related(LibraryFileContent, lfas, ["content_id"])
return files
@property
@@ -1367,7 +1365,7 @@ class BinaryPackagePublishingHistory(StormBase, ArchivePublisherBase):
"""See `IBinaryPackagePublishingHistory`."""
binaries = Store.of(self).find(
(LibraryFileAlias, LibraryFileContent),
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
LibraryFileAlias.id == BinaryPackageFile.libraryfile_id,
BinaryPackageFile.binarypackagerelease
== self.binarypackagerelease_id,
@@ -1586,8 +1584,6 @@ class PublishingSet:
if len(bpphs) == 0:
return
else:
- if ISQLObjectResultSet.providedBy(bpphs):
- bpphs = IResultSet(bpphs)
if bpphs.is_empty():
return
@@ -1976,7 +1972,7 @@ class PublishingSet:
LibraryFileAlias,
LibraryFileContent,
),
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
LibraryFileAlias.id == BinaryPackageFile.libraryfile_id,
BinaryPackageFile.binarypackagerelease == BinaryPackageRelease.id,
BinaryPackageRelease.build_id == BinaryPackageBuild.id,
@@ -2004,7 +2000,7 @@ class PublishingSet:
LibraryFileAlias,
LibraryFileContent,
),
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
LibraryFileAlias.id == SourcePackageReleaseFile.libraryfile_id,
SourcePackageReleaseFile.sourcepackagerelease
== SourcePackagePublishingHistory.sourcepackagerelease_id,
@@ -2150,7 +2146,7 @@ class PublishingSet:
lfas = bulk.load_related(
LibraryFileAlias, sprfs, ["libraryfile_id"]
)
- bulk.load_related(LibraryFileContent, lfas, ["contentID"])
+ bulk.load_related(LibraryFileContent, lfas, ["content_id"])
return DecoratedResultSet(spphs, pre_iter_hook=eager_load)
@@ -2204,7 +2200,7 @@ class PublishingSet:
lfas = bulk.load_related(
LibraryFileAlias, bpfs, ["libraryfile_id"]
)
- bulk.load_related(LibraryFileContent, lfas, ["contentID"])
+ bulk.load_related(LibraryFileContent, lfas, ["content_id"])
bulk.load_related(
SourcePackageName, sprs, ["sourcepackagename_id"]
)
@@ -2231,7 +2227,7 @@ class PublishingSet:
LibraryFileAlias,
LibraryFileContent,
),
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
LibraryFileAlias.id == PackageUpload.changes_file_id,
PackageUpload.id == PackageUploadSource.packageupload_id,
PackageUpload.status == PackageUploadStatus.DONE,
diff --git a/lib/lp/soyuz/model/queue.py b/lib/lp/soyuz/model/queue.py
index 639640a..0d477d7 100644
--- a/lib/lp/soyuz/model/queue.py
+++ b/lib/lp/soyuz/model/queue.py
@@ -451,7 +451,7 @@ class PackageUpload(StormBase):
AND bpf.libraryfile = lfa.id
AND lfa.filename IN (%%s)
""" % sqlvalues(
- self.archive, self.distroseries.distribution.id
+ self.archive_id, self.distroseries.distribution_id
)
# Inject the inner query.
query %= inner_query
@@ -1899,7 +1899,7 @@ def prefill_packageupload_caches(uploads, puses, pubs, pucs, logs):
)
puc_lfas = load_related(LibraryFileAlias, pucs, ["libraryfilealias_id"])
- load_related(LibraryFileContent, puc_lfas, ["contentID"])
+ load_related(LibraryFileContent, puc_lfas, ["content_id"])
for spr_cache in sprs:
get_property_cache(spr_cache).published_archives = []
diff --git a/lib/lp/soyuz/model/sourcepackagerelease.py b/lib/lp/soyuz/model/sourcepackagerelease.py
index 6e1222e..1f7b00f 100644
--- a/lib/lp/soyuz/model/sourcepackagerelease.py
+++ b/lib/lp/soyuz/model/sourcepackagerelease.py
@@ -480,7 +480,7 @@ class SourcePackageRelease(StormBase):
),
Join(
LibraryFileContent,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
),
]
results = store.using(*origin).find(
diff --git a/lib/lp/soyuz/scripts/gina/README b/lib/lp/soyuz/scripts/gina/README
index 1208e80..7f84725 100644
--- a/lib/lp/soyuz/scripts/gina/README
+++ b/lib/lp/soyuz/scripts/gina/README
@@ -28,7 +28,7 @@ Using the Gina output:
-> Tons of packages (13k binaries and 7k sources)*
-> Person-related information (name, multiple mails, gpgkey, etc)
- * the current implementation of Soyuz/SQLObject is quite slow to
+ * the current implementation of Soyuz/Storm is quite slow to
fetch all information from DB, so don't forget to use "renice"
to reduce the postgres and the zope priority if your machine is
`normal' (non-dual-Xeon), otherwise it will CRASH (I'm serious).
diff --git a/lib/lp/soyuz/scripts/initialize_distroseries.py b/lib/lp/soyuz/scripts/initialize_distroseries.py
index bcb6f28..6a35b71 100644
--- a/lib/lp/soyuz/scripts/initialize_distroseries.py
+++ b/lib/lp/soyuz/scripts/initialize_distroseries.py
@@ -857,7 +857,7 @@ class InitializeDistroSeries:
FROM Archivepermission WHERE packageset = %s
"""
% sqlvalues(
- self.distroseries.main_archive,
+ self.distroseries.main_archive.id,
child_ps.id,
parent_ps.id,
)
@@ -913,7 +913,7 @@ class InitializeDistroSeries:
WHERE pocket IS NOT NULL AND distroseries = %s
"""
% sqlvalues(
- self.distroseries.main_archive,
+ self.distroseries.main_archive.id,
self.distroseries.id,
parent.id,
)
diff --git a/lib/lp/soyuz/scripts/packagecopier.py b/lib/lp/soyuz/scripts/packagecopier.py
index d0553f5..f377107 100644
--- a/lib/lp/soyuz/scripts/packagecopier.py
+++ b/lib/lp/soyuz/scripts/packagecopier.py
@@ -171,7 +171,7 @@ def check_copy_permissions(
# checks on each source archive. Not all of this is currently
# preloadable.
archives = load_related(Archive, sources, ["archive_id"])
- load_related(Person, archives, ["ownerID"])
+ load_related(Person, archives, ["owner_id"])
# If there is a requester, check that they have upload permission into
# the destination (archive, component, pocket). This check is done
diff --git a/lib/lp/soyuz/tests/test_archive.py b/lib/lp/soyuz/tests/test_archive.py
index a72d9b7..5c17662 100644
--- a/lib/lp/soyuz/tests/test_archive.py
+++ b/lib/lp/soyuz/tests/test_archive.py
@@ -44,6 +44,7 @@ from lp.buildmaster.interfaces.buildfarmjobbehaviour import (
IBuildFarmJobBehaviour,
)
from lp.buildmaster.interfaces.processor import IProcessorSet
+from lp.buildmaster.model.buildqueue import BuildQueue
from lp.registry.enums import PersonVisibility, TeamMembershipPolicy
from lp.registry.interfaces.distribution import IDistributionSet
from lp.registry.interfaces.person import IPersonSet
@@ -52,7 +53,6 @@ from lp.registry.interfaces.series import SeriesStatus
from lp.registry.interfaces.teammembership import TeamMembershipStatus
from lp.services.authserver.testing import InProcessAuthServerFixture
from lp.services.database.interfaces import IStore
-from lp.services.database.sqlbase import sqlvalues
from lp.services.features import getFeatureFlag
from lp.services.features.testing import FeatureFixture
from lp.services.gpg.interfaces import (
@@ -116,6 +116,7 @@ from lp.soyuz.model.archivepermission import (
ArchivePermission,
ArchivePermissionSet,
)
+from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
from lp.soyuz.model.binarypackagerelease import (
BinaryPackageReleaseDownloadCount,
)
@@ -398,20 +399,18 @@ class TestArchiveEnableDisable(TestCaseWithFactory):
def _getBuildQueuesByStatus(self, archive, status):
# Return the count for archive build jobs with the given status.
- query = """
- SELECT COUNT(BuildQueue.id)
- FROM BinaryPackageBuild, BuildQueue
- WHERE
- BinaryPackageBuild.build_farm_job =
- BuildQueue.build_farm_job
- AND BinaryPackageBuild.archive = %s
- AND BinaryPackageBuild.status = %s
- AND BuildQueue.status = %s;
- """ % sqlvalues(
- archive, BuildStatus.NEEDSBUILD, status
- )
-
- return IStore(Archive).execute(query).get_one()[0]
+ return (
+ IStore(BuildQueue)
+ .find(
+ BuildQueue.id,
+ BinaryPackageBuild.build_farm_job_id
+ == BuildQueue._build_farm_job_id,
+ BinaryPackageBuild.archive == archive,
+ BinaryPackageBuild.status == BuildStatus.NEEDSBUILD,
+ BuildQueue.status == status,
+ )
+ .count()
+ )
def assertNoBuildQueuesHaveStatus(self, archive, status):
# Check that that the jobs attached to this archive do not have this
diff --git a/lib/lp/soyuz/tests/test_initializedistroseriesjob.py b/lib/lp/soyuz/tests/test_initializedistroseriesjob.py
index dd48340..d40f9b4 100644
--- a/lib/lp/soyuz/tests/test_initializedistroseriesjob.py
+++ b/lib/lp/soyuz/tests/test_initializedistroseriesjob.py
@@ -51,13 +51,15 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
def test_getOopsVars(self):
parent = self.factory.makeDistroSeries()
distroseries = self.factory.makeDistroSeries()
- job = self.job_source.create(distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(distroseries, [str(parent.id)])
vars = job.getOopsVars()
naked_job = removeSecurityProxy(job)
self.assertIn(("distribution_id", distroseries.distribution.id), vars)
self.assertIn(("distroseries_id", distroseries.id), vars)
self.assertIn(("distribution_job_id", naked_job.context.id), vars)
- self.assertIn(("parent_distroseries_ids", [parent.id]), vars)
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ self.assertIn(("parent_distroseries_ids", [str(parent.id)]), vars)
def _getJobs(self):
"""Return the pending InitializeDistroSeriesJobs as a list."""
@@ -80,12 +82,14 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
overlay_components = ("main", "universe")
arches = ("i386", "amd64")
archindep_archtag = "amd64"
- packagesets = (packageset1.id, packageset2.id)
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ packagesets = (str(packageset1.id), str(packageset2.id))
rebuild = False
job = self.job_source.create(
distroseries,
- [parent1.id, parent2.id],
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ [str(parent1.id), str(parent2.id)],
arches,
archindep_archtag,
packagesets,
@@ -121,12 +125,14 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
# If there's already a pending InitializeDistroSeriesJob for a
# DistroSeries, InitializeDistroSeriesJob.create() raises an
# exception.
- job = self.job_source.create(distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(distroseries, [str(parent.id)])
exception = self.assertRaises(
InitializationPending,
self.job_source.create,
distroseries,
- [parent.id],
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ [str(parent.id)],
)
self.assertEqual(job, exception.job)
@@ -136,14 +142,16 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
# If there's already a completed InitializeDistroSeriesJob for a
# DistroSeries, InitializeDistroSeriesJob.create() raises an
# exception.
- job = self.job_source.create(distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(distroseries, [str(parent.id)])
job.start()
job.complete()
exception = self.assertRaises(
InitializationCompleted,
self.job_source.create,
distroseries,
- [parent.id],
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ [str(parent.id)],
)
self.assertEqual(job, exception.job)
@@ -153,10 +161,11 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
# If there's already a failed InitializeDistroSeriesJob for a
# DistroSeries, InitializeDistroSeriesJob.create() schedules a new
# job.
- job = self.job_source.create(distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(distroseries, [str(parent.id)])
job.start()
job.fail()
- self.job_source.create(distroseries, [parent.id])
+ self.job_source.create(distroseries, [str(parent.id)])
def test_run_with_previous_series_already_set(self):
# InitializationError is raised if a parent series already exists
@@ -167,7 +176,8 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
distroseries, parent, initialized=True
)
- job = self.job_source.create(distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(distroseries, [str(parent.id)])
expected_message = (
"Series {child.name} has already been initialised" "."
).format(child=distroseries)
@@ -189,7 +199,8 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
job = self.job_source.create(
distroseries,
- [parent.id],
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ [str(parent.id)],
arches,
archindep_archtag,
packagesets,
@@ -205,7 +216,8 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
self.assertEqual(naked_job.archindep_archtag, archindep_archtag)
self.assertEqual(naked_job.packagesets, packagesets)
self.assertEqual(naked_job.rebuild, False)
- self.assertEqual(naked_job.parents, (parent.id,))
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ self.assertEqual(naked_job.parents, (str(parent.id),))
self.assertEqual(naked_job.overlays, overlays)
self.assertEqual(naked_job.overlay_pockets, overlay_pockets)
self.assertEqual(naked_job.overlay_components, overlay_components)
@@ -213,9 +225,10 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
def test_parent(self):
parent = self.factory.makeDistroSeries()
distroseries = self.factory.makeDistroSeries()
- job = self.job_source.create(distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(distroseries, [str(parent.id)])
naked_job = removeSecurityProxy(job)
- self.assertEqual((parent.id,), naked_job.parents)
+ self.assertEqual((str(parent.id),), naked_job.parents)
def test_get(self):
# InitializeDistroSeriesJob.get() returns the initialization job for
@@ -224,8 +237,9 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
distroseries = self.factory.makeDistroSeries()
another_distroseries = self.factory.makeDistroSeries()
self.assertIs(None, self.job_source.get(distroseries))
- self.job_source.create(distroseries, [parent.id])
- self.job_source.create(another_distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ self.job_source.create(distroseries, [str(parent.id)])
+ self.job_source.create(another_distroseries, [str(parent.id)])
job = self.job_source.get(distroseries)
self.assertIsInstance(job, InitializeDistroSeriesJob)
self.assertEqual(job.distroseries, distroseries)
@@ -235,14 +249,16 @@ class InitializeDistroSeriesJobTests(TestCaseWithFactory):
# None when no error description is recorded.
parent = self.factory.makeDistroSeries()
distroseries = self.factory.makeDistroSeries()
- job = self.job_source.create(distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(distroseries, [str(parent.id)])
self.assertIs(None, removeSecurityProxy(job).error_description)
def test_error_description_set_when_notifying_about_user_errors(self):
# error_description is set by notifyUserError().
parent = self.factory.makeDistroSeries()
distroseries = self.factory.makeDistroSeries()
- job = self.job_source.create(distroseries, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(distroseries, [str(parent.id)])
message = "This is an example message."
job.notifyUserError(InitializationError(message))
self.assertEqual(message, removeSecurityProxy(job).error_description)
@@ -273,6 +289,7 @@ def create_child(factory):
test1 = getUtility(IPackagesetSet).new(
"test1", "test 1 packageset", parent.owner, distroseries=parent
)
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
test1_packageset_id = str(test1.id)
test1.addSources("udev")
parent.updatePackageCount()
@@ -310,7 +327,8 @@ class InitializeDistroSeriesJobTestsWithPackages(TestCaseWithFactory):
def test_job(self):
parent, child, test1_packageset_id = create_child(self.factory)
- job = self.job_source.create(child, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job = self.job_source.create(child, [str(parent.id)])
switch_dbuser("initializedistroseries")
job.run()
@@ -323,7 +341,8 @@ class InitializeDistroSeriesJobTestsWithPackages(TestCaseWithFactory):
arch = parent.nominatedarchindep.architecturetag
job = self.job_source.create(
child,
- [parent.id],
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ [str(parent.id)],
packagesets=(test1_packageset_id,),
arches=(arch,),
rebuild=True,
@@ -344,7 +363,8 @@ class InitializeDistroSeriesJobTestsWithPackages(TestCaseWithFactory):
parent, child, test1_packageset_id = create_child(self.factory)
job = self.job_source.create(
child,
- [parent.id],
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ [str(parent.id)],
archindep_archtag=None,
packagesets=None,
arches=None,
@@ -363,7 +383,8 @@ class InitializeDistroSeriesJobTestsWithPackages(TestCaseWithFactory):
parent, child, test1_packageset_id = create_child(self.factory)
job = self.job_source.create(
child,
- [parent.id],
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ [str(parent.id)],
archindep_archtag=None,
packagesets=None,
arches=None,
@@ -386,7 +407,8 @@ class InitializeDistroSeriesJobTestsWithPackages(TestCaseWithFactory):
self.setupDas(parent, "powerpc", "hppa")
job = self.job_source.create(
child,
- [parent.id],
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ [str(parent.id)],
archindep_archtag="amd64",
packagesets=None,
arches=None,
@@ -423,7 +445,8 @@ class TestViaCelery(TestCaseWithFactory):
parent, child, test1 = create_child(self.factory)
job_source = getUtility(IInitializeDistroSeriesJobSource)
with block_on_job():
- job_source.create(child, [parent.id])
+ # XXX cjwatson 2023-09-11: Our JS code passes IDs as strings.
+ job_source.create(child, [str(parent.id)])
transaction.commit()
child.updatePackageCount()
self.assertEqual(parent.sourcecount, child.sourcecount)
diff --git a/lib/lp/soyuz/tests/test_packagediff.py b/lib/lp/soyuz/tests/test_packagediff.py
index b86a26f..c5db107 100644
--- a/lib/lp/soyuz/tests/test_packagediff.py
+++ b/lib/lp/soyuz/tests/test_packagediff.py
@@ -102,7 +102,7 @@ class TestPackageDiffs(TestCaseWithFactory):
if expire:
update_map[LibraryFileAlias.expires] = datetime.now(timezone.utc)
if delete:
- update_map[LibraryFileAlias.contentID] = None
+ update_map[LibraryFileAlias.content_id] = None
with dbuser("launchpad"):
IStore(LibraryFileAlias).execute(
BulkUpdate(
diff --git a/lib/lp/soyuz/vocabularies.py b/lib/lp/soyuz/vocabularies.py
index 1b640b8..8254e63 100644
--- a/lib/lp/soyuz/vocabularies.py
+++ b/lib/lp/soyuz/vocabularies.py
@@ -11,6 +11,7 @@ __all__ = [
"PPAVocabulary",
]
+from storm.expr import Is
from storm.locals import And, Or
from zope.component import getUtility
from zope.interface import implementer
@@ -22,11 +23,7 @@ from lp.registry.model.person import Person
from lp.services.database.interfaces import IStore
from lp.services.database.stormexpr import fti_search
from lp.services.webapp.interfaces import ILaunchBag
-from lp.services.webapp.vocabulary import (
- IHugeVocabulary,
- SQLObjectVocabularyBase,
- StormVocabularyBase,
-)
+from lp.services.webapp.vocabulary import IHugeVocabulary, StormVocabularyBase
from lp.soyuz.enums import ArchivePurpose
from lp.soyuz.interfaces.archive import IArchiveSet
from lp.soyuz.model.archive import Archive, get_enabled_archive_filter
@@ -86,18 +83,17 @@ class PackageReleaseVocabulary(StormVocabularyBase):
@implementer(IHugeVocabulary)
-class PPAVocabulary(SQLObjectVocabularyBase):
+class PPAVocabulary(StormVocabularyBase):
_table = Archive
- _orderBy = ["Person.name, Archive.name"]
- _clauseTables = ["Person"]
+ _order_by = ["Person.name", "Archive.name"]
# This should probably also filter by privacy, but that becomes
# problematic when you need to remove a dependency that you can no
# longer see.
- _filter = And(
- Archive._enabled == True,
- Person.q.id == Archive.q.ownerID,
- Archive.q.purpose == ArchivePurpose.PPA,
- )
+ _clauses = [
+ Is(Archive._enabled, True),
+ Archive.owner == Person.id,
+ Archive.purpose == ArchivePurpose.PPA,
+ ]
displayname = "Select a PPA"
step_title = "Search"
@@ -121,7 +117,7 @@ class PPAVocabulary(SQLObjectVocabularyBase):
def search(self, query, vocab_filter=None):
"""Return a resultset of archives.
- This is a helper required by `SQLObjectVocabularyBase.searchForTerms`.
+ This is a helper required by `StormVocabularyBase.searchForTerms`.
"""
if not query:
return self.emptySelectResults()
@@ -147,17 +143,18 @@ class PPAVocabulary(SQLObjectVocabularyBase):
Person.name == owner_name, Archive.name == archive_name
)
- clause = And(
- self._filter,
+ extra_clauses = [
get_enabled_archive_filter(
getUtility(ILaunchBag).user,
purpose=ArchivePurpose.PPA,
include_public=True,
),
search_clause,
- )
- return self._table.select(
- clause, orderBy=self._orderBy, clauseTables=self._clauseTables
+ ]
+ return (
+ IStore(self._table)
+ .find(self._table, *self._clauses, *extra_clauses)
+ .order_by(self._order_by)
)
diff --git a/lib/lp/testing/__init__.py b/lib/lp/testing/__init__.py
index 32256e1..d8cbff9 100644
--- a/lib/lp/testing/__init__.py
+++ b/lib/lp/testing/__init__.py
@@ -579,7 +579,7 @@ class TestCase(testtools.TestCase, fixtures.TestWithFixtures):
to another date value. Trickery is required because SQLBuilder truth
semantics cause UTC_NOW to appear equal to all dates.
- :param sql_object: a security-proxied SQLObject instance.
+ :param sql_object: a security-proxied Storm instance.
:param attribute_name: the name of a database column in the table
associated to this object.
:param date: `datetime.datetime` object or `UTC_NOW`.
@@ -1108,8 +1108,11 @@ class AbstractYUITestCase(TestCase):
The tests are run during `setUp()`, but failures need to be reported
from here.
"""
- assert self.layer.browser
- results = self.layer.browser.run_tests(
+ # Circular import.
+ from lp.testing.layers import WebBrowserLayer
+
+ assert WebBrowserLayer.browser
+ results = WebBrowserLayer.browser.run_tests(
self.html_uri,
timeout=self.suite_timeout,
incremental_timeout=self.incremental_timeout,
diff --git a/lib/lp/testing/factory.py b/lib/lp/testing/factory.py
index 60cb62e..7605ed7 100644
--- a/lib/lp/testing/factory.py
+++ b/lib/lp/testing/factory.py
@@ -3283,6 +3283,7 @@ class LaunchpadObjectFactory(ObjectFactory):
sha1=hashlib.sha1(content).hexdigest(),
md5=hashlib.md5(content).hexdigest(),
)
+ IStore(LibraryFileContent).add(lfc)
lfa = ProxyFactory(
LibraryFileAlias(
content=lfc,
@@ -3292,6 +3293,7 @@ class LaunchpadObjectFactory(ObjectFactory):
restricted=restricted,
)
)
+ IStore(LibraryFileAlias).flush()
else:
lfa = getUtility(ILibraryFileAliasSet).create(
filename,
diff --git a/lib/lp/testing/html5browser.py b/lib/lp/testing/html5browser.py
index c559c64..678eeab 100644
--- a/lib/lp/testing/html5browser.py
+++ b/lib/lp/testing/html5browser.py
@@ -125,4 +125,4 @@ class Browser:
return json.loads(results)
def close(self):
- self.driver.close()
+ self.driver.quit()
diff --git a/lib/lp/testing/layers.py b/lib/lp/testing/layers.py
index ce68de6..cf94f50 100644
--- a/lib/lp/testing/layers.py
+++ b/lib/lp/testing/layers.py
@@ -1938,8 +1938,8 @@ class ZopelessAppServerLayer(LaunchpadZopelessLayer):
LayerProcessController.postTestInvariants()
-class YUITestLayer(FunctionalLayer):
- """The layer for all YUITests cases."""
+class WebBrowserLayer(BaseLayer):
+ """A layer that runs a web browser."""
browser = None
@@ -1966,6 +1966,32 @@ class YUITestLayer(FunctionalLayer):
pass
+class YUITestLayer(FunctionalLayer, WebBrowserLayer):
+ """The layer for all YUI test cases."""
+
+ browser = None
+
+ @classmethod
+ @profiled
+ def setUp(cls):
+ pass
+
+ @classmethod
+ @profiled
+ def tearDown(cls):
+ pass
+
+ @classmethod
+ @profiled
+ def testSetUp(cls):
+ pass
+
+ @classmethod
+ @profiled
+ def testTearDown(cls):
+ pass
+
+
class YUIAppServerLayer(MemcachedLayer):
"""The layer for all YUIAppServer test cases."""
diff --git a/lib/lp/testing/tests/test_html5browser.py b/lib/lp/testing/tests/test_html5browser.py
index bdc6229..d6ebc11 100644
--- a/lib/lp/testing/tests/test_html5browser.py
+++ b/lib/lp/testing/tests/test_html5browser.py
@@ -4,12 +4,14 @@
from tempfile import NamedTemporaryFile
from lp.testing import TestCase
-from lp.testing.html5browser import Browser
+from lp.testing.layers import WebBrowserLayer
class TestBrowser(TestCase):
"""Verify Browser methods."""
+ layer = WebBrowserLayer
+
def setUp(self):
super().setUp()
self.file = NamedTemporaryFile(
@@ -51,11 +53,9 @@ class TestBrowser(TestCase):
self.file.flush()
self.file_uri = "file://{}".format(self.file.name)
self.addCleanup(self.file.close)
- self.browser = Browser()
- self.addCleanup(self.browser.close)
def test_load_test_results(self):
- results = self.browser.run_tests(self.file_uri, timeout=10000)
+ results = self.layer.browser.run_tests(self.file_uri, timeout=10000)
self.assertEqual(results.status, results.Status.SUCCESS)
self.assertEqual(
results.results,
@@ -66,7 +66,7 @@ class TestBrowser(TestCase):
)
def test_timeout_error(self):
- results = self.browser.run_tests(self.file_uri, timeout=1500)
+ results = self.layer.browser.run_tests(self.file_uri, timeout=1500)
self.assertEqual(results.status, results.Status.TIMEOUT)
self.assertIsNone(results.results)
self.assertEqual(
@@ -75,7 +75,7 @@ class TestBrowser(TestCase):
)
def test_incremental_timeout_success(self):
- results = self.browser.run_tests(
+ results = self.layer.browser.run_tests(
self.file_uri, timeout=10000, incremental_timeout=3000
)
self.assertEqual(results.status, results.Status.SUCCESS)
@@ -88,7 +88,7 @@ class TestBrowser(TestCase):
)
def test_incremental_timeout_error(self):
- results = self.browser.run_tests(
+ results = self.layer.browser.run_tests(
self.file_uri, timeout=10000, incremental_timeout=1500
)
self.assertEqual(results.status, results.Status.TIMEOUT)
diff --git a/lib/lp/translations/doc/rosetta-karma.rst b/lib/lp/translations/doc/rosetta-karma.rst
index 9ae8415..48e08d9 100644
--- a/lib/lp/translations/doc/rosetta-karma.rst
+++ b/lib/lp/translations/doc/rosetta-karma.rst
@@ -3,8 +3,8 @@ Rosetta gives Karma to the users that do some kind of actions.
This test documents when and why Rosetta does it.
Note, that once we commit the transaction, we need to fetch again any
-SQLObject we need to use to be sure we have the right information. Seems
-like SQLObjects are not persistent between transactions.
+Storm instance we need to use to be sure we have the right information. Seems
+like Storm instances are not persistent between transactions.
>>> import transaction
>>> from lp.app.interfaces.launchpad import ILaunchpadCelebrities
diff --git a/lib/lp/translations/doc/rosetta-translation.rst b/lib/lp/translations/doc/rosetta-translation.rst
index 866b444..8c26c18 100644
--- a/lib/lp/translations/doc/rosetta-translation.rst
+++ b/lib/lp/translations/doc/rosetta-translation.rst
@@ -42,7 +42,7 @@ Get a translation for a particular message and check it has a translation.
Get a person to create a translation with.
>>> from lp.registry.model.person import Person
- >>> person = Person.get(1)
+ >>> person = IStore(Person).get(Person, 1)
>>> pofile.canEditTranslations(person)
True
@@ -96,9 +96,9 @@ Now we want to test the interaction of the "upstream" translations with the
"active translations". There are several things we want to be able to test.
First, let's setup some useful variables.
- >>> Pa = Person.get(50)
- >>> Pb = Person.get(46)
- >>> Pc = Person.get(16)
+ >>> Pa = IStore(Person).get(Person, 50)
+ >>> Pb = IStore(Person).get(Person, 46)
+ >>> Pc = IStore(Person).get(Person, 16)
Pa, Pb and Pc are three useful Person's.
diff --git a/lib/lp/translations/doc/translationmessage-destroy.rst b/lib/lp/translations/doc/translationmessage-destroy.rst
index 6c79ec3..05cf0ef 100644
--- a/lib/lp/translations/doc/translationmessage-destroy.rst
+++ b/lib/lp/translations/doc/translationmessage-destroy.rst
@@ -4,8 +4,7 @@ destroySelf
(Note: this test runs as rosettaadmin to obtain the necessary
privileges)
-With this method, we allow to remove a submission, it comes from SQLObject,
-but we test it here to be sure it appears in our public interface.
+With this method, we allow removing a submission.
We will need extra permissions to use this method.
diff --git a/lib/lp/translations/model/distroserieslanguage.py b/lib/lp/translations/model/distroserieslanguage.py
index d147f4b..c2385f4 100644
--- a/lib/lp/translations/model/distroserieslanguage.py
+++ b/lib/lp/translations/model/distroserieslanguage.py
@@ -36,7 +36,7 @@ from lp.translations.utilities.rosettastats import RosettaStats
class DistroSeriesLanguage(StormBase, RosettaStats):
"""See `IDistroSeriesLanguage`.
- A SQLObject based implementation of IDistroSeriesLanguage.
+ A Storm implementation of IDistroSeriesLanguage.
"""
__storm_table__ = "DistroSeriesLanguage"
diff --git a/lib/lp/translations/model/poexportrequest.py b/lib/lp/translations/model/poexportrequest.py
index 3313457..9dd9ec5 100644
--- a/lib/lp/translations/model/poexportrequest.py
+++ b/lib/lp/translations/model/poexportrequest.py
@@ -92,7 +92,7 @@ class POExportRequestSet:
)
query_params = {
- "person": quote(person),
+ "person": quote(person.id),
"format": quote(format),
"templates": potemplate_ids,
"pofiles": pofile_ids,
diff --git a/lib/lp/translations/model/pofile.py b/lib/lp/translations/model/pofile.py
index 14626b1..9548b3e 100644
--- a/lib/lp/translations/model/pofile.py
+++ b/lib/lp/translations/model/pofile.py
@@ -1,7 +1,7 @@
# Copyright 2009-2020 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-"""`SQLObject` implementation of `IPOFile` interface."""
+"""Storm implementation of `IPOFile` interface."""
__all__ = [
"PlaceholderPOFile",
@@ -456,7 +456,7 @@ class POFile(StormBase, POFileMixIn):
)
.config(distinct=True)
)
- contributors = contributors.order_by(*Person._storm_sortingColumns)
+ contributors = contributors.order_by(Person._separated_sortingColumns)
contributors = contributors.config(distinct=True)
return contributors
diff --git a/lib/lp/translations/model/potemplate.py b/lib/lp/translations/model/potemplate.py
index 87e79d5..8487230 100644
--- a/lib/lp/translations/model/potemplate.py
+++ b/lib/lp/translations/model/potemplate.py
@@ -1,7 +1,7 @@
# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-"""`SQLObject` implementation of `IPOTemplate` interface."""
+"""Storm implementation of `IPOTemplate` interface."""
__all__ = [
"get_pofiles_for",
diff --git a/lib/lp/translations/model/translationgroup.py b/lib/lp/translations/model/translationgroup.py
index 943b38e..5471b10 100644
--- a/lib/lp/translations/model/translationgroup.py
+++ b/lib/lp/translations/model/translationgroup.py
@@ -175,10 +175,10 @@ class TranslationGroup(StormBase):
Translator,
Language,
Person,
- LeftJoin(LibraryFileAlias, LibraryFileAlias.id == Person.iconID),
+ LeftJoin(LibraryFileAlias, LibraryFileAlias.id == Person.icon_id),
LeftJoin(
LibraryFileContent,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
),
]
tables = (
@@ -223,7 +223,7 @@ class TranslationGroup(StormBase):
)
get_precached_products(products, need_licences=True)
icons = bulk.load_related(LibraryFileAlias, products, ["icon_id"])
- bulk.load_related(LibraryFileContent, icons, ["contentID"])
+ bulk.load_related(LibraryFileContent, icons, ["content_id"])
return products
def fetchProjectGroupsForDisplay(self):
@@ -238,7 +238,7 @@ class TranslationGroup(StormBase):
),
LeftJoin(
LibraryFileContent,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
),
]
tables = (
@@ -271,7 +271,7 @@ class TranslationGroup(StormBase):
),
LeftJoin(
LibraryFileContent,
- LibraryFileContent.id == LibraryFileAlias.contentID,
+ LibraryFileContent.id == LibraryFileAlias.content_id,
),
]
tables = (
diff --git a/lib/lp/translations/scripts/fix_plural_forms.py b/lib/lp/translations/scripts/fix_plural_forms.py
index 8bdebac..1ad2836 100644
--- a/lib/lp/translations/scripts/fix_plural_forms.py
+++ b/lib/lp/translations/scripts/fix_plural_forms.py
@@ -9,7 +9,6 @@ __all__ = [
from lp.services.database.interfaces import IStore
from lp.services.database.sqlbase import cursor
-from lp.services.database.sqlobject import SQLObjectNotFound
from lp.translations.interfaces.translations import TranslationConstants
from lp.translations.model.pofile import POFile
from lp.translations.model.potmsgset import POTMsgSet
@@ -83,8 +82,6 @@ def fix_plurals_in_all_pofiles(ztm, logger):
cur.execute("""SELECT MAX(id) FROM POFile""")
(max_pofile_id,) = cur.fetchall()[0]
for pofile_id in range(1, max_pofile_id):
- try:
- pofile = IStore(POFile).get(POFile, pofile_id)
+ pofile = IStore(POFile).get(POFile, pofile_id)
+ if pofile is not None:
fix_pofile_plurals(pofile, logger, ztm)
- except SQLObjectNotFound:
- pass
diff --git a/lib/lp/translations/scripts/tests/test_remove_translations.py b/lib/lp/translations/scripts/tests/test_remove_translations.py
index 15c2638..dbea556 100644
--- a/lib/lp/translations/scripts/tests/test_remove_translations.py
+++ b/lib/lp/translations/scripts/tests/test_remove_translations.py
@@ -420,7 +420,7 @@ class TestRemoveTranslations(TestCase):
# on reviewer instead.
new_nl_message.reviewer = self.potemplate.owner
- self._removeMessages(submitter=carlos)
+ self._removeMessages(submitter=carlos.id)
self._checkInvariant()
def test_RemoveByReviewer(self):
@@ -434,7 +434,7 @@ class TestRemoveTranslations(TestCase):
new_nl_message.reviewer = carlos
new_de_message.reviewer = carlos
- self._removeMessages(reviewer=carlos)
+ self._removeMessages(reviewer=carlos.id)
self._checkInvariant()
def test_RemoveByDateCreated(self):
@@ -473,7 +473,7 @@ class TestRemoveTranslations(TestCase):
)
removeSecurityProxy(new_de_message).submitter = mark
- self._removeMessages(submitter=carlos, date_created="2015-05-12")
+ self._removeMessages(submitter=carlos.id, date_created="2015-05-12")
# First make sure we're not reading out of cache.
Store.of(self.nl_pofile).flush()
@@ -502,7 +502,7 @@ class TestRemoveTranslations(TestCase):
)
rowcount = self._removeMessages(
- submitter=carlos, date_created="2015-05-12"
+ submitter=carlos.id, date_created="2015-05-12"
)
self.assertEqual(rowcount, 1)
diff --git a/scripts/librarian-report.py b/scripts/librarian-report.py
index 8151b29..0281181 100755
--- a/scripts/librarian-report.py
+++ b/scripts/librarian-report.py
@@ -13,7 +13,7 @@ import sys
from optparse import OptionParser
from lp.services.database.postgresql import listReferences
-from lp.services.database.sqlbase import connect, quoteIdentifier, sqlvalues
+from lp.services.database.sqlbase import connect, quote_identifier, sqlvalues
from lp.services.scripts import db_options
@@ -76,8 +76,8 @@ def main():
for referring_table, referring_column in sorted(references):
if referring_table == "libraryfiledownloadcount":
continue
- quoted_referring_table = quoteIdentifier(referring_table)
- quoted_referring_column = quoteIdentifier(referring_column)
+ quoted_referring_table = quote_identifier(referring_table)
+ quoted_referring_column = quote_identifier(referring_column)
cur.execute(
"""
SELECT
diff --git a/utilities/make-dummy-hosted-branches b/utilities/make-dummy-hosted-branches
index 2b40eb8..866213e 100755
--- a/utilities/make-dummy-hosted-branches
+++ b/utilities/make-dummy-hosted-branches
@@ -29,7 +29,7 @@ from lp.code.model.branch import Branch
from lp.codehosting.tests.helpers import create_branch_with_one_revision
from lp.codehosting.vfs import branch_id_to_path
from lp.services.config import config
-from lp.services.database.sqlbase import sqlvalues
+from lp.services.database.interfaces import IStore
from lp.services.scripts import execute_zcml_for_scripts
@@ -51,8 +51,8 @@ def main(argv):
shutil.rmtree(config.codehosting.mirrored_branches_root)
execute_zcml_for_scripts()
try:
- branches = Branch.select(
- "Branch.branch_type = %s" % sqlvalues(BranchType.HOSTED)
+ branches = IStore(Branch).find(
+ Branch, Branch.branch_type == BranchType.HOSTED
)
for branch in branches:
make_bazaar_branch_and_tree(branch)
diff --git a/utilities/snakefood/Makefile b/utilities/snakefood/Makefile
index 06d92cc..7984534 100644
--- a/utilities/snakefood/Makefile
+++ b/utilities/snakefood/Makefile
@@ -4,11 +4,11 @@ default: lp-clustered.svg
# Generate import dependency graph
lp.sfood:
- sfood -i -u -I $(LIB_DIR)/sqlobject -I $(LIB_DIR)/schoolbell \
+ sfood -i -u -I $(LIB_DIR)/schoolbell \
-I $(LIB_DIR)/contrib \
-I $(LIB_DIR)/canonical/not-used $(LIB_DIR)/canonical \
$(LIB_DIR)/lp 2>/dev/null | grep -v contrib/ \
- | grep -v sqlobject | egrep -v 'BeautifulSoup|bs4' | grep -v psycopg \
+ | egrep -v 'BeautifulSoup|bs4' | grep -v psycopg \
| grep -v schoolbell | grep -v '/tests/' | grep -v '/ftests/' \
| grep -v 'lp/services/config' > lp.sfood.tmp
mv lp.sfood.tmp lp.sfood
Follow ups