launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #32148
[Merge] ~lgp171188/launchpad:manually-merge-stable-to-db-devel into launchpad:db-devel
Guruprasad has proposed merging ~lgp171188/launchpad:manually-merge-stable-to-db-devel into launchpad:db-devel.
Commit message:
Merge stable to db-devel manually
The automatic merges are broken because they need buildbot
to be passing on both branches and the db-devel one appears
to be broken currently because of the previous urllib3
upgrade issues.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~lgp171188/launchpad/+git/launchpad/+merge/480216
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~lgp171188/launchpad:manually-merge-stable-to-db-devel into launchpad:db-devel.
diff --git a/cronscripts/foaf-update-karma-cache.py b/cronscripts/foaf-update-karma-cache.py
index 844a518..a569461 100755
--- a/cronscripts/foaf-update-karma-cache.py
+++ b/cronscripts/foaf-update-karma-cache.py
@@ -5,6 +5,8 @@
import _pythonpath # noqa: F401
+from decimal import Decimal
+
from zope.component import getUtility
from lp.app.errors import NotFoundError
@@ -284,7 +286,15 @@ class KarmaCacheUpdater(LaunchpadCronScript):
at C_add_summed_totals to see how the summed entries are generated.
"""
(person_id, category_id, product_id, distribution_id, points) = entry
- points *= scaling[category_id] # Scaled. wow.
+ # XXX lgp171188 2025-01-22 In Postgres 14, the points column gets
+ # returned as a Decimal instead of a float as before. So convert the
+ # operands to Decimal to perform the arithmetic in that case, and then
+ # convert back to a float so that there is no precision lost during the
+ # operation, just at the time of conversion. This can be removed once
+ # we have upgraded to a Postgres version >= 14.
+ if isinstance(points, Decimal):
+ scaling[category_id] = Decimal(scaling[category_id])
+ points = float(points * scaling[category_id]) # Scaled. wow.
self.logger.debug(
"Setting person_id=%d, category_id=%d, points=%d"
% (person_id, category_id, points)
diff --git a/database/schema/Makefile b/database/schema/Makefile
index 0b33795..b1ff5e1 100644
--- a/database/schema/Makefile
+++ b/database/schema/Makefile
@@ -117,7 +117,7 @@ create:
@ echo "* Patching the database schema"
@ ./upgrade.py --separate-sessions -d ${EMPTY_DBNAME}
@ echo "* Security setup"
- @ ./security.py -q -d ${EMPTY_DBNAME}
+ @ ./security.py -q -d ${EMPTY_DBNAME} --grant-create-on-public-schema
@ echo "* Disabling autovacuum"
@ ./unautovacuumable.py -d ${EMPTY_DBNAME}
@ echo "* Vacuuming"
diff --git a/database/schema/security.py b/database/schema/security.py
index d742577..7049552 100755
--- a/database/schema/security.py
+++ b/database/schema/security.py
@@ -646,6 +646,20 @@ def reset_permissions(con, config, options):
"GRANT USAGE ON SCHEMA %s TO PUBLIC"
% (quote_identifier(schema_name),)
)
+
+ # XXX 2025-01-27 lgp171188: PostgreSQL 15+ stopped granting
+ # the permission to create tables in the 'public' namespace
+ # for all non-owner users as a part of security strengthening,
+ # in favour of requiring the users to do this manually as needed.
+ # We conditionally enable it here as various Launchpad roles
+ # need it to be able to run the Launchpad test suite without any errors.
+ if (
+ options.grant_create_on_public_schema
+ and con.server_version // 10000 >= 15
+ ):
+ log.debug("Granting CREATE on schema 'public' on PostgreSQL 15+.")
+ cur.execute("GRANT CREATE on SCHEMA public TO PUBLIC")
+
for obj in schema.values():
if obj.schema not in public_schemas:
continue
@@ -760,6 +774,20 @@ if __name__ == "__main__":
default="postgres",
help="Owner of PostgreSQL objects",
)
+ parser.add_option(
+ "--grant-create-on-public-schema",
+ dest="grant_create_on_public_schema",
+ default=False,
+ action="store_true",
+ help=(
+ "Grant CREATE on the 'public' schema in PostgreSQL 15+ to "
+ "all users. PostgreSQL <= 14 allowed this access automatically."
+ "This should only be used in the dev/test environments via the"
+ "'make schema' invocation and not anywhere in a production or"
+ "production-like environment."
+ ),
+ )
+
db_options(parser)
logger_options(parser)
diff --git a/lib/lp/app/stories/basics/xx-opstats.rst b/lib/lp/app/stories/basics/xx-opstats.rst
index 0dcd7d6..c338934 100644
--- a/lib/lp/app/stories/basics/xx-opstats.rst
+++ b/lib/lp/app/stories/basics/xx-opstats.rst
@@ -395,7 +395,7 @@ But our database connections are broken.
>>> IStore(Person).find(Person, name="janitor")
Traceback (most recent call last):
...
- storm.exceptions.DisconnectionError:
+ storm.exceptions.DisconnectionError:...
FATAL: database "nonexistent" does not exist
>>> _ = config.pop("no_db")
diff --git a/lib/lp/app/stories/launchpad-root/xx-featuredprojects.rst b/lib/lp/app/stories/launchpad-root/xx-featuredprojects.rst
index 79c90bd..38b310a 100644
--- a/lib/lp/app/stories/launchpad-root/xx-featuredprojects.rst
+++ b/lib/lp/app/stories/launchpad-root/xx-featuredprojects.rst
@@ -19,7 +19,7 @@ projects' pages in Launchpad.
>>> print(extract_text(featured.h2))
Featured projects
- >>> featured_list = featured.find("", "featured-projects-list")
+ >>> featured_list = featured.find(class_="featured-projects-list")
>>> for link in featured_list.find_all("a"):
... print(extract_text(link))
...
@@ -82,7 +82,7 @@ is now at index '4' and is therefore displayed as the top project:
>>> anon_browser.open("http://launchpad.test/")
>>> featured = find_tag_by_id(anon_browser.contents, "homepage-featured")
- >>> featured_list = featured.find("", "featured-projects-list")
+ >>> featured_list = featured.find(class_="featured-projects-list")
>>> for link in featured_list.find_all("a"):
... print(extract_text(link))
...
diff --git a/lib/lp/app/validators/validation.py b/lib/lp/app/validators/validation.py
index 4a2dc62..ea721cf 100644
--- a/lib/lp/app/validators/validation.py
+++ b/lib/lp/app/validators/validation.py
@@ -6,6 +6,7 @@ __all__ = [
"valid_cve_sequence",
"validate_new_team_email",
"validate_oci_branch_name",
+ "validate_content_templates",
]
import re
@@ -114,3 +115,28 @@ def validate_oci_branch_name(branch_name):
if "/" in segment:
return False
return True
+
+
+# XXX alvarocs 2024-12-13:
+# To add merge proposal templates or other templates
+# as allowed keys when implemented.
+def validate_content_templates(value):
+ # Omit validation if None
+ if value is None:
+ return True
+ allowed_keys = {
+ "bug_templates",
+ }
+ for key, inner_dict in value.items():
+ # Validate allowed keys
+ if key not in allowed_keys:
+ raise ValueError(
+ f"Invalid key '{key}' in content_templates. "
+ "Allowed keys: {allowed_keys}"
+ )
+ # Validate 'default' key exists
+ if "default" not in inner_dict:
+ raise ValueError(
+ f"The '{key}' dictionary must contain a 'default' key."
+ )
+ return True
diff --git a/lib/lp/archivepublisher/artifactory.py b/lib/lp/archivepublisher/artifactory.py
index 7cf7982..93d8efe 100644
--- a/lib/lp/archivepublisher/artifactory.py
+++ b/lib/lp/archivepublisher/artifactory.py
@@ -617,7 +617,7 @@ class ArtifactoryPool:
]
elif repository_format == ArchiveRepositoryFormat.RUST:
return [
- "*.tar.xz",
+ "*.crate",
]
elif repository_format == ArchiveRepositoryFormat.GENERIC:
return ["*"]
diff --git a/lib/lp/archivepublisher/publishing.py b/lib/lp/archivepublisher/publishing.py
index 77d44fa..f84d9e2 100644
--- a/lib/lp/archivepublisher/publishing.py
+++ b/lib/lp/archivepublisher/publishing.py
@@ -795,39 +795,51 @@ class Publisher:
archive=self.archive
):
spphs_by_spr[spph.sourcepackagerelease_id].append(spph)
- release_id = "source:%d" % spph.sourcepackagerelease_id
- releases_by_id.setdefault(release_id, spph.sourcepackagerelease)
+ launchpad_release_id = "source:%d" % spph.sourcepackagerelease_id
+ releases_by_id.setdefault(
+ launchpad_release_id, spph.sourcepackagerelease
+ )
self.log.debug(
- "Collecting %s for %s", release_id, spph.sourcepackagename
+ "Collecting %s for %s",
+ launchpad_release_id,
+ spph.sourcepackagename,
)
- pubs_by_id[release_id].append(spph)
+ pubs_by_id[launchpad_release_id].append(spph)
for bpph in publishing_set.getBinariesForPublishing(
archive=self.archive
):
bpphs_by_bpr[bpph.binarypackagerelease_id].append(bpph)
- release_id = "binary:%d" % bpph.binarypackagerelease_id
+ launchpad_release_id = "binary:%d" % bpph.binarypackagerelease_id
self.log.debug(
- "Collecting %s for %s", release_id, bpph.binarypackagename
+ "Collecting %s for %s",
+ launchpad_release_id,
+ bpph.binarypackagename,
+ )
+ releases_by_id.setdefault(
+ launchpad_release_id, bpph.binarypackagerelease
)
- releases_by_id.setdefault(release_id, bpph.binarypackagerelease)
- pubs_by_id[release_id].append(bpph)
+ pubs_by_id[launchpad_release_id].append(bpph)
artifacts = self._diskpool.getAllArtifacts(
self.archive.name, self.archive.repository_format
)
plan = []
for path, properties in sorted(artifacts.items()):
- release_id = properties.get("launchpad.release-id")
+ artifactory_release_id = properties.get("launchpad.release-id")
source_name = properties.get("launchpad.source-name")
source_version = properties.get("launchpad.source-version")
- if not release_id or not source_name or not source_version:
+ if (
+ not artifactory_release_id
+ or not source_name
+ or not source_version
+ ):
# Skip any files that Launchpad didn't put in Artifactory.
continue
plan.append(
(
source_name[0],
source_version[0],
- release_id[0],
+ artifactory_release_id[0],
path,
properties,
)
@@ -839,14 +851,14 @@ class Publisher:
# corresponding pool entries.
missing_sources = set()
missing_binaries = set()
- for _, _, release_id, _, _ in plan:
- if release_id in releases_by_id:
+ for _, _, artifactory_release_id, _, _ in plan:
+ if artifactory_release_id in releases_by_id:
continue
- match = re.match(r"^source:(\d+)$", release_id)
+ match = re.match(r"^source:(\d+)$", artifactory_release_id)
if match is not None:
missing_sources.add(int(match.group(1)))
else:
- match = re.match(r"^binary:(\d+)$", release_id)
+ match = re.match(r"^binary:(\d+)$", artifactory_release_id)
if match is not None:
missing_binaries.add(int(match.group(1)))
for spr in load(SourcePackageRelease, missing_sources):
@@ -860,14 +872,16 @@ class Publisher:
# in Debian-format source packages).
pub_files_by_path = defaultdict(set)
pubs_by_path = defaultdict(set)
- for source_name, source_version, release_id, _, _ in plan:
- for pub_file in releases_by_id[release_id].files:
+ for source_name, source_version, artifactory_release_id, _, _ in plan:
+ for pub_file in releases_by_id[artifactory_release_id].files:
path = self._diskpool.pathFor(
None, source_name, source_version, pub_file
)
pub_files_by_path[path].add(pub_file)
- if release_id in pubs_by_id:
- pubs_by_path[path].update(pubs_by_id[release_id])
+ if artifactory_release_id in pubs_by_id:
+ pubs_by_path[path].update(
+ pubs_by_id[artifactory_release_id]
+ )
root_path = ArtifactoryPath(self._config.archiveroot)
for source_name, source_version, _, path, properties in plan:
diff --git a/lib/lp/archivepublisher/tests/test_artifactory.py b/lib/lp/archivepublisher/tests/test_artifactory.py
index 3fc2aa5..a5212b7 100644
--- a/lib/lp/archivepublisher/tests/test_artifactory.py
+++ b/lib/lp/archivepublisher/tests/test_artifactory.py
@@ -290,7 +290,7 @@ class TestArtifactoryPool(TestCase):
def test_getArtifactPatterns_rust(self):
pool = self.makePool()
self.assertEqual(
- ["*.tar.xz"],
+ ["*.crate"],
pool.getArtifactPatterns(ArchiveRepositoryFormat.RUST),
)
diff --git a/lib/lp/archiveuploader/craftrecipeupload.py b/lib/lp/archiveuploader/craftrecipeupload.py
index 5e9d285..152a469 100644
--- a/lib/lp/archiveuploader/craftrecipeupload.py
+++ b/lib/lp/archiveuploader/craftrecipeupload.py
@@ -8,7 +8,11 @@ __all__ = [
]
import os
+import tarfile
+import tempfile
+from pathlib import Path
+import yaml
from zope.component import getUtility
from lp.archiveuploader.utils import UploadError
@@ -38,33 +42,70 @@ class CraftRecipeUpload:
"""Process this upload, loading it into the database."""
self.logger.debug("Beginning processing.")
- found_craft = False
- craft_paths = []
- for dirpath, _, filenames in os.walk(self.upload_path):
- if dirpath == self.upload_path:
- # All relevant files will be in a subdirectory.
- continue
- for craft_file in sorted(filenames):
- if craft_file.endswith(".tar.xz"):
- found_craft = True
- craft_paths.append(os.path.join(dirpath, craft_file))
-
- if not found_craft:
+ # Find all .tar.xz files in subdirectories
+ upload_path = Path(self.upload_path)
+ craft_paths = list(upload_path.rglob("*.tar.xz"))
+
+ # Skip files directly in upload_path
+ craft_paths = [p for p in craft_paths if p.parent != upload_path]
+
+ if not craft_paths:
raise UploadError("Build did not produce any craft files.")
- for craft_path in craft_paths:
- with open(craft_path, "rb") as file:
- libraryfile = self.librarian.create(
- os.path.basename(craft_path),
- os.stat(craft_path).st_size,
- file,
- filenameToContentType(craft_path),
- restricted=build.is_private,
- )
- build.addFile(libraryfile)
+ for craft_path in sorted(craft_paths):
+ # Check if archive contains .crate files
+ with tempfile.TemporaryDirectory() as tmpdir:
+ with tarfile.open(craft_path, "r:xz") as tar:
+ tar.extractall(path=tmpdir)
+
+ # Look for .crate files and metadata.yaml
+ crate_files = list(Path(tmpdir).rglob("*.crate"))
+ metadata_path = Path(tmpdir) / "metadata.yaml"
+
+ if crate_files and metadata_path.exists():
+ # If we found a crate file and metadata, upload it
+ try:
+ metadata = yaml.safe_load(metadata_path.read_text())
+ crate_name = metadata.get("name")
+ crate_version = metadata.get("version")
+ self.logger.debug(
+ "Found crate %s version %s",
+ crate_name,
+ crate_version,
+ )
+ except Exception as e:
+ self.logger.warning(
+ "Failed to parse metadata.yaml: %s", e
+ )
+
+ crate_path = crate_files[
+ 0
+ ] # Take the first (and should be only) crate file
+ with open(crate_path, "rb") as file:
+ libraryfile = self.librarian.create(
+ os.path.basename(str(crate_path)),
+ os.stat(crate_path).st_size,
+ file,
+ filenameToContentType(str(crate_path)),
+ restricted=build.is_private,
+ )
+ build.addFile(libraryfile)
+ else:
+ # If no crate file found, upload the original archive
+ self.logger.debug(
+ "No crate files found, uploading archive"
+ )
+ with open(craft_path, "rb") as file:
+ libraryfile = self.librarian.create(
+ os.path.basename(str(craft_path)),
+ os.stat(craft_path).st_size,
+ file,
+ filenameToContentType(str(craft_path)),
+ restricted=build.is_private,
+ )
+ build.addFile(libraryfile)
# The master verifies the status to confirm successful upload.
self.logger.debug("Updating %s" % build.title)
build.updateStatus(BuildStatus.FULLYBUILT)
-
self.logger.debug("Finished upload.")
diff --git a/lib/lp/archiveuploader/tests/test_craftrecipeupload.py b/lib/lp/archiveuploader/tests/test_craftrecipeupload.py
index 5f7f737..24ed0f0 100644
--- a/lib/lp/archiveuploader/tests/test_craftrecipeupload.py
+++ b/lib/lp/archiveuploader/tests/test_craftrecipeupload.py
@@ -4,8 +4,12 @@
"""Tests for `CraftRecipeUpload`."""
import os
+import tarfile
+import tempfile
+import yaml
from storm.store import Store
+from zope.security.proxy import removeSecurityProxy
from lp.archiveuploader.tests.test_uploadprocessor import (
TestUploadProcessorBase,
@@ -39,40 +43,120 @@ class TestCraftRecipeUploads(TestUploadProcessorBase):
self.layer.txn, builds=True
)
- def test_sets_build_and_state(self):
- # The upload processor uploads files and sets the correct status.
- self.assertFalse(self.build.verifySuccessfulUpload())
+ def _createArchiveWithCrate(
+ self, upload_dir, crate_name="test-crate", crate_version="0.1.0"
+ ):
+ """Helper to create a tar.xz archive containing a crate & metadata."""
+ # Create a temporary directory to build our archive
+ with tempfile.TemporaryDirectory() as tmpdir:
+ # Create metadata.yaml
+ metadata = {
+ "name": crate_name,
+ "version": crate_version,
+ }
+ metadata_path = os.path.join(tmpdir, "metadata.yaml")
+ with open(metadata_path, "w") as f:
+ yaml.safe_dump(metadata, f)
+
+ # Create dummy crate file
+ crate_path = os.path.join(
+ tmpdir, f"{crate_name}-{crate_version}.crate"
+ )
+ with open(crate_path, "wb") as f:
+ f.write(b"dummy crate contents")
+
+ # Create tar.xz archive
+ archive_path = os.path.join(upload_dir, "output.tar.xz")
+ with tarfile.open(archive_path, "w:xz") as tar:
+ tar.add(metadata_path, arcname="metadata.yaml")
+ tar.add(crate_path, arcname=os.path.basename(crate_path))
+
+ return archive_path
+
+ def _createArchiveWithoutCrate(self, upload_dir, filename="output.tar.xz"):
+ """Helper to create a tar.xz archive without crate files."""
+ archive_path = os.path.join(upload_dir, filename)
+ os.makedirs(os.path.dirname(archive_path), exist_ok=True)
+
+ with tarfile.open(archive_path, "w:xz") as tar:
+ # Add a dummy file
+ with tempfile.NamedTemporaryFile(mode="w") as tmp:
+ tmp.write("test content")
+ tmp.flush()
+ tar.add(tmp.name, arcname="test.txt")
+
+ return archive_path
+
+ def test_processes_crate_from_archive(self):
+ """Test that crates are properly extracted and processed
+ from archives."""
upload_dir = os.path.join(
- self.incoming_folder, "test", str(self.build.id), "ubuntu"
- )
- write_file(
- os.path.join(upload_dir, "foo_0_all.craft.tar.xz"), b"craft"
+ self.incoming_folder, "test", str(self.build.id)
)
- write_file(os.path.join(upload_dir, "foo_0_all.manifest"), b"manifest")
+ os.makedirs(upload_dir, exist_ok=True)
+
+ # Create archive with specific crate name and version
+ crate_name = "test-crate"
+ crate_version = "0.2.0"
+ self._createArchiveWithCrate(upload_dir, crate_name, crate_version)
+
handler = UploadHandler.forProcessor(
self.uploadprocessor, self.incoming_folder, "test", self.build
)
result = handler.processCraftRecipe(self.log)
- self.assertEqual(
- UploadStatusEnum.ACCEPTED,
- result,
- "Craft upload failed\nGot: %s" % self.log.getLogBuffer(),
+
+ # Verify upload succeeded
+ self.assertEqual(UploadStatusEnum.ACCEPTED, result)
+ self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
+
+ # Verify only the crate file was stored (not the archive)
+ build = removeSecurityProxy(self.build)
+ files = list(build.getFiles())
+ self.assertEqual(1, len(files))
+ stored_file = files[0][1]
+ expected_filename = f"{crate_name}-{crate_version}.crate"
+ self.assertEqual(expected_filename, stored_file.filename)
+
+ def test_uploads_archive_without_crate(self):
+ """Test that the original archive is uploaded when no crate
+ files exist."""
+ upload_dir = os.path.join(
+ self.incoming_folder, "test", str(self.build.id)
+ )
+ os.makedirs(upload_dir, exist_ok=True)
+
+ # Create archive without crate files
+ archive_name = "test-output.tar.xz"
+ self._createArchiveWithoutCrate(upload_dir, archive_name)
+
+ handler = UploadHandler.forProcessor(
+ self.uploadprocessor, self.incoming_folder, "test", self.build
)
+ result = handler.processCraftRecipe(self.log)
+
+ # Verify upload succeeded
+ self.assertEqual(UploadStatusEnum.ACCEPTED, result)
self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
- self.assertTrue(self.build.verifySuccessfulUpload())
+
+ # Verify the original archive was stored
+ build = removeSecurityProxy(self.build)
+ files = list(build.getFiles())
+ self.assertEqual(1, len(files))
+ stored_file = files[0][1]
+ self.assertEqual(archive_name, stored_file.filename)
def test_requires_craft(self):
- # The upload processor fails if the upload does not contain any
- # .craft files.
- self.assertFalse(self.build.verifySuccessfulUpload())
+ """Test that the upload fails if no .tar.xz files are found."""
upload_dir = os.path.join(
- self.incoming_folder, "test", str(self.build.id), "ubuntu"
+ self.incoming_folder, "test", str(self.build.id)
)
write_file(os.path.join(upload_dir, "foo_0_all.manifest"), b"manifest")
+
handler = UploadHandler.forProcessor(
self.uploadprocessor, self.incoming_folder, "test", self.build
)
result = handler.processCraftRecipe(self.log)
+
self.assertEqual(UploadStatusEnum.REJECTED, result)
self.assertIn(
"ERROR Build did not produce any craft files.",
diff --git a/lib/lp/blueprints/stories/blueprints/xx-productseries.rst b/lib/lp/blueprints/stories/blueprints/xx-productseries.rst
index d239261..3c340dd 100644
--- a/lib/lp/blueprints/stories/blueprints/xx-productseries.rst
+++ b/lib/lp/blueprints/stories/blueprints/xx-productseries.rst
@@ -71,7 +71,9 @@ so the targeting should NOT be automatically approved.
...
... Continue
... -----------------------------26999413214087432371486976730--
- ... """
+ ... """.replace(
+ ... "\n", "\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other
@@ -124,7 +126,9 @@ OK, we will also pitch the e4x spec to the same series:
...
... Continue
... -----------------------------26999413214087432371486976730--
- ... """
+ ... """.replace(
+ ... "\n", "\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other
@@ -232,7 +236,9 @@ because we are an admin, then we will move it back.
...
... Continue
... -----------------------------26999413214087432371486976730--
- ... """
+ ... """.replace(
+ ... "\n", "\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other
@@ -278,7 +284,9 @@ And lets put it back:
...
... Continue
... -----------------------------26999413214087432371486976730--
- ... """
+ ... """.replace(
+ ... "\n", "\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other
diff --git a/lib/lp/bugs/browser/tests/test_bugtarget_configure.py b/lib/lp/bugs/browser/tests/test_bugtarget_configure.py
index 88846c7..76b53c0 100644
--- a/lib/lp/bugs/browser/tests/test_bugtarget_configure.py
+++ b/lib/lp/bugs/browser/tests/test_bugtarget_configure.py
@@ -4,6 +4,7 @@
"""Unit tests for bug configuration views."""
from lp.app.enums import ServiceUsage
+from lp.app.validators.validation import validate_content_templates
from lp.registry.interfaces.person import TeamMembershipPolicy
from lp.testing import TestCaseWithFactory, login_person
from lp.testing.layers import DatabaseFunctionalLayer
@@ -192,3 +193,34 @@ class TestProductBugConfigurationView(TestCaseWithFactory):
{"bug_templates": {"default": "new lp template"}},
self.product.content_templates,
)
+
+
+class TestValidateContentTemplates(TestCaseWithFactory):
+ layer = DatabaseFunctionalLayer
+
+ # Test the validator for content templates
+ def test_none_value(self):
+ self.assertTrue(validate_content_templates(None))
+
+ def test_valid_content_templates(self):
+ valid_value = {
+ "bug_templates": {
+ "default": "A default bug template",
+ "security": "A bug template for security related bugs",
+ },
+ }
+ self.assertTrue(validate_content_templates(valid_value))
+
+ def test_invalid_key(self):
+ invalid_value = {"invalid_key": {"default": "A default bug template"}}
+ self.assertRaises(
+ ValueError, validate_content_templates, invalid_value
+ )
+
+ def test_missing_default(self):
+ invalid_value = {
+ "bug_templates": {"not_default": "A not default bug template"}
+ }
+ self.assertRaises(
+ ValueError, validate_content_templates, invalid_value
+ )
diff --git a/lib/lp/bugs/interfaces/bugtarget.py b/lib/lp/bugs/interfaces/bugtarget.py
index 2415010..2f4fec0 100644
--- a/lib/lp/bugs/interfaces/bugtarget.py
+++ b/lib/lp/bugs/interfaces/bugtarget.py
@@ -52,6 +52,7 @@ from lp.app.enums import (
PROPRIETARY_INFORMATION_TYPES,
InformationType,
)
+from lp.app.validators.validation import validate_content_templates
from lp.bugs.interfaces.bugtask import IBugTask
from lp.bugs.interfaces.bugtasksearch import (
BugBlueprintSearch,
@@ -276,26 +277,27 @@ class IBugTarget(IHasBugs):
)
)
- # XXX alvarocs 2024-11-27:
- # To be exported to the API once a validator is added.
- content_templates = Dict(
- title=("Templates to use for reporting a bug"),
- description=(
- "This pre-defined template will be given to the "
- "users to guide them when reporting a bug. "
- ),
- key_type=TextLine(),
- value_type=Dict(
+ content_templates = exported(
+ Dict(
+ title=("Templates to use for reporting a bug"),
+ description=(
+ "This pre-defined template will be given to the "
+ "users to guide them when reporting a bug. "
+ ),
key_type=TextLine(),
- value_type=Text(
+ value_type=Dict(
+ key_type=TextLine(),
+ value_type=Text(
+ required=False,
+ max_length=50000,
+ ),
required=False,
max_length=50000,
),
required=False,
max_length=50000,
- ),
- required=False,
- max_length=50000,
+ constraint=validate_content_templates,
+ )
)
bug_reported_acknowledgement = exported(
diff --git a/lib/lp/bugs/stories/bugs/xx-bugtask-assignee-widget.rst b/lib/lp/bugs/stories/bugs/xx-bugtask-assignee-widget.rst
index 99d759d..80a08a5 100644
--- a/lib/lp/bugs/stories/bugs/xx-bugtask-assignee-widget.rst
+++ b/lib/lp/bugs/stories/bugs/xx-bugtask-assignee-widget.rst
@@ -75,7 +75,9 @@ button:
...
... Save Changes
... -----------------------------19759086281403130373932339922--
- ... """
+ ... """.replace(
+ ... b"\n", b"\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other
@@ -157,7 +159,9 @@ But, you can also assign the task to another person, of course:
...
... Save Changes
... -----------------------------19759086281403130373932339922--
- ... """
+ ... """.replace(
+ ... b"\n", b"\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other
@@ -239,7 +243,9 @@ Lastly, the widget also allows you to simply assign the task to nobody
...
... Save Changes
... -----------------------------19759086281403130373932339922--
- ... """
+ ... """.replace(
+ ... b"\n", b"\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other
diff --git a/lib/lp/bugs/stories/guided-filebug/xx-filebug-attachments.rst b/lib/lp/bugs/stories/guided-filebug/xx-filebug-attachments.rst
index d1afe24..a364223 100644
--- a/lib/lp/bugs/stories/guided-filebug/xx-filebug-attachments.rst
+++ b/lib/lp/bugs/stories/guided-filebug/xx-filebug-attachments.rst
@@ -110,7 +110,9 @@ treat all empty-equivalent values equally.
...
... Submit Bug Report
... -----------------------------2051078912280543729816242321--
- ... """
+ ... """.replace(
+ ... b"\n", b"\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other...
diff --git a/lib/lp/registry/interfaces/projectgroup.py b/lib/lp/registry/interfaces/projectgroup.py
index 0b39dc4..e8324c1 100644
--- a/lib/lp/registry/interfaces/projectgroup.py
+++ b/lib/lp/registry/interfaces/projectgroup.py
@@ -33,6 +33,7 @@ from lp.app.interfaces.launchpad import (
IServiceUsage,
)
from lp.app.validators.name import name_validator
+from lp.app.validators.validation import validate_content_templates
from lp.blueprints.interfaces.specificationtarget import IHasSpecifications
from lp.blueprints.interfaces.sprint import IHasSprints
from lp.bugs.interfaces.bugtarget import IHasBugs, IHasOfficialBugTags
@@ -384,26 +385,27 @@ class IProjectGroupPublic(
)
)
- # XXX alvarocs 2024-11-27:
- # To be exported to the API once a validator is added.
- content_templates = Dict(
- title=("Templates to use for reporting a bug"),
- description=(
- "This pre-defined template will be given to the "
- "users to guide them when reporting a bug. "
- ),
- key_type=TextLine(),
- value_type=Dict(
+ content_templates = exported(
+ Dict(
+ title=("Templates to use for reporting a bug"),
+ description=(
+ "This pre-defined template will be given to the "
+ "users to guide them when reporting a bug. "
+ ),
key_type=TextLine(),
- value_type=Text(
+ value_type=Dict(
+ key_type=TextLine(),
+ value_type=Text(
+ required=False,
+ max_length=50000,
+ ),
required=False,
max_length=50000,
),
required=False,
max_length=50000,
- ),
- required=False,
- max_length=50000,
+ constraint=validate_content_templates,
+ )
)
bug_reported_acknowledgement = exported(
diff --git a/lib/lp/registry/stories/productseries/xx-productseries-review.rst b/lib/lp/registry/stories/productseries/xx-productseries-review.rst
index 93d6c12..53ed212 100644
--- a/lib/lp/registry/stories/productseries/xx-productseries-review.rst
+++ b/lib/lp/registry/stories/productseries/xx-productseries-review.rst
@@ -21,7 +21,9 @@ to bazaar. Also changes the name of the productseries to 'newname'.
...
... Change
... -----------------------------10572808480422220968425074--
- ... """
+ ... """.replace(
+ ... "\n", "\r\n"
+ ... ) # Necessary to ensure it fits the HTTP standard
... )
... ) # noqa
HTTP/1.1 303 See Other
diff --git a/lib/lp/registry/stories/webservice/xx-distribution-source-package.rst b/lib/lp/registry/stories/webservice/xx-distribution-source-package.rst
index a848237..0a69eff 100644
--- a/lib/lp/registry/stories/webservice/xx-distribution-source-package.rst
+++ b/lib/lp/registry/stories/webservice/xx-distribution-source-package.rst
@@ -14,6 +14,7 @@ Source packages can be obtained from the context of a distribution.
>>> pprint_entry(mozilla_firefox)
bug_reported_acknowledgement: None
bug_reporting_guidelines: None
+ content_templates: None
display_name: 'mozilla-firefox in Debian'
distribution_link: 'http://.../debian'
name: 'mozilla-firefox'
diff --git a/lib/lp/registry/stories/webservice/xx-distribution.rst b/lib/lp/registry/stories/webservice/xx-distribution.rst
index 1d9a8e9..f873924 100644
--- a/lib/lp/registry/stories/webservice/xx-distribution.rst
+++ b/lib/lp/registry/stories/webservice/xx-distribution.rst
@@ -32,6 +32,7 @@ And for every distribution we publish most of its attributes.
code_admin_link: None
commercial_subscription_is_due: False
commercial_subscription_link: None
+ content_templates: None
current_series_link: 'http://.../ubuntu/hoary'
date_created: '2006-10-16T18:31:43.415195+00:00'
default_traversal_policy: 'Series'
diff --git a/lib/lp/registry/stories/webservice/xx-distroseries.rst b/lib/lp/registry/stories/webservice/xx-distroseries.rst
index 51a089a..1dd4aeb 100644
--- a/lib/lp/registry/stories/webservice/xx-distroseries.rst
+++ b/lib/lp/registry/stories/webservice/xx-distroseries.rst
@@ -75,6 +75,7 @@ For distroseries we publish a subset of its attributes.
bug_reporting_guidelines: None
changeslist: 'hoary-changes@xxxxxxxxxx'
component_names: ['main', 'restricted']
+ content_templates: None
date_created: '2006-10-16T18:31:43.483559+00:00'
datereleased: None
description: 'Hoary is the ...
diff --git a/lib/lp/registry/stories/webservice/xx-person.rst b/lib/lp/registry/stories/webservice/xx-person.rst
index 7e93f8b..4ae1a58 100644
--- a/lib/lp/registry/stories/webservice/xx-person.rst
+++ b/lib/lp/registry/stories/webservice/xx-person.rst
@@ -813,6 +813,7 @@ Subscribed packages can be listed with getBugSubscriberPackages:
---
bug_reported_acknowledgement: None
bug_reporting_guidelines: None
+ content_templates: None
display_name: '...'
distribution_link: '...'
name: 'fooix'
diff --git a/lib/lp/registry/stories/webservice/xx-project-registry.rst b/lib/lp/registry/stories/webservice/xx-project-registry.rst
index 37b1061..b6bdb0a 100644
--- a/lib/lp/registry/stories/webservice/xx-project-registry.rst
+++ b/lib/lp/registry/stories/webservice/xx-project-registry.rst
@@ -79,6 +79,7 @@ host.
bug_reported_acknowledgement: None
bug_reporting_guidelines: None
bug_tracker_link: None
+ content_templates: None
date_created: '...'
description: 'The Mozilla Project...'
display_name: 'The Mozilla Project'
@@ -177,6 +178,7 @@ Projects are available at their canonical URL on the API virtual host.
bug_tracker_link: None
commercial_subscription_is_due: False
commercial_subscription_link: None
+ content_templates: None
date_created: '2004-09-24T20:58:02.185708+00:00'
date_next_suggest_packaging: None
description: 'The Mozilla Firefox web browser'
@@ -841,6 +843,7 @@ virtual host.
branch_link: 'http://.../~babadoo-owner/babadoo/fooey'
bug_reported_acknowledgement: None
bug_reporting_guidelines: None
+ content_templates: None
date_created: '...'
display_name: 'foobadoo'
driver_link: None
diff --git a/lib/lp/registry/stories/webservice/xx-source-package.rst b/lib/lp/registry/stories/webservice/xx-source-package.rst
index d09785d..813e1b8 100644
--- a/lib/lp/registry/stories/webservice/xx-source-package.rst
+++ b/lib/lp/registry/stories/webservice/xx-source-package.rst
@@ -32,6 +32,7 @@ distribution series.
>>> pprint_entry(evolution)
bug_reported_acknowledgement: None
bug_reporting_guidelines: None
+ content_templates: None
displayname: 'evolution in My-distro My-series'
distribution_link: 'http://.../my-distro'
distroseries_link: 'http://.../my-distro/my-series'
diff --git a/lib/lp/services/database/doc/textsearching.rst b/lib/lp/services/database/doc/textsearching.rst
index c374934..56ccbf1 100644
--- a/lib/lp/services/database/doc/textsearching.rst
+++ b/lib/lp/services/database/doc/textsearching.rst
@@ -249,19 +249,26 @@ Repeated '-' are simply ignored by to_tsquery().
>>> ftq("---foo--- ---bar---")
---foo---&---bar--- <=> 'foo' & 'bar'
+
+XXX 2025-01-23 lgp171188: The following doctests have a lot of placeholders
+ignoring key values like '&', '<->', and '<2>' since it is not straightforward
+to test different values in a doctest based on different PostgreSQL versions.
+So these ignored values have been checked in the unit tests in the
+lp.services.database.tests.test_text_searching module.
+
Hyphens surrounded by two words are retained. This reflects the way
how to_tsquery() and to_tsvector() handle such strings.
>>> print(search_same("foo-bar"))
FTI data: 'bar':3 'foo':2 'foo-bar':1
- query: 'foo-bar' & 'foo' & 'bar'
+ query: 'foo-bar' ... 'foo' ... 'bar'
match: True
A '-' surrounded by numbers is treated as the sign of the right-hand number.
>>> print(search_same("123-456"))
FTI data: '-456':2 '123':1
- query: '123' & '-456'
+ query: '123' ... '-456'
match: True
Punctuation is handled consistently. If a string containing punctuation
@@ -272,31 +279,31 @@ string finds the indexed text.
>>> for symbol in punctuation:
... print(repr(symbol), search_same("foo%sbar" % symbol))
...
- "'" FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '"' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '#' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '$' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '%' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '*' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '+' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- ',' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
+ "'" FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '"' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '#' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '$' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '%' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '*' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '+' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ ',' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
'.' FTI data: 'foo.bar':1 query: 'foo.bar' match: True
'/' FTI data: 'foo/bar':1 query: 'foo/bar' match: True
- ':' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- ';' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '<' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '=' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '>' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '?' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '@' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '[' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '\\' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- ']' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '^' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '`' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '{' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '}' FTI data: 'bar':2 'foo':1 query: 'foo' & 'bar' match: True
- '~' FTI data: 'foo':1 '~bar':2 query: 'foo' & '~bar' match: True
+ ':' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ ';' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '<' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '=' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '>' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '?' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '@' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '[' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '\\' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ ']' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '^' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '`' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '{' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '}' FTI data: 'bar':2 'foo':1 query: 'foo' ... 'bar' match: True
+ '~' FTI data: 'foo':1 '~bar':2 query: 'foo' ... '~bar' match: True
>>> for symbol in punctuation:
... print(
@@ -399,14 +406,14 @@ Bug #44913 - Unicode characters in the wrong place.
>>> print(search_same("abc-a\N{LATIN SMALL LETTER C WITH CEDILLA}"))
FTI data: 'abc':2 'abc-aç':1 'aç':3
- query: 'abc-aç' & 'abc' & 'aç'
+ query: 'abc-aç' ... 'abc' ... 'aç'
match: True
Cut & Paste of 'Smart' quotes. Note that the quotation mark is retained
in the FTI.
>>> print(search_same("a-a\N{RIGHT DOUBLE QUOTATION MARK}"))
- FTI data: 'a-a”':1 'a”':3 query: 'a-a”' & 'a”' match: True
+ FTI data: 'a-a”':1 'a”':3 query: 'a-a”' ... 'a”' match: True
>>> print(
... search_same(
@@ -414,7 +421,7 @@ in the FTI.
... "\N{RIGHT SINGLE QUOTATION MARK}"
... )
... )
- FTI data: 'a’':2 '‘a':1 query: '‘a' & 'a’' match: True
+ FTI data: 'a’':2 '‘a':1 query: '‘a' ... 'a’' match: True
Bug #44913 - Nothing but stopwords in a query needing repair
@@ -543,7 +550,7 @@ or invalid leading operators
Bug #160236
>>> ftq("foo AND AND bar-baz")
- foo&bar-baz <=> 'foo' & 'bar-baz' & 'bar' & 'baz'
+ foo&bar-baz <=> 'foo' ... 'bar-baz' ... 'bar' ... 'baz'
>>> ftq("foo OR OR bar.baz")
foo|bar.baz <=> 'foo' | 'bar.baz'
diff --git a/lib/lp/services/database/tests/test_text_searching.py b/lib/lp/services/database/tests/test_text_searching.py
new file mode 100644
index 0000000..b375deb
--- /dev/null
+++ b/lib/lp/services/database/tests/test_text_searching.py
@@ -0,0 +1,159 @@
+# Copyright 2025 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Test text searching functionality."""
+
+from testtools.matchers import Equals, MatchesAny
+from zope.component import getUtility
+
+from lp.services.database.interfaces import (
+ DEFAULT_FLAVOR,
+ MAIN_STORE,
+ IStoreSelector,
+)
+from lp.services.helpers import backslashreplace
+from lp.testing import TestCaseWithFactory
+from lp.testing.layers import DatabaseFunctionalLayer
+
+
+def get_store():
+ return getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
+
+
+def ftq(query):
+ store = get_store()
+ try:
+ result = store.execute("SELECT _ftq(%s), ftq(%s)", (query, query))
+ uncompiled, compiled = result.get_one()
+ except Exception:
+ store.rollback()
+ raise
+ if uncompiled is not None:
+ uncompiled = backslashreplace(uncompiled)
+ uncompiled = uncompiled.replace(" ", "")
+ if compiled is not None:
+ compiled = backslashreplace(compiled)
+ result = "%s <=> %s" % (uncompiled, compiled)
+ return result
+
+
+def search(text_to_search, search_phrase):
+ store = get_store()
+ result = store.execute("SELECT to_tsvector(%s)", (text_to_search,))
+ ts_vector = result.get_all()[0][0]
+ result = store.execute("SELECT ftq(%s)", (search_phrase,))
+ ts_query = result.get_all()[0][0]
+ result = store.execute(
+ "SELECT to_tsvector(%s) @@ ftq(%s)",
+ (text_to_search, search_phrase),
+ )
+ match = result.get_all()[0][0]
+ return "FTI data: %s query: %s match: %s" % (
+ ts_vector,
+ ts_query,
+ str(match),
+ )
+
+
+def search_same(text):
+ return search(text, text)
+
+
+class TestTextSearchingFTI(TestCaseWithFactory):
+ layer = DatabaseFunctionalLayer
+
+ def assert_result_matches(self, result, expected, placeholders_list):
+ matchers = [
+ Equals(expected.format(*placeholders))
+ for placeholders in placeholders_list
+ ]
+ self.assertThat(
+ result,
+ MatchesAny(
+ *matchers,
+ ),
+ )
+
+ def test_hyphens_surrounded_by_two_words_retained(self):
+ # Hyphens surrounded by two words are retained. This reflects the way
+ # how to_tsquery() and to_tsvector() handle such strings.
+ result = search_same("foo-bar")
+ expected = (
+ "FTI data: 'bar':3 'foo':2 'foo-bar':1 query: "
+ "'foo-bar' {} 'foo' {} 'bar' match: True"
+ )
+ self.assert_result_matches(result, expected, (["&"] * 3, ["<->"] * 3))
+
+ def test_hyphen_surrounded_by_numbers_sign_of_right_number(self):
+ # A '-' surrounded by numbers is treated as the sign of the
+ # right-hand number.
+ result = search_same("123-456")
+ expected = (
+ "FTI data: '-456':2 '123':1 query: '123' {} '-456' match: True"
+ )
+ self.assert_result_matches(result, expected, (["&"], ["<->"]))
+
+ def test_consistent_handling_of_punctuation(self):
+ # Punctuation is handled consistently. If a string containing
+ # punctuation appears in an FTI, it can also be passed to ftq(),
+ # and a search for this string finds the indexed text.
+ result = search_same("foo'bar")
+ expected = (
+ "FTI data: 'bar':2 'foo':1 query: 'foo' {} 'bar' match: True"
+ )
+ placeholders = (["&"], ["<->"])
+ punctuations = "'\"#$%*+,:;<=>?@[\\]^`{}`"
+ for symbol in punctuations:
+ result = search_same(f"foo{symbol}bar")
+ self.assert_result_matches(
+ result,
+ expected,
+ placeholders,
+ )
+ result = search_same("foo.bar")
+ expected = "FTI data: 'foo.bar':1 query: 'foo.bar' match: True"
+ self.assert_result_matches(
+ result,
+ expected,
+ ([], []),
+ )
+
+ def test_unicode_characters_in_the_wrong_place(self):
+ # Bug #44913 - Unicode characters in the wrong place.
+ result = search_same("abc-a\N{LATIN SMALL LETTER C WITH CEDILLA}")
+ expected = (
+ "FTI data: 'abc':2 'abc-aç':1 'aç':3 query: 'abc-aç' {} 'abc' "
+ "{} 'aç' match: True"
+ )
+ self.assert_result_matches(
+ result,
+ expected,
+ (["&"] * 2, ["<->"] * 2),
+ )
+
+ def test_cut_and_past_of_smart_quotes(self):
+ # Cut & Paste of 'Smart' quotes. Note that the quotation mark is
+ # retained in the FTI.
+ result = search_same("a-a\N{RIGHT DOUBLE QUOTATION MARK}")
+ expected = (
+ "FTI data: 'a-a”':1 'a”':3 query: 'a-a”' {} 'a”' match: True"
+ )
+ self.assert_result_matches(
+ result,
+ expected,
+ (["&"], ["<2>"]),
+ )
+ result = search_same(
+ "\N{LEFT SINGLE QUOTATION MARK}a.a"
+ "\N{RIGHT SINGLE QUOTATION MARK}"
+ )
+ expected = "FTI data: 'a’':2 '‘a':1 query: '‘a' {} 'a’' match: True"
+ self.assert_result_matches(result, expected, (["&"], ["<->"]))
+
+ def test_bug_160236_ftq(self):
+ # filing a bug with summary "a&& a-a" oopses with sql syntax error
+ result = ftq("foo AND AND bar-baz")
+ expected = "foo&bar-baz <=> 'foo' {} 'bar-baz' {} 'bar' {} 'baz'"
+ self.assert_result_matches(
+ result, expected, (["&"] * 3, ["&", "<->", "<->"])
+ )
diff --git a/lib/lp/services/webapp/errorlog.py b/lib/lp/services/webapp/errorlog.py
index f2cb950..cd7d642 100644
--- a/lib/lp/services/webapp/errorlog.py
+++ b/lib/lp/services/webapp/errorlog.py
@@ -424,9 +424,7 @@ class ErrorReportingUtility:
"database does not allow connections",
"pgbouncer database is disabled",
):
- if message.startswith(ok) or message.startswith(
- "ERROR: " + ok
- ):
+ if ok in message or f"ERROR: {ok}" in message:
return True
return False
diff --git a/lib/lp/services/webapp/tests/test_error.py b/lib/lp/services/webapp/tests/test_error.py
index 0ebc506..c223924 100644
--- a/lib/lp/services/webapp/tests/test_error.py
+++ b/lib/lp/services/webapp/tests/test_error.py
@@ -5,6 +5,7 @@
import http.client
import logging
+import re
import socket
import time
from urllib.error import HTTPError
@@ -138,6 +139,28 @@ class TestDatabaseErrorViews(TestCase):
def __init__(self, message):
super().__init__(("DisconnectionError", message))
+ class DisconnectsWithMessageRegex:
+ def __init__(self, message_regex):
+ self.message_regex = message_regex
+
+ def match(self, actual):
+ if "DisconnectionError" != actual[0] or not re.match(
+ self.message_regex, actual[1]
+ ):
+
+ class DisconnectsWithMessageRegexMismatch:
+ def __init__(self, description):
+ self.description = description
+
+ def describe(self):
+ return self.description
+
+ return DisconnectsWithMessageRegexMismatch(
+ "reference = ('DisconnectionError', "
+ f"'{self.message_regex}')\n"
+ f"actual = ('{actual[0]}', '{actual[1]}')"
+ )
+
browser = Browser()
browser.raiseHttpErrors = False
with CaptureOops() as oopses:
@@ -146,6 +169,15 @@ class TestDatabaseErrorViews(TestCase):
self.assertThat(
browser.contents, Contains(DisconnectionErrorView.reason)
)
+ # XXX 2024-01-22 lgp171188: Since there isn't a straightforward
+ # way to query the Postgres version at test runtime and assert
+ # accordingly, I have added to the existing style of `MatchesAny`
+ # clauses for Postgres 14 support. Once we upgrade to Postgres,
+ # all the code and assertions for older versions can be removed.
+ libpq_14_connection_error_prefix_regex = (
+ r'connection to server at "localhost" \(127\.0\.0\.1\), '
+ r"port .* failed"
+ )
self.assertThat(
[
(oops["type"], oops["value"].split("\n")[0])
@@ -164,9 +196,17 @@ class TestDatabaseErrorViews(TestCase):
]
* 2
+ [
- Disconnects(
- "could not connect to server: Connection refused"
- )
+ MatchesAny(
+ # libpq < 14.0
+ Disconnects(
+ "could not connect to server: Connection refused"
+ ),
+ # libpq >= 14.0
+ DisconnectsWithMessageRegex(
+ libpq_14_connection_error_prefix_regex
+ + ": Connection refused"
+ ),
+ ),
]
* 6
),
@@ -186,8 +226,16 @@ class TestDatabaseErrorViews(TestCase):
],
MatchesListwise(
[
- Disconnects(
- "could not connect to server: Connection refused"
+ MatchesAny(
+ # libpa < 14.0
+ Disconnects(
+ "could not connect to server: Connection refused"
+ ),
+ # libpq >= 14.0
+ DisconnectsWithMessageRegex(
+ libpq_14_connection_error_prefix_regex
+ + ": Connection refused"
+ ),
)
]
* 8
@@ -223,7 +271,21 @@ class TestDatabaseErrorViews(TestCase):
]
self.assertNotEqual([], disconnection_oopses)
self.assertThat(
- disconnection_oopses, AllMatch(Disconnects("database removed"))
+ disconnection_oopses,
+ AllMatch(
+ MatchesAny(
+ # libpq < 14.0
+ Disconnects("database removed"),
+ # libpq ~= 14.0
+ DisconnectsWithMessageRegex(
+ libpq_14_connection_error_prefix_regex
+ + ": ERROR: database does not allow connections: "
+ r"launchpad_ftest_.*"
+ ),
+ # libpq ~= 16.0
+ Disconnects("server closed the connection unexpectedly"),
+ )
+ ),
)
# A second request doesn't log any OOPSes.
diff --git a/lib/lp/soyuz/interfaces/archive.py b/lib/lp/soyuz/interfaces/archive.py
index 3641dd1..4d6b03e 100644
--- a/lib/lp/soyuz/interfaces/archive.py
+++ b/lib/lp/soyuz/interfaces/archive.py
@@ -612,6 +612,11 @@ class IArchiveSubscriberView(Interface):
),
required=False,
),
+ order_by_date_ascending=Bool(
+ title=_("Order by ascending creation date"),
+ description=_("Return oldest results first."),
+ required=False,
+ ),
)
# Really ISourcePackagePublishingHistory, patched in
# lp.soyuz.interfaces.webservice.
@@ -628,6 +633,7 @@ class IArchiveSubscriberView(Interface):
created_since_date=None,
component_name=None,
order_by_date=False,
+ order_by_date_ascending=False,
):
"""All `ISourcePackagePublishingHistory` target to this archive."""
# It loads additional related objects only needed in the API call
@@ -644,6 +650,7 @@ class IArchiveSubscriberView(Interface):
eager_load=False,
component_name=None,
order_by_date=False,
+ order_by_date_ascending=False,
include_removed=True,
only_unpublished=False,
):
@@ -669,6 +676,8 @@ class IArchiveSubscriberView(Interface):
If not specified, publications are ordered by source
package name (lexicographically), then by descending version
and then descending ID.
+ :param order_by_date_ascending: Order publications by descending
+ creation date and then by ascending ID.
:param include_removed: If True, include publications that have been
removed from disk as well as those that have not.
:param only_unpublished: If True, only include publications that
@@ -749,6 +758,11 @@ class IArchiveSubscriberView(Interface):
),
required=False,
),
+ order_by_date_ascending=Bool(
+ title=_("Order by ascending creation date"),
+ description=_("Return oldest results first."),
+ required=False,
+ ),
component_name=TextLine(title=_("Component name"), required=False),
)
# Really IBinaryPackagePublishingHistory, patched in
@@ -767,6 +781,7 @@ class IArchiveSubscriberView(Interface):
created_since_date=None,
ordered=True,
order_by_date=False,
+ order_by_date_ascending=False,
include_removed=True,
only_unpublished=False,
eager_load=False,
diff --git a/lib/lp/soyuz/interfaces/binarypackagebuild.py b/lib/lp/soyuz/interfaces/binarypackagebuild.py
index e72b450..0f0be6a 100644
--- a/lib/lp/soyuz/interfaces/binarypackagebuild.py
+++ b/lib/lp/soyuz/interfaces/binarypackagebuild.py
@@ -8,6 +8,7 @@ __all__ = [
"IBinaryPackageBuild",
"IBuildRescoreForm",
"IBinaryPackageBuildSet",
+ "MissingDependencies",
"UnparsableDependencies",
]
@@ -46,6 +47,10 @@ class UnparsableDependencies(Exception):
"""Raised when parsing invalid dependencies on a binary package."""
+class MissingDependencies(UnparsableDependencies):
+ """Raised when there are missing dependencies on a binary package."""
+
+
class IBinaryPackageBuildView(IPackageBuildView):
"""A Build interface for items requiring launchpad.View."""
diff --git a/lib/lp/soyuz/model/archive.py b/lib/lp/soyuz/model/archive.py
index 12fa3ac..11c9b17 100644
--- a/lib/lp/soyuz/model/archive.py
+++ b/lib/lp/soyuz/model/archive.py
@@ -26,6 +26,7 @@ from storm.databases.postgres import JSON as PgJSON
from storm.expr import (
Alias,
And,
+ Asc,
Cast,
Count,
Desc,
@@ -698,6 +699,7 @@ class Archive(StormBase):
exact_match=False,
created_since_date=None,
order_by_date=False,
+ order_by_date_ascending=False,
component_name=None,
):
"""See `IArchive`."""
@@ -714,6 +716,7 @@ class Archive(StormBase):
eager_load=True,
component_name=component_name,
order_by_date=order_by_date,
+ order_by_date_ascending=order_by_date_ascending,
include_removed=True,
)
@@ -770,6 +773,7 @@ class Archive(StormBase):
eager_load=False,
component_name=None,
order_by_date=False,
+ order_by_date_ascending=False,
include_removed=True,
only_unpublished=False,
):
@@ -781,13 +785,18 @@ class Archive(StormBase):
Desc(SourcePackagePublishingHistory.datecreated),
Desc(SourcePackagePublishingHistory.id),
]
+ elif order_by_date_ascending:
+ order_by = [
+ Asc(SourcePackagePublishingHistory.datecreated),
+ Asc(SourcePackagePublishingHistory.id),
+ ]
else:
order_by = [
SourcePackageName.name,
Desc(SourcePackagePublishingHistory.id),
]
- if not order_by_date or name is not None:
+ if not (order_by_date or order_by_date_ascending) or name is not None:
clauses.append(
SourcePackagePublishingHistory.sourcepackagename_id
== SourcePackageName.id
@@ -804,7 +813,10 @@ class Archive(StormBase):
elif len(name) != 0:
clauses.append(SourcePackageName.name.is_in(name))
- if not order_by_date or version is not None:
+ if (
+ not (order_by_date or order_by_date_ascending)
+ or version is not None
+ ):
clauses.append(
SourcePackagePublishingHistory.sourcepackagerelease_id
== SourcePackageRelease.id
@@ -820,7 +832,7 @@ class Archive(StormBase):
Cast(SourcePackageRelease.version, "text")
== six.ensure_text(version)
)
- elif not order_by_date:
+ elif not (order_by_date or order_by_date_ascending):
order_by.insert(1, Desc(SourcePackageRelease.version))
if component_name is not None:
@@ -1001,6 +1013,7 @@ class Archive(StormBase):
created_since_date=None,
ordered=True,
order_by_date=False,
+ order_by_date_ascending=False,
include_removed=True,
only_unpublished=False,
need_bpr=False,
@@ -1012,7 +1025,7 @@ class Archive(StormBase):
"""
clauses = [BinaryPackagePublishingHistory.archive == self]
- if order_by_date:
+ if order_by_date or order_by_date_ascending:
ordered = False
if order_by_date:
@@ -1020,6 +1033,11 @@ class Archive(StormBase):
Desc(BinaryPackagePublishingHistory.datecreated),
Desc(BinaryPackagePublishingHistory.id),
]
+ elif order_by_date_ascending:
+ order_by = [
+ Asc(BinaryPackagePublishingHistory.datecreated),
+ Asc(BinaryPackagePublishingHistory.id),
+ ]
elif ordered:
order_by = [
BinaryPackageName.name,
@@ -1120,6 +1138,7 @@ class Archive(StormBase):
created_since_date=None,
ordered=True,
order_by_date=False,
+ order_by_date_ascending=False,
include_removed=True,
only_unpublished=False,
eager_load=False,
@@ -1140,6 +1159,7 @@ class Archive(StormBase):
created_since_date=created_since_date,
ordered=ordered,
order_by_date=order_by_date,
+ order_by_date_ascending=order_by_date_ascending,
include_removed=include_removed,
only_unpublished=only_unpublished,
component_name=component_name,
diff --git a/lib/lp/soyuz/model/archivejob.py b/lib/lp/soyuz/model/archivejob.py
index 100e3ba..9242e69 100644
--- a/lib/lp/soyuz/model/archivejob.py
+++ b/lib/lp/soyuz/model/archivejob.py
@@ -5,6 +5,7 @@ import io
import json
import logging
import os
+import os.path
import tarfile
import tempfile
import zipfile
@@ -14,10 +15,10 @@ from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
import zstandard
from lazr.delegates import delegate_to
+from packaging.utils import parse_wheel_filename
from pkginfo import SDist, Wheel
from storm.expr import And
from storm.locals import JSON, Int, Reference
-from wheel_filename import parse_wheel_filename
from zope.component import getUtility
from zope.interface import implementer, provider
from zope.security.proxy import removeSecurityProxy
@@ -330,6 +331,9 @@ class CIBuildUploadJob(ArchiveJobDerived):
SourcePackageFileType.GO_MODULE_MOD,
SourcePackageFileType.GO_MODULE_ZIP,
},
+ # XXX: ruinedyourlife 2024-12-06
+ # Remove the Rust format and it's scanner as we don't need it from
+ # CI builds. We only care about crates in craft recipe uploads.
ArchiveRepositoryFormat.RUST: {
BinaryPackageFormat.CRATE,
},
@@ -431,10 +435,12 @@ class CIBuildUploadJob(ArchiveJobDerived):
) -> Dict[str, ArtifactMetadata]:
all_metadata = {}
for path in paths:
- if not path.name.endswith(".whl"):
+ filename = str(os.path.basename(os.fsdecode(path)))
+ if not filename.endswith(".whl"):
continue
try:
- parsed_path = parse_wheel_filename(str(path))
+ _, _, _, tags = parse_wheel_filename(str(filename))
+ platforms = [tag.platform for tag in set(tags)]
wheel = Wheel(str(path))
except Exception as e:
logger.warning(
@@ -448,7 +454,7 @@ class CIBuildUploadJob(ArchiveJobDerived):
version=wheel.version,
summary=wheel.summary or "",
description=wheel.description or "",
- architecturespecific="any" not in parsed_path.platform_tags,
+ architecturespecific="any" not in platforms, # != platform,
homepage=wheel.home_page or "",
)
return all_metadata
diff --git a/lib/lp/soyuz/model/binarypackagebuild.py b/lib/lp/soyuz/model/binarypackagebuild.py
index 4ac614c..30e60c2 100644
--- a/lib/lp/soyuz/model/binarypackagebuild.py
+++ b/lib/lp/soyuz/model/binarypackagebuild.py
@@ -71,6 +71,7 @@ from lp.soyuz.interfaces.binarypackagebuild import (
BuildSetStatus,
IBinaryPackageBuild,
IBinaryPackageBuildSet,
+ MissingDependencies,
UnparsableDependencies,
)
from lp.soyuz.interfaces.binarysourcereference import (
@@ -651,6 +652,14 @@ class BinaryPackageBuild(PackageBuildMixin, StormBase):
apt_pkg.init_system()
# Check package build dependencies using debian.deb822
+
+ if self.dependencies is None:
+ raise MissingDependencies(
+ "Build dependencies for %s (%s) are missing.\n"
+ "It indicates that something is wrong in buildd-workers."
+ % (self.title, self.id)
+ )
+
try:
with warnings.catch_warnings():
warnings.simplefilter("error")
@@ -662,6 +671,28 @@ class BinaryPackageBuild(PackageBuildMixin, StormBase):
% (self.title, self.id, self.dependencies)
)
+ # For each dependency check if there is a name (without spaces) and
+ # version. See TestBuildUpdateDependencies.testInvalidDependencies()
+
+ def has_valid_name(dep):
+ return (
+ dep.get("name") is not None
+ and len(dep.get("name", "").split(" ")) == 1
+ )
+
+ def has_valid_version(dep):
+ return dep.get("version") is None or len(dep.get("version")) == 2
+
+ for or_dep in parsed_deps:
+ for dep in or_dep:
+ if not has_valid_name(dep) or not has_valid_version(dep):
+ raise UnparsableDependencies(
+ "Build dependencies for %s (%s) could not be parsed: "
+ "'%s'\nIt indicates that something is wrong in "
+ "buildd-workers."
+ % (self.title, self.id, self.dependencies)
+ )
+
remaining_deps = []
for or_dep in parsed_deps:
if not any(self._isDependencySatisfied(token) for token in or_dep):
diff --git a/lib/lp/soyuz/tests/test_archive.py b/lib/lp/soyuz/tests/test_archive.py
index e5bb461..1fd3dd6 100644
--- a/lib/lp/soyuz/tests/test_archive.py
+++ b/lib/lp/soyuz/tests/test_archive.py
@@ -3814,6 +3814,29 @@ class TestGetPublishedSources(TestCaseWithFactory):
list(archive.getPublishedSources(order_by_date=True)),
)
+ def test_order_by_date_ascending(self):
+ archive = self.factory.makeArchive()
+ middle_spph = self.factory.makeSourcePackagePublishingHistory(
+ archive=archive,
+ date_uploaded=datetime(2009, 1, 1, tzinfo=timezone.utc),
+ )
+ newest_spph = self.factory.makeSourcePackagePublishingHistory(
+ archive=archive,
+ date_uploaded=datetime(2025, 1, 1, tzinfo=timezone.utc),
+ )
+ oldest_spph = self.factory.makeSourcePackagePublishingHistory(
+ archive=archive,
+ date_uploaded=datetime(2000, 1, 1, tzinfo=timezone.utc),
+ )
+ expected_order = [oldest_spph, middle_spph, newest_spph]
+
+ self.assertEqual(
+ expected_order,
+ list(
+ archive.api_getPublishedSources(order_by_date_ascending=True)
+ ),
+ )
+
def test_matches_version_as_text(self):
# Versions such as 0.7-4 and 0.07-4 are equal according to the
# "debversion" type, but for lookup purposes we compare the text of
@@ -5185,6 +5208,29 @@ class TestgetAllPublishedBinaries(TestCaseWithFactory):
list(archive.getAllPublishedBinaries(order_by_date=True)),
)
+ def test_order_by_date_ascending(self):
+ archive = self.factory.makeArchive()
+ middle_bpph = self.factory.makeBinaryPackagePublishingHistory(
+ archive=archive,
+ datecreated=datetime(2009, 1, 1, tzinfo=timezone.utc),
+ )
+ newest_bpph = self.factory.makeBinaryPackagePublishingHistory(
+ archive=archive,
+ datecreated=datetime(2025, 1, 1, tzinfo=timezone.utc),
+ )
+ oldest_bpph = self.factory.makeBinaryPackagePublishingHistory(
+ archive=archive,
+ datecreated=datetime(2000, 1, 1, tzinfo=timezone.utc),
+ )
+ expected_order = [oldest_bpph, middle_bpph, newest_bpph]
+
+ self.assertEqual(
+ expected_order,
+ list(
+ archive.getAllPublishedBinaries(order_by_date_ascending=True)
+ ),
+ )
+
def test_matches_version_as_text(self):
# Versions such as 0.7-4 and 0.07-4 are equal according to the
# "debversion" type, but for lookup purposes we compare the text of
diff --git a/lib/lp/soyuz/tests/test_archivejob.py b/lib/lp/soyuz/tests/test_archivejob.py
index b006a5f..d4c5488 100644
--- a/lib/lp/soyuz/tests/test_archivejob.py
+++ b/lib/lp/soyuz/tests/test_archivejob.py
@@ -1895,7 +1895,7 @@ class TestCIBuildUploadJob(TestCaseWithFactory):
expected_logs = [
"Running %r (ID %d) in status Waiting" % (job, job.job_id),
"Failed to scan _invalid.whl as a Python wheel: Invalid wheel "
- "filename: '_invalid.whl'",
+ "filename (wrong number of parts): '_invalid'",
"%r (ID %d) failed with user error %r."
% (
job,
diff --git a/lib/lp/soyuz/tests/test_binarypackagebuild.py b/lib/lp/soyuz/tests/test_binarypackagebuild.py
index 89c02e6..0193fe5 100644
--- a/lib/lp/soyuz/tests/test_binarypackagebuild.py
+++ b/lib/lp/soyuz/tests/test_binarypackagebuild.py
@@ -296,6 +296,21 @@ class TestBuildUpdateDependencies(TestCaseWithFactory):
# Missing comma between dependencies.
self.assertRaisesUnparsableDependencies(depwait_build, "name1 name2")
+ # Mismatched parentheses.
+ self.assertRaisesUnparsableDependencies(
+ depwait_build, "name (= version"
+ )
+
+ # Wrong parentheses.
+ self.assertRaisesUnparsableDependencies(
+ depwait_build, "name )= version("
+ )
+
+ # Invalid or.
+ self.assertRaisesUnparsableDependencies(
+ depwait_build, "name (>= version) | name2 ("
+ )
+
def testBug378828(self):
# `IBinaryPackageBuild.updateDependencies` copes with the
# scenario where the corresponding source publication is not
diff --git a/lib/lp/soyuz/tests/test_binarysourcereference.py b/lib/lp/soyuz/tests/test_binarysourcereference.py
index 4b50b7c..831b6c8 100644
--- a/lib/lp/soyuz/tests/test_binarysourcereference.py
+++ b/lib/lp/soyuz/tests/test_binarysourcereference.py
@@ -37,13 +37,24 @@ class TestBinarySourceReference(TestCaseWithFactory):
def test_createFromRelationship_nonsense(self):
bpr = self.factory.makeBinaryPackageRelease()
- expected_message = (
- r"Invalid Built-Using field; cannot be parsed by deb822: .*"
- )
+ # in newer versions of deb822, when the version can't be processed, it
+ # will return None. Therefore, nonsense version will raise an exception
+ # that the version must be strict
+ expected_message = r"Built-Using must contain strict dependencies: .*"
with ExpectedException(UnparsableBuiltUsing, expected_message):
self.reference_set.createFromRelationship(
bpr, "nonsense (", BinarySourceReferenceType.BUILT_USING
)
+ with ExpectedException(UnparsableBuiltUsing, expected_message):
+ self.reference_set.createFromRelationship(
+ bpr, "nonsense )= 1(", BinarySourceReferenceType.BUILT_USING
+ )
+ with ExpectedException(UnparsableBuiltUsing, expected_message):
+ self.reference_set.createFromRelationship(
+ bpr,
+ "nonsense (nonsense)",
+ BinarySourceReferenceType.BUILT_USING,
+ )
def test_createFromRelationship_alternatives(self):
bpr = self.factory.makeBinaryPackageRelease()
diff --git a/lib/lp/translations/stories/importqueue/xx-translation-import-queue.rst b/lib/lp/translations/stories/importqueue/xx-translation-import-queue.rst
index 49ac695..484024e 100644
--- a/lib/lp/translations/stories/importqueue/xx-translation-import-queue.rst
+++ b/lib/lp/translations/stories/importqueue/xx-translation-import-queue.rst
@@ -55,9 +55,9 @@ shown as static HTML.
>>> print(anon_browser.url)
http://translations.launchpad.test/firefox/1.0/+imports
>>> row = find_tags_by_class(anon_browser.contents, "import_entry_row")[1]
- >>> print(extract_text(row.find("", "import_source")))
+ >>> print(extract_text(row.find(class_="import_source")))
po/es.po in Mozilla Firefox 1.0 series
- >>> print(extract_text(row.find("", "import_status")))
+ >>> print(extract_text(row.find(class_="import_status")))
Needs Review
Some tarballs contain files whose names look like PO or POT files, but
diff --git a/requirements/launchpad.txt b/requirements/launchpad.txt
index 40e3c45..57fca91 100644
--- a/requirements/launchpad.txt
+++ b/requirements/launchpad.txt
@@ -17,7 +17,7 @@ backports.functools-lru-cache==1.5
# ztk-versions.cfg uses 3.2.0 on Python 3, but that drops support for Python
# 3.5. Pin to 3.1.7 until we no longer care about xenial.
bcrypt==3.1.7
-beautifulsoup4==4.7.1
+beautifulsoup4==4.12.3
billiard==3.6.4.0
bleach==6.1.0
bleach-allowlist==1.0.3
@@ -62,7 +62,7 @@ immutables==0.14
importlib==1.0.2
importlib-metadata==8.5.0
incremental==21.3.0
-ipython==7.9.0
+ipython==8.12.3
ipython-genutils==0.2.0
iso8601==0.1.12
jedi==0.17.2
@@ -72,7 +72,7 @@ keyring==0.6.2
keystoneauth1==4.1.0
kombu==4.6.11
launchpad-buildd==206
-launchpadlib==1.10.17
+launchpadlib==2.1.0
lazr.batchnavigator==1.3.1
lazr.config==2.2.3
lazr.delegates==2.0.4
@@ -94,7 +94,7 @@ mistune==0.8.3
monotonic==1.5
more-itertools==10.5.0
msgpack==1.0.2
-multipart==0.2.4
+multipart==1.2.1
netaddr==0.7.19
netifaces==0.11.0
oauth==1.0
@@ -123,8 +123,7 @@ pgbouncer==0.0.9
pickleshare==0.7.5
pkginfo==1.11.2
prettytable==0.7.2
-prompt-toolkit==2.0.10
-psutil==5.4.2
+psutil==6.1.0
psycopg2==2.8.6
ptyprocess==0.7.0
pyasn1==0.6.1
@@ -143,7 +142,7 @@ pyOpenSSL==17.5.0
pyparsing==3.1.4
pystache==0.6.6
python-dateutil==2.9.0.post0
-python-debian==0.1.32
+python-debian==0.1.49
python-keystoneclient==3.21.0
python3-openid==3.2
python-swiftclient==4.6.0
@@ -163,7 +162,7 @@ setuptools-git==1.2
setuptools-scm==3.4.3
sgmllib3k==1.0.0
soupmatchers==0.4
-soupsieve==1.9
+soupsieve==2.6
statsd==3.3.0
stevedore==1.32.0
# lp:~launchpad-committers/storm/lp
@@ -175,7 +174,6 @@ testscenarios==0.4
testtools==2.5.0
timeline==0.0.7
toml==0.10.2
-traitlets==4.3.3
transaction==3.0.1
treq==18.6.0
# lp:~launchpad/twisted:lp-backport
@@ -193,7 +191,6 @@ webencodings==0.5.1
WebOb==1.8.9
WebTest==2.0.35
Werkzeug==1.0.1
-wheel-filename==1.1.0
wrapt==1.12.1
wsgi-intercept==1.9.2
WSGIProxy2==0.4.6
diff --git a/requirements/types.txt b/requirements/types.txt
index 0b7a7d7..3c51cbb 100644
--- a/requirements/types.txt
+++ b/requirements/types.txt
@@ -5,6 +5,7 @@ types-bleach==3.3.1
types-oauthlib==3.1.0
types-psycopg2==2.9.21.4
types-python-dateutil==2.9.0.20241003
+types-PyYAML==6.0.12.20241230
types-requests==0.1.13
types-six==0.1.9
types-urllib3==1.26.25.4
Follow ups