← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] ~lgp171188/launchpad:merge-stable into launchpad:db-devel

 

Guruprasad has proposed merging ~lgp171188/launchpad:merge-stable into launchpad:db-devel.

Commit message:
Manual merge from stable up to 5ad0f578c061

This pulls in some resource limit fixes from the stable
branch which are causing the db-devel buildbot runs to
fail.


Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~lgp171188/launchpad/+git/launchpad/+merge/492178
-- 
Your team Launchpad code reviewers is requested to review the proposed merge of ~lgp171188/launchpad:merge-stable into launchpad:db-devel.
diff --git a/lib/lp/answers/interfaces/questioncollection.py b/lib/lp/answers/interfaces/questioncollection.py
index e6d370b..486d997 100644
--- a/lib/lp/answers/interfaces/questioncollection.py
+++ b/lib/lp/answers/interfaces/questioncollection.py
@@ -21,7 +21,7 @@ from lazr.restful.declarations import (
 )
 from lazr.restful.fields import ReferenceChoice
 from zope.interface import Attribute, Interface
-from zope.schema import Choice, Int, List, TextLine
+from zope.schema import Choice, Datetime, Int, List, TextLine
 
 from lp import _
 from lp.answers.enums import (
@@ -48,6 +48,20 @@ class IQuestionCollection(Interface):
             value_type=ReferenceChoice(vocabulary="Language"),
         ),
         sort=Choice(title=_("Sort"), required=False, vocabulary=QuestionSort),
+        created_before=Datetime(
+            title=_(
+                "Search for questions that were created "
+                "before the given date."
+            ),
+            required=False,
+        ),
+        created_since=Datetime(
+            title=_(
+                "Search for questions that were created "
+                "since the given date."
+            ),
+            required=False,
+        ),
     )
     @operation_returns_collection_of(Interface)  # IQuestion.
     @export_read_operation()
@@ -57,6 +71,8 @@ class IQuestionCollection(Interface):
         status=list(QUESTION_STATUS_DEFAULT_SEARCH),
         language=None,
         sort=None,
+        created_before=None,
+        created_since=None,
     ):
         """Return the questions from the collection matching search criteria.
 
@@ -74,6 +90,12 @@ class IQuestionCollection(Interface):
         :param sort: An attribute of QuestionSort. If None, a default value is
             used. When there is a search_text value, the default is to sort by
             RELEVANCY, otherwise results are sorted NEWEST_FIRST.
+
+        :param created_since: Only return results whose `datecreated` property
+            is greater than or equal to this date.
+
+        :param created_before: Only return results whose `datecreated` property
+            is smaller than this date.
         """
 
     def getQuestionLanguages():
@@ -118,6 +140,8 @@ class ISearchableByQuestionOwner(IQuestionCollection):
         sort=None,
         owner=None,
         needs_attention_from=None,
+        created_before=None,
+        created_since=None,
     ):
         """Return the questions from the collection matching search criteria.
 
@@ -139,6 +163,10 @@ class ISearchableByQuestionOwner(IQuestionCollection):
         :param sort: An attribute of QuestionSort. If None, a default value is
             used. When there is a search_text value, the default is to sort by
             RELEVANCY, otherwise results are sorted NEWEST_FIRST.
+        :param created_since: Only return results whose `datecreated` property
+            is greater than or equal to this date.
+        :param created_before: Only return results whose `datecreated` property
+            is smaller than this date.
         """
 
 
diff --git a/lib/lp/answers/interfaces/questionsperson.py b/lib/lp/answers/interfaces/questionsperson.py
index 2561a74..93086da 100644
--- a/lib/lp/answers/interfaces/questionsperson.py
+++ b/lib/lp/answers/interfaces/questionsperson.py
@@ -13,7 +13,7 @@ from lazr.restful.declarations import (
 )
 from lazr.restful.fields import ReferenceChoice
 from zope.interface import Interface
-from zope.schema import Bool, Choice, List, TextLine
+from zope.schema import Bool, Choice, Datetime, List, TextLine
 
 from lp import _
 from lp.answers.enums import (
@@ -67,6 +67,20 @@ class IQuestionsPerson(IQuestionCollection):
             title=_("Needs attentions from"), default=False, required=False
         ),
         sort=Choice(title=_("Sort"), required=False, vocabulary=QuestionSort),
+        created_before=Datetime(
+            title=_(
+                "Search for questions that were created "
+                "before the given date."
+            ),
+            required=False,
+        ),
+        created_since=Datetime(
+            title=_(
+                "Search for questions that were created "
+                "since the given date."
+            ),
+            required=False,
+        ),
     )
     @operation_returns_collection_of(Interface)  # IQuestion.
     @export_read_operation()
@@ -80,6 +94,8 @@ class IQuestionsPerson(IQuestionCollection):
         sort=None,
         participation=None,
         needs_attention=None,
+        created_before=None,
+        created_since=None,
     ):
         """Search the person's questions.
 
@@ -104,4 +120,8 @@ class IQuestionsPerson(IQuestionCollection):
         :param sort: An attribute of QuestionSort. If None, a default value is
             used. When there is a search_text value, the default is to sort by
             RELEVANCY, otherwise results are sorted NEWEST_FIRST.
+        :param created_since: Only return results whose `datecreated` property
+            is greater than or equal to this date.
+        :param created_before: Only return results whose `datecreated` property
+            is smaller than this date.
         """
diff --git a/lib/lp/answers/model/question.py b/lib/lp/answers/model/question.py
index bd760b9..853fe2d 100644
--- a/lib/lp/answers/model/question.py
+++ b/lib/lp/answers/model/question.py
@@ -887,6 +887,8 @@ class QuestionSet:
         language=None,
         status=QUESTION_STATUS_DEFAULT_SEARCH,
         sort=None,
+        created_before=None,
+        created_since=None,
     ):
         """See `IQuestionSet`"""
         return QuestionSearch(
@@ -894,6 +896,8 @@ class QuestionSet:
             status=status,
             language=language,
             sort=sort,
+            created_before=created_before,
+            created_since=created_since,
         ).getResults()
 
     def getQuestionLanguages(self):
@@ -1073,6 +1077,8 @@ class QuestionSearch:
         distribution=None,
         sourcepackagename=None,
         projectgroup=None,
+        created_before=None,
+        created_since=None,
     ):
         self.search_text = search_text
         self.nl_phrase_used = False
@@ -1098,6 +1104,8 @@ class QuestionSearch:
         self.distribution = distribution
         self.sourcepackagename = sourcepackagename
         self.projectgroup = projectgroup
+        self.created_before = created_before
+        self.created_since = created_since
 
     def getTargetConstraints(self):
         """Return the constraints related to the IQuestionTarget context."""
@@ -1221,6 +1229,12 @@ class QuestionSearch:
                 )
             )
 
+        if self.created_before:
+            constraints.append(Question.datecreated < self.created_before)
+
+        if self.created_since:
+            constraints.append(Question.datecreated >= self.created_since)
+
         return constraints
 
     def getPrejoins(self):
@@ -1348,6 +1362,8 @@ class QuestionTargetSearch(QuestionSearch):
         product=None,
         distribution=None,
         sourcepackagename=None,
+        created_before=None,
+        created_since=None,
     ):
         assert (
             product is not None
@@ -1365,6 +1381,8 @@ class QuestionTargetSearch(QuestionSearch):
             product=product,
             distribution=distribution,
             sourcepackagename=sourcepackagename,
+            created_before=created_before,
+            created_since=created_since,
         )
 
         if owner:
@@ -1450,6 +1468,8 @@ class QuestionPersonSearch(QuestionSearch):
         sort=None,
         participation=None,
         needs_attention=False,
+        created_before=None,
+        created_since=None,
     ):
         if needs_attention:
             needs_attention_from = person
@@ -1463,6 +1483,8 @@ class QuestionPersonSearch(QuestionSearch):
             language=language,
             needs_attention_from=needs_attention_from,
             sort=sort,
+            created_before=created_before,
+            created_since=created_since,
         )
 
         assert IPerson.providedBy(person), "expected IPerson, got %r" % person
diff --git a/lib/lp/answers/model/questionsperson.py b/lib/lp/answers/model/questionsperson.py
index 964c050..3c907cb 100644
--- a/lib/lp/answers/model/questionsperson.py
+++ b/lib/lp/answers/model/questionsperson.py
@@ -28,6 +28,8 @@ class QuestionsPersonMixin:
         sort=None,
         participation=None,
         needs_attention=None,
+        created_before=None,
+        created_since=None,
     ):
         """See `IQuestionsPerson`."""
         return QuestionPersonSearch(
@@ -38,6 +40,8 @@ class QuestionsPersonMixin:
             sort=sort,
             participation=participation,
             needs_attention=needs_attention,
+            created_before=created_before,
+            created_since=created_since,
         ).getResults()
 
     def getQuestionLanguages(self):
diff --git a/lib/lp/answers/tests/test_question.py b/lib/lp/answers/tests/test_question.py
index 29adfe5..f9599b3 100644
--- a/lib/lp/answers/tests/test_question.py
+++ b/lib/lp/answers/tests/test_question.py
@@ -1,6 +1,8 @@
 # Copyright 2013-2017 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
+from datetime import datetime, timedelta, timezone
+
 from testtools.testcase import ExpectedException
 from zope.component import getUtility
 from zope.security.interfaces import Unauthorized
@@ -72,3 +74,47 @@ class TestQuestionSearch(TestCaseWithFactory):
         questions = list(getUtility(IQuestionSet).searchQuestions())
         self.assertIn(active_question, questions)
         self.assertNotIn(inactive_question, questions)
+
+    def test_created_before(self):
+        today = datetime.now(timezone.utc)
+        nine_days_ago = today - timedelta(days=9)
+
+        q_nine_days_ago = self.factory.makeQuestion(
+            datecreated=today - timedelta(days=9)
+        )
+        q_ten_days_ago = self.factory.makeQuestion(
+            datecreated=today - timedelta(days=10)
+        )
+
+        questions = list(
+            getUtility(IQuestionSet).searchQuestions(
+                created_before=nine_days_ago
+            )
+        )
+
+        # Requires using assertIn/assertNotIn instead of assertEqual
+        # because database already contains multiple questions
+        self.assertIn(q_ten_days_ago, questions)
+        self.assertNotIn(q_nine_days_ago, questions)
+
+    def test_created_since(self):
+        today = datetime.now(timezone.utc)
+        nine_days_ago = today - timedelta(days=9)
+
+        q_nine_days_ago = self.factory.makeQuestion(
+            datecreated=today - timedelta(days=9)
+        )
+        q_ten_days_ago = self.factory.makeQuestion(
+            datecreated=today - timedelta(days=10)
+        )
+
+        questions = list(
+            getUtility(IQuestionSet).searchQuestions(
+                created_since=nine_days_ago
+            )
+        )
+
+        # Requires using assertIn/assertNotIn instead of assertEqual
+        # because database already contains multiple questions
+        self.assertIn(q_nine_days_ago, questions)
+        self.assertNotIn(q_ten_days_ago, questions)
diff --git a/lib/lp/archiveuploader/buildinfofile.py b/lib/lp/archiveuploader/buildinfofile.py
index bd8851d..137b9ae 100644
--- a/lib/lp/archiveuploader/buildinfofile.py
+++ b/lib/lp/archiveuploader/buildinfofile.py
@@ -43,25 +43,25 @@ class BuildInfoFile(PackageUploadFile, SignableTagFile):
         )
         self.parse(verify_signature=not policy.unsigned_buildinfo_ok)
         arch_match = re_isbuildinfo.match(self.filename)
-        self.architecture = arch_match.group(3)
+        self.filename_archtag = arch_match.group(3)
 
     @property
     def is_sourceful(self):
         # XXX cjwatson 2017-03-29: We should get this from the parsed
         # Architecture field instead.
-        return self.architecture == "source"
+        return self.filename_archtag == "source"
 
     @property
     def is_binaryful(self):
         # XXX cjwatson 2017-03-29: We should get this from the parsed
         # Architecture field instead.
-        return self.architecture != "source"
+        return self.filename_archtag != "source"
 
     @property
     def is_archindep(self):
         # XXX cjwatson 2017-03-29: We should get this from the parsed
         # Architecture field instead.
-        return self.architecture == "all"
+        return self.filename_archtag == "all"
 
     def verify(self):
         """Verify the uploaded buildinfo file.
@@ -82,11 +82,11 @@ class BuildInfoFile(PackageUploadFile, SignableTagFile):
     def checkBuild(self, build):
         """See `PackageUploadFile`."""
         try:
-            das = self.policy.distroseries[self.architecture]
+            das = self.policy.distroseries[self.filename_archtag]
         except NotFoundError:
             raise UploadError(
                 "Upload to unknown architecture %s for distroseries %s"
-                % (self.architecture, self.policy.distroseries)
+                % (self.filename_archtag, self.policy.distroseries)
             )
 
         # Sanity check; raise an error if the build we've been
diff --git a/lib/lp/archiveuploader/changesfile.py b/lib/lp/archiveuploader/changesfile.py
index 6f0f81d..050f15b 100644
--- a/lib/lp/archiveuploader/changesfile.py
+++ b/lib/lp/archiveuploader/changesfile.py
@@ -336,6 +336,18 @@ class ChangesFile(SignableTagFile):
         return set(self._dict["Architecture"].decode().split())
 
     @property
+    def architecture_variants(self):
+        """Return set of strings specifying architecture variants listed in file.
+
+        Can be empty.
+
+        For instance {}, {'amd64v3'}.
+        """
+        return set(
+            self._dict.get("Architecture-Variant", b"").decode().split()
+        )
+
+    @property
     def binaries(self):
         """Return set of binary package names listed."""
         return set(self._dict.get("Binary", "").decode().strip().split())
diff --git a/lib/lp/archiveuploader/nascentupload.py b/lib/lp/archiveuploader/nascentupload.py
index e7ecb7c..9bee114 100644
--- a/lib/lp/archiveuploader/nascentupload.py
+++ b/lib/lp/archiveuploader/nascentupload.py
@@ -372,7 +372,7 @@ class NascentUpload:
                 ddeb_key = (
                     uploaded_file.package,
                     uploaded_file.version,
-                    uploaded_file.architecture,
+                    uploaded_file.filename_archtag,
                 )
                 if ddeb_key in unmatched_ddebs:
                     yield UploadError(
@@ -392,7 +392,7 @@ class NascentUpload:
                         (
                             uploaded_file.package + "-dbgsym",
                             uploaded_file.version,
-                            uploaded_file.architecture,
+                            uploaded_file.filename_archtag,
                         )
                     )
                 except KeyError:
@@ -680,7 +680,7 @@ class NascentUpload:
                     % (
                         uploaded_file.package,
                         uploaded_file.version,
-                        uploaded_file.architecture,
+                        uploaded_file.filename_archtag,
                     )
                 )
 
@@ -698,10 +698,10 @@ class NascentUpload:
                     override_name
                 )
 
-                if uploaded_file.architecture == "all":
+                if uploaded_file.filename_archtag == "all":
                     archtag = None
                 else:
-                    archtag = uploaded_file.architecture
+                    archtag = uploaded_file.filename_archtag
 
                 try:
                     spph = uploaded_file.findCurrentSourcePublication()
diff --git a/lib/lp/archiveuploader/nascentuploadfile.py b/lib/lp/archiveuploader/nascentuploadfile.py
index 01b2243..f1f7f47 100644
--- a/lib/lp/archiveuploader/nascentuploadfile.py
+++ b/lib/lp/archiveuploader/nascentuploadfile.py
@@ -570,7 +570,7 @@ class BaseBinaryUploadFile(PackageUploadFile):
         # Yeah, this is weird. Where else can I discover this without
         # unpacking the deb file, though?
         binary_match = re_isadeb.match(self.filename)
-        self.architecture = binary_match.group(3)
+        self.filename_archtag = binary_match.group(3)
 
     #
     # Useful properties.
@@ -583,7 +583,7 @@ class BaseBinaryUploadFile(PackageUploadFile):
         They can be build in any architecture and the result will fit all
         architectures available.
         """
-        return self.architecture.lower() == "all"
+        return self.filename_archtag.lower() == "all"
 
     @property
     def archtag(self):
@@ -593,7 +593,7 @@ class BaseBinaryUploadFile(PackageUploadFile):
         of the machine that has built it (it is encoded in the changesfile
         name).
         """
-        archtag = self.architecture
+        archtag = self.filename_archtag
         if archtag == "all":
             return self.changes.filename_archtag
         return archtag
@@ -733,33 +733,58 @@ class BaseBinaryUploadFile(PackageUploadFile):
                 % (filename_version, control_version_chopped)
             )
 
-    def verifyArchitecture(self):
-        """Check if the control architecture matches the changesfile.
+    def verifyABIAndISATags(self):
+        """Check if the control abi/isa matches the changesfile.
 
         Also check if it is a valid architecture in LP context.
         """
-        control_arch = six.ensure_text(self.control.get("Architecture", b""))
+        control_arch = self.control.get("Architecture", b"").decode()
+        control_arch_variant = self.control.get(
+            "Architecture-Variant", b""
+        ).decode()
+        abi_tag = control_arch
+        if control_arch_variant:
+            isa_tag = control_arch_variant
+        else:
+            isa_tag = abi_tag
+
         valid_archs = [
             a.architecturetag for a in self.policy.distroseries.architectures
         ]
 
-        if control_arch not in valid_archs and control_arch != "all":
+        if abi_tag != "all" and abi_tag not in valid_archs:
+            yield UploadError(
+                "%s: Unknown architecture: '%s'" % (self.filename, abi_tag)
+            )
+
+        if (
+            isa_tag != abi_tag
+            and isa_tag != "all"
+            and isa_tag not in valid_archs
+        ):
             yield UploadError(
-                "%s: Unknown architecture: '%s'"
-                % (self.filename, control_arch)
+                "%s: Unknown architecture variant: '%s'"
+                % (self.filename, isa_tag)
             )
 
-        if control_arch not in self.changes.architectures:
+        if abi_tag not in self.changes.architectures:
             yield UploadError(
                 "%s: control file lists arch as '%s' which isn't "
-                "in the changes file." % (self.filename, control_arch)
+                "in the changes file." % (self.filename, abi_tag)
             )
 
-        if control_arch != self.architecture:
+        if isa_tag != abi_tag:
+            if isa_tag not in self.changes.architecture_variants:
+                yield UploadError(
+                    "%s: control file lists arch variant as '%s' which isn't "
+                    "in the changes file." % (self.filename, isa_tag)
+                )
+
+        if isa_tag != self.filename_archtag:
             yield UploadError(
-                "%s: control file lists arch as '%s' which doesn't "
-                "agree with version '%s' in the filename."
-                % (self.filename, control_arch, self.architecture)
+                "%s: control file lists ISA as '%s' which doesn't "
+                "agree with '%s' in the filename."
+                % (self.filename, isa_tag, self.filename_archtag)
             )
 
     def verifyDepends(self):
@@ -1107,7 +1132,7 @@ class UdebBinaryUploadFile(BaseBinaryUploadFile):
         return [
             self.verifyPackage,
             self.verifyVersion,
-            self.verifyArchitecture,
+            self.verifyABIAndISATags,
             self.verifyDepends,
             self.verifySection,
             self.verifyPriority,
@@ -1126,7 +1151,7 @@ class DebBinaryUploadFile(BaseBinaryUploadFile):
         return [
             self.verifyPackage,
             self.verifyVersion,
-            self.verifyArchitecture,
+            self.verifyABIAndISATags,
             self.verifyDepends,
             self.verifySection,
             self.verifyPriority,
diff --git a/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.buildinfo b/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.buildinfo
new file mode 100644
index 0000000..e839edc
--- /dev/null
+++ b/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.buildinfo
@@ -0,0 +1,25 @@
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA1
+
+Format: 1.0
+Source: bar
+Binary: bar
+Architecture: amd64
+Architecture-Variant: amd64v3
+Version: 1.0-1
+Checksums-Md5:
+ 224a95bb17616a86e02e8e3850851e2b 608 bar_1.0-1_amd64v3.deb
+Build-Origin: Ubuntu
+Build-Architecture: amd64
+Build-Date: Wed, 29 Mar 2017 00:01:21 +0100
+Installed-Build-Depends:
+ dpkg (= 1.18.22),
+ dpkg-dev (= 1.18.22)
+
+-----BEGIN PGP SIGNATURE-----
+
+iHQEARECADQWIQQ0DKO7Jw4nFsnuC3aOfrcIbGSoxQUCaIreyxYcZm9vLmJhckBj
+YW5vbmljYWwuY29tAAoJEI5+twhsZKjF/twAn1h3gkpBuqzQ/X8PUoq3wxvITTRS
+AJwIh6ElUPyc41i0qBnXWY7prTx7pA==
+=xkiw
+-----END PGP SIGNATURE-----
diff --git a/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.changes b/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.changes
new file mode 100644
index 0000000..528661b
--- /dev/null
+++ b/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.changes
@@ -0,0 +1,31 @@
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA1
+
+Format: 1.7
+Date: Thu, 30 Mar 2006 01:36:14 +0100
+Source: bar
+Binary: bar
+Architecture: amd64
+Architecture-Variant: amd64v3
+Version: 1.0-1
+Distribution: breezy
+Urgency: low
+Maintainer: Launchpad team <launchpad@xxxxxxxxxxxxxxxxxxx>
+Changed-By: Daniel Silverstone <daniel.silverstone@xxxxxxxxxxxxx>
+Description: 
+ bar        - Stuff for testing
+Changes: 
+ bar (1.0-1) breezy; urgency=low
+ .
+   * A variant build
+Files: 
+ 224a95bb17616a86e02e8e3850851e2b 608 devel optional bar_1.0-1_amd64v3.deb
+ 92888986ce2a3fac29edcc42db3b12b6 609 devel optional bar_1.0-1_amd64v3.buildinfo
+
+-----BEGIN PGP SIGNATURE-----
+
+iHQEARECADQWIQQ0DKO7Jw4nFsnuC3aOfrcIbGSoxQUCaIre5RYcZm9vLmJhckBj
+YW5vbmljYWwuY29tAAoJEI5+twhsZKjF98UAn03R2Au8dzcnuxPIjvgIjP7UFbgo
+AJwMe0oIFkgAIMUcRqRwLQQx9QqXtg==
+=pdPo
+-----END PGP SIGNATURE-----
diff --git a/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.deb b/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.deb
new file mode 100644
index 0000000..1c55544
Binary files /dev/null and b/lib/lp/archiveuploader/tests/data/suite/bar_1.0-1_variant/bar_1.0-1_amd64v3.deb differ
diff --git a/lib/lp/archiveuploader/tests/test_buildinfofile.py b/lib/lp/archiveuploader/tests/test_buildinfofile.py
index acd0d02..7260454 100644
--- a/lib/lp/archiveuploader/tests/test_buildinfofile.py
+++ b/lib/lp/archiveuploader/tests/test_buildinfofile.py
@@ -67,7 +67,7 @@ class TestBuildInfoFile(PackageUploadFileTestCase):
                 "0.42",
                 self.createChangesFile("foo_0.1-1_%s.changes" % arch, changes),
             )
-            self.assertEqual(arch, buildinfofile.architecture)
+            self.assertEqual(arch, buildinfofile.filename_archtag)
             self.assertEqual(is_sourceful, buildinfofile.is_sourceful)
             self.assertEqual(is_binaryful, buildinfofile.is_binaryful)
             self.assertEqual(is_archindep, buildinfofile.is_archindep)
diff --git a/lib/lp/archiveuploader/tests/test_changesfile.py b/lib/lp/archiveuploader/tests/test_changesfile.py
index 067b6bf..062e57e 100644
--- a/lib/lp/archiveuploader/tests/test_changesfile.py
+++ b/lib/lp/archiveuploader/tests/test_changesfile.py
@@ -296,6 +296,22 @@ class ChangesFileTests(TestCase):
         self.assertEqual("i386", changes.architecture_line)
         self.assertEqual({"i386"}, changes.architectures)
 
+    def test_architecture_variants(self):
+        # The architecture variants get extracted from the changes
+        # file and parsed correctly.
+        changes = self.createChangesFile(
+            "mypkg_0.1_i386.changes", self.getBaseChanges()
+        )
+        self.assertEqual(set(), changes.architecture_variants)
+        contents = self.getBaseChanges()
+        contents["Architecture-Variant"] = "i386sse"
+        changes = self.createChangesFile("mypkg_0.1_i386.changes", contents)
+        self.assertEqual({"i386sse"}, changes.architecture_variants)
+        contents = self.getBaseChanges()
+        contents["Architecture-Variant"] = "i386sse i386avx"
+        changes = self.createChangesFile("mypkg_0.1_i386.changes", contents)
+        self.assertEqual({"i386sse", "i386avx"}, changes.architecture_variants)
+
     def test_source(self):
         # The source package name gets extracted from the changes file.
         changes = self.createChangesFile(
diff --git a/lib/lp/archiveuploader/tests/test_nascentuploadfile.py b/lib/lp/archiveuploader/tests/test_nascentuploadfile.py
index 0de2157..ce7cc7c 100644
--- a/lib/lp/archiveuploader/tests/test_nascentuploadfile.py
+++ b/lib/lp/archiveuploader/tests/test_nascentuploadfile.py
@@ -43,8 +43,14 @@ from lp.soyuz.enums import (
     PackagePublishingStatus,
     PackageUploadCustomFormat,
 )
-from lp.testing import TestCaseWithFactory
-from lp.testing.layers import LaunchpadZopelessLayer, ZopelessDatabaseLayer
+from lp.testing import (
+    TestCase,
+    TestCaseWithFactory,
+)
+from lp.testing.layers import (
+    LaunchpadZopelessLayer,
+    ZopelessDatabaseLayer,
+)
 
 
 class NascentUploadFileTestCase(TestCaseWithFactory):
@@ -874,6 +880,39 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
         self.assertEqual(BuildStatus.FULLYBUILT, build.status)
         self.assertIs(None, build.upload_log)
 
+    def test_checkBuild_variant(self):
+        # checkBuild() verifies consistency with a build.
+        self.factory.makeDistroArchSeries(
+            distroseries=self.policy.distroseries, architecturetag="amd64"
+        )
+        das = self.factory.makeDistroArchSeries(
+            distroseries=self.policy.distroseries,
+            architecturetag="amd64v3",
+            underlying_architecturetag="amd64",
+        )
+        build = self.factory.makeBinaryPackageBuild(
+            distroarchseries=das, archive=self.policy.archive
+        )
+        control = self.getBaseControl()
+        control["Architecture"] = b"amd64"
+        control["Architecture-Variant"] = b"amd64v3"
+        uploadfile = self.createDebBinaryUploadFile(
+            "foo_0.42_amd64v3.deb",
+            "main/python",
+            "unknown",
+            "mypkg",
+            "0.42",
+            None,
+            control=control,
+            data_format="gz",
+            control_format="gz",
+        )
+        uploadfile.checkBuild(build)
+        # checkBuild() sets the build status to FULLYBUILT and
+        # removes the upload log.
+        self.assertEqual(BuildStatus.FULLYBUILT, build.status)
+        self.assertIs(None, build.upload_log)
+
     def test_checkBuild_inconsistent(self):
         # checkBuild() raises UploadError if inconsistencies between build
         # and upload file are found.
@@ -983,3 +1022,198 @@ class DebBinaryUploadFileTests(PackageUploadFileTestCase):
         self.assertEqual(
             spph2.sourcepackagerelease, uploadfile.findSourcePackageRelease()
         )
+
+
+class FakeDAS:
+    def __init__(self, arch_tag):
+        self.architecturetag = arch_tag
+
+
+class FakeSeries:
+    def __init__(self, valid_arches):
+        self.architectures = [FakeDAS(arch_tag) for arch_tag in valid_arches]
+
+
+class FakePolicy:
+    def __init__(self, series):
+        self.distroseries = series
+
+
+class FakeChanges:
+    def __init__(self, architectures, architecture_variants):
+        self.architectures = architectures
+        self.architecture_variants = architecture_variants
+
+
+class DebBinaryUploadFileUnitTests(TestCase):
+
+    layer = ZopelessDatabaseLayer
+
+    def abiIsaErrorsFor(
+        self, *, filename, control_fields, changes_fields, valid_arches
+    ):
+
+        policy = FakePolicy(FakeSeries(valid_arches))
+
+        changes = FakeChanges(
+            architectures=changes_fields["Architecture"].split(),
+            architecture_variants=set(
+                changes_fields.get("Architecture-Variant", "").split()
+            ),
+        )
+
+        uploadfile = DebBinaryUploadFile(
+            filepath=filename,
+            md5={},
+            size=0,
+            component_and_section="main/misc",
+            priority_name="optional",
+            package=None,
+            version=None,
+            changes=changes,
+            policy=policy,
+            logger=None,
+        )
+        uploadfile.parseControl(
+            {k: v.encode() for k, v in control_fields.items()},
+        )
+        result = []
+        for error in uploadfile.verifyABIAndISATags():
+            if not isinstance(error, UploadError):
+                self.fail(
+                    f"verifyABIAndISATags yielded wrong kind of error: {error}"
+                )
+            error = str(error)
+            if not error.startswith(filename + ": "):
+                self.fail(
+                    "verifyABIAndISATags yielded error without expected "
+                    f"prefix: {error}"
+                )
+            error = error.replace(filename + ": ", "", 1)
+            result.append(str(error))
+        return result
+
+    def test_verifyABIAndISATags_ok_no_variant(self):
+        self.assertEqual(
+            self.abiIsaErrorsFor(
+                filename="foo_1.0_amd64.deb",
+                control_fields={"Architecture": "amd64"},
+                changes_fields={"Architecture": "amd64"},
+                valid_arches=["amd64"],
+            ),
+            [],
+        )
+
+    def test_verifyABIAndISATags_ok_with_variant(self):
+        self.assertEqual(
+            self.abiIsaErrorsFor(
+                filename="foo_1.0_amd64v3.deb",
+                control_fields={
+                    "Architecture": "amd64",
+                    "Architecture-Variant": "amd64v3",
+                },
+                changes_fields={
+                    "Architecture": "amd64",
+                    "Architecture-Variant": "amd64v3",
+                },
+                valid_arches=["amd64", "amd64v3"],
+            ),
+            [],
+        )
+
+    def test_verifyABIAndISATags_arch_does_not_match_filename(self):
+        self.assertEqual(
+            self.abiIsaErrorsFor(
+                filename="foo_1.0_i386.deb",
+                control_fields={"Architecture": "amd64"},
+                changes_fields={"Architecture": "amd64"},
+                valid_arches=["amd64"],
+            ),
+            [
+                "control file lists ISA as 'amd64' which doesn't agree with "
+                "'i386' in the filename.",
+            ],
+        )
+
+    def test_verifyABIAndISATags_unknown_abi_tag(self):
+        self.assertEqual(
+            self.abiIsaErrorsFor(
+                filename="foo_1.0_amd64.deb",
+                control_fields={"Architecture": "amd64"},
+                changes_fields={"Architecture": "amd64"},
+                valid_arches=["i386"],
+            ),
+            [
+                "Unknown architecture: 'amd64'",
+            ],
+        )
+
+    def test_verifyABIAndISATags_arch_not_in_changes(self):
+        self.assertEqual(
+            self.abiIsaErrorsFor(
+                filename="foo_1.0_amd64.deb",
+                control_fields={"Architecture": "amd64"},
+                changes_fields={"Architecture": "i386"},
+                valid_arches=["amd64"],
+            ),
+            [
+                "control file lists arch as 'amd64' which isn't in the "
+                "changes file.",
+            ],
+        )
+
+    def test_verifyABIAndISATags_variant_does_not_match_filename(self):
+        self.assertEqual(
+            self.abiIsaErrorsFor(
+                filename="foo_1.0_amd64.deb",
+                control_fields={
+                    "Architecture": "amd64",
+                    "Architecture-Variant": "amd64v3",
+                },
+                changes_fields={
+                    "Architecture": "amd64",
+                    "Architecture-Variant": "amd64v3",
+                },
+                valid_arches=["amd64", "amd64v3"],
+            ),
+            [
+                "control file lists ISA as 'amd64v3' which doesn't agree "
+                "with 'amd64' in the filename."
+            ],
+        )
+
+    def test_verifyABIAndISATags_variant_not_in_changes(self):
+        self.assertEqual(
+            self.abiIsaErrorsFor(
+                filename="foo_1.0_amd64v3.deb",
+                control_fields={
+                    "Architecture": "amd64",
+                    "Architecture-Variant": "amd64v3",
+                },
+                changes_fields={
+                    "Architecture": "amd64",
+                },
+                valid_arches=["amd64", "amd64v3"],
+            ),
+            [
+                "control file lists arch variant as 'amd64v3' which isn't in "
+                "the changes file.",
+            ],
+        )
+
+    def test_verifyABIAndISATags_unknown_variant(self):
+        self.assertEqual(
+            self.abiIsaErrorsFor(
+                filename="foo_1.0_amd64v3.deb",
+                control_fields={
+                    "Architecture": "amd64",
+                    "Architecture-Variant": "amd64v3",
+                },
+                changes_fields={
+                    "Architecture": "amd64",
+                    "Architecture-Variant": "amd64v3",
+                },
+                valid_arches=["amd64"],
+            ),
+            ["Unknown architecture variant: 'amd64v3'"],
+        )
diff --git a/lib/lp/archiveuploader/tests/test_uploadprocessor.py b/lib/lp/archiveuploader/tests/test_uploadprocessor.py
index 95c1b8b..bcc5939 100644
--- a/lib/lp/archiveuploader/tests/test_uploadprocessor.py
+++ b/lib/lp/archiveuploader/tests/test_uploadprocessor.py
@@ -2893,6 +2893,64 @@ class TestUploadHandler(TestUploadProcessorBase):
         # Upon full build the upload log is unset.
         self.assertIs(None, build.upload_log)
 
+    def testBinaryPackageBuilds_variant(self):
+        # Properly uploaded binaries should result in the
+        # build status changing to FULLYBUILT.
+        # Upload a source package
+        self.switchToAdmin()
+        self.factory.makeBuildableDistroArchSeries(
+            distroseries=self.breezy, architecturetag="amd64"
+        )
+        das_amd64v3 = self.factory.makeBuildableDistroArchSeries(
+            distroseries=self.breezy,
+            architecturetag="amd64v3",
+            underlying_architecturetag="amd64",
+        )
+        archive = self.breezy.distribution.main_archive
+        procs = list(archive.processors)
+        procs.append(das_amd64v3.processor)
+        removeSecurityProxy(archive).processors = procs
+
+        self.switchToUploader()
+        source_upload_dir = self.queueUpload("bar_1.0-1")
+        self.processUpload(self.uploadprocessor, source_upload_dir)
+        source_pub = self.publishPackage("bar", "1.0-1")
+        builds = source_pub.createMissingBuilds()
+        for b in builds:
+            if b.distro_arch_series.architecturetag == "amd64v3":
+                build = b
+
+        # Move the source from the accepted queue.
+        self.switchToAdmin()
+        [queue_item] = self.breezy.getPackageUploads(
+            status=PackageUploadStatus.ACCEPTED, name="bar", version="1.0-1"
+        )
+        queue_item.setDone()
+
+        build.buildqueue_record.markAsBuilding(self.factory.makeBuilder())
+        build.updateStatus(BuildStatus.UPLOADING)
+        self.switchToUploader()
+
+        # Upload and accept a binary for the primary archive source.
+
+        # Commit so the build cookie has the right ids.
+        self.layer.txn.commit()
+        behaviour = IBuildFarmJobBehaviour(build)
+        leaf_name = behaviour.getUploadDirLeaf(build.build_cookie)
+        build_upload_dir = self.queueUpload(
+            "bar_1.0-1_variant", queue_entry=leaf_name
+        )
+        self.options.context = "buildd"
+        self.options.builds = True
+        pop_notifications()
+        self.processUpload(self.uploadprocessor, build_upload_dir)
+        self.layer.txn.commit()
+        # No emails are sent on success
+        self.assertEmailQueueLength(0)
+        self.assertEqual(BuildStatus.FULLYBUILT, build.status)
+        # Upon full build the upload log is unset.
+        self.assertIs(None, build.upload_log)
+
     def doSuccessRecipeBuild(self):
         # Upload a source package
         self.switchToAdmin()
diff --git a/lib/lp/bugs/interfaces/bug.py b/lib/lp/bugs/interfaces/bug.py
index 634be7e..caef426 100644
--- a/lib/lp/bugs/interfaces/bug.py
+++ b/lib/lp/bugs/interfaces/bug.py
@@ -115,6 +115,7 @@ class CreateBugParams:
         importance=None,
         milestone=None,
         assignee=None,
+        validate_assignee=True,
         cve=None,
         metadata=None,
         check_permissions=True,
@@ -136,6 +137,7 @@ class CreateBugParams:
         self.importance = importance
         self.milestone = milestone
         self.assignee = assignee
+        self.validate_assignee = validate_assignee
         self.cve = cve
         self.metadata = metadata
         self.check_permissions = check_permissions
diff --git a/lib/lp/bugs/interfaces/cve.py b/lib/lp/bugs/interfaces/cve.py
index 204dcd7..4efd60c 100644
--- a/lib/lp/bugs/interfaces/cve.py
+++ b/lib/lp/bugs/interfaces/cve.py
@@ -189,6 +189,9 @@ class ICve(Interface):
     def setCVSSVectorForAuthority(authority, vector_string):
         """Set the CVSS vector string from an authority."""
 
+    def getDistributionVulnerability(self, distribution):
+        """Return the linked vulnerability for the given distribution."""
+
     def getVulnerabilitiesVisibleToUser(user):
         """Return the linked vulnerabilities visible to the given user."""
 
diff --git a/lib/lp/bugs/model/bug.py b/lib/lp/bugs/model/bug.py
index 2d3f90a..d48b166 100644
--- a/lib/lp/bugs/model/bug.py
+++ b/lib/lp/bugs/model/bug.py
@@ -248,6 +248,7 @@ def snapshot_bug_params(bug_params):
             "importance",
             "milestone",
             "assignee",
+            "validate_assignee",
             "cve",
             "metadata",
             "check_permissions",
@@ -3337,7 +3338,9 @@ class BugSet:
 
         bug_task = bug.default_bugtask
         if params.assignee:
-            bug_task.transitionToAssignee(params.assignee)
+            bug_task.transitionToAssignee(
+                params.assignee, validate=params.validate_assignee
+            )
         if params.importance:
             bug_task.transitionToImportance(params.importance, params.owner)
         if params.milestone:
diff --git a/lib/lp/bugs/model/cve.py b/lib/lp/bugs/model/cve.py
index 3bf1194..ce24b2d 100644
--- a/lib/lp/bugs/model/cve.py
+++ b/lib/lp/bugs/model/cve.py
@@ -115,6 +115,18 @@ class Cve(StormBase, BugLinkTargetMixin):
             sorted(bulk.load(Bug, bug_ids), key=operator.attrgetter("id"))
         )
 
+    def getDistributionVulnerability(self, distribution):
+        """See `ICve`."""
+        return (
+            Store.of(self)
+            .find(
+                Vulnerability,
+                Vulnerability.cve == self,
+                Vulnerability.distribution_id == distribution.id,
+            )
+            .one()
+        )
+
     def getVulnerabilitiesVisibleToUser(self, user):
         """See `ICve`."""
         vulnerabilities = Store.of(self).find(
diff --git a/lib/lp/bugs/model/importvulnerabilityjob.py b/lib/lp/bugs/model/importvulnerabilityjob.py
index 1a33c54..4ef6e08 100644
--- a/lib/lp/bugs/model/importvulnerabilityjob.py
+++ b/lib/lp/bugs/model/importvulnerabilityjob.py
@@ -10,9 +10,9 @@ import re
 
 from zope.component import getUtility
 from zope.interface import implementer, provider
+from zope.security.proxy import removeSecurityProxy
 
 from lp.app.enums import InformationType
-from lp.app.interfaces.launchpad import ILaunchpadCelebrities
 from lp.bugs.enums import VulnerabilityHandlerEnum
 from lp.bugs.interfaces.vulnerabilityjob import (
     IImportVulnerabilityJob,
@@ -27,13 +27,13 @@ from lp.bugs.model.vulnerabilityjob import (
 )
 from lp.bugs.scripts.soss.models import SOSSRecord
 from lp.bugs.scripts.soss.sossimport import SOSSImporter
+from lp.code.errors import GitRepositoryScanFault
 from lp.code.interfaces.githosting import IGitHostingClient
 from lp.code.interfaces.gitlookup import IGitLookup
 from lp.services.config import config
 from lp.services.database.interfaces import IPrimaryStore, IStore
 from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.model.job import Job
-from lp.testing import person_logged_in
 
 CVE_PATTERN = re.compile(r"^CVE-\d{4}-\d+$")
 logger = logging.getLogger(__name__)
@@ -104,12 +104,12 @@ class ImportVulnerabilityJob(VulnerabilityJobDerived):
             VulnerabilityJob.job_id == Job.id,
             VulnerabilityJob.job_type == cls.class_job_type,
             VulnerabilityJob.handler == handler,
+            Job._status.is_in(
+                (JobStatus.WAITING, JobStatus.RUNNING, JobStatus.SUSPENDED)
+            ),
         ).one()
 
-        if vulnerability_job is not None and (
-            vulnerability_job.job.status == JobStatus.WAITING
-            or vulnerability_job.job.status == JobStatus.RUNNING
-        ):
+        if vulnerability_job is not None:
             raise VulnerabilityJobInProgress(cls(vulnerability_job))
 
         # Schedule the initialization.
@@ -185,8 +185,6 @@ class ImportVulnerabilityJob(VulnerabilityJobDerived):
 
     def run(self):
         """See `IRunnableJob`."""
-        admin = getUtility(ILaunchpadCelebrities).admin
-
         # InformationType is passed as a value as DBItem is not serializable
         information_type = InformationType.items[self.information_type]
         parser, importer = self._get_parser_importer(
@@ -195,26 +193,34 @@ class ImportVulnerabilityJob(VulnerabilityJobDerived):
 
         # Get git repository
         git_lookup = getUtility(IGitLookup)
-        repository = git_lookup.getByUrl(self.git_repository)
+        repository = removeSecurityProxy(git_lookup.get(self.git_repository))
         if not repository:
             exception = VulnerabilityJobException("Git repository not found")
             self.notifyUserError(exception)
             raise exception
 
         # Get git reference
-        ref = repository.getRefByPath(self.git_ref)
+        ref = removeSecurityProxy(repository.getRefByPath(self.git_ref))
         if not ref:
             exception = VulnerabilityJobException("Git ref not found")
             self.notifyUserError(exception)
             raise exception
 
         # turnip API call to get added/modified files
-        stats = getUtility(IGitHostingClient).getDiffStats(
-            path=self.git_repository,
-            old=self.import_since_commit_sha1,
-            new=ref.commit_sha1,
-            logger=logger,
-        )
+        try:
+            stats = getUtility(IGitHostingClient).getDiffStats(
+                path=repository.getInternalPath(),
+                old=self.import_since_commit_sha1,
+                new=ref.commit_sha1,
+                logger=logger,
+            )
+        except GitRepositoryScanFault:
+            exception = VulnerabilityJobException(
+                f"Git diff between {self.import_since_commit_sha1} and "
+                f"{ref.commit_sha1} for {self.git_ref} not found"
+            )
+            self.notifyUserError(exception)
+            raise exception
 
         files = [*stats.get("added", ()), *stats.get("modified", ())]
         for file in files:
@@ -248,9 +254,7 @@ class ImportVulnerabilityJob(VulnerabilityJobDerived):
                 )
                 record = parser(blob)
 
-                # Logged as admin
-                with person_logged_in(admin):
-                    bug, vulnerability = importer(record, cve_sequence)
+                bug, vulnerability = importer(record, cve_sequence)
 
                 if bug and vulnerability:
                     self.metadata["result"]["succeeded"].append(cve_sequence)
diff --git a/lib/lp/bugs/model/tests/test_vulnerability.py b/lib/lp/bugs/model/tests/test_vulnerability.py
index adc0f6b..effa984 100644
--- a/lib/lp/bugs/model/tests/test_vulnerability.py
+++ b/lib/lp/bugs/model/tests/test_vulnerability.py
@@ -701,7 +701,7 @@ class TestVulnerabilitySetImportData(TestCaseWithFactory):
             job = getUtility(IImportVulnerabilityJobSource).get(self.handler)
             naked_job = removeSecurityProxy(job)
             self.assertIsInstance(naked_job, ImportVulnerabilityJob)
-            self.assertEqual(naked_job.git_repository, repo.git_https_url)
+            self.assertEqual(naked_job.git_repository, repo.id)
             self.assertEqual(naked_job.git_ref, self.git_ref)
             self.assertEqual(naked_job.git_paths, self.git_paths)
             self.assertEqual(
@@ -850,8 +850,9 @@ class TestVulnerabilitySetImportData(TestCaseWithFactory):
             )
 
     def test_importData_wrong_import_since_commit_sha1(self):
-        """Test that we cannot create a ImportVulnerabilityJob when
-        import_since_commit_sha1 is not in git_ref.
+        """Test that we can create a ImportVulnerabilityJob when
+        import_since_commit_sha1 is not in git_ref. The job will check that
+        later.
         """
         self.useContext(feature_flags())
         set_feature_flag(VULNERABILITY_IMPORT_ENABLED_FEATURE_FLAG, "true")
@@ -869,11 +870,7 @@ class TestVulnerabilitySetImportData(TestCaseWithFactory):
 
         import_since_commit_sha1 = "1" * 40
         with person_logged_in(self.requester):
-            self.assertRaisesWithContent(
-                NotFoundError,
-                f"'{import_since_commit_sha1} does not exist in "
-                f"{self.git_ref}'",
-                getUtility(IVulnerabilitySet).importData,
+            getUtility(IVulnerabilitySet).importData(
                 self.requester,
                 self.handler,
                 repo,
@@ -883,6 +880,19 @@ class TestVulnerabilitySetImportData(TestCaseWithFactory):
                 import_since_commit_sha1,
             )
 
+            job = getUtility(IImportVulnerabilityJobSource).get(self.handler)
+            naked_job = removeSecurityProxy(job)
+            self.assertIsInstance(naked_job, ImportVulnerabilityJob)
+            self.assertEqual(naked_job.git_repository, repo.id)
+            self.assertEqual(naked_job.git_ref, self.git_ref)
+            self.assertEqual(naked_job.git_paths, self.git_paths)
+            self.assertEqual(
+                naked_job.information_type, self.information_type.value
+            )
+            self.assertEqual(
+                naked_job.import_since_commit_sha1, import_since_commit_sha1
+            )
+
     def test_importData_duplicated(self):
         """Test that we cannot create a duplicated ImportVulnerabilityJob
         if there is already a peding one for the same handler.
diff --git a/lib/lp/bugs/model/vulnerability.py b/lib/lp/bugs/model/vulnerability.py
index 504cea4..c337b8f 100644
--- a/lib/lp/bugs/model/vulnerability.py
+++ b/lib/lp/bugs/model/vulnerability.py
@@ -443,19 +443,14 @@ class VulnerabilitySet:
                 f"{git_ref} does not exist in the specified git repository"
             )
 
-        # Check import_since_commit_sha1 exists in git_ref
-        if import_since_commit_sha1 and not git_repository.checkCommitInRef(
-            import_since_commit_sha1, git_ref
-        ):
-            raise NotFoundError(
-                f"{import_since_commit_sha1} does not exist in {git_ref}"
-            )
+        # The job will check that import_since_commit_sha1 exists in git_ref
+        # making a call to turnip's API
 
         # Trigger the import job after validations pass
         job_source = getUtility(IImportVulnerabilityJobSource)
         job_source.create(
             handler,
-            git_repository.git_https_url,
+            git_repository.id,
             git_ref,
             git_paths,
             information_type.value,
diff --git a/lib/lp/bugs/scripts/cveimport.py b/lib/lp/bugs/scripts/cveimport.py
index 66a6dcd..57b25b2 100644
--- a/lib/lp/bugs/scripts/cveimport.py
+++ b/lib/lp/bugs/scripts/cveimport.py
@@ -374,6 +374,16 @@ class CVEUpdater(LaunchpadCronScript):
             # extract the outer zip file
             with zipfile.ZipFile(outer_zip_path) as outer_zf:
                 if delta:
+                    target_dir = os.path.join(temp_dir, "deltaCves")
+
+                    # If there are no delta cves, we return an empty dir
+                    if not outer_zf.namelist():
+                        self.logger.info(
+                            "Zip file is empty: there are no delta changes"
+                        )
+                        os.mkdir(target_dir)
+                        return target_dir
+
                     # for delta, extract deltacves directory
                     members = [
                         m
@@ -381,7 +391,6 @@ class CVEUpdater(LaunchpadCronScript):
                         if m.startswith("deltaCves/")
                     ]
                     outer_zf.extractall(temp_dir, members=members)
-                    target_dir = os.path.join(temp_dir, "deltaCves")
                 else:
                     # for baseline, handle nested zip structure
                     outer_zf.extract("cves.zip", temp_dir)
@@ -727,7 +736,7 @@ class CVEUpdater(LaunchpadCronScript):
         for ref in references:
             url = ref.get("url")
             source = "external"  # default source
-            content = ref.get("name", "")
+            content = ref.get("name", url)
 
             # look for existing reference
             was_there_previously = False
diff --git a/lib/lp/bugs/scripts/soss/models.py b/lib/lp/bugs/scripts/soss/models.py
index c557812..f8fe1a9 100644
--- a/lib/lp/bugs/scripts/soss/models.py
+++ b/lib/lp/bugs/scripts/soss/models.py
@@ -7,6 +7,7 @@ from enum import Enum
 from typing import Any, Dict, List, Optional
 
 import yaml
+from packaging.version import Version
 
 __all__ = [
     "SOSSRecord",
@@ -53,6 +54,14 @@ class SOSSRecord:
         def __str__(self) -> str:
             return self.value
 
+        def __lt__(self, other) -> bool:
+            try:
+                self_ver = self.value.split(":")[-1].split("/")[0]
+                other_ver = self.value.split(":")[-1].split("/")[0]
+                return Version(self_ver) < Version(other_ver)
+            except Exception:
+                return self.value < other.value
+
     @dataclass
     class CVSS:
         source: str
@@ -95,6 +104,12 @@ class SOSSRecord:
                 "Note": self.note,
             }
 
+        def __lt__(self, other: "SOSSRecord.Package") -> bool:
+            if self.name == other.name:
+                return self.channel < other.channel
+
+            return self.name <= other.name
+
     references: List[str]
     notes: List[str]
     priority: PriorityEnum
diff --git a/lib/lp/bugs/scripts/soss/sossexport.py b/lib/lp/bugs/scripts/soss/sossexport.py
new file mode 100644
index 0000000..9a75eb6
--- /dev/null
+++ b/lib/lp/bugs/scripts/soss/sossexport.py
@@ -0,0 +1,162 @@
+#  Copyright 2025 Canonical Ltd.  This software is licensed under the
+#  GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""A SOSS (SOSS CVE Tracker) bug exporter"""
+import logging
+from collections import defaultdict
+from datetime import datetime
+from typing import Dict, List, Optional
+
+from lp.app.enums import InformationType
+from lp.bugs.model.bug import Bug as BugModel
+from lp.bugs.model.bugtask import BugTask
+from lp.bugs.model.cve import Cve as CveModel
+from lp.bugs.model.vulnerability import Vulnerability
+from lp.bugs.scripts.soss.models import SOSSRecord
+from lp.bugs.scripts.soss.sossimport import (
+    PACKAGE_STATUS_MAP,
+    PACKAGE_TYPE_MAP,
+    PRIORITY_ENUM_MAP,
+)
+from lp.registry.model.distribution import Distribution
+
+__all__ = [
+    "SOSSExporter",
+]
+
+logger = logging.getLogger(__name__)
+
+# Constants moved to module level with proper naming
+PRIORITY_ENUM_MAP_REVERSE = {v: k for k, v in PRIORITY_ENUM_MAP.items()}
+
+PACKAGE_TYPE_MAP_REVERSE = {v: k for k, v in PACKAGE_TYPE_MAP.items()}
+
+PACKAGE_STATUS_MAP_REVERSE = {v: k for k, v in PACKAGE_STATUS_MAP.items()}
+
+
+class SOSSExporter:
+    """
+    SOSSExporter is used to export Launchpad Vulnerability data to SOSS CVE
+    files.
+    """
+
+    def __init__(
+        self,
+        information_type: InformationType = InformationType.PROPRIETARY,
+    ) -> None:
+        self.information_type = information_type
+
+    def _get_packages(
+        self, bugtasks: List[BugTask]
+    ) -> Dict[SOSSRecord.PackageTypeEnum, SOSSRecord.Package]:
+        """Get a dict of SOSSRecord.PackageTypeEnum: SOSSRecord.Package from a
+        bugtask list.
+        """
+        packages = defaultdict(list)
+        for bugtask in bugtasks:
+            pkg = SOSSRecord.Package(
+                name=bugtask.target.name,
+                channel=SOSSRecord.Channel(
+                    value="/".join(s for s in bugtask.channel if s is not None)
+                ),
+                repositories=bugtask.metadata.get("repositories"),
+                status=SOSSRecord.PackageStatusEnum(
+                    PACKAGE_STATUS_MAP_REVERSE[bugtask.status]
+                ),
+                note=bugtask.status_explanation or "",
+            )
+            packages[PACKAGE_TYPE_MAP_REVERSE[bugtask.packagetype]].append(pkg)
+
+        ordered_packages = {
+            k: sorted(packages[k])
+            for k in PACKAGE_TYPE_MAP_REVERSE.values()
+            if packages[k]
+        }
+
+        return ordered_packages
+
+    def _get_cvss(self, cvss: Dict) -> List[SOSSRecord.CVSS]:
+        """Get a list of SOSSRecord.CVSS from a cvss dict"""
+        cvss_list = []
+        for authority in cvss.keys():
+            for c in cvss[authority]:
+                cvss_list.append(
+                    SOSSRecord.CVSS(
+                        c.get("source"),
+                        c.get("vector"),
+                        c.get("baseScore"),
+                        c.get("baseSeverity"),
+                    )
+                )
+        return cvss_list
+
+    def to_record(
+        self,
+        lp_cve: CveModel,
+        distribution: Distribution,
+        bug: BugModel,
+        vulnerability: Vulnerability,
+    ) -> SOSSRecord:
+        """Return a SOSSRecord exporting Launchpad data for the specified
+        cve_sequence.
+        """
+        self._validate_to_record_args(lp_cve, distribution, bug, vulnerability)
+
+        # Parse bug
+        desc_parts = bug.description.rsplit("\n\nReferences:\n", maxsplit=1)
+        references = desc_parts[1].split("\n") if len(desc_parts) > 1 else []
+
+        # Parse bug.bugtasks
+        packages = self._get_packages(bug.bugtasks)
+        assigned_to = (
+            bug.bugtasks[0].assignee.name if bug.bugtasks[0].assignee else ""
+        )
+
+        # Parse vulnerability
+        public_date = self._normalize_date_without_timezone(
+            vulnerability.date_made_public
+        )
+        notes = vulnerability.notes.split("\n") if vulnerability.notes else []
+        priority = SOSSRecord.PriorityEnum(
+            PRIORITY_ENUM_MAP_REVERSE[vulnerability.importance]
+        )
+
+        return SOSSRecord(
+            references=references,
+            notes=notes,
+            priority=priority,
+            priority_reason=vulnerability.importance_explanation,
+            assigned_to=assigned_to,
+            packages=packages,
+            candidate=f"CVE-{lp_cve.sequence}",
+            description=vulnerability.description,
+            cvss=self._get_cvss(vulnerability.cvss),
+            public_date=public_date,
+        )
+
+    def _validate_to_record_args(
+        self,
+        lp_cve: CveModel,
+        distribution: Distribution,
+        bug: BugModel,
+        vulnerability: Vulnerability,
+    ):
+        required_args = {
+            "Cve": lp_cve,
+            "Bug": bug,
+            "Vulnerability": vulnerability,
+            "Distribution": distribution,
+        }
+
+        for name, value in required_args.items():
+            if value is None:
+                logger.error(f"[SOSSExporter] {name} can't be None")
+                raise ValueError(f"{name} can't be None")
+
+    def _normalize_date_without_timezone(
+        self, date_obj: datetime
+    ) -> Optional[datetime]:
+        """Normalize date to no timezone if needed."""
+        if date_obj and date_obj.tzinfo is not None:
+            return date_obj.replace(tzinfo=None)
+        return date_obj
diff --git a/lib/lp/bugs/scripts/soss/sossimport.py b/lib/lp/bugs/scripts/soss/sossimport.py
index 773caf4..9f59e92 100644
--- a/lib/lp/bugs/scripts/soss/sossimport.py
+++ b/lib/lp/bugs/scripts/soss/sossimport.py
@@ -13,6 +13,7 @@ from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
 from lp.app.enums import InformationType
+from lp.app.errors import NotFoundError
 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
 from lp.bugs.enums import VulnerabilityStatus
 from lp.bugs.interfaces.bug import CreateBugParams, IBugSet
@@ -36,10 +37,13 @@ from lp.registry.model.distribution import Distribution
 from lp.registry.model.externalpackage import ExternalPackage
 from lp.registry.model.person import Person
 from lp.registry.security import SecurityAdminDistribution
-from lp.testing import person_logged_in
 
 __all__ = [
     "SOSSImporter",
+    "PRIORITY_ENUM_MAP",
+    "PACKAGE_TYPE_MAP",
+    "PACKAGE_STATUS_MAP",
+    "DISTRIBUTION_NAME",
 ]
 
 logger = logging.getLogger(__name__)
@@ -55,11 +59,11 @@ PRIORITY_ENUM_MAP = {
 }
 
 PACKAGE_TYPE_MAP = {
-    SOSSRecord.PackageTypeEnum.UNPACKAGED: ExternalPackageType.GENERIC,
-    SOSSRecord.PackageTypeEnum.PYTHON: ExternalPackageType.PYTHON,
-    SOSSRecord.PackageTypeEnum.MAVEN: ExternalPackageType.MAVEN,
     SOSSRecord.PackageTypeEnum.CONDA: ExternalPackageType.CONDA,
+    SOSSRecord.PackageTypeEnum.MAVEN: ExternalPackageType.MAVEN,
+    SOSSRecord.PackageTypeEnum.PYTHON: ExternalPackageType.PYTHON,
     SOSSRecord.PackageTypeEnum.RUST: ExternalPackageType.CARGO,
+    SOSSRecord.PackageTypeEnum.UNPACKAGED: ExternalPackageType.GENERIC,
 }
 
 PACKAGE_STATUS_MAP = {
@@ -81,7 +85,7 @@ class SOSSImporter:
 
     def __init__(
         self,
-        information_type: InformationType = InformationType.PRIVATESECURITY,
+        information_type: InformationType = InformationType.PROPRIETARY,
         dry_run: bool = False,
     ) -> None:
         self.information_type = information_type
@@ -93,11 +97,13 @@ class SOSSImporter:
         self.vulnerability_set = getUtility(IVulnerabilitySet)
         self.bug_set = getUtility(IBugSet)
         self.cve_set = getUtility(ICveSet)
-        self.soss = getUtility(IDistributionSet).getByName(DISTRIBUTION_NAME)
+        self.soss = removeSecurityProxy(
+            getUtility(IDistributionSet).getByName(DISTRIBUTION_NAME)
+        )
 
         if self.soss is None:
             logger.error("[SOSSImporter] SOSS distribution not found")
-            raise Exception("SOSS distribution not found")
+            raise NotFoundError("SOSS distribution not found")
 
     def import_cve_from_file(
         self, cve_path: str
@@ -109,9 +115,7 @@ class SOSSImporter:
         with open(cve_path, encoding="utf-8") as file:
             soss_record = SOSSRecord.from_yaml(file)
 
-        with person_logged_in(self.bug_importer):
-            bug, vulnerability = self.import_cve(soss_record, cve_sequence)
-
+        bug, vulnerability = self.import_cve(soss_record, cve_sequence)
         return bug, vulnerability
 
     def import_cve(
@@ -131,7 +135,7 @@ class SOSSImporter:
         else:
             bug = self._update_bug(bug, soss_record, lp_cve)
 
-        vulnerability = self._find_existing_vulnerability(bug, self.soss)
+        vulnerability = self._find_existing_vulnerability(lp_cve, self.soss)
         if not vulnerability:
             vulnerability = self._create_vulnerability(
                 bug, soss_record, lp_cve, self.soss
@@ -169,22 +173,25 @@ class SOSSImporter:
         metadata = {"repositories": package.repositories}
 
         # Create the bug, only first bugtask
-        bug, _ = self.bug_set.createBug(
-            CreateBugParams(
-                comment=self._make_bug_description(soss_record),
-                title=lp_cve.sequence,
-                information_type=self.information_type,
-                owner=self.bug_importer,
-                target=externalpackage,
-                status=PACKAGE_STATUS_MAP[package.status],
-                status_explanation=package.note,
-                assignee=assignee,
-                importance=PRIORITY_ENUM_MAP[soss_record.priority],
-                cve=lp_cve,
-                metadata=metadata,
-                check_permissions=False,
-            ),
-            notify_event=False,
+        bug, _ = removeSecurityProxy(
+            self.bug_set.createBug(
+                CreateBugParams(
+                    comment=self._make_bug_description(soss_record),
+                    title=lp_cve.sequence,
+                    information_type=self.information_type,
+                    owner=self.bug_importer,
+                    target=externalpackage,
+                    status=PACKAGE_STATUS_MAP[package.status],
+                    status_explanation=package.note,
+                    assignee=assignee,
+                    validate_assignee=False,
+                    importance=PRIORITY_ENUM_MAP[soss_record.priority],
+                    cve=lp_cve,
+                    metadata=metadata,
+                    check_permissions=False,
+                ),
+                notify_event=False,
+            )
         )
 
         # Create next bugtasks
@@ -231,23 +238,25 @@ class SOSSImporter:
         :param distribution: a Distribution affected by the vulnerability
         :return: a Vulnerability
         """
-        vulnerability: Vulnerability = self.vulnerability_set.new(
-            distribution=distribution,
-            status=VulnerabilityStatus.NEEDS_TRIAGE,
-            importance=PRIORITY_ENUM_MAP[soss_record.priority],
-            creator=bug.owner,
-            information_type=self.information_type,
-            cve=lp_cve,
-            description=soss_record.description,
-            notes="\n".join(soss_record.notes),
-            mitigation=None,
-            importance_explanation=soss_record.priority_reason,
-            date_made_public=self._normalize_date_with_timezone(
-                soss_record.public_date
-            ),
-            date_notice_issued=None,
-            date_coordinated_release=None,
-            cvss=self._prepare_cvss_data(soss_record),
+        vulnerability: Vulnerability = removeSecurityProxy(
+            self.vulnerability_set.new(
+                distribution=distribution,
+                status=VulnerabilityStatus.NEEDS_TRIAGE,
+                importance=PRIORITY_ENUM_MAP[soss_record.priority],
+                creator=bug.owner,
+                information_type=self.information_type,
+                cve=lp_cve,
+                description=soss_record.description,
+                notes="\n".join(soss_record.notes),
+                mitigation=None,
+                importance_explanation=soss_record.priority_reason,
+                date_made_public=self._normalize_date_with_timezone(
+                    soss_record.public_date
+                ),
+                date_notice_issued=None,
+                date_coordinated_release=None,
+                cvss=self._prepare_cvss_data(soss_record),
+            )
         )
         vulnerability.linkBug(bug, bug.owner)
 
@@ -260,7 +269,7 @@ class SOSSImporter:
 
     def _update_vulnerability(
         self, vulnerability: Vulnerability, soss_record: SOSSRecord
-    ) -> None:
+    ) -> Vulnerability:
         """
         Update a Vulnerability model with the information
         contained in a SOSSRecord
@@ -285,6 +294,7 @@ class SOSSImporter:
             "[SOSSImporter] Updated Vulnerability with ID: "
             f"{vulnerability.id} for {vulnerability.distribution.name}",
         )
+        return vulnerability
 
     def _find_existing_bug(
         self,
@@ -293,30 +303,30 @@ class SOSSImporter:
         distribution: Distribution,
     ) -> Optional[BugModel]:
         """Find existing bug for the given CVE."""
-        for vulnerability in lp_cve.vulnerabilities:
-            if vulnerability.distribution == distribution:
-                bugs = vulnerability.bugs
-                if len(bugs) > 1:
-                    raise ValueError(
-                        "Multiple existing bugs found for CVE ",
-                        soss_record.sequence,
-                    )
-                if bugs:
-                    return bugs[0]
+        vulnerability = self._find_existing_vulnerability(lp_cve, distribution)
+        if not vulnerability:
+            return None
+
+        bugs = vulnerability.bugs
+        if len(bugs) > 1:
+            raise ValueError(
+                "Multiple existing bugs found for CVE ",
+                soss_record.sequence,
+            )
+        if bugs:
+            return removeSecurityProxy(bugs[0])
+
         return None
 
     def _find_existing_vulnerability(
-        self, bug: BugModel, distribution: Distribution
+        self, lp_cve: CveModel, distribution: Distribution
     ) -> Optional[Vulnerability]:
         """Find existing vulnerability for the current distribution"""
-        if not bug:
+        if not lp_cve:
             return None
 
-        vulnerability = next(
-            (v for v in bug.vulnerabilities if v.distribution == distribution),
-            None,
-        )
-        return vulnerability
+        vulnerability = lp_cve.getDistributionVulnerability(distribution)
+        return removeSecurityProxy(vulnerability)
 
     def _create_or_update_bugtasks(
         self, bug: BugModel, soss_record: SOSSRecord
@@ -337,7 +347,9 @@ class SOSSImporter:
         assignee = self._get_assignee(soss_record.assigned_to)
 
         # Build a lookup dict for existing bug tasks
-        bugtask_by_target = {task.target: task for task in bug.bugtasks}
+        bugtask_by_target = {
+            task.target: removeSecurityProxy(task) for task in bug.bugtasks
+        }
 
         for packagetype, package_list in packages:
             for package in package_list:
@@ -351,14 +363,16 @@ class SOSSImporter:
                 )
 
                 if target not in bugtask_by_target:
-                    self.bugtask_set.createTask(
-                        bug,
-                        self.bug_importer,
-                        target,
-                        status=PACKAGE_STATUS_MAP[package.status],
-                        importance=PRIORITY_ENUM_MAP[soss_record.priority],
-                        assignee=assignee,
-                        metadata=metadata,
+                    bugtask = removeSecurityProxy(
+                        self.bugtask_set.createTask(
+                            bug,
+                            self.bug_importer,
+                            target,
+                            status=PACKAGE_STATUS_MAP[package.status],
+                            importance=PRIORITY_ENUM_MAP[soss_record.priority],
+                            assignee=assignee,
+                            metadata=metadata,
+                        )
                     )
                 else:
                     bugtask = bugtask_by_target[target]
@@ -377,6 +391,8 @@ class SOSSImporter:
                     bugtask.transitionToAssignee(assignee, validate=False)
                     bugtask.metadata = metadata
 
+                bugtask.status_explanation = package.note
+
         # Remove bugtasks that were deleted from the record
         for bugtask in bugtask_by_target.values():
             bugtask.destroySelf()
diff --git a/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2005-1544 b/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2005-1544
index 8788e47..15536ad 100644
--- a/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2005-1544
+++ b/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2005-1544
@@ -31,3 +31,4 @@ Packages:
     - soss-src-stable-local
     Status: ignored
     Note: ''
+Candidate: CVE-2005-1544
diff --git a/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2011-5000 b/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2011-5000
index e92427b..76ce151 100644
--- a/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2011-5000
+++ b/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2011-5000
@@ -12,3 +12,4 @@ Packages:
     - soss-conda-stable-local
     Status: ignored
     Note: ''
+Candidate: CVE-2011-5000
diff --git a/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2021-21300 b/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2021-21300
index 22ae13d..3997a96 100644
--- a/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2021-21300
+++ b/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2021-21300
@@ -11,3 +11,4 @@ Packages:
     - soss-conda-candidate-local
     Status: ignored
     Note: was ignored
+Candidate: CVE-2021-21300
diff --git a/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2025-1979 b/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2025-1979
index 5d90588..a562efa 100644
--- a/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2025-1979
+++ b/lib/lp/bugs/scripts/soss/tests/sampledata/CVE-2025-1979
@@ -16,39 +16,32 @@ Priority-Reason: 'Unrealistic exploitation scenario. Logs are stored locally and
   in this priority assignment. '
 Assigned-To: janitor
 Packages:
-  unpackaged:
+  conda:
+  - Name: ray
+    Channel: jammy:1.17.0/stable
+    Repositories:
+    - nvidia-pb3-python-stable-local
+    Status: not-affected
+    Note: 2.22.0+soss.1
+  maven:
   - Name: vllm
     Channel: noble:0.7.3/stable
     Repositories:
     - soss-src-stable-local
-    Status: needed
+    Status: needs-triage
     Note: ''
   python:
-  - Name: ray
-    Channel: jammy:2.22.0/stable
-    Repositories:
-    - nvidia-pb3-python-stable-local
-    Status: released
-    Note: 2.22.0+soss.1
   - Name: pyyaml
     Channel: jammy:2.22.0/stable
     Repositories:
     - nvidia-pb3-python-stable-local
     Status: not-affected
     Note: ''
-  maven:
-  - Name: vllm
-    Channel: noble:0.7.3/stable
-    Repositories:
-    - soss-src-stable-local
-    Status: needs-triage
-    Note: ''
-  conda:
   - Name: ray
-    Channel: jammy:1.17.0/stable
+    Channel: jammy:2.22.0/stable
     Repositories:
     - nvidia-pb3-python-stable-local
-    Status: not-affected
+    Status: released
     Note: 2.22.0+soss.1
   rust:
   - Name: ray
@@ -57,6 +50,13 @@ Packages:
     - nvidia-pb3-python-stable-local
     Status: deferred
     Note: 2.22.0+soss.1
+  unpackaged:
+  - Name: vllm
+    Channel: noble:0.7.3/stable
+    Repositories:
+    - soss-src-stable-local
+    Status: needed
+    Note: ''
 Candidate: CVE-2025-1979
 Description: "Versions of the package ray before 2.43.0 are vulnerable to Insertion\
   \ of Sensitive Information into Log File where the redis password is being logged\
diff --git a/lib/lp/bugs/scripts/soss/tests/test_sossexport.py b/lib/lp/bugs/scripts/soss/tests/test_sossexport.py
new file mode 100644
index 0000000..6c1e901
--- /dev/null
+++ b/lib/lp/bugs/scripts/soss/tests/test_sossexport.py
@@ -0,0 +1,109 @@
+from pathlib import Path
+
+from zope.component import getUtility
+from zope.security.proxy import removeSecurityProxy
+
+from lp.app.enums import InformationType
+from lp.app.interfaces.launchpad import ILaunchpadCelebrities
+from lp.bugs.interfaces.cve import ICveSet
+from lp.bugs.scripts.soss import SOSSRecord
+from lp.bugs.scripts.soss.sossexport import SOSSExporter
+from lp.bugs.scripts.soss.sossimport import SOSSImporter
+from lp.testing import TestCaseWithFactory
+from lp.testing.layers import LaunchpadZopelessLayer
+
+
+class TestSOSSExporter(TestCaseWithFactory):
+
+    layer = LaunchpadZopelessLayer
+
+    def setUp(self):
+        super().setUp()
+        self.cve_set = getUtility(ICveSet)
+        self.bug_importer = getUtility(ILaunchpadCelebrities).bug_importer
+
+        self.sampledata = Path(__file__).parent / "sampledata"
+        self.factory.makePerson(name="octagalland")
+        self.soss = self.factory.makeDistribution(
+            name="soss",
+            displayname="SOSS",
+            information_type=InformationType.PROPRIETARY,
+        )
+        self._makeCves()
+
+        self.soss_importer = SOSSImporter()
+        self.soss_exporter = SOSSExporter()
+
+    def _makeCves(self):
+        for file in self.sampledata.iterdir():
+            cve_sequence = file.name.lstrip("CVE-")
+            if not self.cve_set[cve_sequence]:
+                self.factory.makeCVE(sequence=cve_sequence)
+
+    def test_get_packages(self):
+        """Test get SOSSRecord.Package list from bugtasks."""
+        for file in self.sampledata.iterdir():
+            with open(file) as f:
+                soss_record = SOSSRecord.from_yaml(f)
+
+            bug, _ = self.soss_importer.import_cve_from_file(file)
+
+            naked_bug = removeSecurityProxy(bug)
+            packages = self.soss_exporter._get_packages(naked_bug.bugtasks)
+            self.assertEqual(soss_record.packages, packages)
+
+    def test_get_cvss(self):
+        """Test get SOSSRecord.CVSS list from vulnerability.cvss."""
+        for file in self.sampledata.iterdir():
+            cve_sequence = file.name.lstrip("CVE-")
+            if not self.cve_set[cve_sequence]:
+                self.factory.makeCVE(sequence=cve_sequence)
+
+            with open(file) as f:
+                soss_record = SOSSRecord.from_yaml(f)
+
+            _, vulnerability = self.soss_importer.import_cve_from_file(file)
+            naked_vulnerability = removeSecurityProxy(vulnerability)
+            cvss = self.soss_exporter._get_cvss(naked_vulnerability.cvss)
+
+            self.assertEqual(soss_record.cvss, cvss)
+
+    def test_to_record(self):
+        """Test that imported and exported SOSSRecords match."""
+        soss_importer = SOSSImporter(
+            information_type=InformationType.PROPRIETARY
+        )
+
+        for file in self.sampledata.iterdir():
+            with open(file) as f:
+                soss_record = SOSSRecord.from_yaml(f)
+
+            bug, vulnerability = soss_importer.import_cve_from_file(file)
+
+            cve_sequence = file.name.lstrip("CVE-")
+            lp_cve = self.cve_set[cve_sequence]
+            exported = self.soss_exporter.to_record(
+                lp_cve, self.soss, bug, vulnerability
+            )
+
+            self.assertEqual(soss_record, exported)
+
+    def test_import_export(self):
+        """Integration test that checks that cve files imported and exported
+        match."""
+        soss_importer = SOSSImporter(
+            information_type=InformationType.PROPRIETARY
+        )
+
+        for file in self.sampledata.iterdir():
+
+            bug, vulnerability = soss_importer.import_cve_from_file(file)
+
+            cve_sequence = file.name.lstrip("CVE-")
+            lp_cve = self.cve_set[cve_sequence]
+            exported = self.soss_exporter.to_record(
+                lp_cve, self.soss, bug, vulnerability
+            )
+
+            with open(file) as f:
+                self.assertEqual(f.read(), exported.to_yaml())
diff --git a/lib/lp/bugs/scripts/soss/tests/test_sossimport.py b/lib/lp/bugs/scripts/soss/tests/test_sossimport.py
index d577e6a..ef21825 100644
--- a/lib/lp/bugs/scripts/soss/tests/test_sossimport.py
+++ b/lib/lp/bugs/scripts/soss/tests/test_sossimport.py
@@ -11,7 +11,7 @@ from lp.bugs.scripts.soss import SOSSRecord
 from lp.bugs.scripts.soss.sossimport import SOSSImporter
 from lp.registry.interfaces.externalpackage import ExternalPackageType
 from lp.registry.interfaces.sourcepackagename import ISourcePackageNameSet
-from lp.testing import TestCaseWithFactory, person_logged_in
+from lp.testing import TestCaseWithFactory
 from lp.testing.layers import LaunchpadZopelessLayer
 
 
@@ -33,6 +33,7 @@ class TestSOSSImporter(TestCaseWithFactory):
             name="soss",
             displayname="SOSS",
             owner=self.owner,
+            information_type=InformationType.PROPRIETARY,
         )
         transaction.commit()
 
@@ -76,24 +77,27 @@ class TestSOSSImporter(TestCaseWithFactory):
                     channel=("jammy:2.22.0", "stable"),
                 ),
                 BugTaskStatus.INVALID,
+                "",
                 {"repositories": ["nvidia-pb3-python-stable-local"]},
             ),
             (
                 self.soss.getExternalPackage(
                     name=ray,
-                    packagetype=ExternalPackageType.PYTHON,
-                    channel=("jammy:2.22.0", "stable"),
+                    packagetype=ExternalPackageType.CONDA,
+                    channel=("jammy:1.17.0", "stable"),
                 ),
-                BugTaskStatus.FIXRELEASED,
+                BugTaskStatus.INVALID,
+                "2.22.0+soss.1",
                 {"repositories": ["nvidia-pb3-python-stable-local"]},
             ),
             (
                 self.soss.getExternalPackage(
                     name=ray,
-                    packagetype=ExternalPackageType.CONDA,
-                    channel=("jammy:1.17.0", "stable"),
+                    packagetype=ExternalPackageType.PYTHON,
+                    channel=("jammy:2.22.0", "stable"),
                 ),
-                BugTaskStatus.INVALID,
+                BugTaskStatus.FIXRELEASED,
+                "2.22.0+soss.1",
                 {"repositories": ["nvidia-pb3-python-stable-local"]},
             ),
             (
@@ -103,24 +107,27 @@ class TestSOSSImporter(TestCaseWithFactory):
                     channel=("focal:0.27.0", "stable"),
                 ),
                 BugTaskStatus.DEFERRED,
+                "2.22.0+soss.1",
                 {"repositories": ["nvidia-pb3-python-stable-local"]},
             ),
             (
                 self.soss.getExternalPackage(
                     name=vllm,
-                    packagetype=ExternalPackageType.GENERIC,
+                    packagetype=ExternalPackageType.MAVEN,
                     channel=("noble:0.7.3", "stable"),
                 ),
-                BugTaskStatus.NEW,
+                BugTaskStatus.UNKNOWN,
+                "",
                 {"repositories": ["soss-src-stable-local"]},
             ),
             (
                 self.soss.getExternalPackage(
                     name=vllm,
-                    packagetype=ExternalPackageType.MAVEN,
+                    packagetype=ExternalPackageType.GENERIC,
                     channel=("noble:0.7.3", "stable"),
                 ),
-                BugTaskStatus.UNKNOWN,
+                BugTaskStatus.NEW,
+                "",
                 {"repositories": ["soss-src-stable-local"]},
             ),
         ]
@@ -165,9 +172,14 @@ class TestSOSSImporter(TestCaseWithFactory):
     ):
         self.assertEqual(len(bugtasks), len(bugtask_reference))
 
-        for i, (target, status, metadata) in enumerate(bugtask_reference):
+        for i, (target, status, status_explanation, metadata) in enumerate(
+            bugtask_reference
+        ):
             self.assertEqual(bugtasks[i].target, target)
             self.assertEqual(bugtasks[i].status, status)
+            self.assertEqual(
+                bugtasks[i].status_explanation, status_explanation
+            )
             self.assertEqual(bugtasks[i].importance, importance)
             self.assertEqual(bugtasks[i].assignee, assignee)
             self.assertEqual(bugtasks[i].metadata, metadata)
@@ -176,7 +188,7 @@ class TestSOSSImporter(TestCaseWithFactory):
         """Helper function to check the imported bug"""
         self.assertEqual(bug.description, self.description)
         self.assertEqual(bug.title, self.cve.sequence)
-        self.assertEqual(bug.information_type, InformationType.PRIVATESECURITY)
+        self.assertEqual(bug.information_type, InformationType.PROPRIETARY)
         self.assertEqual(bug.owner, self.bug_importer)
 
         self._check_bugtasks(
@@ -199,7 +211,7 @@ class TestSOSSImporter(TestCaseWithFactory):
         self.assertEqual(vulnerability.date_notice_issued, None)
         self.assertEqual(vulnerability.date_coordinated_release, None)
         self.assertEqual(
-            vulnerability.information_type, InformationType.PRIVATESECURITY
+            vulnerability.information_type, InformationType.PROPRIETARY
         )
         self.assertEqual(vulnerability.importance, BugTaskImportance.LOW)
         self.assertEqual(
@@ -224,7 +236,7 @@ class TestSOSSImporter(TestCaseWithFactory):
         file = self.sampledata / "CVE-2025-1979"
 
         soss_importer = SOSSImporter(
-            information_type=InformationType.PRIVATESECURITY
+            information_type=InformationType.PROPRIETARY
         )
         bug, vulnerability = soss_importer.import_cve_from_file(file)
 
@@ -234,10 +246,14 @@ class TestSOSSImporter(TestCaseWithFactory):
         # Check vulnerability
         self._check_vulnerability_fields(vulnerability, bug)
 
+        # Import again to check that it doesn't create new objects
+        bug_copy, vulnerability_copy = soss_importer.import_cve_from_file(file)
+        self.assertEqual(bug, bug_copy)
+        self.assertEqual(vulnerability, vulnerability_copy)
+
     def test_create_update_bug(self):
         """Test create and update a bug from a SOSS cve file"""
-        with person_logged_in(self.bug_importer):
-            bug = SOSSImporter()._create_bug(self.soss_record, self.cve)
+        bug = SOSSImporter()._create_bug(self.soss_record, self.cve)
 
         self._check_bug_fields(bug, self.bugtask_reference)
 
@@ -262,14 +278,14 @@ class TestSOSSImporter(TestCaseWithFactory):
         self.soss_record.packages.pop(SOSSRecord.PackageTypeEnum.RUST)
 
         bug = SOSSImporter(
-            information_type=InformationType.PUBLICSECURITY
+            information_type=InformationType.PROPRIETARY
         )._update_bug(bug, self.soss_record, new_cve)
         transaction.commit()
 
         # Check bug fields
         self.assertEqual(bug.description, new_description)
         self.assertEqual(bug.title, new_cve.sequence)
-        self.assertEqual(bug.information_type, InformationType.PUBLICSECURITY)
+        self.assertEqual(bug.information_type, InformationType.PROPRIETARY)
 
         # Check bugtasks
         bugtasks = bug.bugtasks
@@ -281,11 +297,10 @@ class TestSOSSImporter(TestCaseWithFactory):
     def test_create_update_vulnerability(self):
         """Test create and update a vulnerability from a SOSS cve file"""
         soss_importer = SOSSImporter()
-        with person_logged_in(self.bug_importer):
-            bug = soss_importer._create_bug(self.soss_record, self.cve)
-            vulnerability = soss_importer._create_vulnerability(
-                bug, self.soss_record, self.cve, self.soss
-            )
+        bug = soss_importer._create_bug(self.soss_record, self.cve)
+        vulnerability = soss_importer._create_vulnerability(
+            bug, self.soss_record, self.cve, self.soss
+        )
 
         self.assertEqual(vulnerability.distribution, self.soss)
         self.assertEqual(
@@ -298,7 +313,7 @@ class TestSOSSImporter(TestCaseWithFactory):
         self.assertEqual(vulnerability.date_notice_issued, None)
         self.assertEqual(vulnerability.date_coordinated_release, None)
         self.assertEqual(
-            vulnerability.information_type, InformationType.PRIVATESECURITY
+            vulnerability.information_type, InformationType.PROPRIETARY
         )
         self.assertEqual(vulnerability.importance, BugTaskImportance.LOW)
         self.assertEqual(
@@ -321,8 +336,7 @@ class TestSOSSImporter(TestCaseWithFactory):
     def test_create_or_update_bugtasks(self):
         """Test update bugtasks"""
         soss_importer = SOSSImporter()
-        with person_logged_in(self.bug_importer):
-            bug = soss_importer._create_bug(self.soss_record, self.cve)
+        bug = soss_importer._create_bug(self.soss_record, self.cve)
 
         self._check_bugtasks(
             bug.bugtasks,
@@ -356,6 +370,7 @@ class TestSOSSImporter(TestCaseWithFactory):
                 channel=("noble:4.23.1", "stable"),
             ),
             BugTaskStatus.DEFERRED,
+            "test note",
             {"repositories": ["test-repo"]},
         )
 
@@ -407,13 +422,13 @@ class TestSOSSImporter(TestCaseWithFactory):
             ],
             SOSSRecord.PackageTypeEnum.UNPACKAGED,
         )
-        self.assertEqual(generic_pkg, self.bugtask_reference[4][0])
+        self.assertEqual(generic_pkg, self.bugtask_reference[5][0])
 
         maven_pkg = soss_importer._get_or_create_external_package(
             self.soss_record.packages[SOSSRecord.PackageTypeEnum.MAVEN][0],
             SOSSRecord.PackageTypeEnum.MAVEN,
         )
-        self.assertEqual(maven_pkg, self.bugtask_reference[5][0])
+        self.assertEqual(maven_pkg, self.bugtask_reference[4][0])
 
     def test_prepare_cvss_data(self):
         """Test prepare the cvss json"""
diff --git a/lib/lp/bugs/scripts/soss/tests/test_sossrecord.py b/lib/lp/bugs/scripts/soss/tests/test_sossrecord.py
index fc3377c..a1cff07 100644
--- a/lib/lp/bugs/scripts/soss/tests/test_sossrecord.py
+++ b/lib/lp/bugs/scripts/soss/tests/test_sossrecord.py
@@ -47,48 +47,39 @@ class TestSOSSRecord(TestCase):
             ),
             assigned_to="janitor",
             packages={
-                SOSSRecord.PackageTypeEnum.UNPACKAGED: [
+                SOSSRecord.PackageTypeEnum.CONDA: [
+                    SOSSRecord.Package(
+                        name="ray",
+                        channel=SOSSRecord.Channel("jammy:1.17.0/stable"),
+                        repositories=["nvidia-pb3-python-stable-local"],
+                        status=SOSSRecord.PackageStatusEnum.NOT_AFFECTED,
+                        note="2.22.0+soss.1",
+                    )
+                ],
+                SOSSRecord.PackageTypeEnum.MAVEN: [
                     SOSSRecord.Package(
                         name="vllm",
                         channel=SOSSRecord.Channel("noble:0.7.3/stable"),
                         repositories=["soss-src-stable-local"],
-                        status=SOSSRecord.PackageStatusEnum.NEEDED,
+                        status=SOSSRecord.PackageStatusEnum.NEEDS_TRIAGE,
                         note="",
                     )
                 ],
                 SOSSRecord.PackageTypeEnum.PYTHON: [
                     SOSSRecord.Package(
-                        name="ray",
-                        channel=SOSSRecord.Channel("jammy:2.22.0/stable"),
-                        repositories=["nvidia-pb3-python-stable-local"],
-                        status=SOSSRecord.PackageStatusEnum.RELEASED,
-                        note="2.22.0+soss.1",
-                    ),
-                    SOSSRecord.Package(
                         name="pyyaml",
                         channel=SOSSRecord.Channel("jammy:2.22.0/stable"),
                         repositories=["nvidia-pb3-python-stable-local"],
                         status=SOSSRecord.PackageStatusEnum.NOT_AFFECTED,
                         note="",
                     ),
-                ],
-                SOSSRecord.PackageTypeEnum.MAVEN: [
-                    SOSSRecord.Package(
-                        name="vllm",
-                        channel=SOSSRecord.Channel("noble:0.7.3/stable"),
-                        repositories=["soss-src-stable-local"],
-                        status=SOSSRecord.PackageStatusEnum.NEEDS_TRIAGE,
-                        note="",
-                    )
-                ],
-                SOSSRecord.PackageTypeEnum.CONDA: [
                     SOSSRecord.Package(
                         name="ray",
-                        channel=SOSSRecord.Channel("jammy:1.17.0/stable"),
+                        channel=SOSSRecord.Channel("jammy:2.22.0/stable"),
                         repositories=["nvidia-pb3-python-stable-local"],
-                        status=SOSSRecord.PackageStatusEnum.NOT_AFFECTED,
+                        status=SOSSRecord.PackageStatusEnum.RELEASED,
                         note="2.22.0+soss.1",
-                    )
+                    ),
                 ],
                 SOSSRecord.PackageTypeEnum.RUST: [
                     SOSSRecord.Package(
@@ -99,6 +90,15 @@ class TestSOSSRecord(TestCase):
                         note="2.22.0+soss.1",
                     )
                 ],
+                SOSSRecord.PackageTypeEnum.UNPACKAGED: [
+                    SOSSRecord.Package(
+                        name="vllm",
+                        channel=SOSSRecord.Channel("noble:0.7.3/stable"),
+                        repositories=["soss-src-stable-local"],
+                        status=SOSSRecord.PackageStatusEnum.NEEDED,
+                        note="",
+                    )
+                ],
             },
             candidate="CVE-2025-1979",
             description=(
@@ -159,48 +159,39 @@ class TestSOSSRecord(TestCase):
             ),
             "Assigned-To": "janitor",
             "Packages": {
-                "unpackaged": [
+                "conda": [
+                    {
+                        "Name": "ray",
+                        "Channel": "jammy:1.17.0/stable",
+                        "Repositories": ["nvidia-pb3-python-stable-local"],
+                        "Status": "not-affected",
+                        "Note": "2.22.0+soss.1",
+                    }
+                ],
+                "maven": [
                     {
                         "Name": "vllm",
                         "Channel": "noble:0.7.3/stable",
                         "Repositories": ["soss-src-stable-local"],
-                        "Status": "needed",
+                        "Status": "needs-triage",
                         "Note": "",
                     }
                 ],
                 "python": [
                     {
-                        "Name": "ray",
-                        "Channel": "jammy:2.22.0/stable",
-                        "Repositories": ["nvidia-pb3-python-stable-local"],
-                        "Status": "released",
-                        "Note": "2.22.0+soss.1",
-                    },
-                    {
                         "Name": "pyyaml",
                         "Channel": "jammy:2.22.0/stable",
                         "Repositories": ["nvidia-pb3-python-stable-local"],
                         "Status": "not-affected",
                         "Note": "",
                     },
-                ],
-                "maven": [
-                    {
-                        "Name": "vllm",
-                        "Channel": "noble:0.7.3/stable",
-                        "Repositories": ["soss-src-stable-local"],
-                        "Status": "needs-triage",
-                        "Note": "",
-                    }
-                ],
-                "conda": [
                     {
                         "Name": "ray",
-                        "Channel": "jammy:1.17.0/stable",
+                        "Channel": "jammy:2.22.0/stable",
                         "Repositories": ["nvidia-pb3-python-stable-local"],
-                        "Status": "not-affected",
+                        "Status": "released",
                         "Note": "2.22.0+soss.1",
-                    }
+                    },
                 ],
                 "rust": [
                     {
@@ -211,6 +202,15 @@ class TestSOSSRecord(TestCase):
                         "Note": "2.22.0+soss.1",
                     }
                 ],
+                "unpackaged": [
+                    {
+                        "Name": "vllm",
+                        "Channel": "noble:0.7.3/stable",
+                        "Repositories": ["soss-src-stable-local"],
+                        "Status": "needed",
+                        "Note": "",
+                    }
+                ],
             },
             "Candidate": "CVE-2025-1979",
             "Description": (
diff --git a/lib/lp/bugs/scripts/tests/test_cveimport.py b/lib/lp/bugs/scripts/tests/test_cveimport.py
index f39c7e6..07f4550 100644
--- a/lib/lp/bugs/scripts/tests/test_cveimport.py
+++ b/lib/lp/bugs/scripts/tests/test_cveimport.py
@@ -4,8 +4,10 @@
 import gzip
 import io
 import json
+import os
 import shutil
 import tempfile
+import zipfile
 from datetime import datetime, timezone
 from pathlib import Path
 
@@ -203,6 +205,20 @@ class TestCVEUpdater(TestCase):
         self.assertIsNotNone(cve)
         self.assertEqual("Delta CVE", cve.description)
 
+    def test_process_delta_directory_empty(self):
+        """Test processing an empty directory of delta CVE files."""
+        # Create empty test delta directory
+        delta_dir = Path(self.temp_dir) / "deltaCves"
+        delta_dir.mkdir()
+
+        # Process the directory using the script infrastructure
+        updater = self.make_updater([str(delta_dir)])
+        processed, errors = updater.process_delta_directory(str(delta_dir))
+
+        # Verify results
+        self.assertEqual(0, processed)
+        self.assertEqual(0, errors)
+
     def test_construct_github_url(self):
         """Test GitHub URL construction for different scenarios."""
         updater = CVEUpdater(
@@ -262,3 +278,66 @@ class TestCVEUpdater(TestCase):
         # Verify the update
         updated_cve = cveset["2024-0004"]
         self.assertEqual(new_desc, updated_cve.description)
+
+    def test_extract_github_zip(self):
+        """Test extract_github_zip for complete releases."""
+        updater = self.make_updater()
+        outer_buffer = io.BytesIO()
+
+        with zipfile.ZipFile(outer_buffer, "w") as outer_zip:
+            # create inner cves.zip in memory
+            inner_buffer = io.BytesIO()
+            with zipfile.ZipFile(inner_buffer, "w") as inner_zip:
+                inner_zip.writestr("cves/CVE-2025-8941.json", "CVE data")
+            outer_zip.writestr("cves.zip", inner_buffer.getvalue())
+
+        target_dir = updater.extract_github_zip(outer_buffer.getvalue())
+        self.assertTrue(target_dir.endswith("cves"))
+        self.assertEqual(os.listdir(target_dir), ["CVE-2025-8941.json"])
+
+    def test_extract_empty_github_zip(self):
+        """Test that extract_github_zip for complete releases raises
+        LaunchpadScriptFailure when the zip is empty.
+        """
+        updater = self.make_updater()
+        buffer = io.BytesIO()
+
+        # Empty zipfile buffer
+        with zipfile.ZipFile(buffer, "w"):
+            pass
+
+        self.assertRaisesWithContent(
+            LaunchpadScriptFailure,
+            "Failed to extract ZIP files: \"There is no item named 'cves.zip' "
+            'in the archive"',
+            updater.extract_github_zip,
+            buffer.getvalue(),
+        )
+
+    def test_extract_delta_github_zip(self):
+        """Test extract_github_zip for delta releases."""
+        updater = self.make_updater()
+        buffer = io.BytesIO()
+
+        with zipfile.ZipFile(buffer, "w") as zf:
+            zf.writestr("deltaCves/CVE-2025-8941.json", "delta CVE data")
+
+        empty_dir = updater.extract_github_zip(buffer.getvalue(), delta=True)
+        self.assertTrue(empty_dir.endswith("deltaCves"))
+        self.assertEqual(os.listdir(empty_dir), ["CVE-2025-8941.json"])
+
+    def test_extract_empty_delta_github_zip(self):
+        """Test that extract_github_zip for delta releases returns an empty dir
+        if the zip is empty. There can be hours when no cves are updated so we
+        will return an empty dir and will not import cves.
+        """
+        updater = self.make_updater()
+        buffer = io.BytesIO()
+
+        # Empty zipfile buffer
+        with zipfile.ZipFile(buffer, "w"):
+            pass
+
+        empty_dir = updater.extract_github_zip(buffer.getvalue(), delta=True)
+        self.assertTrue(empty_dir.endswith("deltaCves"))
+        self.assertEqual(os.listdir(empty_dir), [])
diff --git a/lib/lp/bugs/tests/test_bug.py b/lib/lp/bugs/tests/test_bug.py
index d4265f9..3bc678c 100644
--- a/lib/lp/bugs/tests/test_bug.py
+++ b/lib/lp/bugs/tests/test_bug.py
@@ -334,6 +334,26 @@ class TestBugCreation(TestCaseWithFactory):
             assignee=person_2,
         )
 
+    def test_CreateBugParams_accepts_validate_asignee_changes(self):
+        # createBug() will accept any assignee change if validate_assignee
+        # equals false
+        person = self.factory.makePerson()
+        person_2 = self.factory.makePerson()
+        target = self.factory.makeProduct()
+        # Setting the target's bug supervisor means that
+        # canTransitionToAssignee() will return False for `person` if
+        # another Person is passed as `assignee`.
+        with person_logged_in(target.owner):
+            target.bug_supervisor = target.owner
+
+        bug = self.createBug(
+            owner=person,
+            target=target,
+            assignee=person_2,
+            validate_assignee=False,
+        )
+        self.assertEqual(person_2, bug.default_bugtask.assignee)
+
     def test_CreateBugParams_rejects_not_allowed_milestone_changes(self):
         # createBug() will reject any importance value passed by users
         # who don't have the right to set the milestone.
diff --git a/lib/lp/bugs/tests/test_cve.py b/lib/lp/bugs/tests/test_cve.py
index 601c672..23fd423 100644
--- a/lib/lp/bugs/tests/test_cve.py
+++ b/lib/lp/bugs/tests/test_cve.py
@@ -10,11 +10,13 @@ from testtools.testcase import ExpectedException
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
 
+from lp.app.enums import InformationType
 from lp.bugs.interfaces.bugtasksearch import BugTaskSearchParams
 from lp.bugs.interfaces.cve import CveStatus, ICveSet
 from lp.bugs.scripts.uct.models import CVSS
 from lp.testing import (
     TestCaseWithFactory,
+    admin_logged_in,
     login_person,
     person_logged_in,
     verifyObject,
@@ -357,3 +359,27 @@ class TestCve(TestCaseWithFactory):
             },
             unproxied_cve.cvss,
         )
+
+    def test_getDistributionVulnerability(self):
+        cve = self.factory.makeCVE(sequence="2099-1234")
+        distribution = self.factory.makeDistribution(
+            information_type=InformationType.PROPRIETARY
+        )
+        vulnerability = self.factory.makeVulnerability(
+            distribution=distribution,
+            cve=cve,
+            information_type=InformationType.PROPRIETARY,
+        )
+
+        # getDistributionVulnerability returns the vulnerability although we
+        # are not logged in
+        self.assertEqual(
+            vulnerability, cve.getDistributionVulnerability(distribution)
+        )
+
+        # As we are not logged as an user, cve.vulnerabilities is empty
+        self.assertEqual(len(list(cve.vulnerabilities)), 0)
+
+        # Admin can see the PROPRIETARY vulnerability
+        with admin_logged_in():
+            self.assertEqual(vulnerability, cve.vulnerabilities[0])
diff --git a/lib/lp/bugs/tests/test_importvulnerabilityjob.py b/lib/lp/bugs/tests/test_importvulnerabilityjob.py
index 51468e8..aa1640c 100644
--- a/lib/lp/bugs/tests/test_importvulnerabilityjob.py
+++ b/lib/lp/bugs/tests/test_importvulnerabilityjob.py
@@ -1,8 +1,11 @@
 # Copyright 2025 Canonical Ltd.  This software is licensed under the
 # GNU Affero General Public License version 3 (see the file LICENSE).
 
+import re
+from contextlib import contextmanager
 from pathlib import Path
 
+import responses
 import transaction
 from zope.component import getUtility
 from zope.security.proxy import removeSecurityProxy
@@ -21,6 +24,10 @@ from lp.code.tests.helpers import GitHostingFixture
 from lp.services.features.testing import FeatureFixture
 from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.tests import block_on_job
+from lp.services.timeout import (
+    get_default_timeout_function,
+    set_default_timeout_function,
+)
 from lp.testing import TestCaseWithFactory, person_logged_in
 from lp.testing.layers import CeleryJobLayer, DatabaseFunctionalLayer
 
@@ -30,6 +37,19 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
 
     layer = DatabaseFunctionalLayer
 
+    @contextmanager
+    def mockRequests(self, method, set_default_timeout=True, **kwargs):
+        with responses.RequestsMock() as requests_mock:
+            requests_mock.add(method, re.compile(r".*"), **kwargs)
+            original_timeout_function = get_default_timeout_function()
+            if set_default_timeout:
+                set_default_timeout_function(lambda: 60.0)
+            try:
+                yield
+            finally:
+                set_default_timeout_function(original_timeout_function)
+            self.requests = [call.request for call in requests_mock.calls]
+
     def setUp(self):
         super().setUp()
         self.repository = self.factory.makeGitRepository()
@@ -56,7 +76,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
     def test_getOopsVars(self):
         """Test getOopsVars method."""
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
@@ -90,7 +110,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
     def test___repr__(self):
         """Test __repr__ method."""
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
@@ -127,18 +147,19 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
         )
         self.assertEqual(expected, repr(job))
 
-    def test_create_with_existing_in_progress_job(self):
-        """If there's already a waiting/running ImportVulnerabilityJob for the
-        handler ImportVulnerabilityJob.create() raises an exception.
+    def test_create_with_existing_pending_job(self):
+        """If there's already a waiting/running/suspended
+        ImportVulnerabilityJob for the handler ImportVulnerabilityJob.create()
+        raises the VulnerabilityJobInProgress exception.
         """
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
         import_since_commit_sha1 = None
 
-        # Job waiting status
+        # Job WAITING status
         job = self.job_source.create(
             handler,
             git_repository,
@@ -173,12 +194,26 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
         )
         self.assertEqual(job, running_exception.job)
 
+        # Job status from RUNNING to SUSPENDED
+        job.suspend()
+        running_exception = self.assertRaises(
+            VulnerabilityJobInProgress,
+            self.job_source.create,
+            handler,
+            git_repository,
+            git_ref,
+            git_paths,
+            information_type,
+            import_since_commit_sha1,
+        )
+        self.assertEqual(job, running_exception.job)
+
     def test_create_with_existing_completed_job(self):
         """If there's already a completed ImportVulnerabilityJob for the
         handler the job can be run again.
         """
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
@@ -208,12 +243,26 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
         job_duplicated.complete()
         self.assertEqual(job_duplicated.status, JobStatus.COMPLETED)
 
+        # There's a 3rd run to check that the query returns only pending jobs,
+        # and one() doesn't raise an exception
+        job_duplicated = self.job_source.create(
+            handler,
+            git_repository,
+            git_ref,
+            git_paths,
+            information_type,
+            import_since_commit_sha1,
+        )
+        job_duplicated.start()
+        job_duplicated.complete()
+        self.assertEqual(job_duplicated.status, JobStatus.COMPLETED)
+
     def test_create_with_existing_failed_job(self):
         """If there's a failed ImportVulnerabilityJob for the handler the job
         can be run again.
         """
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
@@ -250,7 +299,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
         self.useFixture(GitHostingFixture(blob=b"Some text"))
 
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
@@ -306,7 +355,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
 
         job = self.job_source.create(
             handler=VulnerabilityHandlerEnum.SOSS,
-            git_repository=self.repository.git_https_url,
+            git_repository=self.repository.id,
             git_ref="ref/tags/v1.0",
             git_paths=["cves"],
             information_type=InformationType.PRIVATESECURITY.value,
@@ -330,6 +379,119 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
             },
         )
 
+    def test_run_import_with_private_repo(self):
+        """Run ImportVulnerabilityJob using a PRIVATESECURITY git
+        repository."""
+        private_repo = removeSecurityProxy(
+            self.factory.makeGitRepository(
+                information_type=InformationType.PRIVATESECURITY
+            )
+        )
+        refs = self.factory.makeGitRefs(
+            repository=private_repo,
+            paths=("ref/heads/main", "ref/tags/v1.0"),
+        )
+        with open(self.cve_path, encoding="utf-8") as file:
+            self.useFixture(
+                GitHostingFixture(
+                    blob=file.read(),
+                    refs=refs,
+                    diff_stats={"added": ["cves/CVE-2025-1979"]},
+                )
+            )
+
+        cve = self.factory.makeCVE("2025-1979")
+
+        # This is a PUBLIC distribution that can contain PRIVATESECURITY bugs
+        # and vulnerabilities. Launchpad distributions can be only PUBLIC or
+        # PROPRIETARY.
+        self.factory.makeDistribution(name="soss")
+
+        job = self.job_source.create(
+            handler=VulnerabilityHandlerEnum.SOSS,
+            git_repository=private_repo.id,
+            git_ref="ref/tags/v1.0",
+            git_paths=["cves"],
+            information_type=InformationType.PRIVATESECURITY.value,
+            import_since_commit_sha1=None,
+        )
+        job.run()
+
+        # Check that it created the bug and vulnerability
+        self.assertEqual(len(cve.bugs), 1)
+
+        admin = getUtility(ILaunchpadCelebrities).admin
+        with person_logged_in(admin):
+            self.assertEqual(len(list(cve.vulnerabilities)), 1)
+
+        self.assertEqual(
+            job.metadata.get("result"),
+            {
+                "succeeded": ["CVE-2025-1979"],
+                "failed": [],
+                "error_description": [],
+            },
+        )
+
+    def test_run_import_with_proprietary_repo(self):
+        """Run ImportVulnerabilityJob using a PROPRIETARY git repository. It
+        also creates the bug and vulnerability in a PROPRIETARY distribution.
+        """
+        project = self.factory.makeProduct()
+        self.factory.makeAccessPolicy(pillar=project)
+        proprietary_repo = removeSecurityProxy(
+            self.factory.makeGitRepository(
+                target=project, information_type=InformationType.PROPRIETARY
+            )
+        )
+        refs = self.factory.makeGitRefs(
+            repository=proprietary_repo,
+            paths=("ref/heads/main", "ref/tags/v1.0"),
+        )
+        with open(self.cve_path, encoding="utf-8") as file:
+            self.useFixture(
+                GitHostingFixture(
+                    blob=file.read(),
+                    refs=refs,
+                    diff_stats={"added": ["cves/CVE-2025-1979"]},
+                )
+            )
+
+        cve = self.factory.makeCVE("2025-1979")
+
+        # This is PROPRIETARY distribution that allows PROPRIETARY bugs and
+        # vulnerabilities. Launchpad distributions can only be PUBLIC or
+        # PROPRIETARY.
+        self.factory.makeDistribution(
+            name="soss", information_type=InformationType.PROPRIETARY
+        )
+
+        job = self.job_source.create(
+            handler=VulnerabilityHandlerEnum.SOSS,
+            git_repository=proprietary_repo.id,
+            git_ref="ref/tags/v1.0",
+            git_paths=["cves"],
+            information_type=InformationType.PROPRIETARY.value,
+            import_since_commit_sha1=None,
+        )
+        job.run()
+
+        # Check that it created the bug and vulnerability
+        self.assertEqual(len(cve.bugs), 1)
+
+        admin = getUtility(ILaunchpadCelebrities).admin
+        with person_logged_in(admin):
+            self.assertEqual(len(list(cve.vulnerabilities)), 1)
+
+        self.assertEqual(
+            job.metadata.get("result"),
+            {
+                "succeeded": ["CVE-2025-1979"],
+                "failed": [],
+                "error_description": [],
+            },
+        )
+
     def test_run_import_with_wrong_git_paths(self):
         """Run ImportVulnerabilityJob with wrong git_paths."""
         with open(self.cve_path, encoding="utf-8") as file:
@@ -346,7 +508,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
 
         job = self.job_source.create(
             handler=VulnerabilityHandlerEnum.SOSS,
-            git_repository=self.repository.git_https_url,
+            git_repository=self.repository.id,
             git_ref="ref/tags/v1.0",
             git_paths=["wrong_path"],
             information_type=InformationType.PRIVATESECURITY.value,
@@ -377,7 +539,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
 
         job = self.job_source.create(
             handler=VulnerabilityHandlerEnum.SOSS,
-            git_repository="wrong_url",
+            git_repository=1111111111,
             git_ref="ref/heads/main",
             git_paths=["cves"],
             information_type=InformationType.PRIVATESECURITY.value,
@@ -411,7 +573,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
 
         job = self.job_source.create(
             handler=VulnerabilityHandlerEnum.SOSS,
-            git_repository=self.repository.git_https_url,
+            git_repository=self.repository.id,
             git_ref="ref/heads/wrong-ref",
             git_paths=["cves"],
             information_type=InformationType.PRIVATESECURITY.value,
@@ -447,7 +609,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
 
         job = self.job_source.create(
             handler=VulnerabilityHandlerEnum.SOSS,
-            git_repository=self.repository.git_https_url,
+            git_repository=self.repository.id,
             git_ref="ref/tags/v1.0",
             git_paths=["cves"],
             information_type=InformationType.PRIVATESECURITY.value,
@@ -487,7 +649,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
 
         job = self.job_source.create(
             handler=VulnerabilityHandlerEnum.SOSS,
-            git_repository=self.repository.git_https_url,
+            git_repository=self.repository.id,
             git_ref="ref/tags/v1.0",
             git_paths=["cves"],
             information_type=InformationType.PRIVATESECURITY.value,
@@ -511,12 +673,61 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
             },
         )
 
+    def test_run_import_with_wrong_import_since_commit_sha1(self):
+        """Run ImportVulnerabilityJob using import_since_commit_sha1"""
+        self.factory.makeDistribution(name="soss")
+        commit_sha1 = "1" * 40
+
+        # This is the second ref we created in setUp()
+        ref = "ref/tags/v1.0"
+        self.assertEqual(self.repository.refs[1].path, ref)
+
+        job = self.job_source.create(
+            handler=VulnerabilityHandlerEnum.SOSS,
+            git_repository=self.repository.id,
+            git_ref=ref,
+            git_paths=["cves"],
+            information_type=InformationType.PRIVATESECURITY.value,
+            import_since_commit_sha1=commit_sha1,
+        )
+
+        error_msg = (
+            f"Git diff between {commit_sha1} and "
+            f"{self.repository.refs[1].commit_sha1} for {ref} not found"
+        )
+
+        # Turnip API call will return a 404
+        with self.mockRequests("GET", status=404):
+            self.assertRaisesWithContent(
+                VulnerabilityJobException,
+                error_msg,
+                job.run,
+            )
+
+        self.assertEqual(
+            job.metadata,
+            {
+                "request": {
+                    "git_repository": self.repository.id,
+                    "git_ref": ref,
+                    "git_paths": ["cves"],
+                    "information_type": InformationType.PRIVATESECURITY.value,
+                    "import_since_commit_sha1": commit_sha1,
+                },
+                "result": {
+                    "error_description": [error_msg],
+                    "succeeded": [],
+                    "failed": [],
+                },
+            },
+        )
+
     def test_get(self):
         """ImportVulnerabilityJob.get() returns the import job for the given
         handler.
         """
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
@@ -542,7 +753,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
         """The ImportVulnerabilityJob.error_description property returns
         None when no error description is recorded."""
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
@@ -561,7 +772,7 @@ class ImportVulnerabilityJobTests(TestCaseWithFactory):
     def test_error_description_set_when_notifying_about_user_errors(self):
         """Test that error_description is set by notifyUserError()."""
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
@@ -605,7 +816,7 @@ class TestViaCelery(TestCaseWithFactory):
         job_source = getUtility(IImportVulnerabilityJobSource)
 
         handler = VulnerabilityHandlerEnum.SOSS
-        git_repository = self.repository.git_https_url
+        git_repository = self.repository.id
         git_ref = "ref/heads/main"
         git_paths = ["cves"]
         information_type = InformationType.PRIVATESECURITY.value
diff --git a/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py b/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
index 3aadc6f..0b8a106 100644
--- a/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
+++ b/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
@@ -27,8 +27,13 @@ class BuildArgs(TypedDict, total=False):
     # True if this build should build architecture-independent packages
     # as well as architecture-dependent packages [binarypackage].
     arch_indep: bool
-    # The architecture tag to build for.
+    # The architecture tag to build for. (deprecated in favor of
+    # abi_tag and isa_tag)
     arch_tag: str
+    # The architecture (i.e. ABI) tag to build for.
+    abi_tag: str
+    # The ISA tag to build for.
+    isa_tag: str
     # Whether this is a build in a private archive.  (This causes URLs
     # in the build log to be sanitized.)
     archive_private: bool
diff --git a/lib/lp/buildmaster/model/buildfarmjobbehaviour.py b/lib/lp/buildmaster/model/buildfarmjobbehaviour.py
index 279c0a2..9aa0d6a 100644
--- a/lib/lp/buildmaster/model/buildfarmjobbehaviour.py
+++ b/lib/lp/buildmaster/model/buildfarmjobbehaviour.py
@@ -110,8 +110,16 @@ class BuildFarmJobBehaviourBase:
 
         launchpad_server_url = config.vhost.mainsite.hostname
         launchpad_instance = launchpad_urls.get(launchpad_server_url, "devel")
+        if self.distro_arch_series.underlying_architecturetag is not None:
+            abi_tag = self.distro_arch_series.underlying_architecturetag
+            isa_tag = self.distro_arch_series.architecturetag
+        else:
+            abi_tag = isa_tag = self.distro_arch_series.architecturetag
+
         return {
             "arch_tag": self.distro_arch_series.architecturetag,
+            "abi_tag": abi_tag,
+            "isa_tag": isa_tag,
             "archive_private": self.archive.private,
             "build_url": canonical_url(self.build),
             "builder_constraints": removeSecurityProxy(
diff --git a/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py b/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
index 4de9565..7e8fefa 100644
--- a/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
+++ b/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
@@ -13,6 +13,7 @@ from datetime import datetime
 import six
 from fixtures import MockPatchObject
 from testtools import ExpectedException
+from testtools.matchers import ContainsAll
 from testtools.twistedsupport import AsynchronousDeferredRunTest
 from twisted.internet import defer
 from zope.component import getUtility
@@ -108,11 +109,13 @@ class TestBuildFarmJobBehaviourBase(TestCaseWithFactory):
             buildfarmjob = removeSecurityProxy(buildfarmjob)
         return BuildFarmJobBehaviourBase(buildfarmjob)
 
-    def _makeBuild(self):
+    def _makeBuild(self, arch="x86", underlying_arch=None):
         """Create a `Build` object."""
         x86 = getUtility(IProcessorSet).getByName("386")
         distroarchseries = self.factory.makeDistroArchSeries(
-            architecturetag="x86", processor=x86
+            architecturetag=arch,
+            processor=x86,
+            underlying_architecturetag=underlying_arch,
         )
         distroseries = distroarchseries.distroseries
         archive = self.factory.makeArchive(
@@ -153,6 +156,40 @@ class TestBuildFarmJobBehaviourBase(TestCaseWithFactory):
         behaviour.setBuilder(self.factory.makeBuilder(virtualized=False), None)
         self.assertIs(False, behaviour.extraBuildArgs()["fast_cleanup"])
 
+    def test_extraBuildArgs_arch(self):
+        # If the builder is virtualized, extraBuildArgs sends
+        # fast_cleanup: True.
+        behaviour = self._makeBehaviour(self._makeBuild(arch="riscv64"))
+        behaviour.setBuilder(self.factory.makeBuilder(virtualized=True), None)
+        self.assertThat(
+            behaviour.extraBuildArgs().items(),
+            ContainsAll(
+                [
+                    ("arch_tag", "riscv64"),
+                    ("abi_tag", "riscv64"),
+                    ("isa_tag", "riscv64"),
+                ]
+            ),
+        )
+
+    def test_extraBuildArgs_arch_variant(self):
+        # If the builder is virtualized, extraBuildArgs sends
+        # fast_cleanup: True.
+        behaviour = self._makeBehaviour(
+            self._makeBuild(arch="riscv64rva23", underlying_arch="riscv64")
+        )
+        behaviour.setBuilder(self.factory.makeBuilder(virtualized=True), None)
+        self.assertThat(
+            behaviour.extraBuildArgs().items(),
+            ContainsAll(
+                [
+                    ("arch_tag", "riscv64rva23"),
+                    ("abi_tag", "riscv64"),
+                    ("isa_tag", "riscv64rva23"),
+                ]
+            ),
+        )
+
     def test_extractBuildStatus_baseline(self):
         # extractBuildStatus picks the name of the build status out of a
         # dict describing the worker's status.
diff --git a/lib/lp/charms/tests/test_charmrecipebuildbehaviour.py b/lib/lp/charms/tests/test_charmrecipebuildbehaviour.py
index 4751440..1329e45 100644
--- a/lib/lp/charms/tests/test_charmrecipebuildbehaviour.py
+++ b/lib/lp/charms/tests/test_charmrecipebuildbehaviour.py
@@ -310,6 +310,8 @@ class TestAsyncCharmRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
@@ -355,6 +357,8 @@ class TestAsyncCharmRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
@@ -518,6 +522,8 @@ class TestAsyncCharmRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
diff --git a/lib/lp/code/interfaces/branch.py b/lib/lp/code/interfaces/branch.py
index 0643fca..14caf4c 100644
--- a/lib/lp/code/interfaces/branch.py
+++ b/lib/lp/code/interfaces/branch.py
@@ -764,7 +764,7 @@ class IBranchView(
 
     @operation_parameters(
         status=List(
-            title=_("A list of merge proposal statuses to filter by."),
+            title=_("A list of statuses to filter the merge proposals by."),
             value_type=Choice(vocabulary=BranchMergeProposalStatus),
         ),
         merged_revnos=List(
diff --git a/lib/lp/code/interfaces/gitref.py b/lib/lp/code/interfaces/gitref.py
index a3a5394..3fdaf3f 100644
--- a/lib/lp/code/interfaces/gitref.py
+++ b/lib/lp/code/interfaces/gitref.py
@@ -415,7 +415,7 @@ class IGitRefView(IHasMergeProposals, IHasRecipes, IInformationType):
 
     @operation_parameters(
         status=List(
-            title=_("A list of merge proposal statuses to filter by."),
+            title=_("A list of statuses to filter the merge proposals by."),
             value_type=Choice(vocabulary=BranchMergeProposalStatus),
         ),
         merged_revision_ids=List(
diff --git a/lib/lp/code/interfaces/gitrepository.py b/lib/lp/code/interfaces/gitrepository.py
index 86cf227..eae554c 100644
--- a/lib/lp/code/interfaces/gitrepository.py
+++ b/lib/lp/code/interfaces/gitrepository.py
@@ -739,7 +739,7 @@ class IGitRepositoryView(IHasRecipes, IAccessTokenTarget):
 
     @operation_parameters(
         status=List(
-            title=_("A list of merge proposal statuses to filter by."),
+            title=_("A list of statuses to filter the merge proposals by."),
             value_type=Choice(vocabulary=BranchMergeProposalStatus),
         ),
         merged_revision_ids=List(
@@ -770,12 +770,6 @@ class IGitRepositoryView(IHasRecipes, IAccessTokenTarget):
         "yet been scanned."
     )
 
-    def checkCommitInRef(commit, ref):
-        """Check if a commit exists in a git ref.
-        :param commit: the commit sha1 to look for.
-        :param ref: the git reference path where it will look.
-        """
-
     def updateMergeCommitIDs(paths):
         """Update commit SHA1s of merge proposals for this repository.
 
diff --git a/lib/lp/code/interfaces/hasbranches.py b/lib/lp/code/interfaces/hasbranches.py
index b51afa2..3b437f0 100644
--- a/lib/lp/code/interfaces/hasbranches.py
+++ b/lib/lp/code/interfaces/hasbranches.py
@@ -87,7 +87,7 @@ class IHasMergeProposals(Interface):
 
     @operation_parameters(
         status=List(
-            title=_("A list of merge proposal statuses to filter by."),
+            title=_("A list of statuses to filter the merge proposals by."),
             value_type=Choice(vocabulary=BranchMergeProposalStatus),
         )
     )
diff --git a/lib/lp/code/interfaces/webservice.py b/lib/lp/code/interfaces/webservice.py
index 568f4a9..90dff64 100644
--- a/lib/lp/code/interfaces/webservice.py
+++ b/lib/lp/code/interfaces/webservice.py
@@ -79,7 +79,7 @@ from lp.code.interfaces.sourcepackagerecipe import ISourcePackageRecipe
 from lp.code.interfaces.sourcepackagerecipebuild import (
     ISourcePackageRecipeBuild,
 )
-from lp.registry.interfaces.person import IPerson
+from lp.registry.interfaces.person import IPerson, IPersonViewRestricted
 from lp.registry.interfaces.product import IProduct
 from lp.registry.interfaces.sourcepackage import ISourcePackage
 from lp.services.fields import InlineObject
@@ -260,3 +260,8 @@ patch_collection_property(
 patch_collection_property(
     ISourcePackageRecipe, "completed_builds", ISourcePackageRecipeBuild
 )
+
+# IPersonViewRestricted
+patch_collection_return_type(
+    IPersonViewRestricted, "getMergeProposals", IBranchMergeProposal
+)
diff --git a/lib/lp/code/model/branchcollection.py b/lib/lp/code/model/branchcollection.py
index 848383d..91c81f6 100644
--- a/lib/lp/code/model/branchcollection.py
+++ b/lib/lp/code/model/branchcollection.py
@@ -377,6 +377,8 @@ class GenericBranchCollection:
         merged_revnos=None,
         merged_revision=None,
         eager_load=False,
+        created_before=None,
+        created_since=None,
     ):
         """See `IBranchCollection`."""
         if for_branches is not None and not for_branches:
@@ -392,6 +394,8 @@ class GenericBranchCollection:
             or prerequisite_branch is not None
             or merged_revnos is not None
             or merged_revision is not None
+            or created_before is not None
+            or created_since is not None
         ):
             return self._naiveGetMergeProposals(
                 statuses,
@@ -400,6 +404,8 @@ class GenericBranchCollection:
                 prerequisite_branch,
                 merged_revnos,
                 merged_revision,
+                created_before=created_before,
+                created_since=created_since,
                 eager_load=eager_load,
             )
         else:
@@ -419,6 +425,8 @@ class GenericBranchCollection:
         merged_revnos=None,
         merged_revision=None,
         eager_load=False,
+        created_before=None,
+        created_since=None,
     ):
         Target = ClassAlias(Branch, "target")
         extra_tables = list(
@@ -478,6 +486,14 @@ class GenericBranchCollection:
             expressions.append(
                 BranchMergeProposal.queue_status.is_in(statuses)
             )
+        if created_before is not None:
+            expressions.append(
+                BranchMergeProposal.date_created < created_before
+            )
+        if created_since is not None:
+            expressions.append(
+                BranchMergeProposal.date_created >= created_since
+            )
         resultset = self.store.using(*tables).find(
             BranchMergeProposal, *expressions
         )
diff --git a/lib/lp/code/model/gitcollection.py b/lib/lp/code/model/gitcollection.py
index fe7055c..f0554e2 100644
--- a/lib/lp/code/model/gitcollection.py
+++ b/lib/lp/code/model/gitcollection.py
@@ -312,6 +312,8 @@ class GenericGitCollection:
         merged_revision_ids=None,
         merge_proposal_ids=None,
         eager_load=False,
+        created_before=None,
+        created_since=None,
     ):
         """See `IGitCollection`."""
         if merged_revision_ids is not None and not merged_revision_ids:
@@ -325,6 +327,8 @@ class GenericGitCollection:
             or prerequisite_path is not None
             or merged_revision_ids is not None
             or merge_proposal_ids is not None
+            or created_before is not None
+            or created_since is not None
         ):
             return self._naiveGetMergeProposals(
                 statuses,
@@ -334,6 +338,8 @@ class GenericGitCollection:
                 prerequisite_path,
                 merged_revision_ids,
                 merge_proposal_ids,
+                created_before=created_before,
+                created_since=created_since,
                 eager_load=eager_load,
             )
         else:
@@ -354,6 +360,8 @@ class GenericGitCollection:
         merged_revision_ids=None,
         merge_proposal_ids=None,
         eager_load=False,
+        created_before=None,
+        created_since=None,
     ):
         Target = ClassAlias(GitRepository, "target")
         extra_tables = list(
@@ -413,6 +421,14 @@ class GenericGitCollection:
             expressions.append(
                 BranchMergeProposal.queue_status.is_in(statuses)
             )
+        if created_before is not None:
+            expressions.append(
+                BranchMergeProposal.date_created < created_before
+            )
+        if created_since is not None:
+            expressions.append(
+                BranchMergeProposal.date_created >= created_since
+            )
         resultset = self.store.using(*tables).find(
             BranchMergeProposal, *expressions
         )
diff --git a/lib/lp/code/model/gitrepository.py b/lib/lp/code/model/gitrepository.py
index 465b876..483f986 100644
--- a/lib/lp/code/model/gitrepository.py
+++ b/lib/lp/code/model/gitrepository.py
@@ -1424,16 +1424,6 @@ class GitRepository(
         )
         return not jobs.is_empty()
 
-    def checkCommitInRef(self, commit, ref):
-        """See `IGitRepository`."""
-        store = Store.of(self)
-        return not store.find(
-            GitRef.commit_sha1,
-            GitRef.repository_id == self.id,
-            GitRef.commit_sha1 == commit,
-            GitRef.path == ref,
-        ).is_empty()
-
     def updateMergeCommitIDs(self, paths):
         """See `IGitRepository`."""
         store = Store.of(self)
diff --git a/lib/lp/code/model/hasbranches.py b/lib/lp/code/model/hasbranches.py
index 17c08ca..5c5f2f2 100644
--- a/lib/lp/code/model/hasbranches.py
+++ b/lib/lp/code/model/hasbranches.py
@@ -55,7 +55,12 @@ class HasMergeProposalsMixin:
     """A mixin implementation class for `IHasMergeProposals`."""
 
     def getMergeProposals(
-        self, status=None, visible_by_user=None, eager_load=False
+        self,
+        status=None,
+        visible_by_user=None,
+        eager_load=False,
+        created_before=None,
+        created_since=None,
     ):
         """See `IHasMergeProposals`."""
         # Circular import.
@@ -71,7 +76,12 @@ class HasMergeProposalsMixin:
         def _getProposals(interface):
             collection = removeSecurityProxy(interface(self))
             collection = collection.visibleByUser(visible_by_user)
-            return collection.getMergeProposals(status, eager_load=False)
+            return collection.getMergeProposals(
+                status,
+                created_before=created_before,
+                created_since=created_since,
+                eager_load=False,
+            )
 
         # SourcePackage Bazaar branches are an aberration which was not
         # replicated for Git, so SourcePackage does not support Git.
diff --git a/lib/lp/code/model/tests/test_branchjob.py b/lib/lp/code/model/tests/test_branchjob.py
index ea830a3..62e1010 100644
--- a/lib/lp/code/model/tests/test_branchjob.py
+++ b/lib/lp/code/model/tests/test_branchjob.py
@@ -6,6 +6,7 @@
 import os
 import shutil
 from datetime import datetime, timedelta, timezone
+from resource import RLIMIT_AS  # Maximum memory that can be taken by a process
 from typing import Optional
 
 import transaction
@@ -63,7 +64,7 @@ from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.model.job import Job
 from lp.services.job.runner import JobRunner
 from lp.services.job.tests import block_on_job
-from lp.services.osutils import override_environ
+from lp.services.osutils import override_environ, preserve_rlimit
 from lp.services.webapp import canonical_url
 from lp.testing import TestCaseWithFactory, person_logged_in
 from lp.testing.dbuser import dbuser, switch_dbuser
@@ -210,7 +211,11 @@ class TestBranchScanJob(TestCaseWithFactory):
             MockPatch("lp.code.model.branchjob.BranchScanJob.run", mock_run)
         )
         runner = JobRunner([job])
-        with dbuser("branchscanner"):
+
+        # Preserve the virtual memory resource limit because runJobHandleError
+        # changes it which causes the whole test worker process to have
+        # limited memory
+        with dbuser("branchscanner"), preserve_rlimit(RLIMIT_AS):
             runner.runJobHandleError(job)
         self.assertEqual(1, len(self.oopses))
         actions = [action[2:4] for action in self.oopses[0]["timeline"]]
diff --git a/lib/lp/code/model/tests/test_cibuildbehaviour.py b/lib/lp/code/model/tests/test_cibuildbehaviour.py
index dce979b..0f8a3f7 100644
--- a/lib/lp/code/model/tests/test_cibuildbehaviour.py
+++ b/lib/lp/code/model/tests/test_cibuildbehaviour.py
@@ -296,6 +296,8 @@ class TestAsyncCIBuildBehaviour(StatsMixin, TestCIBuildBehaviourBase):
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "fast_cleanup": Is(True),
@@ -435,6 +437,8 @@ class TestAsyncCIBuildBehaviour(StatsMixin, TestCIBuildBehaviourBase):
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "fast_cleanup": Is(True),
@@ -613,6 +617,8 @@ class TestAsyncCIBuildBehaviour(StatsMixin, TestCIBuildBehaviourBase):
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "fast_cleanup": Is(True),
diff --git a/lib/lp/code/model/tests/test_gitrepository.py b/lib/lp/code/model/tests/test_gitrepository.py
index e1fe89f..4e48291 100644
--- a/lib/lp/code/model/tests/test_gitrepository.py
+++ b/lib/lp/code/model/tests/test_gitrepository.py
@@ -2560,80 +2560,6 @@ class TestGitRepositoryRefs(TestCaseWithFactory):
             hosting_fixture.getRefs.extract_kwargs(),
         )
 
-        master_sha1 = hashlib.sha1(b"refs/heads/master").hexdigest()
-        author = self.factory.makePerson()
-        with person_logged_in(author):
-            author_email = author.preferredemail.email
-        author_date = datetime(2015, 1, 1, tzinfo=timezone.utc)
-        committer_date = datetime(2015, 1, 2, tzinfo=timezone.utc)
-        self.useFixture(
-            GitHostingFixture(
-                commits=[
-                    {
-                        "sha1": master_sha1,
-                        "message": "tip of master",
-                        "author": {
-                            "name": author.displayname,
-                            "email": author_email,
-                            "time": int(seconds_since_epoch(author_date)),
-                        },
-                        "committer": {
-                            "name": "New Person",
-                            "email": "new-person@xxxxxxxxxxx",
-                            "time": int(seconds_since_epoch(committer_date)),
-                        },
-                        "parents": [],
-                        "tree": hashlib.sha1(b"").hexdigest(),
-                    }
-                ]
-            )
-        )
-
-    def test_checkCommitInRef(self):
-        """Test that a commit is on a GitRef."""
-        repository = self.factory.makeGitRepository()
-        paths = ["refs/heads/master", "refs/heads/other-branch"]
-        ref = self.factory.makeGitRefs(repository=repository, paths=paths)
-
-        # Check that ref[0] is master
-        self.assertEqual(ref[0].path, "refs/heads/master")
-
-        # Check that the commit we created exists in the specified branch
-        result = repository.checkCommitInRef(
-            ref[0].commit_sha1, "refs/heads/master"
-        )
-        self.assertEqual(result, True)
-
-    def test_checkCommitInRef_wrong_branch(self):
-        """Test that checkCommitInRef returns False when checking a commit from
-        a different branch.
-        """
-        repository = self.factory.makeGitRepository()
-        paths = ["refs/heads/master", "refs/heads/other-branch"]
-        ref = self.factory.makeGitRefs(repository=repository, paths=paths)
-
-        # Check that ref[0] is master
-        self.assertEqual(ref[0].path, "refs/heads/master")
-
-        # Check that the commit from master does not exist in other-branch
-        result = repository.checkCommitInRef(
-            ref[0].commit_sha1, "refs/heads/other-branch"
-        )
-        self.assertEqual(result, False)
-
-    def test_checkCommitInRef_wrong_commit_sha1(self):
-        """Test that checkCommitInRef returns False when checking a wrong
-        commit_sha1.
-        """
-        repository = self.factory.makeGitRepository()
-        paths = ["refs/heads/master", "refs/heads/other-branch"]
-        self.factory.makeGitRefs(repository=repository, paths=paths)
-
-        # Check that a commit that does not exist in this repo does not exist
-        # in master
-        result = repository.checkCommitInRef("1" * 40, "refs/heads/master")
-        self.assertEqual(result, False)
-
     def test_fetchRefCommits(self):
         # fetchRefCommits fetches detailed tip commit metadata for the
         # requested refs.
diff --git a/lib/lp/code/model/tests/test_recipebuilder.py b/lib/lp/code/model/tests/test_recipebuilder.py
index 85ba544..35d2a3e 100644
--- a/lib/lp/code/model/tests/test_recipebuilder.py
+++ b/lib/lp/code/model/tests/test_recipebuilder.py
@@ -204,6 +204,8 @@ class TestAsyncRecipeBuilder(TestRecipeBuilderBase):
         self.assertEqual(
             {
                 "arch_tag": "i386",
+                "abi_tag": "i386",
+                "isa_tag": "i386",
                 "archive_private": False,
                 "archive_purpose": "PPA",
                 "archives": expected_archives,
@@ -308,6 +310,8 @@ class TestAsyncRecipeBuilder(TestRecipeBuilderBase):
         self.assertEqual(
             {
                 "arch_tag": "i386",
+                "abi_tag": "i386",
+                "isa_tag": "i386",
                 "archive_private": False,
                 "archive_purpose": "PPA",
                 "archives": expected_archives,
@@ -359,6 +363,8 @@ class TestAsyncRecipeBuilder(TestRecipeBuilderBase):
         self.assertEqual(
             {
                 "arch_tag": "i386",
+                "abi_tag": "i386",
+                "isa_tag": "i386",
                 "archive_private": False,
                 "archive_purpose": "PPA",
                 "archives": expected_archives,
diff --git a/lib/lp/crafts/model/craftrecipebuildjob.py b/lib/lp/crafts/model/craftrecipebuildjob.py
index dc92ad2..81b4912 100644
--- a/lib/lp/crafts/model/craftrecipebuildjob.py
+++ b/lib/lp/crafts/model/craftrecipebuildjob.py
@@ -691,6 +691,11 @@ class CraftPublishingJob(CraftRecipeBuildJobDerived):
         new_properties["soss.source_url"] = [self._recipe_git_url()]
         new_properties["soss.type"] = ["source"]
         new_properties["soss.license"] = [self._get_license_metadata()]
+        version_str = self._get_version_metadata()
+        channel_value = (
+            f"{version_str}/stable" if version_str else "unknown/stable"
+        )
+        new_properties["launchpad.channel"] = [channel_value]
 
         # Repo name is derived from the URL
         # refer to schema-lazr.conf for more details about the URL structure
@@ -781,31 +786,46 @@ class CraftPublishingJob(CraftRecipeBuildJobDerived):
             )
             return "unknown"
 
-    def _get_license_metadata(self) -> str:
-        """Get the license metadata from the build files."""
+    def _get_artifact_metadata(self) -> dict:
+        """Load and cache metadata from metadata.yaml if present.
+
+        Returns an empty dict when metadata is missing or cannot be parsed.
+        """
+        if hasattr(self, "_artifact_metadata"):
+            return self._artifact_metadata
+
         for _, lfa, _ in self.build.getFiles():
             if lfa.filename == "metadata.yaml":
                 lfa.open()
                 try:
                     content = lfa.read().decode("utf-8")
-                    metadata = yaml.safe_load(content)
-
-                    if "license" not in metadata:
-                        log.info(
-                            "No license found in metadata.yaml, returning \
-                            'unknown'."
-                        )
-                        return "unknown"
-
-                    return metadata.get("license")
-
+                    metadata = yaml.safe_load(content) or {}
+                    self._artifact_metadata = metadata
+                    return self._artifact_metadata
                 except yaml.YAMLError as e:
                     self.error_message = f"Failed to parse metadata.yaml: {e}"
-
                     log.info(self.error_message)
-                    return "unknown"
+                    self._artifact_metadata = {}
+                    return self._artifact_metadata
                 finally:
                     lfa.close()
 
         log.info("No metadata.yaml file found in the build files.")
-        return "unknown"
+        self._artifact_metadata = {}
+        return self._artifact_metadata
+
+    def _get_license_metadata(self) -> str:
+        """Get the license metadata from the cached metadata."""
+        metadata = self._get_artifact_metadata()
+        if "license" not in metadata:
+            log.info("No license found in metadata.yaml, returning 'unknown'.")
+            return "unknown"
+        return metadata.get("license")
+
+    def _get_version_metadata(self) -> str:
+        """Get the version metadata from the cached metadata."""
+        metadata = self._get_artifact_metadata()
+        if "version" not in metadata:
+            log.info("No version found in metadata.yaml, returning 'unknown'.")
+            return "unknown"
+        return str(metadata.get("version"))
diff --git a/lib/lp/crafts/tests/test_craftrecipebuildbehaviour.py b/lib/lp/crafts/tests/test_craftrecipebuildbehaviour.py
index 972e596..f4f4731 100644
--- a/lib/lp/crafts/tests/test_craftrecipebuildbehaviour.py
+++ b/lib/lp/crafts/tests/test_craftrecipebuildbehaviour.py
@@ -331,6 +331,8 @@ class TestAsyncCraftRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
@@ -378,6 +380,8 @@ class TestAsyncCraftRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
@@ -513,6 +517,8 @@ class TestAsyncCraftRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
diff --git a/lib/lp/crafts/tests/test_craftrecipebuildjob.py b/lib/lp/crafts/tests/test_craftrecipebuildjob.py
index e7a4fc4..6b95efd 100644
--- a/lib/lp/crafts/tests/test_craftrecipebuildjob.py
+++ b/lib/lp/crafts/tests/test_craftrecipebuildjob.py
@@ -8,6 +8,7 @@ import os
 import tarfile
 import tempfile
 from pathlib import Path
+from resource import RLIMIT_AS  # Maximum memory that can be taken by a process
 
 from artifactory import ArtifactoryPath
 from fixtures import FakeLogger
@@ -34,6 +35,7 @@ from lp.services.job.interfaces.job import JobStatus
 from lp.services.job.runner import JobRunner
 from lp.services.librarian.interfaces import ILibraryFileAliasSet
 from lp.services.librarian.utils import copy_and_close
+from lp.services.osutils import preserve_rlimit
 from lp.testing import TestCaseWithFactory
 from lp.testing.layers import CeleryJobLayer, ZopelessDatabaseLayer
 
@@ -95,7 +97,12 @@ class TestCraftPublishingJob(TestCaseWithFactory):
     def run_job(self, job):
         """Helper to run a job and return the result."""
         job = getUtility(ICraftPublishingJobSource).create(self.build)
-        JobRunner([job]).runAll()
+
+        # Preserve the virtual memory resource limit because runAll changes it
+        # which causes the whole test worker process to have limited memory
+        with preserve_rlimit(RLIMIT_AS):
+            JobRunner([job]).runAll()
+
         job = removeSecurityProxy(job)
         return job
 
@@ -421,7 +428,7 @@ class TestCraftPublishingJob(TestCaseWithFactory):
 
         # Add a metadata file with license information
         license_value = "Apache-2.0"
-        metadata_yaml = f"license: {license_value}\n"
+        metadata_yaml = f"license: {license_value}\nversion: 0.1.0\n"
         librarian = getUtility(ILibraryFileAliasSet)
         metadata_lfa = librarian.create(
             "metadata.yaml",
@@ -540,6 +547,9 @@ class TestCraftPublishingJob(TestCaseWithFactory):
         )
         self.assertEqual(artifact["properties"]["soss.type"], "source")
         self.assertEqual(artifact["properties"]["soss.license"], license_value)
+        self.assertEqual(
+            artifact["properties"].get("launchpad.channel"), "0.1.0/stable"
+        )
 
     def test_run_missing_maven_config(self):
         """
@@ -625,7 +635,7 @@ class TestCraftPublishingJob(TestCaseWithFactory):
 
         # Create a metadata file with license information
         license_value = "Apache-2.0"
-        metadata_yaml = f"license: {license_value}\n"
+        metadata_yaml = f"license: {license_value}\nversion: 0.1.0\n"
         librarian = getUtility(ILibraryFileAliasSet)
         metadata_lfa = librarian.create(
             "metadata.yaml",
@@ -762,6 +772,9 @@ class TestCraftPublishingJob(TestCaseWithFactory):
         )
         self.assertEqual(artifact["properties"]["soss.type"], "source")
         self.assertEqual(artifact["properties"]["soss.license"], license_value)
+        self.assertEqual(
+            artifact["properties"].get("launchpad.channel"), "0.1.0/stable"
+        )
 
     def test__publish_properties_sets_expected_properties(self):
         """Test that _publish_properties sets the correct properties in
@@ -805,6 +818,8 @@ class TestCraftPublishingJob(TestCaseWithFactory):
         )
         self.assertEqual(props["soss.type"], "source")
         self.assertEqual(props["soss.license"], "MIT")
+        self.assertIn("launchpad.channel", props)
+        self.assertEqual(props["launchpad.channel"], "unknown/stable")
 
     def test__publish_properties_artifact_not_found(self):
         """Test that _publish_properties raises NotFoundError if artifact is
@@ -844,10 +859,16 @@ class TestCraftPublishingJob(TestCaseWithFactory):
             lambda self: "https://example.com/repo.git";,
         )
 
-        JobRunner([job]).runAll()
+        # Preserve the virtual memory resource limit because runAll changes it
+        # which causes the whole test worker process to have limited memory
+        with preserve_rlimit(RLIMIT_AS):
+            JobRunner([job]).runAll()
 
         artifact = self._artifactory_search("repository", "artifact.file")
         self.assertEqual(artifact["properties"]["soss.license"], "unknown")
+        self.assertEqual(
+            artifact["properties"].get("launchpad.channel"), "unknown/stable"
+        )
 
     def test__publish_properties_no_license_in_metadata_yaml(self):
         """Test that _publish_properties sets license to 'unknown' if no
@@ -883,10 +904,16 @@ class TestCraftPublishingJob(TestCaseWithFactory):
             lambda self: "https://example.com/repo.git";,
         )
 
-        JobRunner([job]).runAll()
+        # Preserve the virtual memory resource limit because runAll changes it
+        # which causes the whole test worker process to have limited memory
+        with preserve_rlimit(RLIMIT_AS):
+            JobRunner([job]).runAll()
 
         artifact = self._artifactory_search("repository", "artifact.file")
         self.assertEqual(artifact["properties"]["soss.license"], "unknown")
+        self.assertEqual(
+            artifact["properties"].get("launchpad.channel"), "unknown/stable"
+        )
 
     def test__publish_properties_license_from_metadata_yaml(self):
         """Test that _publish_properties gets license from metadata.yaml
@@ -894,7 +921,7 @@ class TestCraftPublishingJob(TestCaseWithFactory):
 
         # Create a metadata.yaml file with a license
         license_value = "Apache-2.0"
-        metadata_yaml = f"license: {license_value}\n"
+        metadata_yaml = f"license: {license_value}\nversion: 0.1.0\n"
         librarian = getUtility(ILibraryFileAliasSet)
         metadata_lfa = librarian.create(
             "metadata.yaml",
@@ -923,10 +950,16 @@ class TestCraftPublishingJob(TestCaseWithFactory):
             lambda self: "https://example.com/repo.git";,
         )
 
-        JobRunner([job]).runAll()
+        # Preserve the virtual memory resource limit because runAll changes it
+        # which causes the whole test worker process to have limited memory
+        with preserve_rlimit(RLIMIT_AS):
+            JobRunner([job]).runAll()
 
         artifact = self._artifactory_search("repository", "artifact.file")
         self.assertEqual(artifact["properties"]["soss.license"], license_value)
+        self.assertEqual(
+            artifact["properties"].get("launchpad.channel"), "0.1.0/stable"
+        )
 
     def test__publish_properties_git_repository_source_url(self):
         """Test that _publish_properties gets git_repository as source_url."""
diff --git a/lib/lp/oci/tests/test_ocirecipebuildbehaviour.py b/lib/lp/oci/tests/test_ocirecipebuildbehaviour.py
index 64d3c01..1d24551 100644
--- a/lib/lp/oci/tests/test_ocirecipebuildbehaviour.py
+++ b/lib/lp/oci/tests/test_ocirecipebuildbehaviour.py
@@ -490,6 +490,8 @@ class TestAsyncOCIRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_file": Equals(job.build.recipe.build_file),
                     "build_args": Equals(
                         {"BUILD_VAR": "123", "LAUNCHPAD_BUILD_ARCH": arch_tag}
@@ -569,6 +571,8 @@ class TestAsyncOCIRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_file": Equals(job.build.recipe.build_file),
                     "build_args": Equals(
                         {"BUILD_VAR": "123", "LAUNCHPAD_BUILD_ARCH": arch_tag}
@@ -660,6 +664,8 @@ class TestAsyncOCIRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_file": Equals(job.build.recipe.build_file),
                     "build_args": Equals(
                         {"BUILD_VAR": "123", "LAUNCHPAD_BUILD_ARCH": arch_tag}
diff --git a/lib/lp/registry/interfaces/distroseries.py b/lib/lp/registry/interfaces/distroseries.py
index 9389bc9..e010c43 100644
--- a/lib/lp/registry/interfaces/distroseries.py
+++ b/lib/lp/registry/interfaces/distroseries.py
@@ -1000,7 +1000,14 @@ class IDistroSeriesPublic(
         :return: A new `PackageUpload`.
         """
 
-    def newArch(architecturetag, processor, official, owner, enabled=True):
+    def newArch(
+        architecturetag,
+        processor,
+        official,
+        owner,
+        enabled=True,
+        underlying_architecturetag=None,
+    ):
         """Create a new port or DistroArchSeries for this DistroSeries."""
 
     def getPOFileContributorsByLanguage(language):
diff --git a/lib/lp/registry/interfaces/person.py b/lib/lp/registry/interfaces/person.py
index a3cb45b..03e4163 100644
--- a/lib/lp/registry/interfaces/person.py
+++ b/lib/lp/registry/interfaces/person.py
@@ -91,6 +91,7 @@ from lp.app.validators.email import email_validator
 from lp.app.validators.name import name_validator
 from lp.blueprints.interfaces.specificationtarget import IHasSpecifications
 from lp.bugs.interfaces.bugtarget import IHasBugs
+from lp.code.enums import BranchMergeProposalStatus
 from lp.code.interfaces.hasbranches import (
     IHasBranches,
     IHasMergeProposals,
@@ -1176,8 +1177,8 @@ class IPersonViewRestricted(
     safe_email_or_blank = TextLine(
         title=_("Safe email for display"),
         description=_(
-            "The person's preferred email if they have"
-            "one and do not choose to hide it. Otherwise"
+            "The person's preferred email if they have "
+            "one and do not choose to hide it. Otherwise "
             "the empty string."
         ),
         readonly=True,
@@ -1930,6 +1931,40 @@ class IPersonViewRestricted(
         If no orderby is provided, Person.sortingColumns is used.
         """
 
+    @operation_parameters(
+        status=List(
+            title=_("A list of statuses to filter the merge proposals by."),
+            value_type=Choice(vocabulary=BranchMergeProposalStatus),
+        ),
+        created_before=Datetime(
+            title=_(
+                "Search for merge proposals that were created"
+                "before the given date."
+            ),
+            required=False,
+        ),
+        created_since=Datetime(
+            title=_(
+                "Search for merge proposals that were created"
+                "since the given date."
+            ),
+            required=False,
+        ),
+    )
+    @call_with(visible_by_user=REQUEST_USER, eager_load=True)
+    # Really IBranchMergeProposal
+    @operation_returns_collection_of(Interface)
+    @export_read_operation()
+    @operation_for_version("beta")
+    def getMergeProposals(
+        status=None,
+        visible_by_user=None,
+        eager_load=False,
+        created_before=None,
+        created_since=None,
+    ):
+        """Return matching BranchMergeProposals."""
+
 
 class IPersonEditRestricted(Interface):
     """IPerson attributes that require launchpad.Edit permission."""
@@ -3100,7 +3135,7 @@ class IAdminTeamMergeSchema(Interface):
         required=True,
         vocabulary="ValidTeam",
         description=_(
-            "The duplicated team found in Launchpad."
+            "The duplicated team found in Launchpad. "
             "This team will be removed."
         ),
     )
diff --git a/lib/lp/registry/model/distribution.py b/lib/lp/registry/model/distribution.py
index 646475d..d34e4e0 100644
--- a/lib/lp/registry/model/distribution.py
+++ b/lib/lp/registry/model/distribution.py
@@ -1443,6 +1443,8 @@ class Distribution(
         owner=None,
         needs_attention_from=None,
         unsupported=False,
+        created_before=None,
+        created_since=None,
     ):
         """See `IQuestionCollection`."""
         if unsupported:
@@ -1459,6 +1461,8 @@ class Distribution(
             owner=owner,
             needs_attention_from=needs_attention_from,
             unsupported_target=unsupported_target,
+            created_before=created_before,
+            created_since=created_since,
         ).getResults()
 
     def getTargetTypes(self):
diff --git a/lib/lp/registry/model/distroseries.py b/lib/lp/registry/model/distroseries.py
index d67dc65..2c10ea2 100644
--- a/lib/lp/registry/model/distroseries.py
+++ b/lib/lp/registry/model/distroseries.py
@@ -1385,7 +1385,13 @@ class DistroSeries(
         return DecoratedResultSet(package_caches, result_to_dsbp)
 
     def newArch(
-        self, architecturetag, processor, official, owner, enabled=True
+        self,
+        architecturetag,
+        processor,
+        official,
+        owner,
+        enabled=True,
+        underlying_architecturetag=None,
     ):
         """See `IDistroSeries`."""
         das = DistroArchSeries(
@@ -1395,6 +1401,7 @@ class DistroSeries(
             distroseries=self,
             owner=owner,
             enabled=enabled,
+            underlying_architecturetag=underlying_architecturetag,
         )
         IStore(das).flush()
         return das
diff --git a/lib/lp/registry/model/product.py b/lib/lp/registry/model/product.py
index c1bd11e..d6a4c56 100644
--- a/lib/lp/registry/model/product.py
+++ b/lib/lp/registry/model/product.py
@@ -1270,6 +1270,8 @@ class Product(
         owner=None,
         needs_attention_from=None,
         unsupported=False,
+        created_before=None,
+        created_since=None,
     ):
         """See `IQuestionCollection`."""
         if unsupported:
@@ -1286,6 +1288,8 @@ class Product(
             owner=owner,
             needs_attention_from=needs_attention_from,
             unsupported_target=unsupported_target,
+            created_before=created_before,
+            created_since=created_since,
         ).getResults()
 
     def getTargetTypes(self):
diff --git a/lib/lp/registry/model/projectgroup.py b/lib/lp/registry/model/projectgroup.py
index 1ad7db5..31df8f9 100644
--- a/lib/lp/registry/model/projectgroup.py
+++ b/lib/lp/registry/model/projectgroup.py
@@ -376,6 +376,8 @@ class ProjectGroup(
         owner=None,
         needs_attention_from=None,
         unsupported=False,
+        created_before=None,
+        created_since=None,
     ):
         """See `IQuestionCollection`."""
         if unsupported:
@@ -392,6 +394,8 @@ class ProjectGroup(
             owner=owner,
             needs_attention_from=needs_attention_from,
             unsupported_target=unsupported_target,
+            created_before=created_before,
+            created_since=created_since,
         ).getResults()
 
     def getQuestionLanguages(self):
diff --git a/lib/lp/registry/model/sourcepackage.py b/lib/lp/registry/model/sourcepackage.py
index 9197ca9..e3a5954 100644
--- a/lib/lp/registry/model/sourcepackage.py
+++ b/lib/lp/registry/model/sourcepackage.py
@@ -106,6 +106,8 @@ class SourcePackageQuestionTargetMixin(QuestionTargetMixin):
         owner=None,
         needs_attention_from=None,
         unsupported=False,
+        created_before=None,
+        created_since=None,
     ):
         """See `IQuestionCollection`."""
         if unsupported:
@@ -123,6 +125,8 @@ class SourcePackageQuestionTargetMixin(QuestionTargetMixin):
             owner=owner,
             needs_attention_from=needs_attention_from,
             unsupported_target=unsupported_target,
+            created_before=created_before,
+            created_since=created_since,
         ).getResults()
 
     def getAnswerContactsForLanguage(self, language):
diff --git a/lib/lp/registry/tests/test_person.py b/lib/lp/registry/tests/test_person.py
index 2d60691..f69a239 100644
--- a/lib/lp/registry/tests/test_person.py
+++ b/lib/lp/registry/tests/test_person.py
@@ -908,6 +908,66 @@ class TestPerson(TestCaseWithFactory):
         )
 
 
+# Tests for user specific merge proposal operations. Shared behavior
+# is mostly tested in TestHasMergeProposalsWebservice (test_hasbranches.py)
+class TestPersonMergeProposals(TestCaseWithFactory):
+    layer = DatabaseFunctionalLayer
+
+    def setUp(self):
+        super().setUp()
+
+    def _makeMergeProposalWithDateCreated(self, user, date_created):
+        branch = self.factory.makeBranch(owner=user)
+        return self.factory.makeBranchMergeProposal(
+            registrant=user, source_branch=branch, date_created=date_created
+        )
+
+    def test_all_merge_proposals(self):
+        user = self.factory.makePerson()
+        today = datetime.now(timezone.utc)
+
+        mp_ten_days_ago = self._makeMergeProposalWithDateCreated(
+            user, today - timedelta(days=10)
+        )
+        mp_nine_days_ago = self._makeMergeProposalWithDateCreated(
+            user, today - timedelta(days=9)
+        )
+
+        merge_proposals = user.getMergeProposals()
+        self.assertEqual(
+            [mp_nine_days_ago, mp_ten_days_ago], list(merge_proposals)
+        )
+
+    def test_created_before(self):
+        user = self.factory.makePerson()
+        today = datetime.now(timezone.utc)
+
+        nine_days_ago = today - timedelta(days=9)
+
+        mp_ten_days_ago = self._makeMergeProposalWithDateCreated(
+            user, today - timedelta(days=10)
+        )
+
+        merge_proposals = user.getMergeProposals(created_before=nine_days_ago)
+        self.assertEqual([mp_ten_days_ago], list(merge_proposals))
+
+    def test_created_since(self):
+        user = self.factory.makePerson()
+        today = datetime.now(timezone.utc)
+
+        nine_days_ago = today - timedelta(days=9)
+
+        self._makeMergeProposalWithDateCreated(
+            user, today - timedelta(days=10)
+        )
+        mp_nine_days_ago = self._makeMergeProposalWithDateCreated(
+            user, today - timedelta(days=9)
+        )
+
+        merge_proposals = user.getMergeProposals(created_since=nine_days_ago)
+        self.assertEqual([mp_nine_days_ago], list(merge_proposals))
+
+
 class TestPersonStates(TestCaseWithFactory):
     layer = DatabaseFunctionalLayer
 
diff --git a/lib/lp/rocks/tests/test_rockrecipebuildbehaviour.py b/lib/lp/rocks/tests/test_rockrecipebuildbehaviour.py
index 312ad76..1f5b185 100644
--- a/lib/lp/rocks/tests/test_rockrecipebuildbehaviour.py
+++ b/lib/lp/rocks/tests/test_rockrecipebuildbehaviour.py
@@ -309,6 +309,8 @@ class TestAsyncRockRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
@@ -355,6 +357,8 @@ class TestAsyncRockRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
@@ -490,6 +494,8 @@ class TestAsyncRockRecipeBuildBehaviour(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
                     "channels": Equals({}),
diff --git a/lib/lp/services/librarianserver/librariangc.py b/lib/lp/services/librarianserver/librariangc.py
index 5c04c55..57e5d92 100644
--- a/lib/lp/services/librarianserver/librariangc.py
+++ b/lib/lp/services/librarianserver/librariangc.py
@@ -673,8 +673,8 @@ def delete_unwanted_files(con):
     con.rollback()
     orig_autocommit = con.autocommit
     try:
-        # Disable autocommit so that we can use named cursors.
-        con.autocommit = False
+        # Enable autocommit so that each query gets closed automatically.
+        con.autocommit = True
         delete_unwanted_disk_files(con)
         swift_enabled = getFeatureFlag("librarian.swift.enabled") or False
         if swift_enabled:
diff --git a/lib/lp/services/osutils.py b/lib/lp/services/osutils.py
index 5472b79..d79529e 100644
--- a/lib/lp/services/osutils.py
+++ b/lib/lp/services/osutils.py
@@ -20,7 +20,9 @@ import os.path
 import shutil
 import time
 from contextlib import contextmanager
+from resource import getrlimit, setrlimit
 from signal import SIGKILL, SIGTERM
+from typing import Tuple
 
 
 def remove_tree(path):
@@ -184,3 +186,18 @@ def process_exists(pid):
         # All is well - the process doesn't exist.
         return False
     return True
+
+
+@contextmanager
+def preserve_rlimit(resource_type: int):
+    """Context manager to preserve and restore a specific resource limit."""
+    current_limits: Tuple[int, int] = getrlimit(resource_type)
+    try:
+        yield
+    finally:
+        try:
+            setrlimit(resource_type, current_limits)
+        except ValueError:
+            # throws a ValueError if the resource_type has no enum counterpart
+            # inside the "resource" package such as "RLIMIT_AS"
+            pass
diff --git a/lib/lp/snappy/tests/test_snapbuildbehaviour.py b/lib/lp/snappy/tests/test_snapbuildbehaviour.py
index 91ce9cf..9b253d9 100644
--- a/lib/lp/snappy/tests/test_snapbuildbehaviour.py
+++ b/lib/lp/snappy/tests/test_snapbuildbehaviour.py
@@ -769,6 +769,8 @@ class TestAsyncSnapBuildBehaviourBuilderProxy(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "branch": Equals(branch.bzr_identity),
                     "build_source_tarball": Is(False),
                     "build_url": Equals(canonical_url(job.build)),
@@ -824,6 +826,8 @@ class TestAsyncSnapBuildBehaviourBuilderProxy(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_source_tarball": Is(False),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
@@ -870,6 +874,8 @@ class TestAsyncSnapBuildBehaviourBuilderProxy(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_source_tarball": Is(False),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
@@ -920,6 +926,8 @@ class TestAsyncSnapBuildBehaviourBuilderProxy(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_source_tarball": Is(False),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
@@ -993,6 +1001,8 @@ class TestAsyncSnapBuildBehaviourBuilderProxy(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_source_tarball": Is(False),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
@@ -1039,6 +1049,8 @@ class TestAsyncSnapBuildBehaviourBuilderProxy(
                     "archive_private": Is(False),
                     "archives": Equals(expected_archives),
                     "arch_tag": Equals("i386"),
+                    "isa_tag": Equals("i386"),
+                    "abi_tag": Equals("i386"),
                     "build_source_tarball": Is(False),
                     "build_url": Equals(canonical_url(job.build)),
                     "builder_constraints": Equals([]),
diff --git a/lib/lp/soyuz/interfaces/distroarchseries.py b/lib/lp/soyuz/interfaces/distroarchseries.py
index 4e5c67a..03915a7 100644
--- a/lib/lp/soyuz/interfaces/distroarchseries.py
+++ b/lib/lp/soyuz/interfaces/distroarchseries.py
@@ -105,6 +105,18 @@ class IDistroArchSeriesPublic(IHasBuildRecords, IHasOwner):
         ),
         exported_as="architecture_tag",
     )
+    underlying_architecturetag = exported(
+        TextLine(
+            title=_("Underlying Architecture Tag"),
+            description=_(
+                "If set, identifies architecture_tag as a 'variant' of the "
+                "specified architecture."
+            ),
+            required=False,
+            constraint=name_validator,
+        ),
+        exported_as="underlying_architecture_tag",
+    )
     official = exported(
         Bool(
             title=_("Official Support"),
diff --git a/lib/lp/soyuz/model/distroarchseries.py b/lib/lp/soyuz/model/distroarchseries.py
index c2960e9..d02365a 100644
--- a/lib/lp/soyuz/model/distroarchseries.py
+++ b/lib/lp/soyuz/model/distroarchseries.py
@@ -56,6 +56,7 @@ class DistroArchSeries(StormBase):
     processor_id = Int(name="processor", allow_none=False)
     processor = Reference(processor_id, Processor.id)
     architecturetag = Unicode(allow_none=False)
+    underlying_architecturetag = Unicode(allow_none=True)
     official = Bool(allow_none=False)
     owner_id = Int(
         name="owner", validator=validate_public_person, allow_none=False
@@ -79,6 +80,7 @@ class DistroArchSeries(StormBase):
         official,
         owner,
         enabled=True,
+        underlying_architecturetag=None,
     ):
         super().__init__()
         self.distroseries = distroseries
@@ -87,6 +89,7 @@ class DistroArchSeries(StormBase):
         self.official = official
         self.owner = owner
         self.enabled = enabled
+        self.underlying_architecturetag = underlying_architecturetag
 
     def __getitem__(self, name):
         return self.getBinaryPackage(name)
diff --git a/lib/lp/soyuz/model/publishing.py b/lib/lp/soyuz/model/publishing.py
index 2214e2e..ebd26cf 100644
--- a/lib/lp/soyuz/model/publishing.py
+++ b/lib/lp/soyuz/model/publishing.py
@@ -1410,9 +1410,15 @@ def expand_binary_requests(distroseries, binaries):
         published, as a sequence of (`DistroArchSeries`,
         `BinaryPackageRelease`, (overrides)) tuples.
     """
-
     archs = list(distroseries.enabled_architectures)
     arch_map = {arch.architecturetag: arch for arch in archs}
+    variant_map = {}
+    for arch in archs:
+        spf = arch.getSourceFilter()
+        if spf and arch.underlying_architecturetag:
+            variant_map.setdefault(arch.underlying_architecturetag, []).append(
+                (arch, spf)
+            )
 
     expanded = []
     for bpr, overrides in binaries.items():
@@ -1421,7 +1427,15 @@ def expand_binary_requests(distroseries, binaries):
             # build arch tag. If it does not exist or is disabled, we should
             # not publish.
             target_arch = arch_map.get((bpr.build or bpr.ci_build).arch_tag)
-            target_archs = [target_arch] if target_arch is not None else []
+            if target_arch is None:
+                continue
+            target_archs = [target_arch]
+            for variant_arch, spf in variant_map.get(
+                target_arch.architecturetag, []
+            ):
+                spn = bpr.build.source_package_release.sourcepackagename
+                if not spf.isSourceIncluded(spn):
+                    target_archs.append(variant_arch)
         else:
             target_archs = archs
         for target_arch in target_archs:
diff --git a/lib/lp/soyuz/scripts/initialize_distroseries.py b/lib/lp/soyuz/scripts/initialize_distroseries.py
index 4097a10..6626a47 100644
--- a/lib/lp/soyuz/scripts/initialize_distroseries.py
+++ b/lib/lp/soyuz/scripts/initialize_distroseries.py
@@ -470,10 +470,23 @@ class InitializeDistroSeries:
         self._store.execute(
             """
             INSERT INTO DistroArchSeries
-            (distroseries, processor, architecturetag, owner, official)
-            SELECT %s, processor, architecturetag, %s, bool_and(official)
+            (
+                distroseries,
+                processor,
+                architecturetag,
+                owner,
+                official,
+                underlying_architecturetag
+            )
+            SELECT
+                %s,
+                processor,
+                architecturetag,
+                %s,
+                bool_and(official),
+                underlying_architecturetag
             FROM DistroArchSeries WHERE enabled = TRUE %s
-            GROUP BY processor, architecturetag
+            GROUP BY processor, architecturetag, underlying_architecturetag
             """
             % (
                 sqlvalues(self.distroseries.id, self.distroseries.owner.id)
diff --git a/lib/lp/soyuz/scripts/tests/test_initialize_distroseries.py b/lib/lp/soyuz/scripts/tests/test_initialize_distroseries.py
index c0d6b57..ff60dc4 100644
--- a/lib/lp/soyuz/scripts/tests/test_initialize_distroseries.py
+++ b/lib/lp/soyuz/scripts/tests/test_initialize_distroseries.py
@@ -52,14 +52,17 @@ class InitializationHelperTestCase(TestCaseWithFactory):
     # - setup/populate parents with packages;
     # - initialize a child from parents.
 
-    def setupDas(self, parent, processor_name, arch_tag):
+    def setupDas(self, parent, processor_name, arch_tag, **kw):
         try:
             processor = getUtility(IProcessorSet).getByName(processor_name)
         except ProcessorNotFound:
             processor = self.factory.makeProcessor(name=processor_name)
         processor.supports_virtualized = True
         parent_das = self.factory.makeDistroArchSeries(
-            distroseries=parent, processor=processor, architecturetag=arch_tag
+            distroseries=parent,
+            processor=processor,
+            architecturetag=arch_tag,
+            **kw,
         )
         lf = self.factory.makeLibraryFileAlias()
         transaction.commit()
@@ -872,6 +875,42 @@ class TestInitializeDistroSeries(InitializationHelperTestCase):
             das[0].architecturetag, self.parent_das.architecturetag
         )
 
+    def test_initialize_variant(self):
+        # Initializing a distroseries when a distroarchseries has an
+        # underlying_architecturetag preserves the underlying_architecturetag.
+        parent, parent_das = self.setupParent()
+        underlying_archtag = parent_das.architecturetag
+        variant_archtag = underlying_archtag + "v2"
+        # Add a variant of parent_das
+        parent_das_v = self.setupDas(
+            parent=parent,
+            processor_name=parent_das.processor.name + "v2",
+            arch_tag=variant_archtag,
+            underlying_architecturetag=underlying_archtag,
+        )
+        child = self._fullInitialize(
+            [parent],
+            distribution=parent.distribution,
+            previous_series=parent,
+        )
+        # Both dases were copied.
+        self.assertEqual(
+            IStore(DistroArchSeries)
+            .find(DistroArchSeries, distroseries=child)
+            .count(),
+            2,
+        )
+        # Both architecturetag and underlying_architecturetag were copied when
+        # creating the dases for the child.
+        child_das = child[parent_das.architecturetag]
+        child_das_v = child[parent_das_v.architecturetag]
+        self.assertEqual(child_das.architecturetag, underlying_archtag)
+        self.assertIs(child_das.underlying_architecturetag, None)
+        self.assertEqual(child_das_v.architecturetag, variant_archtag)
+        self.assertEqual(
+            child_das_v.underlying_architecturetag, underlying_archtag
+        )
+
     def test_copying_packagesets(self):
         # If a parent series has packagesets, we should copy them.
         self.parent, self.parent_das = self.setupParent()
diff --git a/lib/lp/soyuz/stories/webservice/xx-distroarchseries.rst b/lib/lp/soyuz/stories/webservice/xx-distroarchseries.rst
index 9f16056..682b99d 100644
--- a/lib/lp/soyuz/stories/webservice/xx-distroarchseries.rst
+++ b/lib/lp/soyuz/stories/webservice/xx-distroarchseries.rst
@@ -49,6 +49,7 @@ For a distroarchseries we publish a subset of its attributes.
     self_link: 'http://.../ubuntu/hoary/i386'
     supports_virtualized: True
     title: 'The Hoary Hedgehog Release for i386 (386)'
+    underlying_architecture_tag: None
     web_link: 'http://launchpad.../ubuntu/hoary/i386'
 
 DistroArchSeries.enabled is published in the API devel version.
@@ -73,4 +74,5 @@ DistroArchSeries.enabled is published in the API devel version.
     self_link: 'http://.../ubuntu/hoary/i386'
     supports_virtualized: True
     title: 'The Hoary Hedgehog Release for i386 (386)'
+    underlying_architecture_tag: None
     web_link: 'http://launchpad.../ubuntu/hoary/i386'
diff --git a/lib/lp/soyuz/tests/test_binarypackagebuildbehaviour.py b/lib/lp/soyuz/tests/test_binarypackagebuildbehaviour.py
index 625bf5b..edae653 100644
--- a/lib/lp/soyuz/tests/test_binarypackagebuildbehaviour.py
+++ b/lib/lp/soyuz/tests/test_binarypackagebuildbehaviour.py
@@ -164,6 +164,8 @@ class TestBinaryBuildPackageBehaviour(StatsMixin, TestCaseWithFactory):
         extra_args = {
             "arch_indep": arch_indep,
             "arch_tag": das.architecturetag,
+            "abi_tag": das.architecturetag,
+            "isa_tag": das.architecturetag,
             "archive_private": archive.private,
             "archive_purpose": archive_purpose.name,
             "archives": archives,
diff --git a/lib/lp/soyuz/tests/test_livefsbuildbehaviour.py b/lib/lp/soyuz/tests/test_livefsbuildbehaviour.py
index fd1e96b..ea0b4f3 100644
--- a/lib/lp/soyuz/tests/test_livefsbuildbehaviour.py
+++ b/lib/lp/soyuz/tests/test_livefsbuildbehaviour.py
@@ -204,6 +204,8 @@ class TestAsyncLiveFSBuildBehaviour(TestLiveFSBuildBehaviourBase):
                 "archive_private": False,
                 "archives": expected_archives,
                 "arch_tag": "i386",
+                "isa_tag": "i386",
+                "abi_tag": "i386",
                 "build_url": canonical_url(job.build),
                 "builder_constraints": [],
                 "datestamp": "20140425-103800",
diff --git a/lib/lp/soyuz/tests/test_publishing.py b/lib/lp/soyuz/tests/test_publishing.py
index efcc4dd..8d4be0a 100644
--- a/lib/lp/soyuz/tests/test_publishing.py
+++ b/lib/lp/soyuz/tests/test_publishing.py
@@ -52,6 +52,7 @@ from lp.soyuz.enums import (
     ArchivePurpose,
     ArchiveRepositoryFormat,
     BinaryPackageFormat,
+    DistroArchSeriesFilterSense,
     PackageUploadStatus,
 )
 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
@@ -2115,6 +2116,36 @@ class TestPublishBinaries(TestCaseWithFactory):
         )
         self.assertEqual(PackagePublishingStatus.PENDING, bpph.status)
 
+    def test_architecture_variant(self):
+        # When a package is not built for a variant, the binaries for
+        # the underlying architecture are published to the variant DAS.
+        arch_tag = self.factory.getUniqueString("arch-")
+        orig_das = self.factory.makeDistroArchSeries(architecturetag=arch_tag)
+        target_das = self.factory.makeDistroArchSeries(
+            architecturetag=arch_tag
+        )
+        target_variant_das = self.factory.makeDistroArchSeries(
+            distroseries=target_das.distroseries,
+            architecturetag=arch_tag + "v2",
+            underlying_architecturetag=arch_tag,
+        )
+        dasf = self.factory.makeDistroArchSeriesFilter(
+            distroarchseries=target_variant_das,
+            sense=DistroArchSeriesFilterSense.EXCLUDE,
+        )
+        build = self.factory.makeBinaryPackageBuild(distroarchseries=orig_das)
+        dasf.packageset.add([build.source_package_release.sourcepackagename])
+        bpr = self.factory.makeBinaryPackageRelease(
+            build=build, architecturespecific=True
+        )
+        args = self.makeArgs([bpr], target_das.distroseries)
+        bpphes = list(getUtility(IPublishingSet).publishBinaries(**args))
+        self.assertEqual(len(bpphes), 2)
+        actual_target_dases = {bpph.distroarchseries for bpph in bpphes}
+        actual_bprs = {bpph.binarypackagerelease for bpph in bpphes}
+        self.assertEqual(actual_target_dases, {target_das, target_variant_das})
+        self.assertEqual(actual_bprs, {bpr})
+
     def test_architecture_independent(self):
         # Architecture-independent binaries get published to all enabled
         # DASes in the series.
diff --git a/lib/lp/testing/factory.py b/lib/lp/testing/factory.py
index 52d93e5..c8d5f9d 100644
--- a/lib/lp/testing/factory.py
+++ b/lib/lp/testing/factory.py
@@ -3618,6 +3618,7 @@ class LaunchpadObjectFactory(ObjectFactory):
         official=True,
         owner=None,
         enabled=True,
+        underlying_architecturetag=None,
     ):
         """Create a new distroarchseries"""
 
@@ -3634,7 +3635,12 @@ class LaunchpadObjectFactory(ObjectFactory):
             architecturetag = self.getUniqueString("arch")
         return ProxyFactory(
             distroseries.newArch(
-                architecturetag, processor, official, owner, enabled
+                architecturetag,
+                processor,
+                official,
+                owner,
+                enabled,
+                underlying_architecturetag,
             )
         )
 
diff --git a/lib/lp/translations/tests/test_translationtemplatesbuildbehaviour.py b/lib/lp/translations/tests/test_translationtemplatesbuildbehaviour.py
index f19d0e9..de52348 100644
--- a/lib/lp/translations/tests/test_translationtemplatesbuildbehaviour.py
+++ b/lib/lp/translations/tests/test_translationtemplatesbuildbehaviour.py
@@ -109,6 +109,8 @@ class TestTranslationTemplatesBuildBehaviour(
                 {},
                 {
                     "arch_tag": das.architecturetag,
+                    "isa_tag": das.architecturetag,
+                    "abi_tag": das.architecturetag,
                     "archive_private": False,
                     "branch_url": behaviour.build.branch.composePublicURL(),
                     "build_url": canonical_url(behaviour.build),

References